##// END OF EJS Templates
match: resolve filesets against the passed `cwd`, not the current one...
Matt Harbison -
r44461:e685fac5 default
parent child Browse files
Show More
@@ -1,109 +1,109 b''
1 # highlight - syntax highlighting in hgweb, based on Pygments
1 # highlight - syntax highlighting in hgweb, based on Pygments
2 #
2 #
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 """syntax highlighting for hgweb (requires Pygments)
11 """syntax highlighting for hgweb (requires Pygments)
12
12
13 It depends on the Pygments syntax highlighting library:
13 It depends on the Pygments syntax highlighting library:
14 http://pygments.org/
14 http://pygments.org/
15
15
16 There are the following configuration options::
16 There are the following configuration options::
17
17
18 [web]
18 [web]
19 pygments_style = <style> (default: colorful)
19 pygments_style = <style> (default: colorful)
20 highlightfiles = <fileset> (default: size('<5M'))
20 highlightfiles = <fileset> (default: size('<5M'))
21 highlightonlymatchfilename = <bool> (default False)
21 highlightonlymatchfilename = <bool> (default False)
22
22
23 ``highlightonlymatchfilename`` will only highlight files if their type could
23 ``highlightonlymatchfilename`` will only highlight files if their type could
24 be identified by their filename. When this is not enabled (the default),
24 be identified by their filename. When this is not enabled (the default),
25 Pygments will try very hard to identify the file type from content and any
25 Pygments will try very hard to identify the file type from content and any
26 match (even matches with a low confidence score) will be used.
26 match (even matches with a low confidence score) will be used.
27 """
27 """
28
28
29 from __future__ import absolute_import
29 from __future__ import absolute_import
30
30
31 from . import highlight
31 from . import highlight
32 from mercurial.hgweb import (
32 from mercurial.hgweb import (
33 webcommands,
33 webcommands,
34 webutil,
34 webutil,
35 )
35 )
36
36
37 from mercurial import (
37 from mercurial import (
38 extensions,
38 extensions,
39 pycompat,
39 pycompat,
40 )
40 )
41
41
42 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
42 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
43 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
44 # be specifying the version(s) of Mercurial they are tested with, or
44 # be specifying the version(s) of Mercurial they are tested with, or
45 # leave the attribute unspecified.
45 # leave the attribute unspecified.
46 testedwith = b'ships-with-hg-core'
46 testedwith = b'ships-with-hg-core'
47
47
48
48
49 def pygmentize(web, field, fctx, tmpl):
49 def pygmentize(web, field, fctx, tmpl):
50 style = web.config(b'web', b'pygments_style', b'colorful')
50 style = web.config(b'web', b'pygments_style', b'colorful')
51 expr = web.config(b'web', b'highlightfiles', b"size('<5M')")
51 expr = web.config(b'web', b'highlightfiles', b"size('<5M')")
52 filenameonly = web.configbool(b'web', b'highlightonlymatchfilename', False)
52 filenameonly = web.configbool(b'web', b'highlightonlymatchfilename', False)
53
53
54 ctx = fctx.changectx()
54 ctx = fctx.changectx()
55 m = ctx.matchfileset(expr)
55 m = ctx.matchfileset(fctx.repo().root, expr)
56 if m(fctx.path()):
56 if m(fctx.path()):
57 highlight.pygmentize(
57 highlight.pygmentize(
58 field, fctx, style, tmpl, guessfilenameonly=filenameonly
58 field, fctx, style, tmpl, guessfilenameonly=filenameonly
59 )
59 )
60
60
61
61
62 def filerevision_highlight(orig, web, fctx):
62 def filerevision_highlight(orig, web, fctx):
63 mt = web.res.headers[b'Content-Type']
63 mt = web.res.headers[b'Content-Type']
64 # only pygmentize for mimetype containing 'html' so we both match
64 # only pygmentize for mimetype containing 'html' so we both match
65 # 'text/html' and possibly 'application/xhtml+xml' in the future
65 # 'text/html' and possibly 'application/xhtml+xml' in the future
66 # so that we don't have to touch the extension when the mimetype
66 # so that we don't have to touch the extension when the mimetype
67 # for a template changes; also hgweb optimizes the case that a
67 # for a template changes; also hgweb optimizes the case that a
68 # raw file is sent using rawfile() and doesn't call us, so we
68 # raw file is sent using rawfile() and doesn't call us, so we
69 # can't clash with the file's content-type here in case we
69 # can't clash with the file's content-type here in case we
70 # pygmentize a html file
70 # pygmentize a html file
71 if b'html' in mt:
71 if b'html' in mt:
72 pygmentize(web, b'fileline', fctx, web.tmpl)
72 pygmentize(web, b'fileline', fctx, web.tmpl)
73
73
74 return orig(web, fctx)
74 return orig(web, fctx)
75
75
76
76
77 def annotate_highlight(orig, web):
77 def annotate_highlight(orig, web):
78 mt = web.res.headers[b'Content-Type']
78 mt = web.res.headers[b'Content-Type']
79 if b'html' in mt:
79 if b'html' in mt:
80 fctx = webutil.filectx(web.repo, web.req)
80 fctx = webutil.filectx(web.repo, web.req)
81 pygmentize(web, b'annotateline', fctx, web.tmpl)
81 pygmentize(web, b'annotateline', fctx, web.tmpl)
82
82
83 return orig(web)
83 return orig(web)
84
84
85
85
86 def generate_css(web):
86 def generate_css(web):
87 pg_style = web.config(b'web', b'pygments_style', b'colorful')
87 pg_style = web.config(b'web', b'pygments_style', b'colorful')
88 fmter = highlight.HtmlFormatter(style=pycompat.sysstr(pg_style))
88 fmter = highlight.HtmlFormatter(style=pycompat.sysstr(pg_style))
89 web.res.headers[b'Content-Type'] = b'text/css'
89 web.res.headers[b'Content-Type'] = b'text/css'
90 style_defs = fmter.get_style_defs(pycompat.sysstr(b''))
90 style_defs = fmter.get_style_defs(pycompat.sysstr(b''))
91 web.res.setbodybytes(
91 web.res.setbodybytes(
92 b''.join(
92 b''.join(
93 [
93 [
94 b'/* pygments_style = %s */\n\n' % pg_style,
94 b'/* pygments_style = %s */\n\n' % pg_style,
95 pycompat.bytestr(style_defs),
95 pycompat.bytestr(style_defs),
96 ]
96 ]
97 )
97 )
98 )
98 )
99 return web.res.sendresponse()
99 return web.res.sendresponse()
100
100
101
101
102 def extsetup(ui):
102 def extsetup(ui):
103 # monkeypatch in the new version
103 # monkeypatch in the new version
104 extensions.wrapfunction(
104 extensions.wrapfunction(
105 webcommands, b'_filerevision', filerevision_highlight
105 webcommands, b'_filerevision', filerevision_highlight
106 )
106 )
107 extensions.wrapfunction(webcommands, b'annotate', annotate_highlight)
107 extensions.wrapfunction(webcommands, b'annotate', annotate_highlight)
108 webcommands.highlightcss = generate_css
108 webcommands.highlightcss = generate_css
109 webcommands.__all__.append(b'highlightcss')
109 webcommands.__all__.append(b'highlightcss')
@@ -1,3021 +1,3027 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 hex,
18 hex,
19 modifiednodeid,
19 modifiednodeid,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 wdirfilenodeids,
23 wdirfilenodeids,
24 wdirhex,
24 wdirhex,
25 )
25 )
26 from .pycompat import (
26 from .pycompat import (
27 getattr,
27 getattr,
28 open,
28 open,
29 )
29 )
30 from . import (
30 from . import (
31 copies,
31 copies,
32 dagop,
32 dagop,
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 obsolete as obsmod,
37 obsolete as obsmod,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 phases,
40 phases,
41 pycompat,
41 pycompat,
42 repoview,
42 repoview,
43 scmutil,
43 scmutil,
44 sparse,
44 sparse,
45 subrepo,
45 subrepo,
46 subrepoutil,
46 subrepoutil,
47 util,
47 util,
48 )
48 )
49 from .utils import (
49 from .utils import (
50 dateutil,
50 dateutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 propertycache = util.propertycache
54 propertycache = util.propertycache
55
55
56
56
57 class basectx(object):
57 class basectx(object):
58 """A basectx object represents the common logic for its children:
58 """A basectx object represents the common logic for its children:
59 changectx: read-only context that is already present in the repo,
59 changectx: read-only context that is already present in the repo,
60 workingctx: a context that represents the working directory and can
60 workingctx: a context that represents the working directory and can
61 be committed,
61 be committed,
62 memctx: a context that represents changes in-memory and can also
62 memctx: a context that represents changes in-memory and can also
63 be committed."""
63 be committed."""
64
64
65 def __init__(self, repo):
65 def __init__(self, repo):
66 self._repo = repo
66 self._repo = repo
67
67
68 def __bytes__(self):
68 def __bytes__(self):
69 return short(self.node())
69 return short(self.node())
70
70
71 __str__ = encoding.strmethod(__bytes__)
71 __str__ = encoding.strmethod(__bytes__)
72
72
73 def __repr__(self):
73 def __repr__(self):
74 return "<%s %s>" % (type(self).__name__, str(self))
74 return "<%s %s>" % (type(self).__name__, str(self))
75
75
76 def __eq__(self, other):
76 def __eq__(self, other):
77 try:
77 try:
78 return type(self) == type(other) and self._rev == other._rev
78 return type(self) == type(other) and self._rev == other._rev
79 except AttributeError:
79 except AttributeError:
80 return False
80 return False
81
81
82 def __ne__(self, other):
82 def __ne__(self, other):
83 return not (self == other)
83 return not (self == other)
84
84
85 def __contains__(self, key):
85 def __contains__(self, key):
86 return key in self._manifest
86 return key in self._manifest
87
87
88 def __getitem__(self, key):
88 def __getitem__(self, key):
89 return self.filectx(key)
89 return self.filectx(key)
90
90
91 def __iter__(self):
91 def __iter__(self):
92 return iter(self._manifest)
92 return iter(self._manifest)
93
93
94 def _buildstatusmanifest(self, status):
94 def _buildstatusmanifest(self, status):
95 """Builds a manifest that includes the given status results, if this is
95 """Builds a manifest that includes the given status results, if this is
96 a working copy context. For non-working copy contexts, it just returns
96 a working copy context. For non-working copy contexts, it just returns
97 the normal manifest."""
97 the normal manifest."""
98 return self.manifest()
98 return self.manifest()
99
99
100 def _matchstatus(self, other, match):
100 def _matchstatus(self, other, match):
101 """This internal method provides a way for child objects to override the
101 """This internal method provides a way for child objects to override the
102 match operator.
102 match operator.
103 """
103 """
104 return match
104 return match
105
105
106 def _buildstatus(
106 def _buildstatus(
107 self, other, s, match, listignored, listclean, listunknown
107 self, other, s, match, listignored, listclean, listunknown
108 ):
108 ):
109 """build a status with respect to another context"""
109 """build a status with respect to another context"""
110 # Load earliest manifest first for caching reasons. More specifically,
110 # Load earliest manifest first for caching reasons. More specifically,
111 # if you have revisions 1000 and 1001, 1001 is probably stored as a
111 # if you have revisions 1000 and 1001, 1001 is probably stored as a
112 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
112 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
113 # 1000 and cache it so that when you read 1001, we just need to apply a
113 # 1000 and cache it so that when you read 1001, we just need to apply a
114 # delta to what's in the cache. So that's one full reconstruction + one
114 # delta to what's in the cache. So that's one full reconstruction + one
115 # delta application.
115 # delta application.
116 mf2 = None
116 mf2 = None
117 if self.rev() is not None and self.rev() < other.rev():
117 if self.rev() is not None and self.rev() < other.rev():
118 mf2 = self._buildstatusmanifest(s)
118 mf2 = self._buildstatusmanifest(s)
119 mf1 = other._buildstatusmanifest(s)
119 mf1 = other._buildstatusmanifest(s)
120 if mf2 is None:
120 if mf2 is None:
121 mf2 = self._buildstatusmanifest(s)
121 mf2 = self._buildstatusmanifest(s)
122
122
123 modified, added = [], []
123 modified, added = [], []
124 removed = []
124 removed = []
125 clean = []
125 clean = []
126 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
126 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
127 deletedset = set(deleted)
127 deletedset = set(deleted)
128 d = mf1.diff(mf2, match=match, clean=listclean)
128 d = mf1.diff(mf2, match=match, clean=listclean)
129 for fn, value in pycompat.iteritems(d):
129 for fn, value in pycompat.iteritems(d):
130 if fn in deletedset:
130 if fn in deletedset:
131 continue
131 continue
132 if value is None:
132 if value is None:
133 clean.append(fn)
133 clean.append(fn)
134 continue
134 continue
135 (node1, flag1), (node2, flag2) = value
135 (node1, flag1), (node2, flag2) = value
136 if node1 is None:
136 if node1 is None:
137 added.append(fn)
137 added.append(fn)
138 elif node2 is None:
138 elif node2 is None:
139 removed.append(fn)
139 removed.append(fn)
140 elif flag1 != flag2:
140 elif flag1 != flag2:
141 modified.append(fn)
141 modified.append(fn)
142 elif node2 not in wdirfilenodeids:
142 elif node2 not in wdirfilenodeids:
143 # When comparing files between two commits, we save time by
143 # When comparing files between two commits, we save time by
144 # not comparing the file contents when the nodeids differ.
144 # not comparing the file contents when the nodeids differ.
145 # Note that this means we incorrectly report a reverted change
145 # Note that this means we incorrectly report a reverted change
146 # to a file as a modification.
146 # to a file as a modification.
147 modified.append(fn)
147 modified.append(fn)
148 elif self[fn].cmp(other[fn]):
148 elif self[fn].cmp(other[fn]):
149 modified.append(fn)
149 modified.append(fn)
150 else:
150 else:
151 clean.append(fn)
151 clean.append(fn)
152
152
153 if removed:
153 if removed:
154 # need to filter files if they are already reported as removed
154 # need to filter files if they are already reported as removed
155 unknown = [
155 unknown = [
156 fn
156 fn
157 for fn in unknown
157 for fn in unknown
158 if fn not in mf1 and (not match or match(fn))
158 if fn not in mf1 and (not match or match(fn))
159 ]
159 ]
160 ignored = [
160 ignored = [
161 fn
161 fn
162 for fn in ignored
162 for fn in ignored
163 if fn not in mf1 and (not match or match(fn))
163 if fn not in mf1 and (not match or match(fn))
164 ]
164 ]
165 # if they're deleted, don't report them as removed
165 # if they're deleted, don't report them as removed
166 removed = [fn for fn in removed if fn not in deletedset]
166 removed = [fn for fn in removed if fn not in deletedset]
167
167
168 return scmutil.status(
168 return scmutil.status(
169 modified, added, removed, deleted, unknown, ignored, clean
169 modified, added, removed, deleted, unknown, ignored, clean
170 )
170 )
171
171
172 @propertycache
172 @propertycache
173 def substate(self):
173 def substate(self):
174 return subrepoutil.state(self, self._repo.ui)
174 return subrepoutil.state(self, self._repo.ui)
175
175
176 def subrev(self, subpath):
176 def subrev(self, subpath):
177 return self.substate[subpath][1]
177 return self.substate[subpath][1]
178
178
179 def rev(self):
179 def rev(self):
180 return self._rev
180 return self._rev
181
181
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184
184
185 def hex(self):
185 def hex(self):
186 return hex(self.node())
186 return hex(self.node())
187
187
188 def manifest(self):
188 def manifest(self):
189 return self._manifest
189 return self._manifest
190
190
191 def manifestctx(self):
191 def manifestctx(self):
192 return self._manifestctx
192 return self._manifestctx
193
193
194 def repo(self):
194 def repo(self):
195 return self._repo
195 return self._repo
196
196
197 def phasestr(self):
197 def phasestr(self):
198 return phases.phasenames[self.phase()]
198 return phases.phasenames[self.phase()]
199
199
200 def mutable(self):
200 def mutable(self):
201 return self.phase() > phases.public
201 return self.phase() > phases.public
202
202
203 def matchfileset(self, expr, badfn=None):
203 def matchfileset(self, cwd, expr, badfn=None):
204 return fileset.match(self, expr, badfn=badfn)
204 return fileset.match(self, cwd, expr, badfn=badfn)
205
205
206 def obsolete(self):
206 def obsolete(self):
207 """True if the changeset is obsolete"""
207 """True if the changeset is obsolete"""
208 return self.rev() in obsmod.getrevs(self._repo, b'obsolete')
208 return self.rev() in obsmod.getrevs(self._repo, b'obsolete')
209
209
210 def extinct(self):
210 def extinct(self):
211 """True if the changeset is extinct"""
211 """True if the changeset is extinct"""
212 return self.rev() in obsmod.getrevs(self._repo, b'extinct')
212 return self.rev() in obsmod.getrevs(self._repo, b'extinct')
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete, but its ancestor is"""
215 """True if the changeset is not obsolete, but its ancestor is"""
216 return self.rev() in obsmod.getrevs(self._repo, b'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, b'orphan')
217
217
218 def phasedivergent(self):
218 def phasedivergent(self):
219 """True if the changeset tries to be a successor of a public changeset
219 """True if the changeset tries to be a successor of a public changeset
220
220
221 Only non-public and non-obsolete changesets may be phase-divergent.
221 Only non-public and non-obsolete changesets may be phase-divergent.
222 """
222 """
223 return self.rev() in obsmod.getrevs(self._repo, b'phasedivergent')
223 return self.rev() in obsmod.getrevs(self._repo, b'phasedivergent')
224
224
225 def contentdivergent(self):
225 def contentdivergent(self):
226 """Is a successor of a changeset with multiple possible successor sets
226 """Is a successor of a changeset with multiple possible successor sets
227
227
228 Only non-public and non-obsolete changesets may be content-divergent.
228 Only non-public and non-obsolete changesets may be content-divergent.
229 """
229 """
230 return self.rev() in obsmod.getrevs(self._repo, b'contentdivergent')
230 return self.rev() in obsmod.getrevs(self._repo, b'contentdivergent')
231
231
232 def isunstable(self):
232 def isunstable(self):
233 """True if the changeset is either orphan, phase-divergent or
233 """True if the changeset is either orphan, phase-divergent or
234 content-divergent"""
234 content-divergent"""
235 return self.orphan() or self.phasedivergent() or self.contentdivergent()
235 return self.orphan() or self.phasedivergent() or self.contentdivergent()
236
236
237 def instabilities(self):
237 def instabilities(self):
238 """return the list of instabilities affecting this changeset.
238 """return the list of instabilities affecting this changeset.
239
239
240 Instabilities are returned as strings. possible values are:
240 Instabilities are returned as strings. possible values are:
241 - orphan,
241 - orphan,
242 - phase-divergent,
242 - phase-divergent,
243 - content-divergent.
243 - content-divergent.
244 """
244 """
245 instabilities = []
245 instabilities = []
246 if self.orphan():
246 if self.orphan():
247 instabilities.append(b'orphan')
247 instabilities.append(b'orphan')
248 if self.phasedivergent():
248 if self.phasedivergent():
249 instabilities.append(b'phase-divergent')
249 instabilities.append(b'phase-divergent')
250 if self.contentdivergent():
250 if self.contentdivergent():
251 instabilities.append(b'content-divergent')
251 instabilities.append(b'content-divergent')
252 return instabilities
252 return instabilities
253
253
254 def parents(self):
254 def parents(self):
255 """return contexts for each parent changeset"""
255 """return contexts for each parent changeset"""
256 return self._parents
256 return self._parents
257
257
258 def p1(self):
258 def p1(self):
259 return self._parents[0]
259 return self._parents[0]
260
260
261 def p2(self):
261 def p2(self):
262 parents = self._parents
262 parents = self._parents
263 if len(parents) == 2:
263 if len(parents) == 2:
264 return parents[1]
264 return parents[1]
265 return self._repo[nullrev]
265 return self._repo[nullrev]
266
266
267 def _fileinfo(self, path):
267 def _fileinfo(self, path):
268 if '_manifest' in self.__dict__:
268 if '_manifest' in self.__dict__:
269 try:
269 try:
270 return self._manifest[path], self._manifest.flags(path)
270 return self._manifest[path], self._manifest.flags(path)
271 except KeyError:
271 except KeyError:
272 raise error.ManifestLookupError(
272 raise error.ManifestLookupError(
273 self._node, path, _(b'not found in manifest')
273 self._node, path, _(b'not found in manifest')
274 )
274 )
275 if '_manifestdelta' in self.__dict__ or path in self.files():
275 if '_manifestdelta' in self.__dict__ or path in self.files():
276 if path in self._manifestdelta:
276 if path in self._manifestdelta:
277 return (
277 return (
278 self._manifestdelta[path],
278 self._manifestdelta[path],
279 self._manifestdelta.flags(path),
279 self._manifestdelta.flags(path),
280 )
280 )
281 mfl = self._repo.manifestlog
281 mfl = self._repo.manifestlog
282 try:
282 try:
283 node, flag = mfl[self._changeset.manifest].find(path)
283 node, flag = mfl[self._changeset.manifest].find(path)
284 except KeyError:
284 except KeyError:
285 raise error.ManifestLookupError(
285 raise error.ManifestLookupError(
286 self._node, path, _(b'not found in manifest')
286 self._node, path, _(b'not found in manifest')
287 )
287 )
288
288
289 return node, flag
289 return node, flag
290
290
291 def filenode(self, path):
291 def filenode(self, path):
292 return self._fileinfo(path)[0]
292 return self._fileinfo(path)[0]
293
293
294 def flags(self, path):
294 def flags(self, path):
295 try:
295 try:
296 return self._fileinfo(path)[1]
296 return self._fileinfo(path)[1]
297 except error.LookupError:
297 except error.LookupError:
298 return b''
298 return b''
299
299
300 @propertycache
300 @propertycache
301 def _copies(self):
301 def _copies(self):
302 return copies.computechangesetcopies(self)
302 return copies.computechangesetcopies(self)
303
303
304 def p1copies(self):
304 def p1copies(self):
305 return self._copies[0]
305 return self._copies[0]
306
306
307 def p2copies(self):
307 def p2copies(self):
308 return self._copies[1]
308 return self._copies[1]
309
309
310 def sub(self, path, allowcreate=True):
310 def sub(self, path, allowcreate=True):
311 '''return a subrepo for the stored revision of path, never wdir()'''
311 '''return a subrepo for the stored revision of path, never wdir()'''
312 return subrepo.subrepo(self, path, allowcreate=allowcreate)
312 return subrepo.subrepo(self, path, allowcreate=allowcreate)
313
313
314 def nullsub(self, path, pctx):
314 def nullsub(self, path, pctx):
315 return subrepo.nullsubrepo(self, path, pctx)
315 return subrepo.nullsubrepo(self, path, pctx)
316
316
317 def workingsub(self, path):
317 def workingsub(self, path):
318 '''return a subrepo for the stored revision, or wdir if this is a wdir
318 '''return a subrepo for the stored revision, or wdir if this is a wdir
319 context.
319 context.
320 '''
320 '''
321 return subrepo.subrepo(self, path, allowwdir=True)
321 return subrepo.subrepo(self, path, allowwdir=True)
322
322
323 def match(
323 def match(
324 self,
324 self,
325 pats=None,
325 pats=None,
326 include=None,
326 include=None,
327 exclude=None,
327 exclude=None,
328 default=b'glob',
328 default=b'glob',
329 listsubrepos=False,
329 listsubrepos=False,
330 badfn=None,
330 badfn=None,
331 cwd=None,
331 ):
332 ):
332 r = self._repo
333 r = self._repo
334 if not cwd:
335 cwd = r.getcwd()
333 return matchmod.match(
336 return matchmod.match(
334 r.root,
337 r.root,
335 r.getcwd(),
338 cwd,
336 pats,
339 pats,
337 include,
340 include,
338 exclude,
341 exclude,
339 default,
342 default,
340 auditor=r.nofsauditor,
343 auditor=r.nofsauditor,
341 ctx=self,
344 ctx=self,
342 listsubrepos=listsubrepos,
345 listsubrepos=listsubrepos,
343 badfn=badfn,
346 badfn=badfn,
344 )
347 )
345
348
346 def diff(
349 def diff(
347 self,
350 self,
348 ctx2=None,
351 ctx2=None,
349 match=None,
352 match=None,
350 changes=None,
353 changes=None,
351 opts=None,
354 opts=None,
352 losedatafn=None,
355 losedatafn=None,
353 pathfn=None,
356 pathfn=None,
354 copy=None,
357 copy=None,
355 copysourcematch=None,
358 copysourcematch=None,
356 hunksfilterfn=None,
359 hunksfilterfn=None,
357 ):
360 ):
358 """Returns a diff generator for the given contexts and matcher"""
361 """Returns a diff generator for the given contexts and matcher"""
359 if ctx2 is None:
362 if ctx2 is None:
360 ctx2 = self.p1()
363 ctx2 = self.p1()
361 if ctx2 is not None:
364 if ctx2 is not None:
362 ctx2 = self._repo[ctx2]
365 ctx2 = self._repo[ctx2]
363 return patch.diff(
366 return patch.diff(
364 self._repo,
367 self._repo,
365 ctx2,
368 ctx2,
366 self,
369 self,
367 match=match,
370 match=match,
368 changes=changes,
371 changes=changes,
369 opts=opts,
372 opts=opts,
370 losedatafn=losedatafn,
373 losedatafn=losedatafn,
371 pathfn=pathfn,
374 pathfn=pathfn,
372 copy=copy,
375 copy=copy,
373 copysourcematch=copysourcematch,
376 copysourcematch=copysourcematch,
374 hunksfilterfn=hunksfilterfn,
377 hunksfilterfn=hunksfilterfn,
375 )
378 )
376
379
377 def dirs(self):
380 def dirs(self):
378 return self._manifest.dirs()
381 return self._manifest.dirs()
379
382
380 def hasdir(self, dir):
383 def hasdir(self, dir):
381 return self._manifest.hasdir(dir)
384 return self._manifest.hasdir(dir)
382
385
383 def status(
386 def status(
384 self,
387 self,
385 other=None,
388 other=None,
386 match=None,
389 match=None,
387 listignored=False,
390 listignored=False,
388 listclean=False,
391 listclean=False,
389 listunknown=False,
392 listunknown=False,
390 listsubrepos=False,
393 listsubrepos=False,
391 ):
394 ):
392 """return status of files between two nodes or node and working
395 """return status of files between two nodes or node and working
393 directory.
396 directory.
394
397
395 If other is None, compare this node with working directory.
398 If other is None, compare this node with working directory.
396
399
397 returns (modified, added, removed, deleted, unknown, ignored, clean)
400 returns (modified, added, removed, deleted, unknown, ignored, clean)
398 """
401 """
399
402
400 ctx1 = self
403 ctx1 = self
401 ctx2 = self._repo[other]
404 ctx2 = self._repo[other]
402
405
403 # This next code block is, admittedly, fragile logic that tests for
406 # This next code block is, admittedly, fragile logic that tests for
404 # reversing the contexts and wouldn't need to exist if it weren't for
407 # reversing the contexts and wouldn't need to exist if it weren't for
405 # the fast (and common) code path of comparing the working directory
408 # the fast (and common) code path of comparing the working directory
406 # with its first parent.
409 # with its first parent.
407 #
410 #
408 # What we're aiming for here is the ability to call:
411 # What we're aiming for here is the ability to call:
409 #
412 #
410 # workingctx.status(parentctx)
413 # workingctx.status(parentctx)
411 #
414 #
412 # If we always built the manifest for each context and compared those,
415 # If we always built the manifest for each context and compared those,
413 # then we'd be done. But the special case of the above call means we
416 # then we'd be done. But the special case of the above call means we
414 # just copy the manifest of the parent.
417 # just copy the manifest of the parent.
415 reversed = False
418 reversed = False
416 if not isinstance(ctx1, changectx) and isinstance(ctx2, changectx):
419 if not isinstance(ctx1, changectx) and isinstance(ctx2, changectx):
417 reversed = True
420 reversed = True
418 ctx1, ctx2 = ctx2, ctx1
421 ctx1, ctx2 = ctx2, ctx1
419
422
420 match = self._repo.narrowmatch(match)
423 match = self._repo.narrowmatch(match)
421 match = ctx2._matchstatus(ctx1, match)
424 match = ctx2._matchstatus(ctx1, match)
422 r = scmutil.status([], [], [], [], [], [], [])
425 r = scmutil.status([], [], [], [], [], [], [])
423 r = ctx2._buildstatus(
426 r = ctx2._buildstatus(
424 ctx1, r, match, listignored, listclean, listunknown
427 ctx1, r, match, listignored, listclean, listunknown
425 )
428 )
426
429
427 if reversed:
430 if reversed:
428 # Reverse added and removed. Clear deleted, unknown and ignored as
431 # Reverse added and removed. Clear deleted, unknown and ignored as
429 # these make no sense to reverse.
432 # these make no sense to reverse.
430 r = scmutil.status(
433 r = scmutil.status(
431 r.modified, r.removed, r.added, [], [], [], r.clean
434 r.modified, r.removed, r.added, [], [], [], r.clean
432 )
435 )
433
436
434 if listsubrepos:
437 if listsubrepos:
435 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
438 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
436 try:
439 try:
437 rev2 = ctx2.subrev(subpath)
440 rev2 = ctx2.subrev(subpath)
438 except KeyError:
441 except KeyError:
439 # A subrepo that existed in node1 was deleted between
442 # A subrepo that existed in node1 was deleted between
440 # node1 and node2 (inclusive). Thus, ctx2's substate
443 # node1 and node2 (inclusive). Thus, ctx2's substate
441 # won't contain that subpath. The best we can do ignore it.
444 # won't contain that subpath. The best we can do ignore it.
442 rev2 = None
445 rev2 = None
443 submatch = matchmod.subdirmatcher(subpath, match)
446 submatch = matchmod.subdirmatcher(subpath, match)
444 s = sub.status(
447 s = sub.status(
445 rev2,
448 rev2,
446 match=submatch,
449 match=submatch,
447 ignored=listignored,
450 ignored=listignored,
448 clean=listclean,
451 clean=listclean,
449 unknown=listunknown,
452 unknown=listunknown,
450 listsubrepos=True,
453 listsubrepos=True,
451 )
454 )
452 for k in (
455 for k in (
453 'modified',
456 'modified',
454 'added',
457 'added',
455 'removed',
458 'removed',
456 'deleted',
459 'deleted',
457 'unknown',
460 'unknown',
458 'ignored',
461 'ignored',
459 'clean',
462 'clean',
460 ):
463 ):
461 rfiles, sfiles = getattr(r, k), getattr(s, k)
464 rfiles, sfiles = getattr(r, k), getattr(s, k)
462 rfiles.extend(b"%s/%s" % (subpath, f) for f in sfiles)
465 rfiles.extend(b"%s/%s" % (subpath, f) for f in sfiles)
463
466
464 r.modified.sort()
467 r.modified.sort()
465 r.added.sort()
468 r.added.sort()
466 r.removed.sort()
469 r.removed.sort()
467 r.deleted.sort()
470 r.deleted.sort()
468 r.unknown.sort()
471 r.unknown.sort()
469 r.ignored.sort()
472 r.ignored.sort()
470 r.clean.sort()
473 r.clean.sort()
471
474
472 return r
475 return r
473
476
474
477
475 class changectx(basectx):
478 class changectx(basectx):
476 """A changecontext object makes access to data related to a particular
479 """A changecontext object makes access to data related to a particular
477 changeset convenient. It represents a read-only context already present in
480 changeset convenient. It represents a read-only context already present in
478 the repo."""
481 the repo."""
479
482
480 def __init__(self, repo, rev, node, maybe_filtered=True):
483 def __init__(self, repo, rev, node, maybe_filtered=True):
481 super(changectx, self).__init__(repo)
484 super(changectx, self).__init__(repo)
482 self._rev = rev
485 self._rev = rev
483 self._node = node
486 self._node = node
484 # When maybe_filtered is True, the revision might be affected by
487 # When maybe_filtered is True, the revision might be affected by
485 # changelog filtering and operation through the filtered changelog must be used.
488 # changelog filtering and operation through the filtered changelog must be used.
486 #
489 #
487 # When maybe_filtered is False, the revision has already been checked
490 # When maybe_filtered is False, the revision has already been checked
488 # against filtering and is not filtered. Operation through the
491 # against filtering and is not filtered. Operation through the
489 # unfiltered changelog might be used in some case.
492 # unfiltered changelog might be used in some case.
490 self._maybe_filtered = maybe_filtered
493 self._maybe_filtered = maybe_filtered
491
494
492 def __hash__(self):
495 def __hash__(self):
493 try:
496 try:
494 return hash(self._rev)
497 return hash(self._rev)
495 except AttributeError:
498 except AttributeError:
496 return id(self)
499 return id(self)
497
500
498 def __nonzero__(self):
501 def __nonzero__(self):
499 return self._rev != nullrev
502 return self._rev != nullrev
500
503
501 __bool__ = __nonzero__
504 __bool__ = __nonzero__
502
505
503 @propertycache
506 @propertycache
504 def _changeset(self):
507 def _changeset(self):
505 if self._maybe_filtered:
508 if self._maybe_filtered:
506 repo = self._repo
509 repo = self._repo
507 else:
510 else:
508 repo = self._repo.unfiltered()
511 repo = self._repo.unfiltered()
509 return repo.changelog.changelogrevision(self.rev())
512 return repo.changelog.changelogrevision(self.rev())
510
513
511 @propertycache
514 @propertycache
512 def _manifest(self):
515 def _manifest(self):
513 return self._manifestctx.read()
516 return self._manifestctx.read()
514
517
515 @property
518 @property
516 def _manifestctx(self):
519 def _manifestctx(self):
517 return self._repo.manifestlog[self._changeset.manifest]
520 return self._repo.manifestlog[self._changeset.manifest]
518
521
519 @propertycache
522 @propertycache
520 def _manifestdelta(self):
523 def _manifestdelta(self):
521 return self._manifestctx.readdelta()
524 return self._manifestctx.readdelta()
522
525
523 @propertycache
526 @propertycache
524 def _parents(self):
527 def _parents(self):
525 repo = self._repo
528 repo = self._repo
526 if self._maybe_filtered:
529 if self._maybe_filtered:
527 cl = repo.changelog
530 cl = repo.changelog
528 else:
531 else:
529 cl = repo.unfiltered().changelog
532 cl = repo.unfiltered().changelog
530
533
531 p1, p2 = cl.parentrevs(self._rev)
534 p1, p2 = cl.parentrevs(self._rev)
532 if p2 == nullrev:
535 if p2 == nullrev:
533 return [repo[p1]]
536 return [repo[p1]]
534 return [repo[p1], repo[p2]]
537 return [repo[p1], repo[p2]]
535
538
536 def changeset(self):
539 def changeset(self):
537 c = self._changeset
540 c = self._changeset
538 return (
541 return (
539 c.manifest,
542 c.manifest,
540 c.user,
543 c.user,
541 c.date,
544 c.date,
542 c.files,
545 c.files,
543 c.description,
546 c.description,
544 c.extra,
547 c.extra,
545 )
548 )
546
549
547 def manifestnode(self):
550 def manifestnode(self):
548 return self._changeset.manifest
551 return self._changeset.manifest
549
552
550 def user(self):
553 def user(self):
551 return self._changeset.user
554 return self._changeset.user
552
555
553 def date(self):
556 def date(self):
554 return self._changeset.date
557 return self._changeset.date
555
558
556 def files(self):
559 def files(self):
557 return self._changeset.files
560 return self._changeset.files
558
561
559 def filesmodified(self):
562 def filesmodified(self):
560 modified = set(self.files())
563 modified = set(self.files())
561 modified.difference_update(self.filesadded())
564 modified.difference_update(self.filesadded())
562 modified.difference_update(self.filesremoved())
565 modified.difference_update(self.filesremoved())
563 return sorted(modified)
566 return sorted(modified)
564
567
565 def filesadded(self):
568 def filesadded(self):
566 filesadded = self._changeset.filesadded
569 filesadded = self._changeset.filesadded
567 compute_on_none = True
570 compute_on_none = True
568 if self._repo.filecopiesmode == b'changeset-sidedata':
571 if self._repo.filecopiesmode == b'changeset-sidedata':
569 compute_on_none = False
572 compute_on_none = False
570 else:
573 else:
571 source = self._repo.ui.config(b'experimental', b'copies.read-from')
574 source = self._repo.ui.config(b'experimental', b'copies.read-from')
572 if source == b'changeset-only':
575 if source == b'changeset-only':
573 compute_on_none = False
576 compute_on_none = False
574 elif source != b'compatibility':
577 elif source != b'compatibility':
575 # filelog mode, ignore any changelog content
578 # filelog mode, ignore any changelog content
576 filesadded = None
579 filesadded = None
577 if filesadded is None:
580 if filesadded is None:
578 if compute_on_none:
581 if compute_on_none:
579 filesadded = copies.computechangesetfilesadded(self)
582 filesadded = copies.computechangesetfilesadded(self)
580 else:
583 else:
581 filesadded = []
584 filesadded = []
582 return filesadded
585 return filesadded
583
586
584 def filesremoved(self):
587 def filesremoved(self):
585 filesremoved = self._changeset.filesremoved
588 filesremoved = self._changeset.filesremoved
586 compute_on_none = True
589 compute_on_none = True
587 if self._repo.filecopiesmode == b'changeset-sidedata':
590 if self._repo.filecopiesmode == b'changeset-sidedata':
588 compute_on_none = False
591 compute_on_none = False
589 else:
592 else:
590 source = self._repo.ui.config(b'experimental', b'copies.read-from')
593 source = self._repo.ui.config(b'experimental', b'copies.read-from')
591 if source == b'changeset-only':
594 if source == b'changeset-only':
592 compute_on_none = False
595 compute_on_none = False
593 elif source != b'compatibility':
596 elif source != b'compatibility':
594 # filelog mode, ignore any changelog content
597 # filelog mode, ignore any changelog content
595 filesremoved = None
598 filesremoved = None
596 if filesremoved is None:
599 if filesremoved is None:
597 if compute_on_none:
600 if compute_on_none:
598 filesremoved = copies.computechangesetfilesremoved(self)
601 filesremoved = copies.computechangesetfilesremoved(self)
599 else:
602 else:
600 filesremoved = []
603 filesremoved = []
601 return filesremoved
604 return filesremoved
602
605
603 @propertycache
606 @propertycache
604 def _copies(self):
607 def _copies(self):
605 p1copies = self._changeset.p1copies
608 p1copies = self._changeset.p1copies
606 p2copies = self._changeset.p2copies
609 p2copies = self._changeset.p2copies
607 compute_on_none = True
610 compute_on_none = True
608 if self._repo.filecopiesmode == b'changeset-sidedata':
611 if self._repo.filecopiesmode == b'changeset-sidedata':
609 compute_on_none = False
612 compute_on_none = False
610 else:
613 else:
611 source = self._repo.ui.config(b'experimental', b'copies.read-from')
614 source = self._repo.ui.config(b'experimental', b'copies.read-from')
612 # If config says to get copy metadata only from changeset, then
615 # If config says to get copy metadata only from changeset, then
613 # return that, defaulting to {} if there was no copy metadata. In
616 # return that, defaulting to {} if there was no copy metadata. In
614 # compatibility mode, we return copy data from the changeset if it
617 # compatibility mode, we return copy data from the changeset if it
615 # was recorded there, and otherwise we fall back to getting it from
618 # was recorded there, and otherwise we fall back to getting it from
616 # the filelogs (below).
619 # the filelogs (below).
617 #
620 #
618 # If we are in compatiblity mode and there is not data in the
621 # If we are in compatiblity mode and there is not data in the
619 # changeset), we get the copy metadata from the filelogs.
622 # changeset), we get the copy metadata from the filelogs.
620 #
623 #
621 # otherwise, when config said to read only from filelog, we get the
624 # otherwise, when config said to read only from filelog, we get the
622 # copy metadata from the filelogs.
625 # copy metadata from the filelogs.
623 if source == b'changeset-only':
626 if source == b'changeset-only':
624 compute_on_none = False
627 compute_on_none = False
625 elif source != b'compatibility':
628 elif source != b'compatibility':
626 # filelog mode, ignore any changelog content
629 # filelog mode, ignore any changelog content
627 p1copies = p2copies = None
630 p1copies = p2copies = None
628 if p1copies is None:
631 if p1copies is None:
629 if compute_on_none:
632 if compute_on_none:
630 p1copies, p2copies = super(changectx, self)._copies
633 p1copies, p2copies = super(changectx, self)._copies
631 else:
634 else:
632 if p1copies is None:
635 if p1copies is None:
633 p1copies = {}
636 p1copies = {}
634 if p2copies is None:
637 if p2copies is None:
635 p2copies = {}
638 p2copies = {}
636 return p1copies, p2copies
639 return p1copies, p2copies
637
640
638 def description(self):
641 def description(self):
639 return self._changeset.description
642 return self._changeset.description
640
643
641 def branch(self):
644 def branch(self):
642 return encoding.tolocal(self._changeset.extra.get(b"branch"))
645 return encoding.tolocal(self._changeset.extra.get(b"branch"))
643
646
644 def closesbranch(self):
647 def closesbranch(self):
645 return b'close' in self._changeset.extra
648 return b'close' in self._changeset.extra
646
649
647 def extra(self):
650 def extra(self):
648 """Return a dict of extra information."""
651 """Return a dict of extra information."""
649 return self._changeset.extra
652 return self._changeset.extra
650
653
651 def tags(self):
654 def tags(self):
652 """Return a list of byte tag names"""
655 """Return a list of byte tag names"""
653 return self._repo.nodetags(self._node)
656 return self._repo.nodetags(self._node)
654
657
655 def bookmarks(self):
658 def bookmarks(self):
656 """Return a list of byte bookmark names."""
659 """Return a list of byte bookmark names."""
657 return self._repo.nodebookmarks(self._node)
660 return self._repo.nodebookmarks(self._node)
658
661
659 def phase(self):
662 def phase(self):
660 return self._repo._phasecache.phase(self._repo, self._rev)
663 return self._repo._phasecache.phase(self._repo, self._rev)
661
664
662 def hidden(self):
665 def hidden(self):
663 return self._rev in repoview.filterrevs(self._repo, b'visible')
666 return self._rev in repoview.filterrevs(self._repo, b'visible')
664
667
665 def isinmemory(self):
668 def isinmemory(self):
666 return False
669 return False
667
670
668 def children(self):
671 def children(self):
669 """return list of changectx contexts for each child changeset.
672 """return list of changectx contexts for each child changeset.
670
673
671 This returns only the immediate child changesets. Use descendants() to
674 This returns only the immediate child changesets. Use descendants() to
672 recursively walk children.
675 recursively walk children.
673 """
676 """
674 c = self._repo.changelog.children(self._node)
677 c = self._repo.changelog.children(self._node)
675 return [self._repo[x] for x in c]
678 return [self._repo[x] for x in c]
676
679
677 def ancestors(self):
680 def ancestors(self):
678 for a in self._repo.changelog.ancestors([self._rev]):
681 for a in self._repo.changelog.ancestors([self._rev]):
679 yield self._repo[a]
682 yield self._repo[a]
680
683
681 def descendants(self):
684 def descendants(self):
682 """Recursively yield all children of the changeset.
685 """Recursively yield all children of the changeset.
683
686
684 For just the immediate children, use children()
687 For just the immediate children, use children()
685 """
688 """
686 for d in self._repo.changelog.descendants([self._rev]):
689 for d in self._repo.changelog.descendants([self._rev]):
687 yield self._repo[d]
690 yield self._repo[d]
688
691
689 def filectx(self, path, fileid=None, filelog=None):
692 def filectx(self, path, fileid=None, filelog=None):
690 """get a file context from this changeset"""
693 """get a file context from this changeset"""
691 if fileid is None:
694 if fileid is None:
692 fileid = self.filenode(path)
695 fileid = self.filenode(path)
693 return filectx(
696 return filectx(
694 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
697 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
695 )
698 )
696
699
697 def ancestor(self, c2, warn=False):
700 def ancestor(self, c2, warn=False):
698 """return the "best" ancestor context of self and c2
701 """return the "best" ancestor context of self and c2
699
702
700 If there are multiple candidates, it will show a message and check
703 If there are multiple candidates, it will show a message and check
701 merge.preferancestor configuration before falling back to the
704 merge.preferancestor configuration before falling back to the
702 revlog ancestor."""
705 revlog ancestor."""
703 # deal with workingctxs
706 # deal with workingctxs
704 n2 = c2._node
707 n2 = c2._node
705 if n2 is None:
708 if n2 is None:
706 n2 = c2._parents[0]._node
709 n2 = c2._parents[0]._node
707 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
710 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
708 if not cahs:
711 if not cahs:
709 anc = nullid
712 anc = nullid
710 elif len(cahs) == 1:
713 elif len(cahs) == 1:
711 anc = cahs[0]
714 anc = cahs[0]
712 else:
715 else:
713 # experimental config: merge.preferancestor
716 # experimental config: merge.preferancestor
714 for r in self._repo.ui.configlist(b'merge', b'preferancestor'):
717 for r in self._repo.ui.configlist(b'merge', b'preferancestor'):
715 try:
718 try:
716 ctx = scmutil.revsymbol(self._repo, r)
719 ctx = scmutil.revsymbol(self._repo, r)
717 except error.RepoLookupError:
720 except error.RepoLookupError:
718 continue
721 continue
719 anc = ctx.node()
722 anc = ctx.node()
720 if anc in cahs:
723 if anc in cahs:
721 break
724 break
722 else:
725 else:
723 anc = self._repo.changelog.ancestor(self._node, n2)
726 anc = self._repo.changelog.ancestor(self._node, n2)
724 if warn:
727 if warn:
725 self._repo.ui.status(
728 self._repo.ui.status(
726 (
729 (
727 _(b"note: using %s as ancestor of %s and %s\n")
730 _(b"note: using %s as ancestor of %s and %s\n")
728 % (short(anc), short(self._node), short(n2))
731 % (short(anc), short(self._node), short(n2))
729 )
732 )
730 + b''.join(
733 + b''.join(
731 _(
734 _(
732 b" alternatively, use --config "
735 b" alternatively, use --config "
733 b"merge.preferancestor=%s\n"
736 b"merge.preferancestor=%s\n"
734 )
737 )
735 % short(n)
738 % short(n)
736 for n in sorted(cahs)
739 for n in sorted(cahs)
737 if n != anc
740 if n != anc
738 )
741 )
739 )
742 )
740 return self._repo[anc]
743 return self._repo[anc]
741
744
742 def isancestorof(self, other):
745 def isancestorof(self, other):
743 """True if this changeset is an ancestor of other"""
746 """True if this changeset is an ancestor of other"""
744 return self._repo.changelog.isancestorrev(self._rev, other._rev)
747 return self._repo.changelog.isancestorrev(self._rev, other._rev)
745
748
746 def walk(self, match):
749 def walk(self, match):
747 '''Generates matching file names.'''
750 '''Generates matching file names.'''
748
751
749 # Wrap match.bad method to have message with nodeid
752 # Wrap match.bad method to have message with nodeid
750 def bad(fn, msg):
753 def bad(fn, msg):
751 # The manifest doesn't know about subrepos, so don't complain about
754 # The manifest doesn't know about subrepos, so don't complain about
752 # paths into valid subrepos.
755 # paths into valid subrepos.
753 if any(fn == s or fn.startswith(s + b'/') for s in self.substate):
756 if any(fn == s or fn.startswith(s + b'/') for s in self.substate):
754 return
757 return
755 match.bad(fn, _(b'no such file in rev %s') % self)
758 match.bad(fn, _(b'no such file in rev %s') % self)
756
759
757 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
760 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
758 return self._manifest.walk(m)
761 return self._manifest.walk(m)
759
762
760 def matches(self, match):
763 def matches(self, match):
761 return self.walk(match)
764 return self.walk(match)
762
765
763
766
764 class basefilectx(object):
767 class basefilectx(object):
765 """A filecontext object represents the common logic for its children:
768 """A filecontext object represents the common logic for its children:
766 filectx: read-only access to a filerevision that is already present
769 filectx: read-only access to a filerevision that is already present
767 in the repo,
770 in the repo,
768 workingfilectx: a filecontext that represents files from the working
771 workingfilectx: a filecontext that represents files from the working
769 directory,
772 directory,
770 memfilectx: a filecontext that represents files in-memory,
773 memfilectx: a filecontext that represents files in-memory,
771 """
774 """
772
775
773 @propertycache
776 @propertycache
774 def _filelog(self):
777 def _filelog(self):
775 return self._repo.file(self._path)
778 return self._repo.file(self._path)
776
779
777 @propertycache
780 @propertycache
778 def _changeid(self):
781 def _changeid(self):
779 if '_changectx' in self.__dict__:
782 if '_changectx' in self.__dict__:
780 return self._changectx.rev()
783 return self._changectx.rev()
781 elif '_descendantrev' in self.__dict__:
784 elif '_descendantrev' in self.__dict__:
782 # this file context was created from a revision with a known
785 # this file context was created from a revision with a known
783 # descendant, we can (lazily) correct for linkrev aliases
786 # descendant, we can (lazily) correct for linkrev aliases
784 return self._adjustlinkrev(self._descendantrev)
787 return self._adjustlinkrev(self._descendantrev)
785 else:
788 else:
786 return self._filelog.linkrev(self._filerev)
789 return self._filelog.linkrev(self._filerev)
787
790
788 @propertycache
791 @propertycache
789 def _filenode(self):
792 def _filenode(self):
790 if '_fileid' in self.__dict__:
793 if '_fileid' in self.__dict__:
791 return self._filelog.lookup(self._fileid)
794 return self._filelog.lookup(self._fileid)
792 else:
795 else:
793 return self._changectx.filenode(self._path)
796 return self._changectx.filenode(self._path)
794
797
795 @propertycache
798 @propertycache
796 def _filerev(self):
799 def _filerev(self):
797 return self._filelog.rev(self._filenode)
800 return self._filelog.rev(self._filenode)
798
801
799 @propertycache
802 @propertycache
800 def _repopath(self):
803 def _repopath(self):
801 return self._path
804 return self._path
802
805
803 def __nonzero__(self):
806 def __nonzero__(self):
804 try:
807 try:
805 self._filenode
808 self._filenode
806 return True
809 return True
807 except error.LookupError:
810 except error.LookupError:
808 # file is missing
811 # file is missing
809 return False
812 return False
810
813
811 __bool__ = __nonzero__
814 __bool__ = __nonzero__
812
815
813 def __bytes__(self):
816 def __bytes__(self):
814 try:
817 try:
815 return b"%s@%s" % (self.path(), self._changectx)
818 return b"%s@%s" % (self.path(), self._changectx)
816 except error.LookupError:
819 except error.LookupError:
817 return b"%s@???" % self.path()
820 return b"%s@???" % self.path()
818
821
819 __str__ = encoding.strmethod(__bytes__)
822 __str__ = encoding.strmethod(__bytes__)
820
823
821 def __repr__(self):
824 def __repr__(self):
822 return "<%s %s>" % (type(self).__name__, str(self))
825 return "<%s %s>" % (type(self).__name__, str(self))
823
826
824 def __hash__(self):
827 def __hash__(self):
825 try:
828 try:
826 return hash((self._path, self._filenode))
829 return hash((self._path, self._filenode))
827 except AttributeError:
830 except AttributeError:
828 return id(self)
831 return id(self)
829
832
830 def __eq__(self, other):
833 def __eq__(self, other):
831 try:
834 try:
832 return (
835 return (
833 type(self) == type(other)
836 type(self) == type(other)
834 and self._path == other._path
837 and self._path == other._path
835 and self._filenode == other._filenode
838 and self._filenode == other._filenode
836 )
839 )
837 except AttributeError:
840 except AttributeError:
838 return False
841 return False
839
842
840 def __ne__(self, other):
843 def __ne__(self, other):
841 return not (self == other)
844 return not (self == other)
842
845
843 def filerev(self):
846 def filerev(self):
844 return self._filerev
847 return self._filerev
845
848
846 def filenode(self):
849 def filenode(self):
847 return self._filenode
850 return self._filenode
848
851
849 @propertycache
852 @propertycache
850 def _flags(self):
853 def _flags(self):
851 return self._changectx.flags(self._path)
854 return self._changectx.flags(self._path)
852
855
853 def flags(self):
856 def flags(self):
854 return self._flags
857 return self._flags
855
858
856 def filelog(self):
859 def filelog(self):
857 return self._filelog
860 return self._filelog
858
861
859 def rev(self):
862 def rev(self):
860 return self._changeid
863 return self._changeid
861
864
862 def linkrev(self):
865 def linkrev(self):
863 return self._filelog.linkrev(self._filerev)
866 return self._filelog.linkrev(self._filerev)
864
867
865 def node(self):
868 def node(self):
866 return self._changectx.node()
869 return self._changectx.node()
867
870
868 def hex(self):
871 def hex(self):
869 return self._changectx.hex()
872 return self._changectx.hex()
870
873
871 def user(self):
874 def user(self):
872 return self._changectx.user()
875 return self._changectx.user()
873
876
874 def date(self):
877 def date(self):
875 return self._changectx.date()
878 return self._changectx.date()
876
879
877 def files(self):
880 def files(self):
878 return self._changectx.files()
881 return self._changectx.files()
879
882
880 def description(self):
883 def description(self):
881 return self._changectx.description()
884 return self._changectx.description()
882
885
883 def branch(self):
886 def branch(self):
884 return self._changectx.branch()
887 return self._changectx.branch()
885
888
886 def extra(self):
889 def extra(self):
887 return self._changectx.extra()
890 return self._changectx.extra()
888
891
889 def phase(self):
892 def phase(self):
890 return self._changectx.phase()
893 return self._changectx.phase()
891
894
892 def phasestr(self):
895 def phasestr(self):
893 return self._changectx.phasestr()
896 return self._changectx.phasestr()
894
897
895 def obsolete(self):
898 def obsolete(self):
896 return self._changectx.obsolete()
899 return self._changectx.obsolete()
897
900
898 def instabilities(self):
901 def instabilities(self):
899 return self._changectx.instabilities()
902 return self._changectx.instabilities()
900
903
901 def manifest(self):
904 def manifest(self):
902 return self._changectx.manifest()
905 return self._changectx.manifest()
903
906
904 def changectx(self):
907 def changectx(self):
905 return self._changectx
908 return self._changectx
906
909
907 def renamed(self):
910 def renamed(self):
908 return self._copied
911 return self._copied
909
912
910 def copysource(self):
913 def copysource(self):
911 return self._copied and self._copied[0]
914 return self._copied and self._copied[0]
912
915
913 def repo(self):
916 def repo(self):
914 return self._repo
917 return self._repo
915
918
916 def size(self):
919 def size(self):
917 return len(self.data())
920 return len(self.data())
918
921
919 def path(self):
922 def path(self):
920 return self._path
923 return self._path
921
924
922 def isbinary(self):
925 def isbinary(self):
923 try:
926 try:
924 return stringutil.binary(self.data())
927 return stringutil.binary(self.data())
925 except IOError:
928 except IOError:
926 return False
929 return False
927
930
928 def isexec(self):
931 def isexec(self):
929 return b'x' in self.flags()
932 return b'x' in self.flags()
930
933
931 def islink(self):
934 def islink(self):
932 return b'l' in self.flags()
935 return b'l' in self.flags()
933
936
934 def isabsent(self):
937 def isabsent(self):
935 """whether this filectx represents a file not in self._changectx
938 """whether this filectx represents a file not in self._changectx
936
939
937 This is mainly for merge code to detect change/delete conflicts. This is
940 This is mainly for merge code to detect change/delete conflicts. This is
938 expected to be True for all subclasses of basectx."""
941 expected to be True for all subclasses of basectx."""
939 return False
942 return False
940
943
941 _customcmp = False
944 _customcmp = False
942
945
943 def cmp(self, fctx):
946 def cmp(self, fctx):
944 """compare with other file context
947 """compare with other file context
945
948
946 returns True if different than fctx.
949 returns True if different than fctx.
947 """
950 """
948 if fctx._customcmp:
951 if fctx._customcmp:
949 return fctx.cmp(self)
952 return fctx.cmp(self)
950
953
951 if self._filenode is None:
954 if self._filenode is None:
952 raise error.ProgrammingError(
955 raise error.ProgrammingError(
953 b'filectx.cmp() must be reimplemented if not backed by revlog'
956 b'filectx.cmp() must be reimplemented if not backed by revlog'
954 )
957 )
955
958
956 if fctx._filenode is None:
959 if fctx._filenode is None:
957 if self._repo._encodefilterpats:
960 if self._repo._encodefilterpats:
958 # can't rely on size() because wdir content may be decoded
961 # can't rely on size() because wdir content may be decoded
959 return self._filelog.cmp(self._filenode, fctx.data())
962 return self._filelog.cmp(self._filenode, fctx.data())
960 if self.size() - 4 == fctx.size():
963 if self.size() - 4 == fctx.size():
961 # size() can match:
964 # size() can match:
962 # if file data starts with '\1\n', empty metadata block is
965 # if file data starts with '\1\n', empty metadata block is
963 # prepended, which adds 4 bytes to filelog.size().
966 # prepended, which adds 4 bytes to filelog.size().
964 return self._filelog.cmp(self._filenode, fctx.data())
967 return self._filelog.cmp(self._filenode, fctx.data())
965 if self.size() == fctx.size():
968 if self.size() == fctx.size():
966 # size() matches: need to compare content
969 # size() matches: need to compare content
967 return self._filelog.cmp(self._filenode, fctx.data())
970 return self._filelog.cmp(self._filenode, fctx.data())
968
971
969 # size() differs
972 # size() differs
970 return True
973 return True
971
974
972 def _adjustlinkrev(self, srcrev, inclusive=False, stoprev=None):
975 def _adjustlinkrev(self, srcrev, inclusive=False, stoprev=None):
973 """return the first ancestor of <srcrev> introducing <fnode>
976 """return the first ancestor of <srcrev> introducing <fnode>
974
977
975 If the linkrev of the file revision does not point to an ancestor of
978 If the linkrev of the file revision does not point to an ancestor of
976 srcrev, we'll walk down the ancestors until we find one introducing
979 srcrev, we'll walk down the ancestors until we find one introducing
977 this file revision.
980 this file revision.
978
981
979 :srcrev: the changeset revision we search ancestors from
982 :srcrev: the changeset revision we search ancestors from
980 :inclusive: if true, the src revision will also be checked
983 :inclusive: if true, the src revision will also be checked
981 :stoprev: an optional revision to stop the walk at. If no introduction
984 :stoprev: an optional revision to stop the walk at. If no introduction
982 of this file content could be found before this floor
985 of this file content could be found before this floor
983 revision, the function will returns "None" and stops its
986 revision, the function will returns "None" and stops its
984 iteration.
987 iteration.
985 """
988 """
986 repo = self._repo
989 repo = self._repo
987 cl = repo.unfiltered().changelog
990 cl = repo.unfiltered().changelog
988 mfl = repo.manifestlog
991 mfl = repo.manifestlog
989 # fetch the linkrev
992 # fetch the linkrev
990 lkr = self.linkrev()
993 lkr = self.linkrev()
991 if srcrev == lkr:
994 if srcrev == lkr:
992 return lkr
995 return lkr
993 # hack to reuse ancestor computation when searching for renames
996 # hack to reuse ancestor computation when searching for renames
994 memberanc = getattr(self, '_ancestrycontext', None)
997 memberanc = getattr(self, '_ancestrycontext', None)
995 iteranc = None
998 iteranc = None
996 if srcrev is None:
999 if srcrev is None:
997 # wctx case, used by workingfilectx during mergecopy
1000 # wctx case, used by workingfilectx during mergecopy
998 revs = [p.rev() for p in self._repo[None].parents()]
1001 revs = [p.rev() for p in self._repo[None].parents()]
999 inclusive = True # we skipped the real (revless) source
1002 inclusive = True # we skipped the real (revless) source
1000 else:
1003 else:
1001 revs = [srcrev]
1004 revs = [srcrev]
1002 if memberanc is None:
1005 if memberanc is None:
1003 memberanc = iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
1006 memberanc = iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
1004 # check if this linkrev is an ancestor of srcrev
1007 # check if this linkrev is an ancestor of srcrev
1005 if lkr not in memberanc:
1008 if lkr not in memberanc:
1006 if iteranc is None:
1009 if iteranc is None:
1007 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
1010 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
1008 fnode = self._filenode
1011 fnode = self._filenode
1009 path = self._path
1012 path = self._path
1010 for a in iteranc:
1013 for a in iteranc:
1011 if stoprev is not None and a < stoprev:
1014 if stoprev is not None and a < stoprev:
1012 return None
1015 return None
1013 ac = cl.read(a) # get changeset data (we avoid object creation)
1016 ac = cl.read(a) # get changeset data (we avoid object creation)
1014 if path in ac[3]: # checking the 'files' field.
1017 if path in ac[3]: # checking the 'files' field.
1015 # The file has been touched, check if the content is
1018 # The file has been touched, check if the content is
1016 # similar to the one we search for.
1019 # similar to the one we search for.
1017 if fnode == mfl[ac[0]].readfast().get(path):
1020 if fnode == mfl[ac[0]].readfast().get(path):
1018 return a
1021 return a
1019 # In theory, we should never get out of that loop without a result.
1022 # In theory, we should never get out of that loop without a result.
1020 # But if manifest uses a buggy file revision (not children of the
1023 # But if manifest uses a buggy file revision (not children of the
1021 # one it replaces) we could. Such a buggy situation will likely
1024 # one it replaces) we could. Such a buggy situation will likely
1022 # result is crash somewhere else at to some point.
1025 # result is crash somewhere else at to some point.
1023 return lkr
1026 return lkr
1024
1027
1025 def isintroducedafter(self, changelogrev):
1028 def isintroducedafter(self, changelogrev):
1026 """True if a filectx has been introduced after a given floor revision
1029 """True if a filectx has been introduced after a given floor revision
1027 """
1030 """
1028 if self.linkrev() >= changelogrev:
1031 if self.linkrev() >= changelogrev:
1029 return True
1032 return True
1030 introrev = self._introrev(stoprev=changelogrev)
1033 introrev = self._introrev(stoprev=changelogrev)
1031 if introrev is None:
1034 if introrev is None:
1032 return False
1035 return False
1033 return introrev >= changelogrev
1036 return introrev >= changelogrev
1034
1037
1035 def introrev(self):
1038 def introrev(self):
1036 """return the rev of the changeset which introduced this file revision
1039 """return the rev of the changeset which introduced this file revision
1037
1040
1038 This method is different from linkrev because it take into account the
1041 This method is different from linkrev because it take into account the
1039 changeset the filectx was created from. It ensures the returned
1042 changeset the filectx was created from. It ensures the returned
1040 revision is one of its ancestors. This prevents bugs from
1043 revision is one of its ancestors. This prevents bugs from
1041 'linkrev-shadowing' when a file revision is used by multiple
1044 'linkrev-shadowing' when a file revision is used by multiple
1042 changesets.
1045 changesets.
1043 """
1046 """
1044 return self._introrev()
1047 return self._introrev()
1045
1048
1046 def _introrev(self, stoprev=None):
1049 def _introrev(self, stoprev=None):
1047 """
1050 """
1048 Same as `introrev` but, with an extra argument to limit changelog
1051 Same as `introrev` but, with an extra argument to limit changelog
1049 iteration range in some internal usecase.
1052 iteration range in some internal usecase.
1050
1053
1051 If `stoprev` is set, the `introrev` will not be searched past that
1054 If `stoprev` is set, the `introrev` will not be searched past that
1052 `stoprev` revision and "None" might be returned. This is useful to
1055 `stoprev` revision and "None" might be returned. This is useful to
1053 limit the iteration range.
1056 limit the iteration range.
1054 """
1057 """
1055 toprev = None
1058 toprev = None
1056 attrs = vars(self)
1059 attrs = vars(self)
1057 if '_changeid' in attrs:
1060 if '_changeid' in attrs:
1058 # We have a cached value already
1061 # We have a cached value already
1059 toprev = self._changeid
1062 toprev = self._changeid
1060 elif '_changectx' in attrs:
1063 elif '_changectx' in attrs:
1061 # We know which changelog entry we are coming from
1064 # We know which changelog entry we are coming from
1062 toprev = self._changectx.rev()
1065 toprev = self._changectx.rev()
1063
1066
1064 if toprev is not None:
1067 if toprev is not None:
1065 return self._adjustlinkrev(toprev, inclusive=True, stoprev=stoprev)
1068 return self._adjustlinkrev(toprev, inclusive=True, stoprev=stoprev)
1066 elif '_descendantrev' in attrs:
1069 elif '_descendantrev' in attrs:
1067 introrev = self._adjustlinkrev(self._descendantrev, stoprev=stoprev)
1070 introrev = self._adjustlinkrev(self._descendantrev, stoprev=stoprev)
1068 # be nice and cache the result of the computation
1071 # be nice and cache the result of the computation
1069 if introrev is not None:
1072 if introrev is not None:
1070 self._changeid = introrev
1073 self._changeid = introrev
1071 return introrev
1074 return introrev
1072 else:
1075 else:
1073 return self.linkrev()
1076 return self.linkrev()
1074
1077
1075 def introfilectx(self):
1078 def introfilectx(self):
1076 """Return filectx having identical contents, but pointing to the
1079 """Return filectx having identical contents, but pointing to the
1077 changeset revision where this filectx was introduced"""
1080 changeset revision where this filectx was introduced"""
1078 introrev = self.introrev()
1081 introrev = self.introrev()
1079 if self.rev() == introrev:
1082 if self.rev() == introrev:
1080 return self
1083 return self
1081 return self.filectx(self.filenode(), changeid=introrev)
1084 return self.filectx(self.filenode(), changeid=introrev)
1082
1085
1083 def _parentfilectx(self, path, fileid, filelog):
1086 def _parentfilectx(self, path, fileid, filelog):
1084 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
1087 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
1085 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
1088 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
1086 if '_changeid' in vars(self) or '_changectx' in vars(self):
1089 if '_changeid' in vars(self) or '_changectx' in vars(self):
1087 # If self is associated with a changeset (probably explicitly
1090 # If self is associated with a changeset (probably explicitly
1088 # fed), ensure the created filectx is associated with a
1091 # fed), ensure the created filectx is associated with a
1089 # changeset that is an ancestor of self.changectx.
1092 # changeset that is an ancestor of self.changectx.
1090 # This lets us later use _adjustlinkrev to get a correct link.
1093 # This lets us later use _adjustlinkrev to get a correct link.
1091 fctx._descendantrev = self.rev()
1094 fctx._descendantrev = self.rev()
1092 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1095 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1093 elif '_descendantrev' in vars(self):
1096 elif '_descendantrev' in vars(self):
1094 # Otherwise propagate _descendantrev if we have one associated.
1097 # Otherwise propagate _descendantrev if we have one associated.
1095 fctx._descendantrev = self._descendantrev
1098 fctx._descendantrev = self._descendantrev
1096 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1099 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1097 return fctx
1100 return fctx
1098
1101
1099 def parents(self):
1102 def parents(self):
1100 _path = self._path
1103 _path = self._path
1101 fl = self._filelog
1104 fl = self._filelog
1102 parents = self._filelog.parents(self._filenode)
1105 parents = self._filelog.parents(self._filenode)
1103 pl = [(_path, node, fl) for node in parents if node != nullid]
1106 pl = [(_path, node, fl) for node in parents if node != nullid]
1104
1107
1105 r = fl.renamed(self._filenode)
1108 r = fl.renamed(self._filenode)
1106 if r:
1109 if r:
1107 # - In the simple rename case, both parent are nullid, pl is empty.
1110 # - In the simple rename case, both parent are nullid, pl is empty.
1108 # - In case of merge, only one of the parent is null id and should
1111 # - In case of merge, only one of the parent is null id and should
1109 # be replaced with the rename information. This parent is -always-
1112 # be replaced with the rename information. This parent is -always-
1110 # the first one.
1113 # the first one.
1111 #
1114 #
1112 # As null id have always been filtered out in the previous list
1115 # As null id have always been filtered out in the previous list
1113 # comprehension, inserting to 0 will always result in "replacing
1116 # comprehension, inserting to 0 will always result in "replacing
1114 # first nullid parent with rename information.
1117 # first nullid parent with rename information.
1115 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
1118 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
1116
1119
1117 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
1120 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
1118
1121
1119 def p1(self):
1122 def p1(self):
1120 return self.parents()[0]
1123 return self.parents()[0]
1121
1124
1122 def p2(self):
1125 def p2(self):
1123 p = self.parents()
1126 p = self.parents()
1124 if len(p) == 2:
1127 if len(p) == 2:
1125 return p[1]
1128 return p[1]
1126 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
1129 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
1127
1130
1128 def annotate(self, follow=False, skiprevs=None, diffopts=None):
1131 def annotate(self, follow=False, skiprevs=None, diffopts=None):
1129 """Returns a list of annotateline objects for each line in the file
1132 """Returns a list of annotateline objects for each line in the file
1130
1133
1131 - line.fctx is the filectx of the node where that line was last changed
1134 - line.fctx is the filectx of the node where that line was last changed
1132 - line.lineno is the line number at the first appearance in the managed
1135 - line.lineno is the line number at the first appearance in the managed
1133 file
1136 file
1134 - line.text is the data on that line (including newline character)
1137 - line.text is the data on that line (including newline character)
1135 """
1138 """
1136 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1139 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1137
1140
1138 def parents(f):
1141 def parents(f):
1139 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1142 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1140 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1143 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1141 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1144 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1142 # isn't an ancestor of the srcrev.
1145 # isn't an ancestor of the srcrev.
1143 f._changeid
1146 f._changeid
1144 pl = f.parents()
1147 pl = f.parents()
1145
1148
1146 # Don't return renamed parents if we aren't following.
1149 # Don't return renamed parents if we aren't following.
1147 if not follow:
1150 if not follow:
1148 pl = [p for p in pl if p.path() == f.path()]
1151 pl = [p for p in pl if p.path() == f.path()]
1149
1152
1150 # renamed filectx won't have a filelog yet, so set it
1153 # renamed filectx won't have a filelog yet, so set it
1151 # from the cache to save time
1154 # from the cache to save time
1152 for p in pl:
1155 for p in pl:
1153 if not '_filelog' in p.__dict__:
1156 if not '_filelog' in p.__dict__:
1154 p._filelog = getlog(p.path())
1157 p._filelog = getlog(p.path())
1155
1158
1156 return pl
1159 return pl
1157
1160
1158 # use linkrev to find the first changeset where self appeared
1161 # use linkrev to find the first changeset where self appeared
1159 base = self.introfilectx()
1162 base = self.introfilectx()
1160 if getattr(base, '_ancestrycontext', None) is None:
1163 if getattr(base, '_ancestrycontext', None) is None:
1161 cl = self._repo.changelog
1164 cl = self._repo.changelog
1162 if base.rev() is None:
1165 if base.rev() is None:
1163 # wctx is not inclusive, but works because _ancestrycontext
1166 # wctx is not inclusive, but works because _ancestrycontext
1164 # is used to test filelog revisions
1167 # is used to test filelog revisions
1165 ac = cl.ancestors(
1168 ac = cl.ancestors(
1166 [p.rev() for p in base.parents()], inclusive=True
1169 [p.rev() for p in base.parents()], inclusive=True
1167 )
1170 )
1168 else:
1171 else:
1169 ac = cl.ancestors([base.rev()], inclusive=True)
1172 ac = cl.ancestors([base.rev()], inclusive=True)
1170 base._ancestrycontext = ac
1173 base._ancestrycontext = ac
1171
1174
1172 return dagop.annotate(
1175 return dagop.annotate(
1173 base, parents, skiprevs=skiprevs, diffopts=diffopts
1176 base, parents, skiprevs=skiprevs, diffopts=diffopts
1174 )
1177 )
1175
1178
1176 def ancestors(self, followfirst=False):
1179 def ancestors(self, followfirst=False):
1177 visit = {}
1180 visit = {}
1178 c = self
1181 c = self
1179 if followfirst:
1182 if followfirst:
1180 cut = 1
1183 cut = 1
1181 else:
1184 else:
1182 cut = None
1185 cut = None
1183
1186
1184 while True:
1187 while True:
1185 for parent in c.parents()[:cut]:
1188 for parent in c.parents()[:cut]:
1186 visit[(parent.linkrev(), parent.filenode())] = parent
1189 visit[(parent.linkrev(), parent.filenode())] = parent
1187 if not visit:
1190 if not visit:
1188 break
1191 break
1189 c = visit.pop(max(visit))
1192 c = visit.pop(max(visit))
1190 yield c
1193 yield c
1191
1194
1192 def decodeddata(self):
1195 def decodeddata(self):
1193 """Returns `data()` after running repository decoding filters.
1196 """Returns `data()` after running repository decoding filters.
1194
1197
1195 This is often equivalent to how the data would be expressed on disk.
1198 This is often equivalent to how the data would be expressed on disk.
1196 """
1199 """
1197 return self._repo.wwritedata(self.path(), self.data())
1200 return self._repo.wwritedata(self.path(), self.data())
1198
1201
1199
1202
1200 class filectx(basefilectx):
1203 class filectx(basefilectx):
1201 """A filecontext object makes access to data related to a particular
1204 """A filecontext object makes access to data related to a particular
1202 filerevision convenient."""
1205 filerevision convenient."""
1203
1206
1204 def __init__(
1207 def __init__(
1205 self,
1208 self,
1206 repo,
1209 repo,
1207 path,
1210 path,
1208 changeid=None,
1211 changeid=None,
1209 fileid=None,
1212 fileid=None,
1210 filelog=None,
1213 filelog=None,
1211 changectx=None,
1214 changectx=None,
1212 ):
1215 ):
1213 """changeid must be a revision number, if specified.
1216 """changeid must be a revision number, if specified.
1214 fileid can be a file revision or node."""
1217 fileid can be a file revision or node."""
1215 self._repo = repo
1218 self._repo = repo
1216 self._path = path
1219 self._path = path
1217
1220
1218 assert (
1221 assert (
1219 changeid is not None or fileid is not None or changectx is not None
1222 changeid is not None or fileid is not None or changectx is not None
1220 ), (
1223 ), (
1221 b"bad args: changeid=%r, fileid=%r, changectx=%r"
1224 b"bad args: changeid=%r, fileid=%r, changectx=%r"
1222 % (changeid, fileid, changectx,)
1225 % (changeid, fileid, changectx,)
1223 )
1226 )
1224
1227
1225 if filelog is not None:
1228 if filelog is not None:
1226 self._filelog = filelog
1229 self._filelog = filelog
1227
1230
1228 if changeid is not None:
1231 if changeid is not None:
1229 self._changeid = changeid
1232 self._changeid = changeid
1230 if changectx is not None:
1233 if changectx is not None:
1231 self._changectx = changectx
1234 self._changectx = changectx
1232 if fileid is not None:
1235 if fileid is not None:
1233 self._fileid = fileid
1236 self._fileid = fileid
1234
1237
1235 @propertycache
1238 @propertycache
1236 def _changectx(self):
1239 def _changectx(self):
1237 try:
1240 try:
1238 return self._repo[self._changeid]
1241 return self._repo[self._changeid]
1239 except error.FilteredRepoLookupError:
1242 except error.FilteredRepoLookupError:
1240 # Linkrev may point to any revision in the repository. When the
1243 # Linkrev may point to any revision in the repository. When the
1241 # repository is filtered this may lead to `filectx` trying to build
1244 # repository is filtered this may lead to `filectx` trying to build
1242 # `changectx` for filtered revision. In such case we fallback to
1245 # `changectx` for filtered revision. In such case we fallback to
1243 # creating `changectx` on the unfiltered version of the reposition.
1246 # creating `changectx` on the unfiltered version of the reposition.
1244 # This fallback should not be an issue because `changectx` from
1247 # This fallback should not be an issue because `changectx` from
1245 # `filectx` are not used in complex operations that care about
1248 # `filectx` are not used in complex operations that care about
1246 # filtering.
1249 # filtering.
1247 #
1250 #
1248 # This fallback is a cheap and dirty fix that prevent several
1251 # This fallback is a cheap and dirty fix that prevent several
1249 # crashes. It does not ensure the behavior is correct. However the
1252 # crashes. It does not ensure the behavior is correct. However the
1250 # behavior was not correct before filtering either and "incorrect
1253 # behavior was not correct before filtering either and "incorrect
1251 # behavior" is seen as better as "crash"
1254 # behavior" is seen as better as "crash"
1252 #
1255 #
1253 # Linkrevs have several serious troubles with filtering that are
1256 # Linkrevs have several serious troubles with filtering that are
1254 # complicated to solve. Proper handling of the issue here should be
1257 # complicated to solve. Proper handling of the issue here should be
1255 # considered when solving linkrev issue are on the table.
1258 # considered when solving linkrev issue are on the table.
1256 return self._repo.unfiltered()[self._changeid]
1259 return self._repo.unfiltered()[self._changeid]
1257
1260
1258 def filectx(self, fileid, changeid=None):
1261 def filectx(self, fileid, changeid=None):
1259 '''opens an arbitrary revision of the file without
1262 '''opens an arbitrary revision of the file without
1260 opening a new filelog'''
1263 opening a new filelog'''
1261 return filectx(
1264 return filectx(
1262 self._repo,
1265 self._repo,
1263 self._path,
1266 self._path,
1264 fileid=fileid,
1267 fileid=fileid,
1265 filelog=self._filelog,
1268 filelog=self._filelog,
1266 changeid=changeid,
1269 changeid=changeid,
1267 )
1270 )
1268
1271
1269 def rawdata(self):
1272 def rawdata(self):
1270 return self._filelog.rawdata(self._filenode)
1273 return self._filelog.rawdata(self._filenode)
1271
1274
1272 def rawflags(self):
1275 def rawflags(self):
1273 """low-level revlog flags"""
1276 """low-level revlog flags"""
1274 return self._filelog.flags(self._filerev)
1277 return self._filelog.flags(self._filerev)
1275
1278
1276 def data(self):
1279 def data(self):
1277 try:
1280 try:
1278 return self._filelog.read(self._filenode)
1281 return self._filelog.read(self._filenode)
1279 except error.CensoredNodeError:
1282 except error.CensoredNodeError:
1280 if self._repo.ui.config(b"censor", b"policy") == b"ignore":
1283 if self._repo.ui.config(b"censor", b"policy") == b"ignore":
1281 return b""
1284 return b""
1282 raise error.Abort(
1285 raise error.Abort(
1283 _(b"censored node: %s") % short(self._filenode),
1286 _(b"censored node: %s") % short(self._filenode),
1284 hint=_(b"set censor.policy to ignore errors"),
1287 hint=_(b"set censor.policy to ignore errors"),
1285 )
1288 )
1286
1289
1287 def size(self):
1290 def size(self):
1288 return self._filelog.size(self._filerev)
1291 return self._filelog.size(self._filerev)
1289
1292
1290 @propertycache
1293 @propertycache
1291 def _copied(self):
1294 def _copied(self):
1292 """check if file was actually renamed in this changeset revision
1295 """check if file was actually renamed in this changeset revision
1293
1296
1294 If rename logged in file revision, we report copy for changeset only
1297 If rename logged in file revision, we report copy for changeset only
1295 if file revisions linkrev points back to the changeset in question
1298 if file revisions linkrev points back to the changeset in question
1296 or both changeset parents contain different file revisions.
1299 or both changeset parents contain different file revisions.
1297 """
1300 """
1298
1301
1299 renamed = self._filelog.renamed(self._filenode)
1302 renamed = self._filelog.renamed(self._filenode)
1300 if not renamed:
1303 if not renamed:
1301 return None
1304 return None
1302
1305
1303 if self.rev() == self.linkrev():
1306 if self.rev() == self.linkrev():
1304 return renamed
1307 return renamed
1305
1308
1306 name = self.path()
1309 name = self.path()
1307 fnode = self._filenode
1310 fnode = self._filenode
1308 for p in self._changectx.parents():
1311 for p in self._changectx.parents():
1309 try:
1312 try:
1310 if fnode == p.filenode(name):
1313 if fnode == p.filenode(name):
1311 return None
1314 return None
1312 except error.LookupError:
1315 except error.LookupError:
1313 pass
1316 pass
1314 return renamed
1317 return renamed
1315
1318
1316 def children(self):
1319 def children(self):
1317 # hard for renames
1320 # hard for renames
1318 c = self._filelog.children(self._filenode)
1321 c = self._filelog.children(self._filenode)
1319 return [
1322 return [
1320 filectx(self._repo, self._path, fileid=x, filelog=self._filelog)
1323 filectx(self._repo, self._path, fileid=x, filelog=self._filelog)
1321 for x in c
1324 for x in c
1322 ]
1325 ]
1323
1326
1324
1327
1325 class committablectx(basectx):
1328 class committablectx(basectx):
1326 """A committablectx object provides common functionality for a context that
1329 """A committablectx object provides common functionality for a context that
1327 wants the ability to commit, e.g. workingctx or memctx."""
1330 wants the ability to commit, e.g. workingctx or memctx."""
1328
1331
1329 def __init__(
1332 def __init__(
1330 self,
1333 self,
1331 repo,
1334 repo,
1332 text=b"",
1335 text=b"",
1333 user=None,
1336 user=None,
1334 date=None,
1337 date=None,
1335 extra=None,
1338 extra=None,
1336 changes=None,
1339 changes=None,
1337 branch=None,
1340 branch=None,
1338 ):
1341 ):
1339 super(committablectx, self).__init__(repo)
1342 super(committablectx, self).__init__(repo)
1340 self._rev = None
1343 self._rev = None
1341 self._node = None
1344 self._node = None
1342 self._text = text
1345 self._text = text
1343 if date:
1346 if date:
1344 self._date = dateutil.parsedate(date)
1347 self._date = dateutil.parsedate(date)
1345 if user:
1348 if user:
1346 self._user = user
1349 self._user = user
1347 if changes:
1350 if changes:
1348 self._status = changes
1351 self._status = changes
1349
1352
1350 self._extra = {}
1353 self._extra = {}
1351 if extra:
1354 if extra:
1352 self._extra = extra.copy()
1355 self._extra = extra.copy()
1353 if branch is not None:
1356 if branch is not None:
1354 self._extra[b'branch'] = encoding.fromlocal(branch)
1357 self._extra[b'branch'] = encoding.fromlocal(branch)
1355 if not self._extra.get(b'branch'):
1358 if not self._extra.get(b'branch'):
1356 self._extra[b'branch'] = b'default'
1359 self._extra[b'branch'] = b'default'
1357
1360
1358 def __bytes__(self):
1361 def __bytes__(self):
1359 return bytes(self._parents[0]) + b"+"
1362 return bytes(self._parents[0]) + b"+"
1360
1363
1361 __str__ = encoding.strmethod(__bytes__)
1364 __str__ = encoding.strmethod(__bytes__)
1362
1365
1363 def __nonzero__(self):
1366 def __nonzero__(self):
1364 return True
1367 return True
1365
1368
1366 __bool__ = __nonzero__
1369 __bool__ = __nonzero__
1367
1370
1368 @propertycache
1371 @propertycache
1369 def _status(self):
1372 def _status(self):
1370 return self._repo.status()
1373 return self._repo.status()
1371
1374
1372 @propertycache
1375 @propertycache
1373 def _user(self):
1376 def _user(self):
1374 return self._repo.ui.username()
1377 return self._repo.ui.username()
1375
1378
1376 @propertycache
1379 @propertycache
1377 def _date(self):
1380 def _date(self):
1378 ui = self._repo.ui
1381 ui = self._repo.ui
1379 date = ui.configdate(b'devel', b'default-date')
1382 date = ui.configdate(b'devel', b'default-date')
1380 if date is None:
1383 if date is None:
1381 date = dateutil.makedate()
1384 date = dateutil.makedate()
1382 return date
1385 return date
1383
1386
1384 def subrev(self, subpath):
1387 def subrev(self, subpath):
1385 return None
1388 return None
1386
1389
1387 def manifestnode(self):
1390 def manifestnode(self):
1388 return None
1391 return None
1389
1392
1390 def user(self):
1393 def user(self):
1391 return self._user or self._repo.ui.username()
1394 return self._user or self._repo.ui.username()
1392
1395
1393 def date(self):
1396 def date(self):
1394 return self._date
1397 return self._date
1395
1398
1396 def description(self):
1399 def description(self):
1397 return self._text
1400 return self._text
1398
1401
1399 def files(self):
1402 def files(self):
1400 return sorted(
1403 return sorted(
1401 self._status.modified + self._status.added + self._status.removed
1404 self._status.modified + self._status.added + self._status.removed
1402 )
1405 )
1403
1406
1404 def modified(self):
1407 def modified(self):
1405 return self._status.modified
1408 return self._status.modified
1406
1409
1407 def added(self):
1410 def added(self):
1408 return self._status.added
1411 return self._status.added
1409
1412
1410 def removed(self):
1413 def removed(self):
1411 return self._status.removed
1414 return self._status.removed
1412
1415
1413 def deleted(self):
1416 def deleted(self):
1414 return self._status.deleted
1417 return self._status.deleted
1415
1418
1416 filesmodified = modified
1419 filesmodified = modified
1417 filesadded = added
1420 filesadded = added
1418 filesremoved = removed
1421 filesremoved = removed
1419
1422
1420 def branch(self):
1423 def branch(self):
1421 return encoding.tolocal(self._extra[b'branch'])
1424 return encoding.tolocal(self._extra[b'branch'])
1422
1425
1423 def closesbranch(self):
1426 def closesbranch(self):
1424 return b'close' in self._extra
1427 return b'close' in self._extra
1425
1428
1426 def extra(self):
1429 def extra(self):
1427 return self._extra
1430 return self._extra
1428
1431
1429 def isinmemory(self):
1432 def isinmemory(self):
1430 return False
1433 return False
1431
1434
1432 def tags(self):
1435 def tags(self):
1433 return []
1436 return []
1434
1437
1435 def bookmarks(self):
1438 def bookmarks(self):
1436 b = []
1439 b = []
1437 for p in self.parents():
1440 for p in self.parents():
1438 b.extend(p.bookmarks())
1441 b.extend(p.bookmarks())
1439 return b
1442 return b
1440
1443
1441 def phase(self):
1444 def phase(self):
1442 phase = phases.newcommitphase(self._repo.ui)
1445 phase = phases.newcommitphase(self._repo.ui)
1443 for p in self.parents():
1446 for p in self.parents():
1444 phase = max(phase, p.phase())
1447 phase = max(phase, p.phase())
1445 return phase
1448 return phase
1446
1449
1447 def hidden(self):
1450 def hidden(self):
1448 return False
1451 return False
1449
1452
1450 def children(self):
1453 def children(self):
1451 return []
1454 return []
1452
1455
1453 def ancestor(self, c2):
1456 def ancestor(self, c2):
1454 """return the "best" ancestor context of self and c2"""
1457 """return the "best" ancestor context of self and c2"""
1455 return self._parents[0].ancestor(c2) # punt on two parents for now
1458 return self._parents[0].ancestor(c2) # punt on two parents for now
1456
1459
1457 def ancestors(self):
1460 def ancestors(self):
1458 for p in self._parents:
1461 for p in self._parents:
1459 yield p
1462 yield p
1460 for a in self._repo.changelog.ancestors(
1463 for a in self._repo.changelog.ancestors(
1461 [p.rev() for p in self._parents]
1464 [p.rev() for p in self._parents]
1462 ):
1465 ):
1463 yield self._repo[a]
1466 yield self._repo[a]
1464
1467
1465 def markcommitted(self, node):
1468 def markcommitted(self, node):
1466 """Perform post-commit cleanup necessary after committing this ctx
1469 """Perform post-commit cleanup necessary after committing this ctx
1467
1470
1468 Specifically, this updates backing stores this working context
1471 Specifically, this updates backing stores this working context
1469 wraps to reflect the fact that the changes reflected by this
1472 wraps to reflect the fact that the changes reflected by this
1470 workingctx have been committed. For example, it marks
1473 workingctx have been committed. For example, it marks
1471 modified and added files as normal in the dirstate.
1474 modified and added files as normal in the dirstate.
1472
1475
1473 """
1476 """
1474
1477
1475 def dirty(self, missing=False, merge=True, branch=True):
1478 def dirty(self, missing=False, merge=True, branch=True):
1476 return False
1479 return False
1477
1480
1478
1481
1479 class workingctx(committablectx):
1482 class workingctx(committablectx):
1480 """A workingctx object makes access to data related to
1483 """A workingctx object makes access to data related to
1481 the current working directory convenient.
1484 the current working directory convenient.
1482 date - any valid date string or (unixtime, offset), or None.
1485 date - any valid date string or (unixtime, offset), or None.
1483 user - username string, or None.
1486 user - username string, or None.
1484 extra - a dictionary of extra values, or None.
1487 extra - a dictionary of extra values, or None.
1485 changes - a list of file lists as returned by localrepo.status()
1488 changes - a list of file lists as returned by localrepo.status()
1486 or None to use the repository status.
1489 or None to use the repository status.
1487 """
1490 """
1488
1491
1489 def __init__(
1492 def __init__(
1490 self, repo, text=b"", user=None, date=None, extra=None, changes=None
1493 self, repo, text=b"", user=None, date=None, extra=None, changes=None
1491 ):
1494 ):
1492 branch = None
1495 branch = None
1493 if not extra or b'branch' not in extra:
1496 if not extra or b'branch' not in extra:
1494 try:
1497 try:
1495 branch = repo.dirstate.branch()
1498 branch = repo.dirstate.branch()
1496 except UnicodeDecodeError:
1499 except UnicodeDecodeError:
1497 raise error.Abort(_(b'branch name not in UTF-8!'))
1500 raise error.Abort(_(b'branch name not in UTF-8!'))
1498 super(workingctx, self).__init__(
1501 super(workingctx, self).__init__(
1499 repo, text, user, date, extra, changes, branch=branch
1502 repo, text, user, date, extra, changes, branch=branch
1500 )
1503 )
1501
1504
1502 def __iter__(self):
1505 def __iter__(self):
1503 d = self._repo.dirstate
1506 d = self._repo.dirstate
1504 for f in d:
1507 for f in d:
1505 if d[f] != b'r':
1508 if d[f] != b'r':
1506 yield f
1509 yield f
1507
1510
1508 def __contains__(self, key):
1511 def __contains__(self, key):
1509 return self._repo.dirstate[key] not in b"?r"
1512 return self._repo.dirstate[key] not in b"?r"
1510
1513
1511 def hex(self):
1514 def hex(self):
1512 return wdirhex
1515 return wdirhex
1513
1516
1514 @propertycache
1517 @propertycache
1515 def _parents(self):
1518 def _parents(self):
1516 p = self._repo.dirstate.parents()
1519 p = self._repo.dirstate.parents()
1517 if p[1] == nullid:
1520 if p[1] == nullid:
1518 p = p[:-1]
1521 p = p[:-1]
1519 # use unfiltered repo to delay/avoid loading obsmarkers
1522 # use unfiltered repo to delay/avoid loading obsmarkers
1520 unfi = self._repo.unfiltered()
1523 unfi = self._repo.unfiltered()
1521 return [
1524 return [
1522 changectx(
1525 changectx(
1523 self._repo, unfi.changelog.rev(n), n, maybe_filtered=False
1526 self._repo, unfi.changelog.rev(n), n, maybe_filtered=False
1524 )
1527 )
1525 for n in p
1528 for n in p
1526 ]
1529 ]
1527
1530
1528 def _fileinfo(self, path):
1531 def _fileinfo(self, path):
1529 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1532 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1530 self._manifest
1533 self._manifest
1531 return super(workingctx, self)._fileinfo(path)
1534 return super(workingctx, self)._fileinfo(path)
1532
1535
1533 def _buildflagfunc(self):
1536 def _buildflagfunc(self):
1534 # Create a fallback function for getting file flags when the
1537 # Create a fallback function for getting file flags when the
1535 # filesystem doesn't support them
1538 # filesystem doesn't support them
1536
1539
1537 copiesget = self._repo.dirstate.copies().get
1540 copiesget = self._repo.dirstate.copies().get
1538 parents = self.parents()
1541 parents = self.parents()
1539 if len(parents) < 2:
1542 if len(parents) < 2:
1540 # when we have one parent, it's easy: copy from parent
1543 # when we have one parent, it's easy: copy from parent
1541 man = parents[0].manifest()
1544 man = parents[0].manifest()
1542
1545
1543 def func(f):
1546 def func(f):
1544 f = copiesget(f, f)
1547 f = copiesget(f, f)
1545 return man.flags(f)
1548 return man.flags(f)
1546
1549
1547 else:
1550 else:
1548 # merges are tricky: we try to reconstruct the unstored
1551 # merges are tricky: we try to reconstruct the unstored
1549 # result from the merge (issue1802)
1552 # result from the merge (issue1802)
1550 p1, p2 = parents
1553 p1, p2 = parents
1551 pa = p1.ancestor(p2)
1554 pa = p1.ancestor(p2)
1552 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1555 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1553
1556
1554 def func(f):
1557 def func(f):
1555 f = copiesget(f, f) # may be wrong for merges with copies
1558 f = copiesget(f, f) # may be wrong for merges with copies
1556 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1559 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1557 if fl1 == fl2:
1560 if fl1 == fl2:
1558 return fl1
1561 return fl1
1559 if fl1 == fla:
1562 if fl1 == fla:
1560 return fl2
1563 return fl2
1561 if fl2 == fla:
1564 if fl2 == fla:
1562 return fl1
1565 return fl1
1563 return b'' # punt for conflicts
1566 return b'' # punt for conflicts
1564
1567
1565 return func
1568 return func
1566
1569
1567 @propertycache
1570 @propertycache
1568 def _flagfunc(self):
1571 def _flagfunc(self):
1569 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1572 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1570
1573
1571 def flags(self, path):
1574 def flags(self, path):
1572 if '_manifest' in self.__dict__:
1575 if '_manifest' in self.__dict__:
1573 try:
1576 try:
1574 return self._manifest.flags(path)
1577 return self._manifest.flags(path)
1575 except KeyError:
1578 except KeyError:
1576 return b''
1579 return b''
1577
1580
1578 try:
1581 try:
1579 return self._flagfunc(path)
1582 return self._flagfunc(path)
1580 except OSError:
1583 except OSError:
1581 return b''
1584 return b''
1582
1585
1583 def filectx(self, path, filelog=None):
1586 def filectx(self, path, filelog=None):
1584 """get a file context from the working directory"""
1587 """get a file context from the working directory"""
1585 return workingfilectx(
1588 return workingfilectx(
1586 self._repo, path, workingctx=self, filelog=filelog
1589 self._repo, path, workingctx=self, filelog=filelog
1587 )
1590 )
1588
1591
1589 def dirty(self, missing=False, merge=True, branch=True):
1592 def dirty(self, missing=False, merge=True, branch=True):
1590 """check whether a working directory is modified"""
1593 """check whether a working directory is modified"""
1591 # check subrepos first
1594 # check subrepos first
1592 for s in sorted(self.substate):
1595 for s in sorted(self.substate):
1593 if self.sub(s).dirty(missing=missing):
1596 if self.sub(s).dirty(missing=missing):
1594 return True
1597 return True
1595 # check current working dir
1598 # check current working dir
1596 return (
1599 return (
1597 (merge and self.p2())
1600 (merge and self.p2())
1598 or (branch and self.branch() != self.p1().branch())
1601 or (branch and self.branch() != self.p1().branch())
1599 or self.modified()
1602 or self.modified()
1600 or self.added()
1603 or self.added()
1601 or self.removed()
1604 or self.removed()
1602 or (missing and self.deleted())
1605 or (missing and self.deleted())
1603 )
1606 )
1604
1607
1605 def add(self, list, prefix=b""):
1608 def add(self, list, prefix=b""):
1606 with self._repo.wlock():
1609 with self._repo.wlock():
1607 ui, ds = self._repo.ui, self._repo.dirstate
1610 ui, ds = self._repo.ui, self._repo.dirstate
1608 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1611 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1609 rejected = []
1612 rejected = []
1610 lstat = self._repo.wvfs.lstat
1613 lstat = self._repo.wvfs.lstat
1611 for f in list:
1614 for f in list:
1612 # ds.pathto() returns an absolute file when this is invoked from
1615 # ds.pathto() returns an absolute file when this is invoked from
1613 # the keyword extension. That gets flagged as non-portable on
1616 # the keyword extension. That gets flagged as non-portable on
1614 # Windows, since it contains the drive letter and colon.
1617 # Windows, since it contains the drive letter and colon.
1615 scmutil.checkportable(ui, os.path.join(prefix, f))
1618 scmutil.checkportable(ui, os.path.join(prefix, f))
1616 try:
1619 try:
1617 st = lstat(f)
1620 st = lstat(f)
1618 except OSError:
1621 except OSError:
1619 ui.warn(_(b"%s does not exist!\n") % uipath(f))
1622 ui.warn(_(b"%s does not exist!\n") % uipath(f))
1620 rejected.append(f)
1623 rejected.append(f)
1621 continue
1624 continue
1622 limit = ui.configbytes(b'ui', b'large-file-limit')
1625 limit = ui.configbytes(b'ui', b'large-file-limit')
1623 if limit != 0 and st.st_size > limit:
1626 if limit != 0 and st.st_size > limit:
1624 ui.warn(
1627 ui.warn(
1625 _(
1628 _(
1626 b"%s: up to %d MB of RAM may be required "
1629 b"%s: up to %d MB of RAM may be required "
1627 b"to manage this file\n"
1630 b"to manage this file\n"
1628 b"(use 'hg revert %s' to cancel the "
1631 b"(use 'hg revert %s' to cancel the "
1629 b"pending addition)\n"
1632 b"pending addition)\n"
1630 )
1633 )
1631 % (f, 3 * st.st_size // 1000000, uipath(f))
1634 % (f, 3 * st.st_size // 1000000, uipath(f))
1632 )
1635 )
1633 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1636 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1634 ui.warn(
1637 ui.warn(
1635 _(
1638 _(
1636 b"%s not added: only files and symlinks "
1639 b"%s not added: only files and symlinks "
1637 b"supported currently\n"
1640 b"supported currently\n"
1638 )
1641 )
1639 % uipath(f)
1642 % uipath(f)
1640 )
1643 )
1641 rejected.append(f)
1644 rejected.append(f)
1642 elif ds[f] in b'amn':
1645 elif ds[f] in b'amn':
1643 ui.warn(_(b"%s already tracked!\n") % uipath(f))
1646 ui.warn(_(b"%s already tracked!\n") % uipath(f))
1644 elif ds[f] == b'r':
1647 elif ds[f] == b'r':
1645 ds.normallookup(f)
1648 ds.normallookup(f)
1646 else:
1649 else:
1647 ds.add(f)
1650 ds.add(f)
1648 return rejected
1651 return rejected
1649
1652
1650 def forget(self, files, prefix=b""):
1653 def forget(self, files, prefix=b""):
1651 with self._repo.wlock():
1654 with self._repo.wlock():
1652 ds = self._repo.dirstate
1655 ds = self._repo.dirstate
1653 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1656 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1654 rejected = []
1657 rejected = []
1655 for f in files:
1658 for f in files:
1656 if f not in ds:
1659 if f not in ds:
1657 self._repo.ui.warn(_(b"%s not tracked!\n") % uipath(f))
1660 self._repo.ui.warn(_(b"%s not tracked!\n") % uipath(f))
1658 rejected.append(f)
1661 rejected.append(f)
1659 elif ds[f] != b'a':
1662 elif ds[f] != b'a':
1660 ds.remove(f)
1663 ds.remove(f)
1661 else:
1664 else:
1662 ds.drop(f)
1665 ds.drop(f)
1663 return rejected
1666 return rejected
1664
1667
1665 def copy(self, source, dest):
1668 def copy(self, source, dest):
1666 try:
1669 try:
1667 st = self._repo.wvfs.lstat(dest)
1670 st = self._repo.wvfs.lstat(dest)
1668 except OSError as err:
1671 except OSError as err:
1669 if err.errno != errno.ENOENT:
1672 if err.errno != errno.ENOENT:
1670 raise
1673 raise
1671 self._repo.ui.warn(
1674 self._repo.ui.warn(
1672 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1675 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1673 )
1676 )
1674 return
1677 return
1675 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1678 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1676 self._repo.ui.warn(
1679 self._repo.ui.warn(
1677 _(b"copy failed: %s is not a file or a symbolic link\n")
1680 _(b"copy failed: %s is not a file or a symbolic link\n")
1678 % self._repo.dirstate.pathto(dest)
1681 % self._repo.dirstate.pathto(dest)
1679 )
1682 )
1680 else:
1683 else:
1681 with self._repo.wlock():
1684 with self._repo.wlock():
1682 ds = self._repo.dirstate
1685 ds = self._repo.dirstate
1683 if ds[dest] in b'?':
1686 if ds[dest] in b'?':
1684 ds.add(dest)
1687 ds.add(dest)
1685 elif ds[dest] in b'r':
1688 elif ds[dest] in b'r':
1686 ds.normallookup(dest)
1689 ds.normallookup(dest)
1687 ds.copy(source, dest)
1690 ds.copy(source, dest)
1688
1691
1689 def match(
1692 def match(
1690 self,
1693 self,
1691 pats=None,
1694 pats=None,
1692 include=None,
1695 include=None,
1693 exclude=None,
1696 exclude=None,
1694 default=b'glob',
1697 default=b'glob',
1695 listsubrepos=False,
1698 listsubrepos=False,
1696 badfn=None,
1699 badfn=None,
1700 cwd=None,
1697 ):
1701 ):
1698 r = self._repo
1702 r = self._repo
1703 if not cwd:
1704 cwd = r.getcwd()
1699
1705
1700 # Only a case insensitive filesystem needs magic to translate user input
1706 # Only a case insensitive filesystem needs magic to translate user input
1701 # to actual case in the filesystem.
1707 # to actual case in the filesystem.
1702 icasefs = not util.fscasesensitive(r.root)
1708 icasefs = not util.fscasesensitive(r.root)
1703 return matchmod.match(
1709 return matchmod.match(
1704 r.root,
1710 r.root,
1705 r.getcwd(),
1711 cwd,
1706 pats,
1712 pats,
1707 include,
1713 include,
1708 exclude,
1714 exclude,
1709 default,
1715 default,
1710 auditor=r.auditor,
1716 auditor=r.auditor,
1711 ctx=self,
1717 ctx=self,
1712 listsubrepos=listsubrepos,
1718 listsubrepos=listsubrepos,
1713 badfn=badfn,
1719 badfn=badfn,
1714 icasefs=icasefs,
1720 icasefs=icasefs,
1715 )
1721 )
1716
1722
1717 def _filtersuspectsymlink(self, files):
1723 def _filtersuspectsymlink(self, files):
1718 if not files or self._repo.dirstate._checklink:
1724 if not files or self._repo.dirstate._checklink:
1719 return files
1725 return files
1720
1726
1721 # Symlink placeholders may get non-symlink-like contents
1727 # Symlink placeholders may get non-symlink-like contents
1722 # via user error or dereferencing by NFS or Samba servers,
1728 # via user error or dereferencing by NFS or Samba servers,
1723 # so we filter out any placeholders that don't look like a
1729 # so we filter out any placeholders that don't look like a
1724 # symlink
1730 # symlink
1725 sane = []
1731 sane = []
1726 for f in files:
1732 for f in files:
1727 if self.flags(f) == b'l':
1733 if self.flags(f) == b'l':
1728 d = self[f].data()
1734 d = self[f].data()
1729 if (
1735 if (
1730 d == b''
1736 d == b''
1731 or len(d) >= 1024
1737 or len(d) >= 1024
1732 or b'\n' in d
1738 or b'\n' in d
1733 or stringutil.binary(d)
1739 or stringutil.binary(d)
1734 ):
1740 ):
1735 self._repo.ui.debug(
1741 self._repo.ui.debug(
1736 b'ignoring suspect symlink placeholder "%s"\n' % f
1742 b'ignoring suspect symlink placeholder "%s"\n' % f
1737 )
1743 )
1738 continue
1744 continue
1739 sane.append(f)
1745 sane.append(f)
1740 return sane
1746 return sane
1741
1747
1742 def _checklookup(self, files):
1748 def _checklookup(self, files):
1743 # check for any possibly clean files
1749 # check for any possibly clean files
1744 if not files:
1750 if not files:
1745 return [], [], []
1751 return [], [], []
1746
1752
1747 modified = []
1753 modified = []
1748 deleted = []
1754 deleted = []
1749 fixup = []
1755 fixup = []
1750 pctx = self._parents[0]
1756 pctx = self._parents[0]
1751 # do a full compare of any files that might have changed
1757 # do a full compare of any files that might have changed
1752 for f in sorted(files):
1758 for f in sorted(files):
1753 try:
1759 try:
1754 # This will return True for a file that got replaced by a
1760 # This will return True for a file that got replaced by a
1755 # directory in the interim, but fixing that is pretty hard.
1761 # directory in the interim, but fixing that is pretty hard.
1756 if (
1762 if (
1757 f not in pctx
1763 f not in pctx
1758 or self.flags(f) != pctx.flags(f)
1764 or self.flags(f) != pctx.flags(f)
1759 or pctx[f].cmp(self[f])
1765 or pctx[f].cmp(self[f])
1760 ):
1766 ):
1761 modified.append(f)
1767 modified.append(f)
1762 else:
1768 else:
1763 fixup.append(f)
1769 fixup.append(f)
1764 except (IOError, OSError):
1770 except (IOError, OSError):
1765 # A file become inaccessible in between? Mark it as deleted,
1771 # A file become inaccessible in between? Mark it as deleted,
1766 # matching dirstate behavior (issue5584).
1772 # matching dirstate behavior (issue5584).
1767 # The dirstate has more complex behavior around whether a
1773 # The dirstate has more complex behavior around whether a
1768 # missing file matches a directory, etc, but we don't need to
1774 # missing file matches a directory, etc, but we don't need to
1769 # bother with that: if f has made it to this point, we're sure
1775 # bother with that: if f has made it to this point, we're sure
1770 # it's in the dirstate.
1776 # it's in the dirstate.
1771 deleted.append(f)
1777 deleted.append(f)
1772
1778
1773 return modified, deleted, fixup
1779 return modified, deleted, fixup
1774
1780
1775 def _poststatusfixup(self, status, fixup):
1781 def _poststatusfixup(self, status, fixup):
1776 """update dirstate for files that are actually clean"""
1782 """update dirstate for files that are actually clean"""
1777 poststatus = self._repo.postdsstatus()
1783 poststatus = self._repo.postdsstatus()
1778 if fixup or poststatus:
1784 if fixup or poststatus:
1779 try:
1785 try:
1780 oldid = self._repo.dirstate.identity()
1786 oldid = self._repo.dirstate.identity()
1781
1787
1782 # updating the dirstate is optional
1788 # updating the dirstate is optional
1783 # so we don't wait on the lock
1789 # so we don't wait on the lock
1784 # wlock can invalidate the dirstate, so cache normal _after_
1790 # wlock can invalidate the dirstate, so cache normal _after_
1785 # taking the lock
1791 # taking the lock
1786 with self._repo.wlock(False):
1792 with self._repo.wlock(False):
1787 if self._repo.dirstate.identity() == oldid:
1793 if self._repo.dirstate.identity() == oldid:
1788 if fixup:
1794 if fixup:
1789 normal = self._repo.dirstate.normal
1795 normal = self._repo.dirstate.normal
1790 for f in fixup:
1796 for f in fixup:
1791 normal(f)
1797 normal(f)
1792 # write changes out explicitly, because nesting
1798 # write changes out explicitly, because nesting
1793 # wlock at runtime may prevent 'wlock.release()'
1799 # wlock at runtime may prevent 'wlock.release()'
1794 # after this block from doing so for subsequent
1800 # after this block from doing so for subsequent
1795 # changing files
1801 # changing files
1796 tr = self._repo.currenttransaction()
1802 tr = self._repo.currenttransaction()
1797 self._repo.dirstate.write(tr)
1803 self._repo.dirstate.write(tr)
1798
1804
1799 if poststatus:
1805 if poststatus:
1800 for ps in poststatus:
1806 for ps in poststatus:
1801 ps(self, status)
1807 ps(self, status)
1802 else:
1808 else:
1803 # in this case, writing changes out breaks
1809 # in this case, writing changes out breaks
1804 # consistency, because .hg/dirstate was
1810 # consistency, because .hg/dirstate was
1805 # already changed simultaneously after last
1811 # already changed simultaneously after last
1806 # caching (see also issue5584 for detail)
1812 # caching (see also issue5584 for detail)
1807 self._repo.ui.debug(
1813 self._repo.ui.debug(
1808 b'skip updating dirstate: identity mismatch\n'
1814 b'skip updating dirstate: identity mismatch\n'
1809 )
1815 )
1810 except error.LockError:
1816 except error.LockError:
1811 pass
1817 pass
1812 finally:
1818 finally:
1813 # Even if the wlock couldn't be grabbed, clear out the list.
1819 # Even if the wlock couldn't be grabbed, clear out the list.
1814 self._repo.clearpostdsstatus()
1820 self._repo.clearpostdsstatus()
1815
1821
1816 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1822 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1817 '''Gets the status from the dirstate -- internal use only.'''
1823 '''Gets the status from the dirstate -- internal use only.'''
1818 subrepos = []
1824 subrepos = []
1819 if b'.hgsub' in self:
1825 if b'.hgsub' in self:
1820 subrepos = sorted(self.substate)
1826 subrepos = sorted(self.substate)
1821 cmp, s = self._repo.dirstate.status(
1827 cmp, s = self._repo.dirstate.status(
1822 match, subrepos, ignored=ignored, clean=clean, unknown=unknown
1828 match, subrepos, ignored=ignored, clean=clean, unknown=unknown
1823 )
1829 )
1824
1830
1825 # check for any possibly clean files
1831 # check for any possibly clean files
1826 fixup = []
1832 fixup = []
1827 if cmp:
1833 if cmp:
1828 modified2, deleted2, fixup = self._checklookup(cmp)
1834 modified2, deleted2, fixup = self._checklookup(cmp)
1829 s.modified.extend(modified2)
1835 s.modified.extend(modified2)
1830 s.deleted.extend(deleted2)
1836 s.deleted.extend(deleted2)
1831
1837
1832 if fixup and clean:
1838 if fixup and clean:
1833 s.clean.extend(fixup)
1839 s.clean.extend(fixup)
1834
1840
1835 self._poststatusfixup(s, fixup)
1841 self._poststatusfixup(s, fixup)
1836
1842
1837 if match.always():
1843 if match.always():
1838 # cache for performance
1844 # cache for performance
1839 if s.unknown or s.ignored or s.clean:
1845 if s.unknown or s.ignored or s.clean:
1840 # "_status" is cached with list*=False in the normal route
1846 # "_status" is cached with list*=False in the normal route
1841 self._status = scmutil.status(
1847 self._status = scmutil.status(
1842 s.modified, s.added, s.removed, s.deleted, [], [], []
1848 s.modified, s.added, s.removed, s.deleted, [], [], []
1843 )
1849 )
1844 else:
1850 else:
1845 self._status = s
1851 self._status = s
1846
1852
1847 return s
1853 return s
1848
1854
1849 @propertycache
1855 @propertycache
1850 def _copies(self):
1856 def _copies(self):
1851 p1copies = {}
1857 p1copies = {}
1852 p2copies = {}
1858 p2copies = {}
1853 parents = self._repo.dirstate.parents()
1859 parents = self._repo.dirstate.parents()
1854 p1manifest = self._repo[parents[0]].manifest()
1860 p1manifest = self._repo[parents[0]].manifest()
1855 p2manifest = self._repo[parents[1]].manifest()
1861 p2manifest = self._repo[parents[1]].manifest()
1856 changedset = set(self.added()) | set(self.modified())
1862 changedset = set(self.added()) | set(self.modified())
1857 narrowmatch = self._repo.narrowmatch()
1863 narrowmatch = self._repo.narrowmatch()
1858 for dst, src in self._repo.dirstate.copies().items():
1864 for dst, src in self._repo.dirstate.copies().items():
1859 if dst not in changedset or not narrowmatch(dst):
1865 if dst not in changedset or not narrowmatch(dst):
1860 continue
1866 continue
1861 if src in p1manifest:
1867 if src in p1manifest:
1862 p1copies[dst] = src
1868 p1copies[dst] = src
1863 elif src in p2manifest:
1869 elif src in p2manifest:
1864 p2copies[dst] = src
1870 p2copies[dst] = src
1865 return p1copies, p2copies
1871 return p1copies, p2copies
1866
1872
1867 @propertycache
1873 @propertycache
1868 def _manifest(self):
1874 def _manifest(self):
1869 """generate a manifest corresponding to the values in self._status
1875 """generate a manifest corresponding to the values in self._status
1870
1876
1871 This reuse the file nodeid from parent, but we use special node
1877 This reuse the file nodeid from parent, but we use special node
1872 identifiers for added and modified files. This is used by manifests
1878 identifiers for added and modified files. This is used by manifests
1873 merge to see that files are different and by update logic to avoid
1879 merge to see that files are different and by update logic to avoid
1874 deleting newly added files.
1880 deleting newly added files.
1875 """
1881 """
1876 return self._buildstatusmanifest(self._status)
1882 return self._buildstatusmanifest(self._status)
1877
1883
1878 def _buildstatusmanifest(self, status):
1884 def _buildstatusmanifest(self, status):
1879 """Builds a manifest that includes the given status results."""
1885 """Builds a manifest that includes the given status results."""
1880 parents = self.parents()
1886 parents = self.parents()
1881
1887
1882 man = parents[0].manifest().copy()
1888 man = parents[0].manifest().copy()
1883
1889
1884 ff = self._flagfunc
1890 ff = self._flagfunc
1885 for i, l in (
1891 for i, l in (
1886 (addednodeid, status.added),
1892 (addednodeid, status.added),
1887 (modifiednodeid, status.modified),
1893 (modifiednodeid, status.modified),
1888 ):
1894 ):
1889 for f in l:
1895 for f in l:
1890 man[f] = i
1896 man[f] = i
1891 try:
1897 try:
1892 man.setflag(f, ff(f))
1898 man.setflag(f, ff(f))
1893 except OSError:
1899 except OSError:
1894 pass
1900 pass
1895
1901
1896 for f in status.deleted + status.removed:
1902 for f in status.deleted + status.removed:
1897 if f in man:
1903 if f in man:
1898 del man[f]
1904 del man[f]
1899
1905
1900 return man
1906 return man
1901
1907
1902 def _buildstatus(
1908 def _buildstatus(
1903 self, other, s, match, listignored, listclean, listunknown
1909 self, other, s, match, listignored, listclean, listunknown
1904 ):
1910 ):
1905 """build a status with respect to another context
1911 """build a status with respect to another context
1906
1912
1907 This includes logic for maintaining the fast path of status when
1913 This includes logic for maintaining the fast path of status when
1908 comparing the working directory against its parent, which is to skip
1914 comparing the working directory against its parent, which is to skip
1909 building a new manifest if self (working directory) is not comparing
1915 building a new manifest if self (working directory) is not comparing
1910 against its parent (repo['.']).
1916 against its parent (repo['.']).
1911 """
1917 """
1912 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1918 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1913 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1919 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1914 # might have accidentally ended up with the entire contents of the file
1920 # might have accidentally ended up with the entire contents of the file
1915 # they are supposed to be linking to.
1921 # they are supposed to be linking to.
1916 s.modified[:] = self._filtersuspectsymlink(s.modified)
1922 s.modified[:] = self._filtersuspectsymlink(s.modified)
1917 if other != self._repo[b'.']:
1923 if other != self._repo[b'.']:
1918 s = super(workingctx, self)._buildstatus(
1924 s = super(workingctx, self)._buildstatus(
1919 other, s, match, listignored, listclean, listunknown
1925 other, s, match, listignored, listclean, listunknown
1920 )
1926 )
1921 return s
1927 return s
1922
1928
1923 def _matchstatus(self, other, match):
1929 def _matchstatus(self, other, match):
1924 """override the match method with a filter for directory patterns
1930 """override the match method with a filter for directory patterns
1925
1931
1926 We use inheritance to customize the match.bad method only in cases of
1932 We use inheritance to customize the match.bad method only in cases of
1927 workingctx since it belongs only to the working directory when
1933 workingctx since it belongs only to the working directory when
1928 comparing against the parent changeset.
1934 comparing against the parent changeset.
1929
1935
1930 If we aren't comparing against the working directory's parent, then we
1936 If we aren't comparing against the working directory's parent, then we
1931 just use the default match object sent to us.
1937 just use the default match object sent to us.
1932 """
1938 """
1933 if other != self._repo[b'.']:
1939 if other != self._repo[b'.']:
1934
1940
1935 def bad(f, msg):
1941 def bad(f, msg):
1936 # 'f' may be a directory pattern from 'match.files()',
1942 # 'f' may be a directory pattern from 'match.files()',
1937 # so 'f not in ctx1' is not enough
1943 # so 'f not in ctx1' is not enough
1938 if f not in other and not other.hasdir(f):
1944 if f not in other and not other.hasdir(f):
1939 self._repo.ui.warn(
1945 self._repo.ui.warn(
1940 b'%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
1946 b'%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
1941 )
1947 )
1942
1948
1943 match.bad = bad
1949 match.bad = bad
1944 return match
1950 return match
1945
1951
1946 def walk(self, match):
1952 def walk(self, match):
1947 '''Generates matching file names.'''
1953 '''Generates matching file names.'''
1948 return sorted(
1954 return sorted(
1949 self._repo.dirstate.walk(
1955 self._repo.dirstate.walk(
1950 self._repo.narrowmatch(match),
1956 self._repo.narrowmatch(match),
1951 subrepos=sorted(self.substate),
1957 subrepos=sorted(self.substate),
1952 unknown=True,
1958 unknown=True,
1953 ignored=False,
1959 ignored=False,
1954 )
1960 )
1955 )
1961 )
1956
1962
1957 def matches(self, match):
1963 def matches(self, match):
1958 match = self._repo.narrowmatch(match)
1964 match = self._repo.narrowmatch(match)
1959 ds = self._repo.dirstate
1965 ds = self._repo.dirstate
1960 return sorted(f for f in ds.matches(match) if ds[f] != b'r')
1966 return sorted(f for f in ds.matches(match) if ds[f] != b'r')
1961
1967
1962 def markcommitted(self, node):
1968 def markcommitted(self, node):
1963 with self._repo.dirstate.parentchange():
1969 with self._repo.dirstate.parentchange():
1964 for f in self.modified() + self.added():
1970 for f in self.modified() + self.added():
1965 self._repo.dirstate.normal(f)
1971 self._repo.dirstate.normal(f)
1966 for f in self.removed():
1972 for f in self.removed():
1967 self._repo.dirstate.drop(f)
1973 self._repo.dirstate.drop(f)
1968 self._repo.dirstate.setparents(node)
1974 self._repo.dirstate.setparents(node)
1969
1975
1970 # write changes out explicitly, because nesting wlock at
1976 # write changes out explicitly, because nesting wlock at
1971 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1977 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1972 # from immediately doing so for subsequent changing files
1978 # from immediately doing so for subsequent changing files
1973 self._repo.dirstate.write(self._repo.currenttransaction())
1979 self._repo.dirstate.write(self._repo.currenttransaction())
1974
1980
1975 sparse.aftercommit(self._repo, node)
1981 sparse.aftercommit(self._repo, node)
1976
1982
1977
1983
1978 class committablefilectx(basefilectx):
1984 class committablefilectx(basefilectx):
1979 """A committablefilectx provides common functionality for a file context
1985 """A committablefilectx provides common functionality for a file context
1980 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1986 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1981
1987
1982 def __init__(self, repo, path, filelog=None, ctx=None):
1988 def __init__(self, repo, path, filelog=None, ctx=None):
1983 self._repo = repo
1989 self._repo = repo
1984 self._path = path
1990 self._path = path
1985 self._changeid = None
1991 self._changeid = None
1986 self._filerev = self._filenode = None
1992 self._filerev = self._filenode = None
1987
1993
1988 if filelog is not None:
1994 if filelog is not None:
1989 self._filelog = filelog
1995 self._filelog = filelog
1990 if ctx:
1996 if ctx:
1991 self._changectx = ctx
1997 self._changectx = ctx
1992
1998
1993 def __nonzero__(self):
1999 def __nonzero__(self):
1994 return True
2000 return True
1995
2001
1996 __bool__ = __nonzero__
2002 __bool__ = __nonzero__
1997
2003
1998 def linkrev(self):
2004 def linkrev(self):
1999 # linked to self._changectx no matter if file is modified or not
2005 # linked to self._changectx no matter if file is modified or not
2000 return self.rev()
2006 return self.rev()
2001
2007
2002 def renamed(self):
2008 def renamed(self):
2003 path = self.copysource()
2009 path = self.copysource()
2004 if not path:
2010 if not path:
2005 return None
2011 return None
2006 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2012 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2007
2013
2008 def parents(self):
2014 def parents(self):
2009 '''return parent filectxs, following copies if necessary'''
2015 '''return parent filectxs, following copies if necessary'''
2010
2016
2011 def filenode(ctx, path):
2017 def filenode(ctx, path):
2012 return ctx._manifest.get(path, nullid)
2018 return ctx._manifest.get(path, nullid)
2013
2019
2014 path = self._path
2020 path = self._path
2015 fl = self._filelog
2021 fl = self._filelog
2016 pcl = self._changectx._parents
2022 pcl = self._changectx._parents
2017 renamed = self.renamed()
2023 renamed = self.renamed()
2018
2024
2019 if renamed:
2025 if renamed:
2020 pl = [renamed + (None,)]
2026 pl = [renamed + (None,)]
2021 else:
2027 else:
2022 pl = [(path, filenode(pcl[0], path), fl)]
2028 pl = [(path, filenode(pcl[0], path), fl)]
2023
2029
2024 for pc in pcl[1:]:
2030 for pc in pcl[1:]:
2025 pl.append((path, filenode(pc, path), fl))
2031 pl.append((path, filenode(pc, path), fl))
2026
2032
2027 return [
2033 return [
2028 self._parentfilectx(p, fileid=n, filelog=l)
2034 self._parentfilectx(p, fileid=n, filelog=l)
2029 for p, n, l in pl
2035 for p, n, l in pl
2030 if n != nullid
2036 if n != nullid
2031 ]
2037 ]
2032
2038
2033 def children(self):
2039 def children(self):
2034 return []
2040 return []
2035
2041
2036
2042
2037 class workingfilectx(committablefilectx):
2043 class workingfilectx(committablefilectx):
2038 """A workingfilectx object makes access to data related to a particular
2044 """A workingfilectx object makes access to data related to a particular
2039 file in the working directory convenient."""
2045 file in the working directory convenient."""
2040
2046
2041 def __init__(self, repo, path, filelog=None, workingctx=None):
2047 def __init__(self, repo, path, filelog=None, workingctx=None):
2042 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
2048 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
2043
2049
2044 @propertycache
2050 @propertycache
2045 def _changectx(self):
2051 def _changectx(self):
2046 return workingctx(self._repo)
2052 return workingctx(self._repo)
2047
2053
2048 def data(self):
2054 def data(self):
2049 return self._repo.wread(self._path)
2055 return self._repo.wread(self._path)
2050
2056
2051 def copysource(self):
2057 def copysource(self):
2052 return self._repo.dirstate.copied(self._path)
2058 return self._repo.dirstate.copied(self._path)
2053
2059
2054 def size(self):
2060 def size(self):
2055 return self._repo.wvfs.lstat(self._path).st_size
2061 return self._repo.wvfs.lstat(self._path).st_size
2056
2062
2057 def lstat(self):
2063 def lstat(self):
2058 return self._repo.wvfs.lstat(self._path)
2064 return self._repo.wvfs.lstat(self._path)
2059
2065
2060 def date(self):
2066 def date(self):
2061 t, tz = self._changectx.date()
2067 t, tz = self._changectx.date()
2062 try:
2068 try:
2063 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
2069 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
2064 except OSError as err:
2070 except OSError as err:
2065 if err.errno != errno.ENOENT:
2071 if err.errno != errno.ENOENT:
2066 raise
2072 raise
2067 return (t, tz)
2073 return (t, tz)
2068
2074
2069 def exists(self):
2075 def exists(self):
2070 return self._repo.wvfs.exists(self._path)
2076 return self._repo.wvfs.exists(self._path)
2071
2077
2072 def lexists(self):
2078 def lexists(self):
2073 return self._repo.wvfs.lexists(self._path)
2079 return self._repo.wvfs.lexists(self._path)
2074
2080
2075 def audit(self):
2081 def audit(self):
2076 return self._repo.wvfs.audit(self._path)
2082 return self._repo.wvfs.audit(self._path)
2077
2083
2078 def cmp(self, fctx):
2084 def cmp(self, fctx):
2079 """compare with other file context
2085 """compare with other file context
2080
2086
2081 returns True if different than fctx.
2087 returns True if different than fctx.
2082 """
2088 """
2083 # fctx should be a filectx (not a workingfilectx)
2089 # fctx should be a filectx (not a workingfilectx)
2084 # invert comparison to reuse the same code path
2090 # invert comparison to reuse the same code path
2085 return fctx.cmp(self)
2091 return fctx.cmp(self)
2086
2092
2087 def remove(self, ignoremissing=False):
2093 def remove(self, ignoremissing=False):
2088 """wraps unlink for a repo's working directory"""
2094 """wraps unlink for a repo's working directory"""
2089 rmdir = self._repo.ui.configbool(b'experimental', b'removeemptydirs')
2095 rmdir = self._repo.ui.configbool(b'experimental', b'removeemptydirs')
2090 self._repo.wvfs.unlinkpath(
2096 self._repo.wvfs.unlinkpath(
2091 self._path, ignoremissing=ignoremissing, rmdir=rmdir
2097 self._path, ignoremissing=ignoremissing, rmdir=rmdir
2092 )
2098 )
2093
2099
2094 def write(self, data, flags, backgroundclose=False, **kwargs):
2100 def write(self, data, flags, backgroundclose=False, **kwargs):
2095 """wraps repo.wwrite"""
2101 """wraps repo.wwrite"""
2096 return self._repo.wwrite(
2102 return self._repo.wwrite(
2097 self._path, data, flags, backgroundclose=backgroundclose, **kwargs
2103 self._path, data, flags, backgroundclose=backgroundclose, **kwargs
2098 )
2104 )
2099
2105
2100 def markcopied(self, src):
2106 def markcopied(self, src):
2101 """marks this file a copy of `src`"""
2107 """marks this file a copy of `src`"""
2102 self._repo.dirstate.copy(src, self._path)
2108 self._repo.dirstate.copy(src, self._path)
2103
2109
2104 def clearunknown(self):
2110 def clearunknown(self):
2105 """Removes conflicting items in the working directory so that
2111 """Removes conflicting items in the working directory so that
2106 ``write()`` can be called successfully.
2112 ``write()`` can be called successfully.
2107 """
2113 """
2108 wvfs = self._repo.wvfs
2114 wvfs = self._repo.wvfs
2109 f = self._path
2115 f = self._path
2110 wvfs.audit(f)
2116 wvfs.audit(f)
2111 if self._repo.ui.configbool(
2117 if self._repo.ui.configbool(
2112 b'experimental', b'merge.checkpathconflicts'
2118 b'experimental', b'merge.checkpathconflicts'
2113 ):
2119 ):
2114 # remove files under the directory as they should already be
2120 # remove files under the directory as they should already be
2115 # warned and backed up
2121 # warned and backed up
2116 if wvfs.isdir(f) and not wvfs.islink(f):
2122 if wvfs.isdir(f) and not wvfs.islink(f):
2117 wvfs.rmtree(f, forcibly=True)
2123 wvfs.rmtree(f, forcibly=True)
2118 for p in reversed(list(pathutil.finddirs(f))):
2124 for p in reversed(list(pathutil.finddirs(f))):
2119 if wvfs.isfileorlink(p):
2125 if wvfs.isfileorlink(p):
2120 wvfs.unlink(p)
2126 wvfs.unlink(p)
2121 break
2127 break
2122 else:
2128 else:
2123 # don't remove files if path conflicts are not processed
2129 # don't remove files if path conflicts are not processed
2124 if wvfs.isdir(f) and not wvfs.islink(f):
2130 if wvfs.isdir(f) and not wvfs.islink(f):
2125 wvfs.removedirs(f)
2131 wvfs.removedirs(f)
2126
2132
2127 def setflags(self, l, x):
2133 def setflags(self, l, x):
2128 self._repo.wvfs.setflags(self._path, l, x)
2134 self._repo.wvfs.setflags(self._path, l, x)
2129
2135
2130
2136
2131 class overlayworkingctx(committablectx):
2137 class overlayworkingctx(committablectx):
2132 """Wraps another mutable context with a write-back cache that can be
2138 """Wraps another mutable context with a write-back cache that can be
2133 converted into a commit context.
2139 converted into a commit context.
2134
2140
2135 self._cache[path] maps to a dict with keys: {
2141 self._cache[path] maps to a dict with keys: {
2136 'exists': bool?
2142 'exists': bool?
2137 'date': date?
2143 'date': date?
2138 'data': str?
2144 'data': str?
2139 'flags': str?
2145 'flags': str?
2140 'copied': str? (path or None)
2146 'copied': str? (path or None)
2141 }
2147 }
2142 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
2148 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
2143 is `False`, the file was deleted.
2149 is `False`, the file was deleted.
2144 """
2150 """
2145
2151
2146 def __init__(self, repo):
2152 def __init__(self, repo):
2147 super(overlayworkingctx, self).__init__(repo)
2153 super(overlayworkingctx, self).__init__(repo)
2148 self.clean()
2154 self.clean()
2149
2155
2150 def setbase(self, wrappedctx):
2156 def setbase(self, wrappedctx):
2151 self._wrappedctx = wrappedctx
2157 self._wrappedctx = wrappedctx
2152 self._parents = [wrappedctx]
2158 self._parents = [wrappedctx]
2153 # Drop old manifest cache as it is now out of date.
2159 # Drop old manifest cache as it is now out of date.
2154 # This is necessary when, e.g., rebasing several nodes with one
2160 # This is necessary when, e.g., rebasing several nodes with one
2155 # ``overlayworkingctx`` (e.g. with --collapse).
2161 # ``overlayworkingctx`` (e.g. with --collapse).
2156 util.clearcachedproperty(self, b'_manifest')
2162 util.clearcachedproperty(self, b'_manifest')
2157
2163
2158 def data(self, path):
2164 def data(self, path):
2159 if self.isdirty(path):
2165 if self.isdirty(path):
2160 if self._cache[path][b'exists']:
2166 if self._cache[path][b'exists']:
2161 if self._cache[path][b'data'] is not None:
2167 if self._cache[path][b'data'] is not None:
2162 return self._cache[path][b'data']
2168 return self._cache[path][b'data']
2163 else:
2169 else:
2164 # Must fallback here, too, because we only set flags.
2170 # Must fallback here, too, because we only set flags.
2165 return self._wrappedctx[path].data()
2171 return self._wrappedctx[path].data()
2166 else:
2172 else:
2167 raise error.ProgrammingError(
2173 raise error.ProgrammingError(
2168 b"No such file or directory: %s" % path
2174 b"No such file or directory: %s" % path
2169 )
2175 )
2170 else:
2176 else:
2171 return self._wrappedctx[path].data()
2177 return self._wrappedctx[path].data()
2172
2178
2173 @propertycache
2179 @propertycache
2174 def _manifest(self):
2180 def _manifest(self):
2175 parents = self.parents()
2181 parents = self.parents()
2176 man = parents[0].manifest().copy()
2182 man = parents[0].manifest().copy()
2177
2183
2178 flag = self._flagfunc
2184 flag = self._flagfunc
2179 for path in self.added():
2185 for path in self.added():
2180 man[path] = addednodeid
2186 man[path] = addednodeid
2181 man.setflag(path, flag(path))
2187 man.setflag(path, flag(path))
2182 for path in self.modified():
2188 for path in self.modified():
2183 man[path] = modifiednodeid
2189 man[path] = modifiednodeid
2184 man.setflag(path, flag(path))
2190 man.setflag(path, flag(path))
2185 for path in self.removed():
2191 for path in self.removed():
2186 del man[path]
2192 del man[path]
2187 return man
2193 return man
2188
2194
2189 @propertycache
2195 @propertycache
2190 def _flagfunc(self):
2196 def _flagfunc(self):
2191 def f(path):
2197 def f(path):
2192 return self._cache[path][b'flags']
2198 return self._cache[path][b'flags']
2193
2199
2194 return f
2200 return f
2195
2201
2196 def files(self):
2202 def files(self):
2197 return sorted(self.added() + self.modified() + self.removed())
2203 return sorted(self.added() + self.modified() + self.removed())
2198
2204
2199 def modified(self):
2205 def modified(self):
2200 return [
2206 return [
2201 f
2207 f
2202 for f in self._cache.keys()
2208 for f in self._cache.keys()
2203 if self._cache[f][b'exists'] and self._existsinparent(f)
2209 if self._cache[f][b'exists'] and self._existsinparent(f)
2204 ]
2210 ]
2205
2211
2206 def added(self):
2212 def added(self):
2207 return [
2213 return [
2208 f
2214 f
2209 for f in self._cache.keys()
2215 for f in self._cache.keys()
2210 if self._cache[f][b'exists'] and not self._existsinparent(f)
2216 if self._cache[f][b'exists'] and not self._existsinparent(f)
2211 ]
2217 ]
2212
2218
2213 def removed(self):
2219 def removed(self):
2214 return [
2220 return [
2215 f
2221 f
2216 for f in self._cache.keys()
2222 for f in self._cache.keys()
2217 if not self._cache[f][b'exists'] and self._existsinparent(f)
2223 if not self._cache[f][b'exists'] and self._existsinparent(f)
2218 ]
2224 ]
2219
2225
2220 def p1copies(self):
2226 def p1copies(self):
2221 copies = self._repo._wrappedctx.p1copies().copy()
2227 copies = self._repo._wrappedctx.p1copies().copy()
2222 narrowmatch = self._repo.narrowmatch()
2228 narrowmatch = self._repo.narrowmatch()
2223 for f in self._cache.keys():
2229 for f in self._cache.keys():
2224 if not narrowmatch(f):
2230 if not narrowmatch(f):
2225 continue
2231 continue
2226 copies.pop(f, None) # delete if it exists
2232 copies.pop(f, None) # delete if it exists
2227 source = self._cache[f][b'copied']
2233 source = self._cache[f][b'copied']
2228 if source:
2234 if source:
2229 copies[f] = source
2235 copies[f] = source
2230 return copies
2236 return copies
2231
2237
2232 def p2copies(self):
2238 def p2copies(self):
2233 copies = self._repo._wrappedctx.p2copies().copy()
2239 copies = self._repo._wrappedctx.p2copies().copy()
2234 narrowmatch = self._repo.narrowmatch()
2240 narrowmatch = self._repo.narrowmatch()
2235 for f in self._cache.keys():
2241 for f in self._cache.keys():
2236 if not narrowmatch(f):
2242 if not narrowmatch(f):
2237 continue
2243 continue
2238 copies.pop(f, None) # delete if it exists
2244 copies.pop(f, None) # delete if it exists
2239 source = self._cache[f][b'copied']
2245 source = self._cache[f][b'copied']
2240 if source:
2246 if source:
2241 copies[f] = source
2247 copies[f] = source
2242 return copies
2248 return copies
2243
2249
2244 def isinmemory(self):
2250 def isinmemory(self):
2245 return True
2251 return True
2246
2252
2247 def filedate(self, path):
2253 def filedate(self, path):
2248 if self.isdirty(path):
2254 if self.isdirty(path):
2249 return self._cache[path][b'date']
2255 return self._cache[path][b'date']
2250 else:
2256 else:
2251 return self._wrappedctx[path].date()
2257 return self._wrappedctx[path].date()
2252
2258
2253 def markcopied(self, path, origin):
2259 def markcopied(self, path, origin):
2254 self._markdirty(
2260 self._markdirty(
2255 path,
2261 path,
2256 exists=True,
2262 exists=True,
2257 date=self.filedate(path),
2263 date=self.filedate(path),
2258 flags=self.flags(path),
2264 flags=self.flags(path),
2259 copied=origin,
2265 copied=origin,
2260 )
2266 )
2261
2267
2262 def copydata(self, path):
2268 def copydata(self, path):
2263 if self.isdirty(path):
2269 if self.isdirty(path):
2264 return self._cache[path][b'copied']
2270 return self._cache[path][b'copied']
2265 else:
2271 else:
2266 return None
2272 return None
2267
2273
2268 def flags(self, path):
2274 def flags(self, path):
2269 if self.isdirty(path):
2275 if self.isdirty(path):
2270 if self._cache[path][b'exists']:
2276 if self._cache[path][b'exists']:
2271 return self._cache[path][b'flags']
2277 return self._cache[path][b'flags']
2272 else:
2278 else:
2273 raise error.ProgrammingError(
2279 raise error.ProgrammingError(
2274 b"No such file or directory: %s" % self._path
2280 b"No such file or directory: %s" % self._path
2275 )
2281 )
2276 else:
2282 else:
2277 return self._wrappedctx[path].flags()
2283 return self._wrappedctx[path].flags()
2278
2284
2279 def __contains__(self, key):
2285 def __contains__(self, key):
2280 if key in self._cache:
2286 if key in self._cache:
2281 return self._cache[key][b'exists']
2287 return self._cache[key][b'exists']
2282 return key in self.p1()
2288 return key in self.p1()
2283
2289
2284 def _existsinparent(self, path):
2290 def _existsinparent(self, path):
2285 try:
2291 try:
2286 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2292 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2287 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2293 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2288 # with an ``exists()`` function.
2294 # with an ``exists()`` function.
2289 self._wrappedctx[path]
2295 self._wrappedctx[path]
2290 return True
2296 return True
2291 except error.ManifestLookupError:
2297 except error.ManifestLookupError:
2292 return False
2298 return False
2293
2299
2294 def _auditconflicts(self, path):
2300 def _auditconflicts(self, path):
2295 """Replicates conflict checks done by wvfs.write().
2301 """Replicates conflict checks done by wvfs.write().
2296
2302
2297 Since we never write to the filesystem and never call `applyupdates` in
2303 Since we never write to the filesystem and never call `applyupdates` in
2298 IMM, we'll never check that a path is actually writable -- e.g., because
2304 IMM, we'll never check that a path is actually writable -- e.g., because
2299 it adds `a/foo`, but `a` is actually a file in the other commit.
2305 it adds `a/foo`, but `a` is actually a file in the other commit.
2300 """
2306 """
2301
2307
2302 def fail(path, component):
2308 def fail(path, component):
2303 # p1() is the base and we're receiving "writes" for p2()'s
2309 # p1() is the base and we're receiving "writes" for p2()'s
2304 # files.
2310 # files.
2305 if b'l' in self.p1()[component].flags():
2311 if b'l' in self.p1()[component].flags():
2306 raise error.Abort(
2312 raise error.Abort(
2307 b"error: %s conflicts with symlink %s "
2313 b"error: %s conflicts with symlink %s "
2308 b"in %d." % (path, component, self.p1().rev())
2314 b"in %d." % (path, component, self.p1().rev())
2309 )
2315 )
2310 else:
2316 else:
2311 raise error.Abort(
2317 raise error.Abort(
2312 b"error: '%s' conflicts with file '%s' in "
2318 b"error: '%s' conflicts with file '%s' in "
2313 b"%d." % (path, component, self.p1().rev())
2319 b"%d." % (path, component, self.p1().rev())
2314 )
2320 )
2315
2321
2316 # Test that each new directory to be created to write this path from p2
2322 # Test that each new directory to be created to write this path from p2
2317 # is not a file in p1.
2323 # is not a file in p1.
2318 components = path.split(b'/')
2324 components = path.split(b'/')
2319 for i in pycompat.xrange(len(components)):
2325 for i in pycompat.xrange(len(components)):
2320 component = b"/".join(components[0:i])
2326 component = b"/".join(components[0:i])
2321 if component in self:
2327 if component in self:
2322 fail(path, component)
2328 fail(path, component)
2323
2329
2324 # Test the other direction -- that this path from p2 isn't a directory
2330 # Test the other direction -- that this path from p2 isn't a directory
2325 # in p1 (test that p1 doesn't have any paths matching `path/*`).
2331 # in p1 (test that p1 doesn't have any paths matching `path/*`).
2326 match = self.match([path], default=b'path')
2332 match = self.match([path], default=b'path')
2327 matches = self.p1().manifest().matches(match)
2333 matches = self.p1().manifest().matches(match)
2328 mfiles = matches.keys()
2334 mfiles = matches.keys()
2329 if len(mfiles) > 0:
2335 if len(mfiles) > 0:
2330 if len(mfiles) == 1 and mfiles[0] == path:
2336 if len(mfiles) == 1 and mfiles[0] == path:
2331 return
2337 return
2332 # omit the files which are deleted in current IMM wctx
2338 # omit the files which are deleted in current IMM wctx
2333 mfiles = [m for m in mfiles if m in self]
2339 mfiles = [m for m in mfiles if m in self]
2334 if not mfiles:
2340 if not mfiles:
2335 return
2341 return
2336 raise error.Abort(
2342 raise error.Abort(
2337 b"error: file '%s' cannot be written because "
2343 b"error: file '%s' cannot be written because "
2338 b" '%s/' is a directory in %s (containing %d "
2344 b" '%s/' is a directory in %s (containing %d "
2339 b"entries: %s)"
2345 b"entries: %s)"
2340 % (path, path, self.p1(), len(mfiles), b', '.join(mfiles))
2346 % (path, path, self.p1(), len(mfiles), b', '.join(mfiles))
2341 )
2347 )
2342
2348
2343 def write(self, path, data, flags=b'', **kwargs):
2349 def write(self, path, data, flags=b'', **kwargs):
2344 if data is None:
2350 if data is None:
2345 raise error.ProgrammingError(b"data must be non-None")
2351 raise error.ProgrammingError(b"data must be non-None")
2346 self._auditconflicts(path)
2352 self._auditconflicts(path)
2347 self._markdirty(
2353 self._markdirty(
2348 path, exists=True, data=data, date=dateutil.makedate(), flags=flags
2354 path, exists=True, data=data, date=dateutil.makedate(), flags=flags
2349 )
2355 )
2350
2356
2351 def setflags(self, path, l, x):
2357 def setflags(self, path, l, x):
2352 flag = b''
2358 flag = b''
2353 if l:
2359 if l:
2354 flag = b'l'
2360 flag = b'l'
2355 elif x:
2361 elif x:
2356 flag = b'x'
2362 flag = b'x'
2357 self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
2363 self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
2358
2364
2359 def remove(self, path):
2365 def remove(self, path):
2360 self._markdirty(path, exists=False)
2366 self._markdirty(path, exists=False)
2361
2367
2362 def exists(self, path):
2368 def exists(self, path):
2363 """exists behaves like `lexists`, but needs to follow symlinks and
2369 """exists behaves like `lexists`, but needs to follow symlinks and
2364 return False if they are broken.
2370 return False if they are broken.
2365 """
2371 """
2366 if self.isdirty(path):
2372 if self.isdirty(path):
2367 # If this path exists and is a symlink, "follow" it by calling
2373 # If this path exists and is a symlink, "follow" it by calling
2368 # exists on the destination path.
2374 # exists on the destination path.
2369 if (
2375 if (
2370 self._cache[path][b'exists']
2376 self._cache[path][b'exists']
2371 and b'l' in self._cache[path][b'flags']
2377 and b'l' in self._cache[path][b'flags']
2372 ):
2378 ):
2373 return self.exists(self._cache[path][b'data'].strip())
2379 return self.exists(self._cache[path][b'data'].strip())
2374 else:
2380 else:
2375 return self._cache[path][b'exists']
2381 return self._cache[path][b'exists']
2376
2382
2377 return self._existsinparent(path)
2383 return self._existsinparent(path)
2378
2384
2379 def lexists(self, path):
2385 def lexists(self, path):
2380 """lexists returns True if the path exists"""
2386 """lexists returns True if the path exists"""
2381 if self.isdirty(path):
2387 if self.isdirty(path):
2382 return self._cache[path][b'exists']
2388 return self._cache[path][b'exists']
2383
2389
2384 return self._existsinparent(path)
2390 return self._existsinparent(path)
2385
2391
2386 def size(self, path):
2392 def size(self, path):
2387 if self.isdirty(path):
2393 if self.isdirty(path):
2388 if self._cache[path][b'exists']:
2394 if self._cache[path][b'exists']:
2389 return len(self._cache[path][b'data'])
2395 return len(self._cache[path][b'data'])
2390 else:
2396 else:
2391 raise error.ProgrammingError(
2397 raise error.ProgrammingError(
2392 b"No such file or directory: %s" % self._path
2398 b"No such file or directory: %s" % self._path
2393 )
2399 )
2394 return self._wrappedctx[path].size()
2400 return self._wrappedctx[path].size()
2395
2401
2396 def tomemctx(
2402 def tomemctx(
2397 self,
2403 self,
2398 text,
2404 text,
2399 branch=None,
2405 branch=None,
2400 extra=None,
2406 extra=None,
2401 date=None,
2407 date=None,
2402 parents=None,
2408 parents=None,
2403 user=None,
2409 user=None,
2404 editor=None,
2410 editor=None,
2405 ):
2411 ):
2406 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2412 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2407 committed.
2413 committed.
2408
2414
2409 ``text`` is the commit message.
2415 ``text`` is the commit message.
2410 ``parents`` (optional) are rev numbers.
2416 ``parents`` (optional) are rev numbers.
2411 """
2417 """
2412 # Default parents to the wrapped contexts' if not passed.
2418 # Default parents to the wrapped contexts' if not passed.
2413 if parents is None:
2419 if parents is None:
2414 parents = self._wrappedctx.parents()
2420 parents = self._wrappedctx.parents()
2415 if len(parents) == 1:
2421 if len(parents) == 1:
2416 parents = (parents[0], None)
2422 parents = (parents[0], None)
2417
2423
2418 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2424 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2419 if parents[1] is None:
2425 if parents[1] is None:
2420 parents = (self._repo[parents[0]], None)
2426 parents = (self._repo[parents[0]], None)
2421 else:
2427 else:
2422 parents = (self._repo[parents[0]], self._repo[parents[1]])
2428 parents = (self._repo[parents[0]], self._repo[parents[1]])
2423
2429
2424 files = self.files()
2430 files = self.files()
2425
2431
2426 def getfile(repo, memctx, path):
2432 def getfile(repo, memctx, path):
2427 if self._cache[path][b'exists']:
2433 if self._cache[path][b'exists']:
2428 return memfilectx(
2434 return memfilectx(
2429 repo,
2435 repo,
2430 memctx,
2436 memctx,
2431 path,
2437 path,
2432 self._cache[path][b'data'],
2438 self._cache[path][b'data'],
2433 b'l' in self._cache[path][b'flags'],
2439 b'l' in self._cache[path][b'flags'],
2434 b'x' in self._cache[path][b'flags'],
2440 b'x' in self._cache[path][b'flags'],
2435 self._cache[path][b'copied'],
2441 self._cache[path][b'copied'],
2436 )
2442 )
2437 else:
2443 else:
2438 # Returning None, but including the path in `files`, is
2444 # Returning None, but including the path in `files`, is
2439 # necessary for memctx to register a deletion.
2445 # necessary for memctx to register a deletion.
2440 return None
2446 return None
2441
2447
2442 return memctx(
2448 return memctx(
2443 self._repo,
2449 self._repo,
2444 parents,
2450 parents,
2445 text,
2451 text,
2446 files,
2452 files,
2447 getfile,
2453 getfile,
2448 date=date,
2454 date=date,
2449 extra=extra,
2455 extra=extra,
2450 user=user,
2456 user=user,
2451 branch=branch,
2457 branch=branch,
2452 editor=editor,
2458 editor=editor,
2453 )
2459 )
2454
2460
2455 def isdirty(self, path):
2461 def isdirty(self, path):
2456 return path in self._cache
2462 return path in self._cache
2457
2463
2458 def isempty(self):
2464 def isempty(self):
2459 # We need to discard any keys that are actually clean before the empty
2465 # We need to discard any keys that are actually clean before the empty
2460 # commit check.
2466 # commit check.
2461 self._compact()
2467 self._compact()
2462 return len(self._cache) == 0
2468 return len(self._cache) == 0
2463
2469
2464 def clean(self):
2470 def clean(self):
2465 self._cache = {}
2471 self._cache = {}
2466
2472
2467 def _compact(self):
2473 def _compact(self):
2468 """Removes keys from the cache that are actually clean, by comparing
2474 """Removes keys from the cache that are actually clean, by comparing
2469 them with the underlying context.
2475 them with the underlying context.
2470
2476
2471 This can occur during the merge process, e.g. by passing --tool :local
2477 This can occur during the merge process, e.g. by passing --tool :local
2472 to resolve a conflict.
2478 to resolve a conflict.
2473 """
2479 """
2474 keys = []
2480 keys = []
2475 # This won't be perfect, but can help performance significantly when
2481 # This won't be perfect, but can help performance significantly when
2476 # using things like remotefilelog.
2482 # using things like remotefilelog.
2477 scmutil.prefetchfiles(
2483 scmutil.prefetchfiles(
2478 self.repo(),
2484 self.repo(),
2479 [self.p1().rev()],
2485 [self.p1().rev()],
2480 scmutil.matchfiles(self.repo(), self._cache.keys()),
2486 scmutil.matchfiles(self.repo(), self._cache.keys()),
2481 )
2487 )
2482
2488
2483 for path in self._cache.keys():
2489 for path in self._cache.keys():
2484 cache = self._cache[path]
2490 cache = self._cache[path]
2485 try:
2491 try:
2486 underlying = self._wrappedctx[path]
2492 underlying = self._wrappedctx[path]
2487 if (
2493 if (
2488 underlying.data() == cache[b'data']
2494 underlying.data() == cache[b'data']
2489 and underlying.flags() == cache[b'flags']
2495 and underlying.flags() == cache[b'flags']
2490 ):
2496 ):
2491 keys.append(path)
2497 keys.append(path)
2492 except error.ManifestLookupError:
2498 except error.ManifestLookupError:
2493 # Path not in the underlying manifest (created).
2499 # Path not in the underlying manifest (created).
2494 continue
2500 continue
2495
2501
2496 for path in keys:
2502 for path in keys:
2497 del self._cache[path]
2503 del self._cache[path]
2498 return keys
2504 return keys
2499
2505
2500 def _markdirty(
2506 def _markdirty(
2501 self, path, exists, data=None, date=None, flags=b'', copied=None
2507 self, path, exists, data=None, date=None, flags=b'', copied=None
2502 ):
2508 ):
2503 # data not provided, let's see if we already have some; if not, let's
2509 # data not provided, let's see if we already have some; if not, let's
2504 # grab it from our underlying context, so that we always have data if
2510 # grab it from our underlying context, so that we always have data if
2505 # the file is marked as existing.
2511 # the file is marked as existing.
2506 if exists and data is None:
2512 if exists and data is None:
2507 oldentry = self._cache.get(path) or {}
2513 oldentry = self._cache.get(path) or {}
2508 data = oldentry.get(b'data')
2514 data = oldentry.get(b'data')
2509 if data is None:
2515 if data is None:
2510 data = self._wrappedctx[path].data()
2516 data = self._wrappedctx[path].data()
2511
2517
2512 self._cache[path] = {
2518 self._cache[path] = {
2513 b'exists': exists,
2519 b'exists': exists,
2514 b'data': data,
2520 b'data': data,
2515 b'date': date,
2521 b'date': date,
2516 b'flags': flags,
2522 b'flags': flags,
2517 b'copied': copied,
2523 b'copied': copied,
2518 }
2524 }
2519
2525
2520 def filectx(self, path, filelog=None):
2526 def filectx(self, path, filelog=None):
2521 return overlayworkingfilectx(
2527 return overlayworkingfilectx(
2522 self._repo, path, parent=self, filelog=filelog
2528 self._repo, path, parent=self, filelog=filelog
2523 )
2529 )
2524
2530
2525
2531
2526 class overlayworkingfilectx(committablefilectx):
2532 class overlayworkingfilectx(committablefilectx):
2527 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2533 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2528 cache, which can be flushed through later by calling ``flush()``."""
2534 cache, which can be flushed through later by calling ``flush()``."""
2529
2535
2530 def __init__(self, repo, path, filelog=None, parent=None):
2536 def __init__(self, repo, path, filelog=None, parent=None):
2531 super(overlayworkingfilectx, self).__init__(repo, path, filelog, parent)
2537 super(overlayworkingfilectx, self).__init__(repo, path, filelog, parent)
2532 self._repo = repo
2538 self._repo = repo
2533 self._parent = parent
2539 self._parent = parent
2534 self._path = path
2540 self._path = path
2535
2541
2536 def cmp(self, fctx):
2542 def cmp(self, fctx):
2537 return self.data() != fctx.data()
2543 return self.data() != fctx.data()
2538
2544
2539 def changectx(self):
2545 def changectx(self):
2540 return self._parent
2546 return self._parent
2541
2547
2542 def data(self):
2548 def data(self):
2543 return self._parent.data(self._path)
2549 return self._parent.data(self._path)
2544
2550
2545 def date(self):
2551 def date(self):
2546 return self._parent.filedate(self._path)
2552 return self._parent.filedate(self._path)
2547
2553
2548 def exists(self):
2554 def exists(self):
2549 return self.lexists()
2555 return self.lexists()
2550
2556
2551 def lexists(self):
2557 def lexists(self):
2552 return self._parent.exists(self._path)
2558 return self._parent.exists(self._path)
2553
2559
2554 def copysource(self):
2560 def copysource(self):
2555 return self._parent.copydata(self._path)
2561 return self._parent.copydata(self._path)
2556
2562
2557 def size(self):
2563 def size(self):
2558 return self._parent.size(self._path)
2564 return self._parent.size(self._path)
2559
2565
2560 def markcopied(self, origin):
2566 def markcopied(self, origin):
2561 self._parent.markcopied(self._path, origin)
2567 self._parent.markcopied(self._path, origin)
2562
2568
2563 def audit(self):
2569 def audit(self):
2564 pass
2570 pass
2565
2571
2566 def flags(self):
2572 def flags(self):
2567 return self._parent.flags(self._path)
2573 return self._parent.flags(self._path)
2568
2574
2569 def setflags(self, islink, isexec):
2575 def setflags(self, islink, isexec):
2570 return self._parent.setflags(self._path, islink, isexec)
2576 return self._parent.setflags(self._path, islink, isexec)
2571
2577
2572 def write(self, data, flags, backgroundclose=False, **kwargs):
2578 def write(self, data, flags, backgroundclose=False, **kwargs):
2573 return self._parent.write(self._path, data, flags, **kwargs)
2579 return self._parent.write(self._path, data, flags, **kwargs)
2574
2580
2575 def remove(self, ignoremissing=False):
2581 def remove(self, ignoremissing=False):
2576 return self._parent.remove(self._path)
2582 return self._parent.remove(self._path)
2577
2583
2578 def clearunknown(self):
2584 def clearunknown(self):
2579 pass
2585 pass
2580
2586
2581
2587
2582 class workingcommitctx(workingctx):
2588 class workingcommitctx(workingctx):
2583 """A workingcommitctx object makes access to data related to
2589 """A workingcommitctx object makes access to data related to
2584 the revision being committed convenient.
2590 the revision being committed convenient.
2585
2591
2586 This hides changes in the working directory, if they aren't
2592 This hides changes in the working directory, if they aren't
2587 committed in this context.
2593 committed in this context.
2588 """
2594 """
2589
2595
2590 def __init__(
2596 def __init__(
2591 self, repo, changes, text=b"", user=None, date=None, extra=None
2597 self, repo, changes, text=b"", user=None, date=None, extra=None
2592 ):
2598 ):
2593 super(workingcommitctx, self).__init__(
2599 super(workingcommitctx, self).__init__(
2594 repo, text, user, date, extra, changes
2600 repo, text, user, date, extra, changes
2595 )
2601 )
2596
2602
2597 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2603 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2598 """Return matched files only in ``self._status``
2604 """Return matched files only in ``self._status``
2599
2605
2600 Uncommitted files appear "clean" via this context, even if
2606 Uncommitted files appear "clean" via this context, even if
2601 they aren't actually so in the working directory.
2607 they aren't actually so in the working directory.
2602 """
2608 """
2603 if clean:
2609 if clean:
2604 clean = [f for f in self._manifest if f not in self._changedset]
2610 clean = [f for f in self._manifest if f not in self._changedset]
2605 else:
2611 else:
2606 clean = []
2612 clean = []
2607 return scmutil.status(
2613 return scmutil.status(
2608 [f for f in self._status.modified if match(f)],
2614 [f for f in self._status.modified if match(f)],
2609 [f for f in self._status.added if match(f)],
2615 [f for f in self._status.added if match(f)],
2610 [f for f in self._status.removed if match(f)],
2616 [f for f in self._status.removed if match(f)],
2611 [],
2617 [],
2612 [],
2618 [],
2613 [],
2619 [],
2614 clean,
2620 clean,
2615 )
2621 )
2616
2622
2617 @propertycache
2623 @propertycache
2618 def _changedset(self):
2624 def _changedset(self):
2619 """Return the set of files changed in this context
2625 """Return the set of files changed in this context
2620 """
2626 """
2621 changed = set(self._status.modified)
2627 changed = set(self._status.modified)
2622 changed.update(self._status.added)
2628 changed.update(self._status.added)
2623 changed.update(self._status.removed)
2629 changed.update(self._status.removed)
2624 return changed
2630 return changed
2625
2631
2626
2632
2627 def makecachingfilectxfn(func):
2633 def makecachingfilectxfn(func):
2628 """Create a filectxfn that caches based on the path.
2634 """Create a filectxfn that caches based on the path.
2629
2635
2630 We can't use util.cachefunc because it uses all arguments as the cache
2636 We can't use util.cachefunc because it uses all arguments as the cache
2631 key and this creates a cycle since the arguments include the repo and
2637 key and this creates a cycle since the arguments include the repo and
2632 memctx.
2638 memctx.
2633 """
2639 """
2634 cache = {}
2640 cache = {}
2635
2641
2636 def getfilectx(repo, memctx, path):
2642 def getfilectx(repo, memctx, path):
2637 if path not in cache:
2643 if path not in cache:
2638 cache[path] = func(repo, memctx, path)
2644 cache[path] = func(repo, memctx, path)
2639 return cache[path]
2645 return cache[path]
2640
2646
2641 return getfilectx
2647 return getfilectx
2642
2648
2643
2649
2644 def memfilefromctx(ctx):
2650 def memfilefromctx(ctx):
2645 """Given a context return a memfilectx for ctx[path]
2651 """Given a context return a memfilectx for ctx[path]
2646
2652
2647 This is a convenience method for building a memctx based on another
2653 This is a convenience method for building a memctx based on another
2648 context.
2654 context.
2649 """
2655 """
2650
2656
2651 def getfilectx(repo, memctx, path):
2657 def getfilectx(repo, memctx, path):
2652 fctx = ctx[path]
2658 fctx = ctx[path]
2653 copysource = fctx.copysource()
2659 copysource = fctx.copysource()
2654 return memfilectx(
2660 return memfilectx(
2655 repo,
2661 repo,
2656 memctx,
2662 memctx,
2657 path,
2663 path,
2658 fctx.data(),
2664 fctx.data(),
2659 islink=fctx.islink(),
2665 islink=fctx.islink(),
2660 isexec=fctx.isexec(),
2666 isexec=fctx.isexec(),
2661 copysource=copysource,
2667 copysource=copysource,
2662 )
2668 )
2663
2669
2664 return getfilectx
2670 return getfilectx
2665
2671
2666
2672
2667 def memfilefrompatch(patchstore):
2673 def memfilefrompatch(patchstore):
2668 """Given a patch (e.g. patchstore object) return a memfilectx
2674 """Given a patch (e.g. patchstore object) return a memfilectx
2669
2675
2670 This is a convenience method for building a memctx based on a patchstore.
2676 This is a convenience method for building a memctx based on a patchstore.
2671 """
2677 """
2672
2678
2673 def getfilectx(repo, memctx, path):
2679 def getfilectx(repo, memctx, path):
2674 data, mode, copysource = patchstore.getfile(path)
2680 data, mode, copysource = patchstore.getfile(path)
2675 if data is None:
2681 if data is None:
2676 return None
2682 return None
2677 islink, isexec = mode
2683 islink, isexec = mode
2678 return memfilectx(
2684 return memfilectx(
2679 repo,
2685 repo,
2680 memctx,
2686 memctx,
2681 path,
2687 path,
2682 data,
2688 data,
2683 islink=islink,
2689 islink=islink,
2684 isexec=isexec,
2690 isexec=isexec,
2685 copysource=copysource,
2691 copysource=copysource,
2686 )
2692 )
2687
2693
2688 return getfilectx
2694 return getfilectx
2689
2695
2690
2696
2691 class memctx(committablectx):
2697 class memctx(committablectx):
2692 """Use memctx to perform in-memory commits via localrepo.commitctx().
2698 """Use memctx to perform in-memory commits via localrepo.commitctx().
2693
2699
2694 Revision information is supplied at initialization time while
2700 Revision information is supplied at initialization time while
2695 related files data and is made available through a callback
2701 related files data and is made available through a callback
2696 mechanism. 'repo' is the current localrepo, 'parents' is a
2702 mechanism. 'repo' is the current localrepo, 'parents' is a
2697 sequence of two parent revisions identifiers (pass None for every
2703 sequence of two parent revisions identifiers (pass None for every
2698 missing parent), 'text' is the commit message and 'files' lists
2704 missing parent), 'text' is the commit message and 'files' lists
2699 names of files touched by the revision (normalized and relative to
2705 names of files touched by the revision (normalized and relative to
2700 repository root).
2706 repository root).
2701
2707
2702 filectxfn(repo, memctx, path) is a callable receiving the
2708 filectxfn(repo, memctx, path) is a callable receiving the
2703 repository, the current memctx object and the normalized path of
2709 repository, the current memctx object and the normalized path of
2704 requested file, relative to repository root. It is fired by the
2710 requested file, relative to repository root. It is fired by the
2705 commit function for every file in 'files', but calls order is
2711 commit function for every file in 'files', but calls order is
2706 undefined. If the file is available in the revision being
2712 undefined. If the file is available in the revision being
2707 committed (updated or added), filectxfn returns a memfilectx
2713 committed (updated or added), filectxfn returns a memfilectx
2708 object. If the file was removed, filectxfn return None for recent
2714 object. If the file was removed, filectxfn return None for recent
2709 Mercurial. Moved files are represented by marking the source file
2715 Mercurial. Moved files are represented by marking the source file
2710 removed and the new file added with copy information (see
2716 removed and the new file added with copy information (see
2711 memfilectx).
2717 memfilectx).
2712
2718
2713 user receives the committer name and defaults to current
2719 user receives the committer name and defaults to current
2714 repository username, date is the commit date in any format
2720 repository username, date is the commit date in any format
2715 supported by dateutil.parsedate() and defaults to current date, extra
2721 supported by dateutil.parsedate() and defaults to current date, extra
2716 is a dictionary of metadata or is left empty.
2722 is a dictionary of metadata or is left empty.
2717 """
2723 """
2718
2724
2719 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2725 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2720 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2726 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2721 # this field to determine what to do in filectxfn.
2727 # this field to determine what to do in filectxfn.
2722 _returnnoneformissingfiles = True
2728 _returnnoneformissingfiles = True
2723
2729
2724 def __init__(
2730 def __init__(
2725 self,
2731 self,
2726 repo,
2732 repo,
2727 parents,
2733 parents,
2728 text,
2734 text,
2729 files,
2735 files,
2730 filectxfn,
2736 filectxfn,
2731 user=None,
2737 user=None,
2732 date=None,
2738 date=None,
2733 extra=None,
2739 extra=None,
2734 branch=None,
2740 branch=None,
2735 editor=False,
2741 editor=False,
2736 ):
2742 ):
2737 super(memctx, self).__init__(
2743 super(memctx, self).__init__(
2738 repo, text, user, date, extra, branch=branch
2744 repo, text, user, date, extra, branch=branch
2739 )
2745 )
2740 self._rev = None
2746 self._rev = None
2741 self._node = None
2747 self._node = None
2742 parents = [(p or nullid) for p in parents]
2748 parents = [(p or nullid) for p in parents]
2743 p1, p2 = parents
2749 p1, p2 = parents
2744 self._parents = [self._repo[p] for p in (p1, p2)]
2750 self._parents = [self._repo[p] for p in (p1, p2)]
2745 files = sorted(set(files))
2751 files = sorted(set(files))
2746 self._files = files
2752 self._files = files
2747 self.substate = {}
2753 self.substate = {}
2748
2754
2749 if isinstance(filectxfn, patch.filestore):
2755 if isinstance(filectxfn, patch.filestore):
2750 filectxfn = memfilefrompatch(filectxfn)
2756 filectxfn = memfilefrompatch(filectxfn)
2751 elif not callable(filectxfn):
2757 elif not callable(filectxfn):
2752 # if store is not callable, wrap it in a function
2758 # if store is not callable, wrap it in a function
2753 filectxfn = memfilefromctx(filectxfn)
2759 filectxfn = memfilefromctx(filectxfn)
2754
2760
2755 # memoizing increases performance for e.g. vcs convert scenarios.
2761 # memoizing increases performance for e.g. vcs convert scenarios.
2756 self._filectxfn = makecachingfilectxfn(filectxfn)
2762 self._filectxfn = makecachingfilectxfn(filectxfn)
2757
2763
2758 if editor:
2764 if editor:
2759 self._text = editor(self._repo, self, [])
2765 self._text = editor(self._repo, self, [])
2760 self._repo.savecommitmessage(self._text)
2766 self._repo.savecommitmessage(self._text)
2761
2767
2762 def filectx(self, path, filelog=None):
2768 def filectx(self, path, filelog=None):
2763 """get a file context from the working directory
2769 """get a file context from the working directory
2764
2770
2765 Returns None if file doesn't exist and should be removed."""
2771 Returns None if file doesn't exist and should be removed."""
2766 return self._filectxfn(self._repo, self, path)
2772 return self._filectxfn(self._repo, self, path)
2767
2773
2768 def commit(self):
2774 def commit(self):
2769 """commit context to the repo"""
2775 """commit context to the repo"""
2770 return self._repo.commitctx(self)
2776 return self._repo.commitctx(self)
2771
2777
2772 @propertycache
2778 @propertycache
2773 def _manifest(self):
2779 def _manifest(self):
2774 """generate a manifest based on the return values of filectxfn"""
2780 """generate a manifest based on the return values of filectxfn"""
2775
2781
2776 # keep this simple for now; just worry about p1
2782 # keep this simple for now; just worry about p1
2777 pctx = self._parents[0]
2783 pctx = self._parents[0]
2778 man = pctx.manifest().copy()
2784 man = pctx.manifest().copy()
2779
2785
2780 for f in self._status.modified:
2786 for f in self._status.modified:
2781 man[f] = modifiednodeid
2787 man[f] = modifiednodeid
2782
2788
2783 for f in self._status.added:
2789 for f in self._status.added:
2784 man[f] = addednodeid
2790 man[f] = addednodeid
2785
2791
2786 for f in self._status.removed:
2792 for f in self._status.removed:
2787 if f in man:
2793 if f in man:
2788 del man[f]
2794 del man[f]
2789
2795
2790 return man
2796 return man
2791
2797
2792 @propertycache
2798 @propertycache
2793 def _status(self):
2799 def _status(self):
2794 """Calculate exact status from ``files`` specified at construction
2800 """Calculate exact status from ``files`` specified at construction
2795 """
2801 """
2796 man1 = self.p1().manifest()
2802 man1 = self.p1().manifest()
2797 p2 = self._parents[1]
2803 p2 = self._parents[1]
2798 # "1 < len(self._parents)" can't be used for checking
2804 # "1 < len(self._parents)" can't be used for checking
2799 # existence of the 2nd parent, because "memctx._parents" is
2805 # existence of the 2nd parent, because "memctx._parents" is
2800 # explicitly initialized by the list, of which length is 2.
2806 # explicitly initialized by the list, of which length is 2.
2801 if p2.node() != nullid:
2807 if p2.node() != nullid:
2802 man2 = p2.manifest()
2808 man2 = p2.manifest()
2803 managing = lambda f: f in man1 or f in man2
2809 managing = lambda f: f in man1 or f in man2
2804 else:
2810 else:
2805 managing = lambda f: f in man1
2811 managing = lambda f: f in man1
2806
2812
2807 modified, added, removed = [], [], []
2813 modified, added, removed = [], [], []
2808 for f in self._files:
2814 for f in self._files:
2809 if not managing(f):
2815 if not managing(f):
2810 added.append(f)
2816 added.append(f)
2811 elif self[f]:
2817 elif self[f]:
2812 modified.append(f)
2818 modified.append(f)
2813 else:
2819 else:
2814 removed.append(f)
2820 removed.append(f)
2815
2821
2816 return scmutil.status(modified, added, removed, [], [], [], [])
2822 return scmutil.status(modified, added, removed, [], [], [], [])
2817
2823
2818
2824
2819 class memfilectx(committablefilectx):
2825 class memfilectx(committablefilectx):
2820 """memfilectx represents an in-memory file to commit.
2826 """memfilectx represents an in-memory file to commit.
2821
2827
2822 See memctx and committablefilectx for more details.
2828 See memctx and committablefilectx for more details.
2823 """
2829 """
2824
2830
2825 def __init__(
2831 def __init__(
2826 self,
2832 self,
2827 repo,
2833 repo,
2828 changectx,
2834 changectx,
2829 path,
2835 path,
2830 data,
2836 data,
2831 islink=False,
2837 islink=False,
2832 isexec=False,
2838 isexec=False,
2833 copysource=None,
2839 copysource=None,
2834 ):
2840 ):
2835 """
2841 """
2836 path is the normalized file path relative to repository root.
2842 path is the normalized file path relative to repository root.
2837 data is the file content as a string.
2843 data is the file content as a string.
2838 islink is True if the file is a symbolic link.
2844 islink is True if the file is a symbolic link.
2839 isexec is True if the file is executable.
2845 isexec is True if the file is executable.
2840 copied is the source file path if current file was copied in the
2846 copied is the source file path if current file was copied in the
2841 revision being committed, or None."""
2847 revision being committed, or None."""
2842 super(memfilectx, self).__init__(repo, path, None, changectx)
2848 super(memfilectx, self).__init__(repo, path, None, changectx)
2843 self._data = data
2849 self._data = data
2844 if islink:
2850 if islink:
2845 self._flags = b'l'
2851 self._flags = b'l'
2846 elif isexec:
2852 elif isexec:
2847 self._flags = b'x'
2853 self._flags = b'x'
2848 else:
2854 else:
2849 self._flags = b''
2855 self._flags = b''
2850 self._copysource = copysource
2856 self._copysource = copysource
2851
2857
2852 def copysource(self):
2858 def copysource(self):
2853 return self._copysource
2859 return self._copysource
2854
2860
2855 def cmp(self, fctx):
2861 def cmp(self, fctx):
2856 return self.data() != fctx.data()
2862 return self.data() != fctx.data()
2857
2863
2858 def data(self):
2864 def data(self):
2859 return self._data
2865 return self._data
2860
2866
2861 def remove(self, ignoremissing=False):
2867 def remove(self, ignoremissing=False):
2862 """wraps unlink for a repo's working directory"""
2868 """wraps unlink for a repo's working directory"""
2863 # need to figure out what to do here
2869 # need to figure out what to do here
2864 del self._changectx[self._path]
2870 del self._changectx[self._path]
2865
2871
2866 def write(self, data, flags, **kwargs):
2872 def write(self, data, flags, **kwargs):
2867 """wraps repo.wwrite"""
2873 """wraps repo.wwrite"""
2868 self._data = data
2874 self._data = data
2869
2875
2870
2876
2871 class metadataonlyctx(committablectx):
2877 class metadataonlyctx(committablectx):
2872 """Like memctx but it's reusing the manifest of different commit.
2878 """Like memctx but it's reusing the manifest of different commit.
2873 Intended to be used by lightweight operations that are creating
2879 Intended to be used by lightweight operations that are creating
2874 metadata-only changes.
2880 metadata-only changes.
2875
2881
2876 Revision information is supplied at initialization time. 'repo' is the
2882 Revision information is supplied at initialization time. 'repo' is the
2877 current localrepo, 'ctx' is original revision which manifest we're reuisng
2883 current localrepo, 'ctx' is original revision which manifest we're reuisng
2878 'parents' is a sequence of two parent revisions identifiers (pass None for
2884 'parents' is a sequence of two parent revisions identifiers (pass None for
2879 every missing parent), 'text' is the commit.
2885 every missing parent), 'text' is the commit.
2880
2886
2881 user receives the committer name and defaults to current repository
2887 user receives the committer name and defaults to current repository
2882 username, date is the commit date in any format supported by
2888 username, date is the commit date in any format supported by
2883 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2889 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2884 metadata or is left empty.
2890 metadata or is left empty.
2885 """
2891 """
2886
2892
2887 def __init__(
2893 def __init__(
2888 self,
2894 self,
2889 repo,
2895 repo,
2890 originalctx,
2896 originalctx,
2891 parents=None,
2897 parents=None,
2892 text=None,
2898 text=None,
2893 user=None,
2899 user=None,
2894 date=None,
2900 date=None,
2895 extra=None,
2901 extra=None,
2896 editor=False,
2902 editor=False,
2897 ):
2903 ):
2898 if text is None:
2904 if text is None:
2899 text = originalctx.description()
2905 text = originalctx.description()
2900 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2906 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2901 self._rev = None
2907 self._rev = None
2902 self._node = None
2908 self._node = None
2903 self._originalctx = originalctx
2909 self._originalctx = originalctx
2904 self._manifestnode = originalctx.manifestnode()
2910 self._manifestnode = originalctx.manifestnode()
2905 if parents is None:
2911 if parents is None:
2906 parents = originalctx.parents()
2912 parents = originalctx.parents()
2907 else:
2913 else:
2908 parents = [repo[p] for p in parents if p is not None]
2914 parents = [repo[p] for p in parents if p is not None]
2909 parents = parents[:]
2915 parents = parents[:]
2910 while len(parents) < 2:
2916 while len(parents) < 2:
2911 parents.append(repo[nullid])
2917 parents.append(repo[nullid])
2912 p1, p2 = self._parents = parents
2918 p1, p2 = self._parents = parents
2913
2919
2914 # sanity check to ensure that the reused manifest parents are
2920 # sanity check to ensure that the reused manifest parents are
2915 # manifests of our commit parents
2921 # manifests of our commit parents
2916 mp1, mp2 = self.manifestctx().parents
2922 mp1, mp2 = self.manifestctx().parents
2917 if p1 != nullid and p1.manifestnode() != mp1:
2923 if p1 != nullid and p1.manifestnode() != mp1:
2918 raise RuntimeError(
2924 raise RuntimeError(
2919 r"can't reuse the manifest: its p1 "
2925 r"can't reuse the manifest: its p1 "
2920 r"doesn't match the new ctx p1"
2926 r"doesn't match the new ctx p1"
2921 )
2927 )
2922 if p2 != nullid and p2.manifestnode() != mp2:
2928 if p2 != nullid and p2.manifestnode() != mp2:
2923 raise RuntimeError(
2929 raise RuntimeError(
2924 r"can't reuse the manifest: "
2930 r"can't reuse the manifest: "
2925 r"its p2 doesn't match the new ctx p2"
2931 r"its p2 doesn't match the new ctx p2"
2926 )
2932 )
2927
2933
2928 self._files = originalctx.files()
2934 self._files = originalctx.files()
2929 self.substate = {}
2935 self.substate = {}
2930
2936
2931 if editor:
2937 if editor:
2932 self._text = editor(self._repo, self, [])
2938 self._text = editor(self._repo, self, [])
2933 self._repo.savecommitmessage(self._text)
2939 self._repo.savecommitmessage(self._text)
2934
2940
2935 def manifestnode(self):
2941 def manifestnode(self):
2936 return self._manifestnode
2942 return self._manifestnode
2937
2943
2938 @property
2944 @property
2939 def _manifestctx(self):
2945 def _manifestctx(self):
2940 return self._repo.manifestlog[self._manifestnode]
2946 return self._repo.manifestlog[self._manifestnode]
2941
2947
2942 def filectx(self, path, filelog=None):
2948 def filectx(self, path, filelog=None):
2943 return self._originalctx.filectx(path, filelog=filelog)
2949 return self._originalctx.filectx(path, filelog=filelog)
2944
2950
2945 def commit(self):
2951 def commit(self):
2946 """commit context to the repo"""
2952 """commit context to the repo"""
2947 return self._repo.commitctx(self)
2953 return self._repo.commitctx(self)
2948
2954
2949 @property
2955 @property
2950 def _manifest(self):
2956 def _manifest(self):
2951 return self._originalctx.manifest()
2957 return self._originalctx.manifest()
2952
2958
2953 @propertycache
2959 @propertycache
2954 def _status(self):
2960 def _status(self):
2955 """Calculate exact status from ``files`` specified in the ``origctx``
2961 """Calculate exact status from ``files`` specified in the ``origctx``
2956 and parents manifests.
2962 and parents manifests.
2957 """
2963 """
2958 man1 = self.p1().manifest()
2964 man1 = self.p1().manifest()
2959 p2 = self._parents[1]
2965 p2 = self._parents[1]
2960 # "1 < len(self._parents)" can't be used for checking
2966 # "1 < len(self._parents)" can't be used for checking
2961 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2967 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2962 # explicitly initialized by the list, of which length is 2.
2968 # explicitly initialized by the list, of which length is 2.
2963 if p2.node() != nullid:
2969 if p2.node() != nullid:
2964 man2 = p2.manifest()
2970 man2 = p2.manifest()
2965 managing = lambda f: f in man1 or f in man2
2971 managing = lambda f: f in man1 or f in man2
2966 else:
2972 else:
2967 managing = lambda f: f in man1
2973 managing = lambda f: f in man1
2968
2974
2969 modified, added, removed = [], [], []
2975 modified, added, removed = [], [], []
2970 for f in self._files:
2976 for f in self._files:
2971 if not managing(f):
2977 if not managing(f):
2972 added.append(f)
2978 added.append(f)
2973 elif f in self:
2979 elif f in self:
2974 modified.append(f)
2980 modified.append(f)
2975 else:
2981 else:
2976 removed.append(f)
2982 removed.append(f)
2977
2983
2978 return scmutil.status(modified, added, removed, [], [], [], [])
2984 return scmutil.status(modified, added, removed, [], [], [], [])
2979
2985
2980
2986
2981 class arbitraryfilectx(object):
2987 class arbitraryfilectx(object):
2982 """Allows you to use filectx-like functions on a file in an arbitrary
2988 """Allows you to use filectx-like functions on a file in an arbitrary
2983 location on disk, possibly not in the working directory.
2989 location on disk, possibly not in the working directory.
2984 """
2990 """
2985
2991
2986 def __init__(self, path, repo=None):
2992 def __init__(self, path, repo=None):
2987 # Repo is optional because contrib/simplemerge uses this class.
2993 # Repo is optional because contrib/simplemerge uses this class.
2988 self._repo = repo
2994 self._repo = repo
2989 self._path = path
2995 self._path = path
2990
2996
2991 def cmp(self, fctx):
2997 def cmp(self, fctx):
2992 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2998 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2993 # path if either side is a symlink.
2999 # path if either side is a symlink.
2994 symlinks = b'l' in self.flags() or b'l' in fctx.flags()
3000 symlinks = b'l' in self.flags() or b'l' in fctx.flags()
2995 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
3001 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2996 # Add a fast-path for merge if both sides are disk-backed.
3002 # Add a fast-path for merge if both sides are disk-backed.
2997 # Note that filecmp uses the opposite return values (True if same)
3003 # Note that filecmp uses the opposite return values (True if same)
2998 # from our cmp functions (True if different).
3004 # from our cmp functions (True if different).
2999 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
3005 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
3000 return self.data() != fctx.data()
3006 return self.data() != fctx.data()
3001
3007
3002 def path(self):
3008 def path(self):
3003 return self._path
3009 return self._path
3004
3010
3005 def flags(self):
3011 def flags(self):
3006 return b''
3012 return b''
3007
3013
3008 def data(self):
3014 def data(self):
3009 return util.readfile(self._path)
3015 return util.readfile(self._path)
3010
3016
3011 def decodeddata(self):
3017 def decodeddata(self):
3012 with open(self._path, b"rb") as f:
3018 with open(self._path, b"rb") as f:
3013 return f.read()
3019 return f.read()
3014
3020
3015 def remove(self):
3021 def remove(self):
3016 util.unlink(self._path)
3022 util.unlink(self._path)
3017
3023
3018 def write(self, data, flags, **kwargs):
3024 def write(self, data, flags, **kwargs):
3019 assert not flags
3025 assert not flags
3020 with open(self._path, b"wb") as f:
3026 with open(self._path, b"wb") as f:
3021 f.write(data)
3027 f.write(data)
@@ -1,4285 +1,4285 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 encoding,
47 encoding,
48 error,
48 error,
49 exchange,
49 exchange,
50 extensions,
50 extensions,
51 filemerge,
51 filemerge,
52 filesetlang,
52 filesetlang,
53 formatter,
53 formatter,
54 hg,
54 hg,
55 httppeer,
55 httppeer,
56 localrepo,
56 localrepo,
57 lock as lockmod,
57 lock as lockmod,
58 logcmdutil,
58 logcmdutil,
59 merge as mergemod,
59 merge as mergemod,
60 obsolete,
60 obsolete,
61 obsutil,
61 obsutil,
62 pathutil,
62 pathutil,
63 phases,
63 phases,
64 policy,
64 policy,
65 pvec,
65 pvec,
66 pycompat,
66 pycompat,
67 registrar,
67 registrar,
68 repair,
68 repair,
69 revlog,
69 revlog,
70 revset,
70 revset,
71 revsetlang,
71 revsetlang,
72 scmutil,
72 scmutil,
73 setdiscovery,
73 setdiscovery,
74 simplemerge,
74 simplemerge,
75 sshpeer,
75 sshpeer,
76 sslutil,
76 sslutil,
77 streamclone,
77 streamclone,
78 templater,
78 templater,
79 treediscovery,
79 treediscovery,
80 upgrade,
80 upgrade,
81 url as urlmod,
81 url as urlmod,
82 util,
82 util,
83 vfs as vfsmod,
83 vfs as vfsmod,
84 wireprotoframing,
84 wireprotoframing,
85 wireprotoserver,
85 wireprotoserver,
86 wireprotov2peer,
86 wireprotov2peer,
87 )
87 )
88 from .utils import (
88 from .utils import (
89 cborutil,
89 cborutil,
90 compression,
90 compression,
91 dateutil,
91 dateutil,
92 procutil,
92 procutil,
93 stringutil,
93 stringutil,
94 )
94 )
95
95
96 from .revlogutils import deltas as deltautil
96 from .revlogutils import deltas as deltautil
97
97
98 release = lockmod.release
98 release = lockmod.release
99
99
100 command = registrar.command()
100 command = registrar.command()
101
101
102
102
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 def debugancestor(ui, repo, *args):
104 def debugancestor(ui, repo, *args):
105 """find the ancestor revision of two revisions in a given index"""
105 """find the ancestor revision of two revisions in a given index"""
106 if len(args) == 3:
106 if len(args) == 3:
107 index, rev1, rev2 = args
107 index, rev1, rev2 = args
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 lookup = r.lookup
109 lookup = r.lookup
110 elif len(args) == 2:
110 elif len(args) == 2:
111 if not repo:
111 if not repo:
112 raise error.Abort(
112 raise error.Abort(
113 _(b'there is no Mercurial repository here (.hg not found)')
113 _(b'there is no Mercurial repository here (.hg not found)')
114 )
114 )
115 rev1, rev2 = args
115 rev1, rev2 = args
116 r = repo.changelog
116 r = repo.changelog
117 lookup = repo.lookup
117 lookup = repo.lookup
118 else:
118 else:
119 raise error.Abort(_(b'either two or three arguments required'))
119 raise error.Abort(_(b'either two or three arguments required'))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122
122
123
123
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 def debugapplystreamclonebundle(ui, repo, fname):
125 def debugapplystreamclonebundle(ui, repo, fname):
126 """apply a stream clone bundle file"""
126 """apply a stream clone bundle file"""
127 f = hg.openpath(ui, fname)
127 f = hg.openpath(ui, fname)
128 gen = exchange.readbundle(ui, f, fname)
128 gen = exchange.readbundle(ui, f, fname)
129 gen.apply(repo)
129 gen.apply(repo)
130
130
131
131
132 @command(
132 @command(
133 b'debugbuilddag',
133 b'debugbuilddag',
134 [
134 [
135 (
135 (
136 b'm',
136 b'm',
137 b'mergeable-file',
137 b'mergeable-file',
138 None,
138 None,
139 _(b'add single file mergeable changes'),
139 _(b'add single file mergeable changes'),
140 ),
140 ),
141 (
141 (
142 b'o',
142 b'o',
143 b'overwritten-file',
143 b'overwritten-file',
144 None,
144 None,
145 _(b'add single file all revs overwrite'),
145 _(b'add single file all revs overwrite'),
146 ),
146 ),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 ],
148 ],
149 _(b'[OPTION]... [TEXT]'),
149 _(b'[OPTION]... [TEXT]'),
150 )
150 )
151 def debugbuilddag(
151 def debugbuilddag(
152 ui,
152 ui,
153 repo,
153 repo,
154 text=None,
154 text=None,
155 mergeable_file=False,
155 mergeable_file=False,
156 overwritten_file=False,
156 overwritten_file=False,
157 new_file=False,
157 new_file=False,
158 ):
158 ):
159 """builds a repo with a given DAG from scratch in the current empty repo
159 """builds a repo with a given DAG from scratch in the current empty repo
160
160
161 The description of the DAG is read from stdin if not given on the
161 The description of the DAG is read from stdin if not given on the
162 command line.
162 command line.
163
163
164 Elements:
164 Elements:
165
165
166 - "+n" is a linear run of n nodes based on the current default parent
166 - "+n" is a linear run of n nodes based on the current default parent
167 - "." is a single node based on the current default parent
167 - "." is a single node based on the current default parent
168 - "$" resets the default parent to null (implied at the start);
168 - "$" resets the default parent to null (implied at the start);
169 otherwise the default parent is always the last node created
169 otherwise the default parent is always the last node created
170 - "<p" sets the default parent to the backref p
170 - "<p" sets the default parent to the backref p
171 - "*p" is a fork at parent p, which is a backref
171 - "*p" is a fork at parent p, which is a backref
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 - "/p2" is a merge of the preceding node and p2
173 - "/p2" is a merge of the preceding node and p2
174 - ":tag" defines a local tag for the preceding node
174 - ":tag" defines a local tag for the preceding node
175 - "@branch" sets the named branch for subsequent nodes
175 - "@branch" sets the named branch for subsequent nodes
176 - "#...\\n" is a comment up to the end of the line
176 - "#...\\n" is a comment up to the end of the line
177
177
178 Whitespace between the above elements is ignored.
178 Whitespace between the above elements is ignored.
179
179
180 A backref is either
180 A backref is either
181
181
182 - a number n, which references the node curr-n, where curr is the current
182 - a number n, which references the node curr-n, where curr is the current
183 node, or
183 node, or
184 - the name of a local tag you placed earlier using ":tag", or
184 - the name of a local tag you placed earlier using ":tag", or
185 - empty to denote the default parent.
185 - empty to denote the default parent.
186
186
187 All string valued-elements are either strictly alphanumeric, or must
187 All string valued-elements are either strictly alphanumeric, or must
188 be enclosed in double quotes ("..."), with "\\" as escape character.
188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 """
189 """
190
190
191 if text is None:
191 if text is None:
192 ui.status(_(b"reading DAG from stdin\n"))
192 ui.status(_(b"reading DAG from stdin\n"))
193 text = ui.fin.read()
193 text = ui.fin.read()
194
194
195 cl = repo.changelog
195 cl = repo.changelog
196 if len(cl) > 0:
196 if len(cl) > 0:
197 raise error.Abort(_(b'repository is not empty'))
197 raise error.Abort(_(b'repository is not empty'))
198
198
199 # determine number of revs in DAG
199 # determine number of revs in DAG
200 total = 0
200 total = 0
201 for type, data in dagparser.parsedag(text):
201 for type, data in dagparser.parsedag(text):
202 if type == b'n':
202 if type == b'n':
203 total += 1
203 total += 1
204
204
205 if mergeable_file:
205 if mergeable_file:
206 linesperrev = 2
206 linesperrev = 2
207 # make a file with k lines per rev
207 # make a file with k lines per rev
208 initialmergedlines = [
208 initialmergedlines = [
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 ]
210 ]
211 initialmergedlines.append(b"")
211 initialmergedlines.append(b"")
212
212
213 tags = []
213 tags = []
214 progress = ui.makeprogress(
214 progress = ui.makeprogress(
215 _(b'building'), unit=_(b'revisions'), total=total
215 _(b'building'), unit=_(b'revisions'), total=total
216 )
216 )
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 at = -1
218 at = -1
219 atbranch = b'default'
219 atbranch = b'default'
220 nodeids = []
220 nodeids = []
221 id = 0
221 id = 0
222 progress.update(id)
222 progress.update(id)
223 for type, data in dagparser.parsedag(text):
223 for type, data in dagparser.parsedag(text):
224 if type == b'n':
224 if type == b'n':
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 id, ps = data
226 id, ps = data
227
227
228 files = []
228 files = []
229 filecontent = {}
229 filecontent = {}
230
230
231 p2 = None
231 p2 = None
232 if mergeable_file:
232 if mergeable_file:
233 fn = b"mf"
233 fn = b"mf"
234 p1 = repo[ps[0]]
234 p1 = repo[ps[0]]
235 if len(ps) > 1:
235 if len(ps) > 1:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 pa = p1.ancestor(p2)
237 pa = p1.ancestor(p2)
238 base, local, other = [
238 base, local, other = [
239 x[fn].data() for x in (pa, p1, p2)
239 x[fn].data() for x in (pa, p1, p2)
240 ]
240 ]
241 m3 = simplemerge.Merge3Text(base, local, other)
241 m3 = simplemerge.Merge3Text(base, local, other)
242 ml = [l.strip() for l in m3.merge_lines()]
242 ml = [l.strip() for l in m3.merge_lines()]
243 ml.append(b"")
243 ml.append(b"")
244 elif at > 0:
244 elif at > 0:
245 ml = p1[fn].data().split(b"\n")
245 ml = p1[fn].data().split(b"\n")
246 else:
246 else:
247 ml = initialmergedlines
247 ml = initialmergedlines
248 ml[id * linesperrev] += b" r%i" % id
248 ml[id * linesperrev] += b" r%i" % id
249 mergedtext = b"\n".join(ml)
249 mergedtext = b"\n".join(ml)
250 files.append(fn)
250 files.append(fn)
251 filecontent[fn] = mergedtext
251 filecontent[fn] = mergedtext
252
252
253 if overwritten_file:
253 if overwritten_file:
254 fn = b"of"
254 fn = b"of"
255 files.append(fn)
255 files.append(fn)
256 filecontent[fn] = b"r%i\n" % id
256 filecontent[fn] = b"r%i\n" % id
257
257
258 if new_file:
258 if new_file:
259 fn = b"nf%i" % id
259 fn = b"nf%i" % id
260 files.append(fn)
260 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
261 filecontent[fn] = b"r%i\n" % id
262 if len(ps) > 1:
262 if len(ps) > 1:
263 if not p2:
263 if not p2:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 for fn in p2:
265 for fn in p2:
266 if fn.startswith(b"nf"):
266 if fn.startswith(b"nf"):
267 files.append(fn)
267 files.append(fn)
268 filecontent[fn] = p2[fn].data()
268 filecontent[fn] = p2[fn].data()
269
269
270 def fctxfn(repo, cx, path):
270 def fctxfn(repo, cx, path):
271 if path in filecontent:
271 if path in filecontent:
272 return context.memfilectx(
272 return context.memfilectx(
273 repo, cx, path, filecontent[path]
273 repo, cx, path, filecontent[path]
274 )
274 )
275 return None
275 return None
276
276
277 if len(ps) == 0 or ps[0] < 0:
277 if len(ps) == 0 or ps[0] < 0:
278 pars = [None, None]
278 pars = [None, None]
279 elif len(ps) == 1:
279 elif len(ps) == 1:
280 pars = [nodeids[ps[0]], None]
280 pars = [nodeids[ps[0]], None]
281 else:
281 else:
282 pars = [nodeids[p] for p in ps]
282 pars = [nodeids[p] for p in ps]
283 cx = context.memctx(
283 cx = context.memctx(
284 repo,
284 repo,
285 pars,
285 pars,
286 b"r%i" % id,
286 b"r%i" % id,
287 files,
287 files,
288 fctxfn,
288 fctxfn,
289 date=(id, 0),
289 date=(id, 0),
290 user=b"debugbuilddag",
290 user=b"debugbuilddag",
291 extra={b'branch': atbranch},
291 extra={b'branch': atbranch},
292 )
292 )
293 nodeid = repo.commitctx(cx)
293 nodeid = repo.commitctx(cx)
294 nodeids.append(nodeid)
294 nodeids.append(nodeid)
295 at = id
295 at = id
296 elif type == b'l':
296 elif type == b'l':
297 id, name = data
297 id, name = data
298 ui.note((b'tag %s\n' % name))
298 ui.note((b'tag %s\n' % name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 elif type == b'a':
300 elif type == b'a':
301 ui.note((b'branch %s\n' % data))
301 ui.note((b'branch %s\n' % data))
302 atbranch = data
302 atbranch = data
303 progress.update(id)
303 progress.update(id)
304
304
305 if tags:
305 if tags:
306 repo.vfs.write(b"localtags", b"".join(tags))
306 repo.vfs.write(b"localtags", b"".join(tags))
307
307
308
308
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 indent_string = b' ' * indent
310 indent_string = b' ' * indent
311 if all:
311 if all:
312 ui.writenoi18n(
312 ui.writenoi18n(
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 % indent_string
314 % indent_string
315 )
315 )
316
316
317 def showchunks(named):
317 def showchunks(named):
318 ui.write(b"\n%s%s\n" % (indent_string, named))
318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 for deltadata in gen.deltaiter():
319 for deltadata in gen.deltaiter():
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 ui.write(
321 ui.write(
322 b"%s%s %s %s %s %s %d\n"
322 b"%s%s %s %s %s %s %d\n"
323 % (
323 % (
324 indent_string,
324 indent_string,
325 hex(node),
325 hex(node),
326 hex(p1),
326 hex(p1),
327 hex(p2),
327 hex(p2),
328 hex(cs),
328 hex(cs),
329 hex(deltabase),
329 hex(deltabase),
330 len(delta),
330 len(delta),
331 )
331 )
332 )
332 )
333
333
334 gen.changelogheader()
334 gen.changelogheader()
335 showchunks(b"changelog")
335 showchunks(b"changelog")
336 gen.manifestheader()
336 gen.manifestheader()
337 showchunks(b"manifest")
337 showchunks(b"manifest")
338 for chunkdata in iter(gen.filelogheader, {}):
338 for chunkdata in iter(gen.filelogheader, {}):
339 fname = chunkdata[b'filename']
339 fname = chunkdata[b'filename']
340 showchunks(fname)
340 showchunks(fname)
341 else:
341 else:
342 if isinstance(gen, bundle2.unbundle20):
342 if isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_(b'use debugbundle2 for this file'))
343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 gen.changelogheader()
344 gen.changelogheader()
345 for deltadata in gen.deltaiter():
345 for deltadata in gen.deltaiter():
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348
348
349
349
350 def _debugobsmarkers(ui, part, indent=0, **opts):
350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 """display version and markers contained in 'data'"""
351 """display version and markers contained in 'data'"""
352 opts = pycompat.byteskwargs(opts)
352 opts = pycompat.byteskwargs(opts)
353 data = part.read()
353 data = part.read()
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 try:
355 try:
356 version, markers = obsolete._readmarkers(data)
356 version, markers = obsolete._readmarkers(data)
357 except error.UnknownVersion as exc:
357 except error.UnknownVersion as exc:
358 msg = b"%sunsupported version: %s (%d bytes)\n"
358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 msg %= indent_string, exc.version, len(data)
359 msg %= indent_string, exc.version, len(data)
360 ui.write(msg)
360 ui.write(msg)
361 else:
361 else:
362 msg = b"%sversion: %d (%d bytes)\n"
362 msg = b"%sversion: %d (%d bytes)\n"
363 msg %= indent_string, version, len(data)
363 msg %= indent_string, version, len(data)
364 ui.write(msg)
364 ui.write(msg)
365 fm = ui.formatter(b'debugobsolete', opts)
365 fm = ui.formatter(b'debugobsolete', opts)
366 for rawmarker in sorted(markers):
366 for rawmarker in sorted(markers):
367 m = obsutil.marker(None, rawmarker)
367 m = obsutil.marker(None, rawmarker)
368 fm.startitem()
368 fm.startitem()
369 fm.plain(indent_string)
369 fm.plain(indent_string)
370 cmdutil.showmarker(fm, m)
370 cmdutil.showmarker(fm, m)
371 fm.end()
371 fm.end()
372
372
373
373
374 def _debugphaseheads(ui, data, indent=0):
374 def _debugphaseheads(ui, data, indent=0):
375 """display version and markers contained in 'data'"""
375 """display version and markers contained in 'data'"""
376 indent_string = b' ' * indent
376 indent_string = b' ' * indent
377 headsbyphase = phases.binarydecode(data)
377 headsbyphase = phases.binarydecode(data)
378 for phase in phases.allphases:
378 for phase in phases.allphases:
379 for head in headsbyphase[phase]:
379 for head in headsbyphase[phase]:
380 ui.write(indent_string)
380 ui.write(indent_string)
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382
382
383
383
384 def _quasirepr(thing):
384 def _quasirepr(thing):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 return b'{%s}' % (
386 return b'{%s}' % (
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 )
388 )
389 return pycompat.bytestr(repr(thing))
389 return pycompat.bytestr(repr(thing))
390
390
391
391
392 def _debugbundle2(ui, gen, all=None, **opts):
392 def _debugbundle2(ui, gen, all=None, **opts):
393 """lists the contents of a bundle2"""
393 """lists the contents of a bundle2"""
394 if not isinstance(gen, bundle2.unbundle20):
394 if not isinstance(gen, bundle2.unbundle20):
395 raise error.Abort(_(b'not a bundle2 file'))
395 raise error.Abort(_(b'not a bundle2 file'))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 parttypes = opts.get('part_type', [])
397 parttypes = opts.get('part_type', [])
398 for part in gen.iterparts():
398 for part in gen.iterparts():
399 if parttypes and part.type not in parttypes:
399 if parttypes and part.type not in parttypes:
400 continue
400 continue
401 msg = b'%s -- %s (mandatory: %r)\n'
401 msg = b'%s -- %s (mandatory: %r)\n'
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 if part.type == b'changegroup':
403 if part.type == b'changegroup':
404 version = part.params.get(b'version', b'01')
404 version = part.params.get(b'version', b'01')
405 cg = changegroup.getunbundler(version, part, b'UN')
405 cg = changegroup.getunbundler(version, part, b'UN')
406 if not ui.quiet:
406 if not ui.quiet:
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 if part.type == b'obsmarkers':
408 if part.type == b'obsmarkers':
409 if not ui.quiet:
409 if not ui.quiet:
410 _debugobsmarkers(ui, part, indent=4, **opts)
410 _debugobsmarkers(ui, part, indent=4, **opts)
411 if part.type == b'phase-heads':
411 if part.type == b'phase-heads':
412 if not ui.quiet:
412 if not ui.quiet:
413 _debugphaseheads(ui, part, indent=4)
413 _debugphaseheads(ui, part, indent=4)
414
414
415
415
416 @command(
416 @command(
417 b'debugbundle',
417 b'debugbundle',
418 [
418 [
419 (b'a', b'all', None, _(b'show all details')),
419 (b'a', b'all', None, _(b'show all details')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 ],
422 ],
423 _(b'FILE'),
423 _(b'FILE'),
424 norepo=True,
424 norepo=True,
425 )
425 )
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 """lists the contents of a bundle"""
427 """lists the contents of a bundle"""
428 with hg.openpath(ui, bundlepath) as f:
428 with hg.openpath(ui, bundlepath) as f:
429 if spec:
429 if spec:
430 spec = exchange.getbundlespec(ui, f)
430 spec = exchange.getbundlespec(ui, f)
431 ui.write(b'%s\n' % spec)
431 ui.write(b'%s\n' % spec)
432 return
432 return
433
433
434 gen = exchange.readbundle(ui, f, bundlepath)
434 gen = exchange.readbundle(ui, f, bundlepath)
435 if isinstance(gen, bundle2.unbundle20):
435 if isinstance(gen, bundle2.unbundle20):
436 return _debugbundle2(ui, gen, all=all, **opts)
436 return _debugbundle2(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
438
438
439
439
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 def debugcapabilities(ui, path, **opts):
441 def debugcapabilities(ui, path, **opts):
442 """lists the capabilities of a remote peer"""
442 """lists the capabilities of a remote peer"""
443 opts = pycompat.byteskwargs(opts)
443 opts = pycompat.byteskwargs(opts)
444 peer = hg.peer(ui, opts, path)
444 peer = hg.peer(ui, opts, path)
445 caps = peer.capabilities()
445 caps = peer.capabilities()
446 ui.writenoi18n(b'Main capabilities:\n')
446 ui.writenoi18n(b'Main capabilities:\n')
447 for c in sorted(caps):
447 for c in sorted(caps):
448 ui.write(b' %s\n' % c)
448 ui.write(b' %s\n' % c)
449 b2caps = bundle2.bundle2caps(peer)
449 b2caps = bundle2.bundle2caps(peer)
450 if b2caps:
450 if b2caps:
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 for key, values in sorted(pycompat.iteritems(b2caps)):
452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 ui.write(b' %s\n' % key)
453 ui.write(b' %s\n' % key)
454 for v in values:
454 for v in values:
455 ui.write(b' %s\n' % v)
455 ui.write(b' %s\n' % v)
456
456
457
457
458 @command(b'debugcheckstate', [], b'')
458 @command(b'debugcheckstate', [], b'')
459 def debugcheckstate(ui, repo):
459 def debugcheckstate(ui, repo):
460 """validate the correctness of the current dirstate"""
460 """validate the correctness of the current dirstate"""
461 parent1, parent2 = repo.dirstate.parents()
461 parent1, parent2 = repo.dirstate.parents()
462 m1 = repo[parent1].manifest()
462 m1 = repo[parent1].manifest()
463 m2 = repo[parent2].manifest()
463 m2 = repo[parent2].manifest()
464 errors = 0
464 errors = 0
465 for f in repo.dirstate:
465 for f in repo.dirstate:
466 state = repo.dirstate[f]
466 state = repo.dirstate[f]
467 if state in b"nr" and f not in m1:
467 if state in b"nr" and f not in m1:
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 errors += 1
469 errors += 1
470 if state in b"a" and f in m1:
470 if state in b"a" and f in m1:
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 errors += 1
472 errors += 1
473 if state in b"m" and f not in m1 and f not in m2:
473 if state in b"m" and f not in m1 and f not in m2:
474 ui.warn(
474 ui.warn(
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 )
476 )
477 errors += 1
477 errors += 1
478 for f in m1:
478 for f in m1:
479 state = repo.dirstate[f]
479 state = repo.dirstate[f]
480 if state not in b"nrm":
480 if state not in b"nrm":
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 errors += 1
482 errors += 1
483 if errors:
483 if errors:
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(errstr)
485 raise error.Abort(errstr)
486
486
487
487
488 @command(
488 @command(
489 b'debugcolor',
489 b'debugcolor',
490 [(b'', b'style', None, _(b'show all configured styles'))],
490 [(b'', b'style', None, _(b'show all configured styles'))],
491 b'hg debugcolor',
491 b'hg debugcolor',
492 )
492 )
493 def debugcolor(ui, repo, **opts):
493 def debugcolor(ui, repo, **opts):
494 """show available color, effects or style"""
494 """show available color, effects or style"""
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 if opts.get('style'):
496 if opts.get('style'):
497 return _debugdisplaystyle(ui)
497 return _debugdisplaystyle(ui)
498 else:
498 else:
499 return _debugdisplaycolor(ui)
499 return _debugdisplaycolor(ui)
500
500
501
501
502 def _debugdisplaycolor(ui):
502 def _debugdisplaycolor(ui):
503 ui = ui.copy()
503 ui = ui.copy()
504 ui._styles.clear()
504 ui._styles.clear()
505 for effect in color._activeeffects(ui).keys():
505 for effect in color._activeeffects(ui).keys():
506 ui._styles[effect] = effect
506 ui._styles[effect] = effect
507 if ui._terminfoparams:
507 if ui._terminfoparams:
508 for k, v in ui.configitems(b'color'):
508 for k, v in ui.configitems(b'color'):
509 if k.startswith(b'color.'):
509 if k.startswith(b'color.'):
510 ui._styles[k] = k[6:]
510 ui._styles[k] = k[6:]
511 elif k.startswith(b'terminfo.'):
511 elif k.startswith(b'terminfo.'):
512 ui._styles[k] = k[9:]
512 ui._styles[k] = k[9:]
513 ui.write(_(b'available colors:\n'))
513 ui.write(_(b'available colors:\n'))
514 # sort label with a '_' after the other to group '_background' entry.
514 # sort label with a '_' after the other to group '_background' entry.
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 for colorname, label in items:
516 for colorname, label in items:
517 ui.write(b'%s\n' % colorname, label=label)
517 ui.write(b'%s\n' % colorname, label=label)
518
518
519
519
520 def _debugdisplaystyle(ui):
520 def _debugdisplaystyle(ui):
521 ui.write(_(b'available style:\n'))
521 ui.write(_(b'available style:\n'))
522 if not ui._styles:
522 if not ui._styles:
523 return
523 return
524 width = max(len(s) for s in ui._styles)
524 width = max(len(s) for s in ui._styles)
525 for label, effects in sorted(ui._styles.items()):
525 for label, effects in sorted(ui._styles.items()):
526 ui.write(b'%s' % label, label=label)
526 ui.write(b'%s' % label, label=label)
527 if effects:
527 if effects:
528 # 50
528 # 50
529 ui.write(b': ')
529 ui.write(b': ')
530 ui.write(b' ' * (max(0, width - len(label))))
530 ui.write(b' ' * (max(0, width - len(label))))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 ui.write(b'\n')
532 ui.write(b'\n')
533
533
534
534
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 def debugcreatestreamclonebundle(ui, repo, fname):
536 def debugcreatestreamclonebundle(ui, repo, fname):
537 """create a stream clone bundle file
537 """create a stream clone bundle file
538
538
539 Stream bundles are special bundles that are essentially archives of
539 Stream bundles are special bundles that are essentially archives of
540 revlog files. They are commonly used for cloning very quickly.
540 revlog files. They are commonly used for cloning very quickly.
541 """
541 """
542 # TODO we may want to turn this into an abort when this functionality
542 # TODO we may want to turn this into an abort when this functionality
543 # is moved into `hg bundle`.
543 # is moved into `hg bundle`.
544 if phases.hassecret(repo):
544 if phases.hassecret(repo):
545 ui.warn(
545 ui.warn(
546 _(
546 _(
547 b'(warning: stream clone bundle will contain secret '
547 b'(warning: stream clone bundle will contain secret '
548 b'revisions)\n'
548 b'revisions)\n'
549 )
549 )
550 )
550 )
551
551
552 requirements, gen = streamclone.generatebundlev1(repo)
552 requirements, gen = streamclone.generatebundlev1(repo)
553 changegroup.writechunks(ui, gen, fname)
553 changegroup.writechunks(ui, gen, fname)
554
554
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556
556
557
557
558 @command(
558 @command(
559 b'debugdag',
559 b'debugdag',
560 [
560 [
561 (b't', b'tags', None, _(b'use tags as labels')),
561 (b't', b'tags', None, _(b'use tags as labels')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 (b'', b'dots', None, _(b'use dots for runs')),
563 (b'', b'dots', None, _(b'use dots for runs')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 ],
565 ],
566 _(b'[OPTION]... [FILE [REV]...]'),
566 _(b'[OPTION]... [FILE [REV]...]'),
567 optionalrepo=True,
567 optionalrepo=True,
568 )
568 )
569 def debugdag(ui, repo, file_=None, *revs, **opts):
569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 """format the changelog or an index DAG as a concise textual description
570 """format the changelog or an index DAG as a concise textual description
571
571
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 revision numbers, they get labeled in the output as rN.
573 revision numbers, they get labeled in the output as rN.
574
574
575 Otherwise, the changelog DAG of the current repo is emitted.
575 Otherwise, the changelog DAG of the current repo is emitted.
576 """
576 """
577 spaces = opts.get('spaces')
577 spaces = opts.get('spaces')
578 dots = opts.get('dots')
578 dots = opts.get('dots')
579 if file_:
579 if file_:
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 revs = set((int(r) for r in revs))
581 revs = set((int(r) for r in revs))
582
582
583 def events():
583 def events():
584 for r in rlog:
584 for r in rlog:
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 if r in revs:
586 if r in revs:
587 yield b'l', (r, b"r%i" % r)
587 yield b'l', (r, b"r%i" % r)
588
588
589 elif repo:
589 elif repo:
590 cl = repo.changelog
590 cl = repo.changelog
591 tags = opts.get('tags')
591 tags = opts.get('tags')
592 branches = opts.get('branches')
592 branches = opts.get('branches')
593 if tags:
593 if tags:
594 labels = {}
594 labels = {}
595 for l, n in repo.tags().items():
595 for l, n in repo.tags().items():
596 labels.setdefault(cl.rev(n), []).append(l)
596 labels.setdefault(cl.rev(n), []).append(l)
597
597
598 def events():
598 def events():
599 b = b"default"
599 b = b"default"
600 for r in cl:
600 for r in cl:
601 if branches:
601 if branches:
602 newb = cl.read(cl.node(r))[5][b'branch']
602 newb = cl.read(cl.node(r))[5][b'branch']
603 if newb != b:
603 if newb != b:
604 yield b'a', newb
604 yield b'a', newb
605 b = newb
605 b = newb
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 if tags:
607 if tags:
608 ls = labels.get(r)
608 ls = labels.get(r)
609 if ls:
609 if ls:
610 for l in ls:
610 for l in ls:
611 yield b'l', (r, l)
611 yield b'l', (r, l)
612
612
613 else:
613 else:
614 raise error.Abort(_(b'need repo for changelog dag'))
614 raise error.Abort(_(b'need repo for changelog dag'))
615
615
616 for line in dagparser.dagtextlines(
616 for line in dagparser.dagtextlines(
617 events(),
617 events(),
618 addspaces=spaces,
618 addspaces=spaces,
619 wraplabels=True,
619 wraplabels=True,
620 wrapannotations=True,
620 wrapannotations=True,
621 wrapnonlinear=dots,
621 wrapnonlinear=dots,
622 usedots=dots,
622 usedots=dots,
623 maxlinewidth=70,
623 maxlinewidth=70,
624 ):
624 ):
625 ui.write(line)
625 ui.write(line)
626 ui.write(b"\n")
626 ui.write(b"\n")
627
627
628
628
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 def debugdata(ui, repo, file_, rev=None, **opts):
630 def debugdata(ui, repo, file_, rev=None, **opts):
631 """dump the contents of a data file revision"""
631 """dump the contents of a data file revision"""
632 opts = pycompat.byteskwargs(opts)
632 opts = pycompat.byteskwargs(opts)
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 if rev is not None:
634 if rev is not None:
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 file_, rev = None, file_
636 file_, rev = None, file_
637 elif rev is None:
637 elif rev is None:
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 try:
640 try:
641 ui.write(r.rawdata(r.lookup(rev)))
641 ui.write(r.rawdata(r.lookup(rev)))
642 except KeyError:
642 except KeyError:
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644
644
645
645
646 @command(
646 @command(
647 b'debugdate',
647 b'debugdate',
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 _(b'[-e] DATE [RANGE]'),
649 _(b'[-e] DATE [RANGE]'),
650 norepo=True,
650 norepo=True,
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdate(ui, date, range=None, **opts):
653 def debugdate(ui, date, range=None, **opts):
654 """parse and display a date"""
654 """parse and display a date"""
655 if opts["extended"]:
655 if opts["extended"]:
656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
657 else:
657 else:
658 d = dateutil.parsedate(date)
658 d = dateutil.parsedate(date)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 if range:
661 if range:
662 m = dateutil.matchdate(range)
662 m = dateutil.matchdate(range)
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664
664
665
665
666 @command(
666 @command(
667 b'debugdeltachain',
667 b'debugdeltachain',
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 _(b'-c|-m|FILE'),
669 _(b'-c|-m|FILE'),
670 optionalrepo=True,
670 optionalrepo=True,
671 )
671 )
672 def debugdeltachain(ui, repo, file_=None, **opts):
672 def debugdeltachain(ui, repo, file_=None, **opts):
673 """dump information about delta chains in a revlog
673 """dump information about delta chains in a revlog
674
674
675 Output can be templatized. Available template keywords are:
675 Output can be templatized. Available template keywords are:
676
676
677 :``rev``: revision number
677 :``rev``: revision number
678 :``chainid``: delta chain identifier (numbered by unique base)
678 :``chainid``: delta chain identifier (numbered by unique base)
679 :``chainlen``: delta chain length to this revision
679 :``chainlen``: delta chain length to this revision
680 :``prevrev``: previous revision in delta chain
680 :``prevrev``: previous revision in delta chain
681 :``deltatype``: role of delta / how it was computed
681 :``deltatype``: role of delta / how it was computed
682 :``compsize``: compressed size of revision
682 :``compsize``: compressed size of revision
683 :``uncompsize``: uncompressed size of revision
683 :``uncompsize``: uncompressed size of revision
684 :``chainsize``: total size of compressed revisions in chain
684 :``chainsize``: total size of compressed revisions in chain
685 :``chainratio``: total chain size divided by uncompressed revision size
685 :``chainratio``: total chain size divided by uncompressed revision size
686 (new delta chains typically start at ratio 2.00)
686 (new delta chains typically start at ratio 2.00)
687 :``lindist``: linear distance from base revision in delta chain to end
687 :``lindist``: linear distance from base revision in delta chain to end
688 of this revision
688 of this revision
689 :``extradist``: total size of revisions not part of this delta chain from
689 :``extradist``: total size of revisions not part of this delta chain from
690 base of delta chain to end of this revision; a measurement
690 base of delta chain to end of this revision; a measurement
691 of how much extra data we need to read/seek across to read
691 of how much extra data we need to read/seek across to read
692 the delta chain for this revision
692 the delta chain for this revision
693 :``extraratio``: extradist divided by chainsize; another representation of
693 :``extraratio``: extradist divided by chainsize; another representation of
694 how much unrelated data is needed to load this delta chain
694 how much unrelated data is needed to load this delta chain
695
695
696 If the repository is configured to use the sparse read, additional keywords
696 If the repository is configured to use the sparse read, additional keywords
697 are available:
697 are available:
698
698
699 :``readsize``: total size of data read from the disk for a revision
699 :``readsize``: total size of data read from the disk for a revision
700 (sum of the sizes of all the blocks)
700 (sum of the sizes of all the blocks)
701 :``largestblock``: size of the largest block of data read from the disk
701 :``largestblock``: size of the largest block of data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
703 :``srchunks``: in how many data hunks the whole revision would be read
703 :``srchunks``: in how many data hunks the whole revision would be read
704
704
705 The sparse read can be enabled with experimental.sparse-read = True
705 The sparse read can be enabled with experimental.sparse-read = True
706 """
706 """
707 opts = pycompat.byteskwargs(opts)
707 opts = pycompat.byteskwargs(opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 index = r.index
709 index = r.index
710 start = r.start
710 start = r.start
711 length = r.length
711 length = r.length
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 withsparseread = getattr(r, '_withsparseread', False)
713 withsparseread = getattr(r, '_withsparseread', False)
714
714
715 def revinfo(rev):
715 def revinfo(rev):
716 e = index[rev]
716 e = index[rev]
717 compsize = e[1]
717 compsize = e[1]
718 uncompsize = e[2]
718 uncompsize = e[2]
719 chainsize = 0
719 chainsize = 0
720
720
721 if generaldelta:
721 if generaldelta:
722 if e[3] == e[5]:
722 if e[3] == e[5]:
723 deltatype = b'p1'
723 deltatype = b'p1'
724 elif e[3] == e[6]:
724 elif e[3] == e[6]:
725 deltatype = b'p2'
725 deltatype = b'p2'
726 elif e[3] == rev - 1:
726 elif e[3] == rev - 1:
727 deltatype = b'prev'
727 deltatype = b'prev'
728 elif e[3] == rev:
728 elif e[3] == rev:
729 deltatype = b'base'
729 deltatype = b'base'
730 else:
730 else:
731 deltatype = b'other'
731 deltatype = b'other'
732 else:
732 else:
733 if e[3] == rev:
733 if e[3] == rev:
734 deltatype = b'base'
734 deltatype = b'base'
735 else:
735 else:
736 deltatype = b'prev'
736 deltatype = b'prev'
737
737
738 chain = r._deltachain(rev)[0]
738 chain = r._deltachain(rev)[0]
739 for iterrev in chain:
739 for iterrev in chain:
740 e = index[iterrev]
740 e = index[iterrev]
741 chainsize += e[1]
741 chainsize += e[1]
742
742
743 return compsize, uncompsize, deltatype, chain, chainsize
743 return compsize, uncompsize, deltatype, chain, chainsize
744
744
745 fm = ui.formatter(b'debugdeltachain', opts)
745 fm = ui.formatter(b'debugdeltachain', opts)
746
746
747 fm.plain(
747 fm.plain(
748 b' rev chain# chainlen prev delta '
748 b' rev chain# chainlen prev delta '
749 b'size rawsize chainsize ratio lindist extradist '
749 b'size rawsize chainsize ratio lindist extradist '
750 b'extraratio'
750 b'extraratio'
751 )
751 )
752 if withsparseread:
752 if withsparseread:
753 fm.plain(b' readsize largestblk rddensity srchunks')
753 fm.plain(b' readsize largestblk rddensity srchunks')
754 fm.plain(b'\n')
754 fm.plain(b'\n')
755
755
756 chainbases = {}
756 chainbases = {}
757 for rev in r:
757 for rev in r:
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 chainbase = chain[0]
759 chainbase = chain[0]
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 basestart = start(chainbase)
761 basestart = start(chainbase)
762 revstart = start(rev)
762 revstart = start(rev)
763 lineardist = revstart + comp - basestart
763 lineardist = revstart + comp - basestart
764 extradist = lineardist - chainsize
764 extradist = lineardist - chainsize
765 try:
765 try:
766 prevrev = chain[-2]
766 prevrev = chain[-2]
767 except IndexError:
767 except IndexError:
768 prevrev = -1
768 prevrev = -1
769
769
770 if uncomp != 0:
770 if uncomp != 0:
771 chainratio = float(chainsize) / float(uncomp)
771 chainratio = float(chainsize) / float(uncomp)
772 else:
772 else:
773 chainratio = chainsize
773 chainratio = chainsize
774
774
775 if chainsize != 0:
775 if chainsize != 0:
776 extraratio = float(extradist) / float(chainsize)
776 extraratio = float(extradist) / float(chainsize)
777 else:
777 else:
778 extraratio = extradist
778 extraratio = extradist
779
779
780 fm.startitem()
780 fm.startitem()
781 fm.write(
781 fm.write(
782 b'rev chainid chainlen prevrev deltatype compsize '
782 b'rev chainid chainlen prevrev deltatype compsize '
783 b'uncompsize chainsize chainratio lindist extradist '
783 b'uncompsize chainsize chainratio lindist extradist '
784 b'extraratio',
784 b'extraratio',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 rev,
786 rev,
787 chainid,
787 chainid,
788 len(chain),
788 len(chain),
789 prevrev,
789 prevrev,
790 deltatype,
790 deltatype,
791 comp,
791 comp,
792 uncomp,
792 uncomp,
793 chainsize,
793 chainsize,
794 chainratio,
794 chainratio,
795 lineardist,
795 lineardist,
796 extradist,
796 extradist,
797 extraratio,
797 extraratio,
798 rev=rev,
798 rev=rev,
799 chainid=chainid,
799 chainid=chainid,
800 chainlen=len(chain),
800 chainlen=len(chain),
801 prevrev=prevrev,
801 prevrev=prevrev,
802 deltatype=deltatype,
802 deltatype=deltatype,
803 compsize=comp,
803 compsize=comp,
804 uncompsize=uncomp,
804 uncompsize=uncomp,
805 chainsize=chainsize,
805 chainsize=chainsize,
806 chainratio=chainratio,
806 chainratio=chainratio,
807 lindist=lineardist,
807 lindist=lineardist,
808 extradist=extradist,
808 extradist=extradist,
809 extraratio=extraratio,
809 extraratio=extraratio,
810 )
810 )
811 if withsparseread:
811 if withsparseread:
812 readsize = 0
812 readsize = 0
813 largestblock = 0
813 largestblock = 0
814 srchunks = 0
814 srchunks = 0
815
815
816 for revschunk in deltautil.slicechunk(r, chain):
816 for revschunk in deltautil.slicechunk(r, chain):
817 srchunks += 1
817 srchunks += 1
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 blksize = blkend - start(revschunk[0])
819 blksize = blkend - start(revschunk[0])
820
820
821 readsize += blksize
821 readsize += blksize
822 if largestblock < blksize:
822 if largestblock < blksize:
823 largestblock = blksize
823 largestblock = blksize
824
824
825 if readsize:
825 if readsize:
826 readdensity = float(chainsize) / float(readsize)
826 readdensity = float(chainsize) / float(readsize)
827 else:
827 else:
828 readdensity = 1
828 readdensity = 1
829
829
830 fm.write(
830 fm.write(
831 b'readsize largestblock readdensity srchunks',
831 b'readsize largestblock readdensity srchunks',
832 b' %10d %10d %9.5f %8d',
832 b' %10d %10d %9.5f %8d',
833 readsize,
833 readsize,
834 largestblock,
834 largestblock,
835 readdensity,
835 readdensity,
836 srchunks,
836 srchunks,
837 readsize=readsize,
837 readsize=readsize,
838 largestblock=largestblock,
838 largestblock=largestblock,
839 readdensity=readdensity,
839 readdensity=readdensity,
840 srchunks=srchunks,
840 srchunks=srchunks,
841 )
841 )
842
842
843 fm.plain(b'\n')
843 fm.plain(b'\n')
844
844
845 fm.end()
845 fm.end()
846
846
847
847
848 @command(
848 @command(
849 b'debugdirstate|debugstate',
849 b'debugdirstate|debugstate',
850 [
850 [
851 (
851 (
852 b'',
852 b'',
853 b'nodates',
853 b'nodates',
854 None,
854 None,
855 _(b'do not display the saved mtime (DEPRECATED)'),
855 _(b'do not display the saved mtime (DEPRECATED)'),
856 ),
856 ),
857 (b'', b'dates', True, _(b'display the saved mtime')),
857 (b'', b'dates', True, _(b'display the saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 ],
859 ],
860 _(b'[OPTION]...'),
860 _(b'[OPTION]...'),
861 )
861 )
862 def debugstate(ui, repo, **opts):
862 def debugstate(ui, repo, **opts):
863 """show the contents of the current dirstate"""
863 """show the contents of the current dirstate"""
864
864
865 nodates = not opts['dates']
865 nodates = not opts['dates']
866 if opts.get('nodates') is not None:
866 if opts.get('nodates') is not None:
867 nodates = True
867 nodates = True
868 datesort = opts.get('datesort')
868 datesort = opts.get('datesort')
869
869
870 if datesort:
870 if datesort:
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 else:
872 else:
873 keyfunc = None # sort by filename
873 keyfunc = None # sort by filename
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 if ent[3] == -1:
875 if ent[3] == -1:
876 timestr = b'unset '
876 timestr = b'unset '
877 elif nodates:
877 elif nodates:
878 timestr = b'set '
878 timestr = b'set '
879 else:
879 else:
880 timestr = time.strftime(
880 timestr = time.strftime(
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 )
882 )
883 timestr = encoding.strtolocal(timestr)
883 timestr = encoding.strtolocal(timestr)
884 if ent[1] & 0o20000:
884 if ent[1] & 0o20000:
885 mode = b'lnk'
885 mode = b'lnk'
886 else:
886 else:
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 for f in repo.dirstate.copies():
889 for f in repo.dirstate.copies():
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891
891
892
892
893 @command(
893 @command(
894 b'debugdiscovery',
894 b'debugdiscovery',
895 [
895 [
896 (b'', b'old', None, _(b'use old-style discovery')),
896 (b'', b'old', None, _(b'use old-style discovery')),
897 (
897 (
898 b'',
898 b'',
899 b'nonheads',
899 b'nonheads',
900 None,
900 None,
901 _(b'use old-style discovery with non-heads included'),
901 _(b'use old-style discovery with non-heads included'),
902 ),
902 ),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 ]
905 ]
906 + cmdutil.remoteopts,
906 + cmdutil.remoteopts,
907 _(b'[--rev REV] [OTHER]'),
907 _(b'[--rev REV] [OTHER]'),
908 )
908 )
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 """runs the changeset discovery protocol in isolation"""
910 """runs the changeset discovery protocol in isolation"""
911 opts = pycompat.byteskwargs(opts)
911 opts = pycompat.byteskwargs(opts)
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 remote = hg.peer(repo, opts, remoteurl)
913 remote = hg.peer(repo, opts, remoteurl)
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915
915
916 # make sure tests are repeatable
916 # make sure tests are repeatable
917 random.seed(int(opts[b'seed']))
917 random.seed(int(opts[b'seed']))
918
918
919 if opts.get(b'old'):
919 if opts.get(b'old'):
920
920
921 def doit(pushedrevs, remoteheads, remote=remote):
921 def doit(pushedrevs, remoteheads, remote=remote):
922 if not util.safehasattr(remote, b'branches'):
922 if not util.safehasattr(remote, b'branches'):
923 # enable in-client legacy support
923 # enable in-client legacy support
924 remote = localrepo.locallegacypeer(remote.local())
924 remote = localrepo.locallegacypeer(remote.local())
925 common, _in, hds = treediscovery.findcommonincoming(
925 common, _in, hds = treediscovery.findcommonincoming(
926 repo, remote, force=True
926 repo, remote, force=True
927 )
927 )
928 common = set(common)
928 common = set(common)
929 if not opts.get(b'nonheads'):
929 if not opts.get(b'nonheads'):
930 ui.writenoi18n(
930 ui.writenoi18n(
931 b"unpruned common: %s\n"
931 b"unpruned common: %s\n"
932 % b" ".join(sorted(short(n) for n in common))
932 % b" ".join(sorted(short(n) for n in common))
933 )
933 )
934
934
935 clnode = repo.changelog.node
935 clnode = repo.changelog.node
936 common = repo.revs(b'heads(::%ln)', common)
936 common = repo.revs(b'heads(::%ln)', common)
937 common = {clnode(r) for r in common}
937 common = {clnode(r) for r in common}
938 return common, hds
938 return common, hds
939
939
940 else:
940 else:
941
941
942 def doit(pushedrevs, remoteheads, remote=remote):
942 def doit(pushedrevs, remoteheads, remote=remote):
943 nodes = None
943 nodes = None
944 if pushedrevs:
944 if pushedrevs:
945 revs = scmutil.revrange(repo, pushedrevs)
945 revs = scmutil.revrange(repo, pushedrevs)
946 nodes = [repo[r].node() for r in revs]
946 nodes = [repo[r].node() for r in revs]
947 common, any, hds = setdiscovery.findcommonheads(
947 common, any, hds = setdiscovery.findcommonheads(
948 ui, repo, remote, ancestorsof=nodes
948 ui, repo, remote, ancestorsof=nodes
949 )
949 )
950 return common, hds
950 return common, hds
951
951
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 localrevs = opts[b'rev']
953 localrevs = opts[b'rev']
954 with util.timedcm('debug-discovery') as t:
954 with util.timedcm('debug-discovery') as t:
955 common, hds = doit(localrevs, remoterevs)
955 common, hds = doit(localrevs, remoterevs)
956
956
957 # compute all statistics
957 # compute all statistics
958 common = set(common)
958 common = set(common)
959 rheads = set(hds)
959 rheads = set(hds)
960 lheads = set(repo.heads())
960 lheads = set(repo.heads())
961
961
962 data = {}
962 data = {}
963 data[b'elapsed'] = t.elapsed
963 data[b'elapsed'] = t.elapsed
964 data[b'nb-common'] = len(common)
964 data[b'nb-common'] = len(common)
965 data[b'nb-common-local'] = len(common & lheads)
965 data[b'nb-common-local'] = len(common & lheads)
966 data[b'nb-common-remote'] = len(common & rheads)
966 data[b'nb-common-remote'] = len(common & rheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 data[b'nb-local'] = len(lheads)
968 data[b'nb-local'] = len(lheads)
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 data[b'nb-remote'] = len(rheads)
970 data[b'nb-remote'] = len(rheads)
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975
975
976 # display discovery summary
976 # display discovery summary
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 ui.writenoi18n(b"heads summary:\n")
978 ui.writenoi18n(b"heads summary:\n")
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992
992
993 if ui.verbose:
993 if ui.verbose:
994 ui.writenoi18n(
994 ui.writenoi18n(
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 )
996 )
997
997
998
998
999 _chunksize = 4 << 10
999 _chunksize = 4 << 10
1000
1000
1001
1001
1002 @command(
1002 @command(
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 )
1004 )
1005 def debugdownload(ui, repo, url, output=None, **opts):
1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 """download a resource using Mercurial logic and config
1006 """download a resource using Mercurial logic and config
1007 """
1007 """
1008 fh = urlmod.open(ui, url, output)
1008 fh = urlmod.open(ui, url, output)
1009
1009
1010 dest = ui
1010 dest = ui
1011 if output:
1011 if output:
1012 dest = open(output, b"wb", _chunksize)
1012 dest = open(output, b"wb", _chunksize)
1013 try:
1013 try:
1014 data = fh.read(_chunksize)
1014 data = fh.read(_chunksize)
1015 while data:
1015 while data:
1016 dest.write(data)
1016 dest.write(data)
1017 data = fh.read(_chunksize)
1017 data = fh.read(_chunksize)
1018 finally:
1018 finally:
1019 if output:
1019 if output:
1020 dest.close()
1020 dest.close()
1021
1021
1022
1022
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 def debugextensions(ui, repo, **opts):
1024 def debugextensions(ui, repo, **opts):
1025 '''show information about active extensions'''
1025 '''show information about active extensions'''
1026 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1027 exts = extensions.extensions(ui)
1027 exts = extensions.extensions(ui)
1028 hgver = util.version()
1028 hgver = util.version()
1029 fm = ui.formatter(b'debugextensions', opts)
1029 fm = ui.formatter(b'debugextensions', opts)
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 isinternal = extensions.ismoduleinternal(extmod)
1031 isinternal = extensions.ismoduleinternal(extmod)
1032 extsource = None
1032 extsource = None
1033
1033
1034 if util.safehasattr(extmod, '__file__'):
1034 if util.safehasattr(extmod, '__file__'):
1035 extsource = pycompat.fsencode(extmod.__file__)
1035 extsource = pycompat.fsencode(extmod.__file__)
1036 elif getattr(sys, 'oxidized', False):
1036 elif getattr(sys, 'oxidized', False):
1037 extsource = pycompat.sysexecutable
1037 extsource = pycompat.sysexecutable
1038 if isinternal:
1038 if isinternal:
1039 exttestedwith = [] # never expose magic string to users
1039 exttestedwith = [] # never expose magic string to users
1040 else:
1040 else:
1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1042 extbuglink = getattr(extmod, 'buglink', None)
1042 extbuglink = getattr(extmod, 'buglink', None)
1043
1043
1044 fm.startitem()
1044 fm.startitem()
1045
1045
1046 if ui.quiet or ui.verbose:
1046 if ui.quiet or ui.verbose:
1047 fm.write(b'name', b'%s\n', extname)
1047 fm.write(b'name', b'%s\n', extname)
1048 else:
1048 else:
1049 fm.write(b'name', b'%s', extname)
1049 fm.write(b'name', b'%s', extname)
1050 if isinternal or hgver in exttestedwith:
1050 if isinternal or hgver in exttestedwith:
1051 fm.plain(b'\n')
1051 fm.plain(b'\n')
1052 elif not exttestedwith:
1052 elif not exttestedwith:
1053 fm.plain(_(b' (untested!)\n'))
1053 fm.plain(_(b' (untested!)\n'))
1054 else:
1054 else:
1055 lasttestedversion = exttestedwith[-1]
1055 lasttestedversion = exttestedwith[-1]
1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1057
1057
1058 fm.condwrite(
1058 fm.condwrite(
1059 ui.verbose and extsource,
1059 ui.verbose and extsource,
1060 b'source',
1060 b'source',
1061 _(b' location: %s\n'),
1061 _(b' location: %s\n'),
1062 extsource or b"",
1062 extsource or b"",
1063 )
1063 )
1064
1064
1065 if ui.verbose:
1065 if ui.verbose:
1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1067 fm.data(bundled=isinternal)
1067 fm.data(bundled=isinternal)
1068
1068
1069 fm.condwrite(
1069 fm.condwrite(
1070 ui.verbose and exttestedwith,
1070 ui.verbose and exttestedwith,
1071 b'testedwith',
1071 b'testedwith',
1072 _(b' tested with: %s\n'),
1072 _(b' tested with: %s\n'),
1073 fm.formatlist(exttestedwith, name=b'ver'),
1073 fm.formatlist(exttestedwith, name=b'ver'),
1074 )
1074 )
1075
1075
1076 fm.condwrite(
1076 fm.condwrite(
1077 ui.verbose and extbuglink,
1077 ui.verbose and extbuglink,
1078 b'buglink',
1078 b'buglink',
1079 _(b' bug reporting: %s\n'),
1079 _(b' bug reporting: %s\n'),
1080 extbuglink or b"",
1080 extbuglink or b"",
1081 )
1081 )
1082
1082
1083 fm.end()
1083 fm.end()
1084
1084
1085
1085
1086 @command(
1086 @command(
1087 b'debugfileset',
1087 b'debugfileset',
1088 [
1088 [
1089 (
1089 (
1090 b'r',
1090 b'r',
1091 b'rev',
1091 b'rev',
1092 b'',
1092 b'',
1093 _(b'apply the filespec on this revision'),
1093 _(b'apply the filespec on this revision'),
1094 _(b'REV'),
1094 _(b'REV'),
1095 ),
1095 ),
1096 (
1096 (
1097 b'',
1097 b'',
1098 b'all-files',
1098 b'all-files',
1099 False,
1099 False,
1100 _(b'test files from all revisions and working directory'),
1100 _(b'test files from all revisions and working directory'),
1101 ),
1101 ),
1102 (
1102 (
1103 b's',
1103 b's',
1104 b'show-matcher',
1104 b'show-matcher',
1105 None,
1105 None,
1106 _(b'print internal representation of matcher'),
1106 _(b'print internal representation of matcher'),
1107 ),
1107 ),
1108 (
1108 (
1109 b'p',
1109 b'p',
1110 b'show-stage',
1110 b'show-stage',
1111 [],
1111 [],
1112 _(b'print parsed tree at the given stage'),
1112 _(b'print parsed tree at the given stage'),
1113 _(b'NAME'),
1113 _(b'NAME'),
1114 ),
1114 ),
1115 ],
1115 ],
1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1117 )
1117 )
1118 def debugfileset(ui, repo, expr, **opts):
1118 def debugfileset(ui, repo, expr, **opts):
1119 '''parse and apply a fileset specification'''
1119 '''parse and apply a fileset specification'''
1120 from . import fileset
1120 from . import fileset
1121
1121
1122 fileset.symbols # force import of fileset so we have predicates to optimize
1122 fileset.symbols # force import of fileset so we have predicates to optimize
1123 opts = pycompat.byteskwargs(opts)
1123 opts = pycompat.byteskwargs(opts)
1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1125
1125
1126 stages = [
1126 stages = [
1127 (b'parsed', pycompat.identity),
1127 (b'parsed', pycompat.identity),
1128 (b'analyzed', filesetlang.analyze),
1128 (b'analyzed', filesetlang.analyze),
1129 (b'optimized', filesetlang.optimize),
1129 (b'optimized', filesetlang.optimize),
1130 ]
1130 ]
1131 stagenames = set(n for n, f in stages)
1131 stagenames = set(n for n, f in stages)
1132
1132
1133 showalways = set()
1133 showalways = set()
1134 if ui.verbose and not opts[b'show_stage']:
1134 if ui.verbose and not opts[b'show_stage']:
1135 # show parsed tree by --verbose (deprecated)
1135 # show parsed tree by --verbose (deprecated)
1136 showalways.add(b'parsed')
1136 showalways.add(b'parsed')
1137 if opts[b'show_stage'] == [b'all']:
1137 if opts[b'show_stage'] == [b'all']:
1138 showalways.update(stagenames)
1138 showalways.update(stagenames)
1139 else:
1139 else:
1140 for n in opts[b'show_stage']:
1140 for n in opts[b'show_stage']:
1141 if n not in stagenames:
1141 if n not in stagenames:
1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1143 showalways.update(opts[b'show_stage'])
1143 showalways.update(opts[b'show_stage'])
1144
1144
1145 tree = filesetlang.parse(expr)
1145 tree = filesetlang.parse(expr)
1146 for n, f in stages:
1146 for n, f in stages:
1147 tree = f(tree)
1147 tree = f(tree)
1148 if n in showalways:
1148 if n in showalways:
1149 if opts[b'show_stage'] or n != b'parsed':
1149 if opts[b'show_stage'] or n != b'parsed':
1150 ui.write(b"* %s:\n" % n)
1150 ui.write(b"* %s:\n" % n)
1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1152
1152
1153 files = set()
1153 files = set()
1154 if opts[b'all_files']:
1154 if opts[b'all_files']:
1155 for r in repo:
1155 for r in repo:
1156 c = repo[r]
1156 c = repo[r]
1157 files.update(c.files())
1157 files.update(c.files())
1158 files.update(c.substate)
1158 files.update(c.substate)
1159 if opts[b'all_files'] or ctx.rev() is None:
1159 if opts[b'all_files'] or ctx.rev() is None:
1160 wctx = repo[None]
1160 wctx = repo[None]
1161 files.update(
1161 files.update(
1162 repo.dirstate.walk(
1162 repo.dirstate.walk(
1163 scmutil.matchall(repo),
1163 scmutil.matchall(repo),
1164 subrepos=list(wctx.substate),
1164 subrepos=list(wctx.substate),
1165 unknown=True,
1165 unknown=True,
1166 ignored=True,
1166 ignored=True,
1167 )
1167 )
1168 )
1168 )
1169 files.update(wctx.substate)
1169 files.update(wctx.substate)
1170 else:
1170 else:
1171 files.update(ctx.files())
1171 files.update(ctx.files())
1172 files.update(ctx.substate)
1172 files.update(ctx.substate)
1173
1173
1174 m = ctx.matchfileset(expr)
1174 m = ctx.matchfileset(repo.getcwd(), expr)
1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1177 for f in sorted(files):
1177 for f in sorted(files):
1178 if not m(f):
1178 if not m(f):
1179 continue
1179 continue
1180 ui.write(b"%s\n" % f)
1180 ui.write(b"%s\n" % f)
1181
1181
1182
1182
1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1184 def debugformat(ui, repo, **opts):
1184 def debugformat(ui, repo, **opts):
1185 """display format information about the current repository
1185 """display format information about the current repository
1186
1186
1187 Use --verbose to get extra information about current config value and
1187 Use --verbose to get extra information about current config value and
1188 Mercurial default."""
1188 Mercurial default."""
1189 opts = pycompat.byteskwargs(opts)
1189 opts = pycompat.byteskwargs(opts)
1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1192
1192
1193 def makeformatname(name):
1193 def makeformatname(name):
1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1195
1195
1196 fm = ui.formatter(b'debugformat', opts)
1196 fm = ui.formatter(b'debugformat', opts)
1197 if fm.isplain():
1197 if fm.isplain():
1198
1198
1199 def formatvalue(value):
1199 def formatvalue(value):
1200 if util.safehasattr(value, b'startswith'):
1200 if util.safehasattr(value, b'startswith'):
1201 return value
1201 return value
1202 if value:
1202 if value:
1203 return b'yes'
1203 return b'yes'
1204 else:
1204 else:
1205 return b'no'
1205 return b'no'
1206
1206
1207 else:
1207 else:
1208 formatvalue = pycompat.identity
1208 formatvalue = pycompat.identity
1209
1209
1210 fm.plain(b'format-variant')
1210 fm.plain(b'format-variant')
1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1212 fm.plain(b' repo')
1212 fm.plain(b' repo')
1213 if ui.verbose:
1213 if ui.verbose:
1214 fm.plain(b' config default')
1214 fm.plain(b' config default')
1215 fm.plain(b'\n')
1215 fm.plain(b'\n')
1216 for fv in upgrade.allformatvariant:
1216 for fv in upgrade.allformatvariant:
1217 fm.startitem()
1217 fm.startitem()
1218 repovalue = fv.fromrepo(repo)
1218 repovalue = fv.fromrepo(repo)
1219 configvalue = fv.fromconfig(repo)
1219 configvalue = fv.fromconfig(repo)
1220
1220
1221 if repovalue != configvalue:
1221 if repovalue != configvalue:
1222 namelabel = b'formatvariant.name.mismatchconfig'
1222 namelabel = b'formatvariant.name.mismatchconfig'
1223 repolabel = b'formatvariant.repo.mismatchconfig'
1223 repolabel = b'formatvariant.repo.mismatchconfig'
1224 elif repovalue != fv.default:
1224 elif repovalue != fv.default:
1225 namelabel = b'formatvariant.name.mismatchdefault'
1225 namelabel = b'formatvariant.name.mismatchdefault'
1226 repolabel = b'formatvariant.repo.mismatchdefault'
1226 repolabel = b'formatvariant.repo.mismatchdefault'
1227 else:
1227 else:
1228 namelabel = b'formatvariant.name.uptodate'
1228 namelabel = b'formatvariant.name.uptodate'
1229 repolabel = b'formatvariant.repo.uptodate'
1229 repolabel = b'formatvariant.repo.uptodate'
1230
1230
1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1233 if fv.default != configvalue:
1233 if fv.default != configvalue:
1234 configlabel = b'formatvariant.config.special'
1234 configlabel = b'formatvariant.config.special'
1235 else:
1235 else:
1236 configlabel = b'formatvariant.config.default'
1236 configlabel = b'formatvariant.config.default'
1237 fm.condwrite(
1237 fm.condwrite(
1238 ui.verbose,
1238 ui.verbose,
1239 b'config',
1239 b'config',
1240 b' %6s',
1240 b' %6s',
1241 formatvalue(configvalue),
1241 formatvalue(configvalue),
1242 label=configlabel,
1242 label=configlabel,
1243 )
1243 )
1244 fm.condwrite(
1244 fm.condwrite(
1245 ui.verbose,
1245 ui.verbose,
1246 b'default',
1246 b'default',
1247 b' %7s',
1247 b' %7s',
1248 formatvalue(fv.default),
1248 formatvalue(fv.default),
1249 label=b'formatvariant.default',
1249 label=b'formatvariant.default',
1250 )
1250 )
1251 fm.plain(b'\n')
1251 fm.plain(b'\n')
1252 fm.end()
1252 fm.end()
1253
1253
1254
1254
1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1256 def debugfsinfo(ui, path=b"."):
1256 def debugfsinfo(ui, path=b"."):
1257 """show information detected about current filesystem"""
1257 """show information detected about current filesystem"""
1258 ui.writenoi18n(b'path: %s\n' % path)
1258 ui.writenoi18n(b'path: %s\n' % path)
1259 ui.writenoi18n(
1259 ui.writenoi18n(
1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1261 )
1261 )
1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1264 ui.writenoi18n(
1264 ui.writenoi18n(
1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1266 )
1266 )
1267 ui.writenoi18n(
1267 ui.writenoi18n(
1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1269 )
1269 )
1270 casesensitive = b'(unknown)'
1270 casesensitive = b'(unknown)'
1271 try:
1271 try:
1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1274 except OSError:
1274 except OSError:
1275 pass
1275 pass
1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1277
1277
1278
1278
1279 @command(
1279 @command(
1280 b'debuggetbundle',
1280 b'debuggetbundle',
1281 [
1281 [
1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1284 (
1284 (
1285 b't',
1285 b't',
1286 b'type',
1286 b'type',
1287 b'bzip2',
1287 b'bzip2',
1288 _(b'bundle compression type to use'),
1288 _(b'bundle compression type to use'),
1289 _(b'TYPE'),
1289 _(b'TYPE'),
1290 ),
1290 ),
1291 ],
1291 ],
1292 _(b'REPO FILE [-H|-C ID]...'),
1292 _(b'REPO FILE [-H|-C ID]...'),
1293 norepo=True,
1293 norepo=True,
1294 )
1294 )
1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1296 """retrieves a bundle from a repo
1296 """retrieves a bundle from a repo
1297
1297
1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1299 given file.
1299 given file.
1300 """
1300 """
1301 opts = pycompat.byteskwargs(opts)
1301 opts = pycompat.byteskwargs(opts)
1302 repo = hg.peer(ui, opts, repopath)
1302 repo = hg.peer(ui, opts, repopath)
1303 if not repo.capable(b'getbundle'):
1303 if not repo.capable(b'getbundle'):
1304 raise error.Abort(b"getbundle() not supported by target repository")
1304 raise error.Abort(b"getbundle() not supported by target repository")
1305 args = {}
1305 args = {}
1306 if common:
1306 if common:
1307 args['common'] = [bin(s) for s in common]
1307 args['common'] = [bin(s) for s in common]
1308 if head:
1308 if head:
1309 args['heads'] = [bin(s) for s in head]
1309 args['heads'] = [bin(s) for s in head]
1310 # TODO: get desired bundlecaps from command line.
1310 # TODO: get desired bundlecaps from command line.
1311 args['bundlecaps'] = None
1311 args['bundlecaps'] = None
1312 bundle = repo.getbundle(b'debug', **args)
1312 bundle = repo.getbundle(b'debug', **args)
1313
1313
1314 bundletype = opts.get(b'type', b'bzip2').lower()
1314 bundletype = opts.get(b'type', b'bzip2').lower()
1315 btypes = {
1315 btypes = {
1316 b'none': b'HG10UN',
1316 b'none': b'HG10UN',
1317 b'bzip2': b'HG10BZ',
1317 b'bzip2': b'HG10BZ',
1318 b'gzip': b'HG10GZ',
1318 b'gzip': b'HG10GZ',
1319 b'bundle2': b'HG20',
1319 b'bundle2': b'HG20',
1320 }
1320 }
1321 bundletype = btypes.get(bundletype)
1321 bundletype = btypes.get(bundletype)
1322 if bundletype not in bundle2.bundletypes:
1322 if bundletype not in bundle2.bundletypes:
1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1325
1325
1326
1326
1327 @command(b'debugignore', [], b'[FILE]')
1327 @command(b'debugignore', [], b'[FILE]')
1328 def debugignore(ui, repo, *files, **opts):
1328 def debugignore(ui, repo, *files, **opts):
1329 """display the combined ignore pattern and information about ignored files
1329 """display the combined ignore pattern and information about ignored files
1330
1330
1331 With no argument display the combined ignore pattern.
1331 With no argument display the combined ignore pattern.
1332
1332
1333 Given space separated file names, shows if the given file is ignored and
1333 Given space separated file names, shows if the given file is ignored and
1334 if so, show the ignore rule (file and line number) that matched it.
1334 if so, show the ignore rule (file and line number) that matched it.
1335 """
1335 """
1336 ignore = repo.dirstate._ignore
1336 ignore = repo.dirstate._ignore
1337 if not files:
1337 if not files:
1338 # Show all the patterns
1338 # Show all the patterns
1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1340 else:
1340 else:
1341 m = scmutil.match(repo[None], pats=files)
1341 m = scmutil.match(repo[None], pats=files)
1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1343 for f in m.files():
1343 for f in m.files():
1344 nf = util.normpath(f)
1344 nf = util.normpath(f)
1345 ignored = None
1345 ignored = None
1346 ignoredata = None
1346 ignoredata = None
1347 if nf != b'.':
1347 if nf != b'.':
1348 if ignore(nf):
1348 if ignore(nf):
1349 ignored = nf
1349 ignored = nf
1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1351 else:
1351 else:
1352 for p in pathutil.finddirs(nf):
1352 for p in pathutil.finddirs(nf):
1353 if ignore(p):
1353 if ignore(p):
1354 ignored = p
1354 ignored = p
1355 ignoredata = repo.dirstate._ignorefileandline(p)
1355 ignoredata = repo.dirstate._ignorefileandline(p)
1356 break
1356 break
1357 if ignored:
1357 if ignored:
1358 if ignored == nf:
1358 if ignored == nf:
1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1360 else:
1360 else:
1361 ui.write(
1361 ui.write(
1362 _(
1362 _(
1363 b"%s is ignored because of "
1363 b"%s is ignored because of "
1364 b"containing directory %s\n"
1364 b"containing directory %s\n"
1365 )
1365 )
1366 % (uipathfn(f), ignored)
1366 % (uipathfn(f), ignored)
1367 )
1367 )
1368 ignorefile, lineno, line = ignoredata
1368 ignorefile, lineno, line = ignoredata
1369 ui.write(
1369 ui.write(
1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1371 % (ignorefile, lineno, line)
1371 % (ignorefile, lineno, line)
1372 )
1372 )
1373 else:
1373 else:
1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1375
1375
1376
1376
1377 @command(
1377 @command(
1378 b'debugindex',
1378 b'debugindex',
1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1380 _(b'-c|-m|FILE'),
1380 _(b'-c|-m|FILE'),
1381 )
1381 )
1382 def debugindex(ui, repo, file_=None, **opts):
1382 def debugindex(ui, repo, file_=None, **opts):
1383 """dump index data for a storage primitive"""
1383 """dump index data for a storage primitive"""
1384 opts = pycompat.byteskwargs(opts)
1384 opts = pycompat.byteskwargs(opts)
1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1386
1386
1387 if ui.debugflag:
1387 if ui.debugflag:
1388 shortfn = hex
1388 shortfn = hex
1389 else:
1389 else:
1390 shortfn = short
1390 shortfn = short
1391
1391
1392 idlen = 12
1392 idlen = 12
1393 for i in store:
1393 for i in store:
1394 idlen = len(shortfn(store.node(i)))
1394 idlen = len(shortfn(store.node(i)))
1395 break
1395 break
1396
1396
1397 fm = ui.formatter(b'debugindex', opts)
1397 fm = ui.formatter(b'debugindex', opts)
1398 fm.plain(
1398 fm.plain(
1399 b' rev linkrev %s %s p2\n'
1399 b' rev linkrev %s %s p2\n'
1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1401 )
1401 )
1402
1402
1403 for rev in store:
1403 for rev in store:
1404 node = store.node(rev)
1404 node = store.node(rev)
1405 parents = store.parents(node)
1405 parents = store.parents(node)
1406
1406
1407 fm.startitem()
1407 fm.startitem()
1408 fm.write(b'rev', b'%6d ', rev)
1408 fm.write(b'rev', b'%6d ', rev)
1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1410 fm.write(b'node', b'%s ', shortfn(node))
1410 fm.write(b'node', b'%s ', shortfn(node))
1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1413 fm.plain(b'\n')
1413 fm.plain(b'\n')
1414
1414
1415 fm.end()
1415 fm.end()
1416
1416
1417
1417
1418 @command(
1418 @command(
1419 b'debugindexdot',
1419 b'debugindexdot',
1420 cmdutil.debugrevlogopts,
1420 cmdutil.debugrevlogopts,
1421 _(b'-c|-m|FILE'),
1421 _(b'-c|-m|FILE'),
1422 optionalrepo=True,
1422 optionalrepo=True,
1423 )
1423 )
1424 def debugindexdot(ui, repo, file_=None, **opts):
1424 def debugindexdot(ui, repo, file_=None, **opts):
1425 """dump an index DAG as a graphviz dot file"""
1425 """dump an index DAG as a graphviz dot file"""
1426 opts = pycompat.byteskwargs(opts)
1426 opts = pycompat.byteskwargs(opts)
1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1428 ui.writenoi18n(b"digraph G {\n")
1428 ui.writenoi18n(b"digraph G {\n")
1429 for i in r:
1429 for i in r:
1430 node = r.node(i)
1430 node = r.node(i)
1431 pp = r.parents(node)
1431 pp = r.parents(node)
1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1433 if pp[1] != nullid:
1433 if pp[1] != nullid:
1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1435 ui.write(b"}\n")
1435 ui.write(b"}\n")
1436
1436
1437
1437
1438 @command(b'debugindexstats', [])
1438 @command(b'debugindexstats', [])
1439 def debugindexstats(ui, repo):
1439 def debugindexstats(ui, repo):
1440 """show stats related to the changelog index"""
1440 """show stats related to the changelog index"""
1441 repo.changelog.shortest(nullid, 1)
1441 repo.changelog.shortest(nullid, 1)
1442 index = repo.changelog.index
1442 index = repo.changelog.index
1443 if not util.safehasattr(index, b'stats'):
1443 if not util.safehasattr(index, b'stats'):
1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1445 for k, v in sorted(index.stats().items()):
1445 for k, v in sorted(index.stats().items()):
1446 ui.write(b'%s: %d\n' % (k, v))
1446 ui.write(b'%s: %d\n' % (k, v))
1447
1447
1448
1448
1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1450 def debuginstall(ui, **opts):
1450 def debuginstall(ui, **opts):
1451 '''test Mercurial installation
1451 '''test Mercurial installation
1452
1452
1453 Returns 0 on success.
1453 Returns 0 on success.
1454 '''
1454 '''
1455 opts = pycompat.byteskwargs(opts)
1455 opts = pycompat.byteskwargs(opts)
1456
1456
1457 problems = 0
1457 problems = 0
1458
1458
1459 fm = ui.formatter(b'debuginstall', opts)
1459 fm = ui.formatter(b'debuginstall', opts)
1460 fm.startitem()
1460 fm.startitem()
1461
1461
1462 # encoding
1462 # encoding
1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1464 err = None
1464 err = None
1465 try:
1465 try:
1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1467 except LookupError as inst:
1467 except LookupError as inst:
1468 err = stringutil.forcebytestr(inst)
1468 err = stringutil.forcebytestr(inst)
1469 problems += 1
1469 problems += 1
1470 fm.condwrite(
1470 fm.condwrite(
1471 err,
1471 err,
1472 b'encodingerror',
1472 b'encodingerror',
1473 _(b" %s\n (check that your locale is properly set)\n"),
1473 _(b" %s\n (check that your locale is properly set)\n"),
1474 err,
1474 err,
1475 )
1475 )
1476
1476
1477 # Python
1477 # Python
1478 pythonlib = None
1478 pythonlib = None
1479 if util.safehasattr(os, '__file__'):
1479 if util.safehasattr(os, '__file__'):
1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1481 elif getattr(sys, 'oxidized', False):
1481 elif getattr(sys, 'oxidized', False):
1482 pythonlib = pycompat.sysexecutable
1482 pythonlib = pycompat.sysexecutable
1483
1483
1484 fm.write(
1484 fm.write(
1485 b'pythonexe',
1485 b'pythonexe',
1486 _(b"checking Python executable (%s)\n"),
1486 _(b"checking Python executable (%s)\n"),
1487 pycompat.sysexecutable or _(b"unknown"),
1487 pycompat.sysexecutable or _(b"unknown"),
1488 )
1488 )
1489 fm.write(
1489 fm.write(
1490 b'pythonver',
1490 b'pythonver',
1491 _(b"checking Python version (%s)\n"),
1491 _(b"checking Python version (%s)\n"),
1492 (b"%d.%d.%d" % sys.version_info[:3]),
1492 (b"%d.%d.%d" % sys.version_info[:3]),
1493 )
1493 )
1494 fm.write(
1494 fm.write(
1495 b'pythonlib',
1495 b'pythonlib',
1496 _(b"checking Python lib (%s)...\n"),
1496 _(b"checking Python lib (%s)...\n"),
1497 pythonlib or _(b"unknown"),
1497 pythonlib or _(b"unknown"),
1498 )
1498 )
1499
1499
1500 security = set(sslutil.supportedprotocols)
1500 security = set(sslutil.supportedprotocols)
1501 if sslutil.hassni:
1501 if sslutil.hassni:
1502 security.add(b'sni')
1502 security.add(b'sni')
1503
1503
1504 fm.write(
1504 fm.write(
1505 b'pythonsecurity',
1505 b'pythonsecurity',
1506 _(b"checking Python security support (%s)\n"),
1506 _(b"checking Python security support (%s)\n"),
1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1508 )
1508 )
1509
1509
1510 # These are warnings, not errors. So don't increment problem count. This
1510 # These are warnings, not errors. So don't increment problem count. This
1511 # may change in the future.
1511 # may change in the future.
1512 if b'tls1.2' not in security:
1512 if b'tls1.2' not in security:
1513 fm.plain(
1513 fm.plain(
1514 _(
1514 _(
1515 b' TLS 1.2 not supported by Python install; '
1515 b' TLS 1.2 not supported by Python install; '
1516 b'network connections lack modern security\n'
1516 b'network connections lack modern security\n'
1517 )
1517 )
1518 )
1518 )
1519 if b'sni' not in security:
1519 if b'sni' not in security:
1520 fm.plain(
1520 fm.plain(
1521 _(
1521 _(
1522 b' SNI not supported by Python install; may have '
1522 b' SNI not supported by Python install; may have '
1523 b'connectivity issues with some servers\n'
1523 b'connectivity issues with some servers\n'
1524 )
1524 )
1525 )
1525 )
1526
1526
1527 # TODO print CA cert info
1527 # TODO print CA cert info
1528
1528
1529 # hg version
1529 # hg version
1530 hgver = util.version()
1530 hgver = util.version()
1531 fm.write(
1531 fm.write(
1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1533 )
1533 )
1534 fm.write(
1534 fm.write(
1535 b'hgverextra',
1535 b'hgverextra',
1536 _(b"checking Mercurial custom build (%s)\n"),
1536 _(b"checking Mercurial custom build (%s)\n"),
1537 b'+'.join(hgver.split(b'+')[1:]),
1537 b'+'.join(hgver.split(b'+')[1:]),
1538 )
1538 )
1539
1539
1540 # compiled modules
1540 # compiled modules
1541 hgmodules = None
1541 hgmodules = None
1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1544 elif getattr(sys, 'oxidized', False):
1544 elif getattr(sys, 'oxidized', False):
1545 hgmodules = pycompat.sysexecutable
1545 hgmodules = pycompat.sysexecutable
1546
1546
1547 fm.write(
1547 fm.write(
1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1549 )
1549 )
1550 fm.write(
1550 fm.write(
1551 b'hgmodules',
1551 b'hgmodules',
1552 _(b"checking installed modules (%s)...\n"),
1552 _(b"checking installed modules (%s)...\n"),
1553 hgmodules or _(b"unknown"),
1553 hgmodules or _(b"unknown"),
1554 )
1554 )
1555
1555
1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1557 rustext = rustandc # for now, that's the only case
1557 rustext = rustandc # for now, that's the only case
1558 cext = policy.policy in (b'c', b'allow') or rustandc
1558 cext = policy.policy in (b'c', b'allow') or rustandc
1559 nopure = cext or rustext
1559 nopure = cext or rustext
1560 if nopure:
1560 if nopure:
1561 err = None
1561 err = None
1562 try:
1562 try:
1563 if cext:
1563 if cext:
1564 from .cext import ( # pytype: disable=import-error
1564 from .cext import ( # pytype: disable=import-error
1565 base85,
1565 base85,
1566 bdiff,
1566 bdiff,
1567 mpatch,
1567 mpatch,
1568 osutil,
1568 osutil,
1569 )
1569 )
1570
1570
1571 # quiet pyflakes
1571 # quiet pyflakes
1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1573 if rustext:
1573 if rustext:
1574 from .rustext import ( # pytype: disable=import-error
1574 from .rustext import ( # pytype: disable=import-error
1575 ancestor,
1575 ancestor,
1576 dirstate,
1576 dirstate,
1577 )
1577 )
1578
1578
1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1580 except Exception as inst:
1580 except Exception as inst:
1581 err = stringutil.forcebytestr(inst)
1581 err = stringutil.forcebytestr(inst)
1582 problems += 1
1582 problems += 1
1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1584
1584
1585 compengines = util.compengines._engines.values()
1585 compengines = util.compengines._engines.values()
1586 fm.write(
1586 fm.write(
1587 b'compengines',
1587 b'compengines',
1588 _(b'checking registered compression engines (%s)\n'),
1588 _(b'checking registered compression engines (%s)\n'),
1589 fm.formatlist(
1589 fm.formatlist(
1590 sorted(e.name() for e in compengines),
1590 sorted(e.name() for e in compengines),
1591 name=b'compengine',
1591 name=b'compengine',
1592 fmt=b'%s',
1592 fmt=b'%s',
1593 sep=b', ',
1593 sep=b', ',
1594 ),
1594 ),
1595 )
1595 )
1596 fm.write(
1596 fm.write(
1597 b'compenginesavail',
1597 b'compenginesavail',
1598 _(b'checking available compression engines (%s)\n'),
1598 _(b'checking available compression engines (%s)\n'),
1599 fm.formatlist(
1599 fm.formatlist(
1600 sorted(e.name() for e in compengines if e.available()),
1600 sorted(e.name() for e in compengines if e.available()),
1601 name=b'compengine',
1601 name=b'compengine',
1602 fmt=b'%s',
1602 fmt=b'%s',
1603 sep=b', ',
1603 sep=b', ',
1604 ),
1604 ),
1605 )
1605 )
1606 wirecompengines = compression.compengines.supportedwireengines(
1606 wirecompengines = compression.compengines.supportedwireengines(
1607 compression.SERVERROLE
1607 compression.SERVERROLE
1608 )
1608 )
1609 fm.write(
1609 fm.write(
1610 b'compenginesserver',
1610 b'compenginesserver',
1611 _(
1611 _(
1612 b'checking available compression engines '
1612 b'checking available compression engines '
1613 b'for wire protocol (%s)\n'
1613 b'for wire protocol (%s)\n'
1614 ),
1614 ),
1615 fm.formatlist(
1615 fm.formatlist(
1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1617 name=b'compengine',
1617 name=b'compengine',
1618 fmt=b'%s',
1618 fmt=b'%s',
1619 sep=b', ',
1619 sep=b', ',
1620 ),
1620 ),
1621 )
1621 )
1622 re2 = b'missing'
1622 re2 = b'missing'
1623 if util._re2:
1623 if util._re2:
1624 re2 = b'available'
1624 re2 = b'available'
1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1626 fm.data(re2=bool(util._re2))
1626 fm.data(re2=bool(util._re2))
1627
1627
1628 # templates
1628 # templates
1629 p = templater.templatepaths()
1629 p = templater.templatepaths()
1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1632 if p:
1632 if p:
1633 m = templater.templatepath(b"map-cmdline.default")
1633 m = templater.templatepath(b"map-cmdline.default")
1634 if m:
1634 if m:
1635 # template found, check if it is working
1635 # template found, check if it is working
1636 err = None
1636 err = None
1637 try:
1637 try:
1638 templater.templater.frommapfile(m)
1638 templater.templater.frommapfile(m)
1639 except Exception as inst:
1639 except Exception as inst:
1640 err = stringutil.forcebytestr(inst)
1640 err = stringutil.forcebytestr(inst)
1641 p = None
1641 p = None
1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1643 else:
1643 else:
1644 p = None
1644 p = None
1645 fm.condwrite(
1645 fm.condwrite(
1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1647 )
1647 )
1648 fm.condwrite(
1648 fm.condwrite(
1649 not m,
1649 not m,
1650 b'defaulttemplatenotfound',
1650 b'defaulttemplatenotfound',
1651 _(b" template '%s' not found\n"),
1651 _(b" template '%s' not found\n"),
1652 b"default",
1652 b"default",
1653 )
1653 )
1654 if not p:
1654 if not p:
1655 problems += 1
1655 problems += 1
1656 fm.condwrite(
1656 fm.condwrite(
1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1658 )
1658 )
1659
1659
1660 # editor
1660 # editor
1661 editor = ui.geteditor()
1661 editor = ui.geteditor()
1662 editor = util.expandpath(editor)
1662 editor = util.expandpath(editor)
1663 editorbin = procutil.shellsplit(editor)[0]
1663 editorbin = procutil.shellsplit(editor)[0]
1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1665 cmdpath = procutil.findexe(editorbin)
1665 cmdpath = procutil.findexe(editorbin)
1666 fm.condwrite(
1666 fm.condwrite(
1667 not cmdpath and editor == b'vi',
1667 not cmdpath and editor == b'vi',
1668 b'vinotfound',
1668 b'vinotfound',
1669 _(
1669 _(
1670 b" No commit editor set and can't find %s in PATH\n"
1670 b" No commit editor set and can't find %s in PATH\n"
1671 b" (specify a commit editor in your configuration"
1671 b" (specify a commit editor in your configuration"
1672 b" file)\n"
1672 b" file)\n"
1673 ),
1673 ),
1674 not cmdpath and editor == b'vi' and editorbin,
1674 not cmdpath and editor == b'vi' and editorbin,
1675 )
1675 )
1676 fm.condwrite(
1676 fm.condwrite(
1677 not cmdpath and editor != b'vi',
1677 not cmdpath and editor != b'vi',
1678 b'editornotfound',
1678 b'editornotfound',
1679 _(
1679 _(
1680 b" Can't find editor '%s' in PATH\n"
1680 b" Can't find editor '%s' in PATH\n"
1681 b" (specify a commit editor in your configuration"
1681 b" (specify a commit editor in your configuration"
1682 b" file)\n"
1682 b" file)\n"
1683 ),
1683 ),
1684 not cmdpath and editorbin,
1684 not cmdpath and editorbin,
1685 )
1685 )
1686 if not cmdpath and editor != b'vi':
1686 if not cmdpath and editor != b'vi':
1687 problems += 1
1687 problems += 1
1688
1688
1689 # check username
1689 # check username
1690 username = None
1690 username = None
1691 err = None
1691 err = None
1692 try:
1692 try:
1693 username = ui.username()
1693 username = ui.username()
1694 except error.Abort as e:
1694 except error.Abort as e:
1695 err = stringutil.forcebytestr(e)
1695 err = stringutil.forcebytestr(e)
1696 problems += 1
1696 problems += 1
1697
1697
1698 fm.condwrite(
1698 fm.condwrite(
1699 username, b'username', _(b"checking username (%s)\n"), username
1699 username, b'username', _(b"checking username (%s)\n"), username
1700 )
1700 )
1701 fm.condwrite(
1701 fm.condwrite(
1702 err,
1702 err,
1703 b'usernameerror',
1703 b'usernameerror',
1704 _(
1704 _(
1705 b"checking username...\n %s\n"
1705 b"checking username...\n %s\n"
1706 b" (specify a username in your configuration file)\n"
1706 b" (specify a username in your configuration file)\n"
1707 ),
1707 ),
1708 err,
1708 err,
1709 )
1709 )
1710
1710
1711 for name, mod in extensions.extensions():
1711 for name, mod in extensions.extensions():
1712 handler = getattr(mod, 'debuginstall', None)
1712 handler = getattr(mod, 'debuginstall', None)
1713 if handler is not None:
1713 if handler is not None:
1714 problems += handler(ui, fm)
1714 problems += handler(ui, fm)
1715
1715
1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1717 if not problems:
1717 if not problems:
1718 fm.data(problems=problems)
1718 fm.data(problems=problems)
1719 fm.condwrite(
1719 fm.condwrite(
1720 problems,
1720 problems,
1721 b'problems',
1721 b'problems',
1722 _(b"%d problems detected, please check your install!\n"),
1722 _(b"%d problems detected, please check your install!\n"),
1723 problems,
1723 problems,
1724 )
1724 )
1725 fm.end()
1725 fm.end()
1726
1726
1727 return problems
1727 return problems
1728
1728
1729
1729
1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1731 def debugknown(ui, repopath, *ids, **opts):
1731 def debugknown(ui, repopath, *ids, **opts):
1732 """test whether node ids are known to a repo
1732 """test whether node ids are known to a repo
1733
1733
1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1735 and 1s indicating unknown/known.
1735 and 1s indicating unknown/known.
1736 """
1736 """
1737 opts = pycompat.byteskwargs(opts)
1737 opts = pycompat.byteskwargs(opts)
1738 repo = hg.peer(ui, opts, repopath)
1738 repo = hg.peer(ui, opts, repopath)
1739 if not repo.capable(b'known'):
1739 if not repo.capable(b'known'):
1740 raise error.Abort(b"known() not supported by target repository")
1740 raise error.Abort(b"known() not supported by target repository")
1741 flags = repo.known([bin(s) for s in ids])
1741 flags = repo.known([bin(s) for s in ids])
1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1743
1743
1744
1744
1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1746 def debuglabelcomplete(ui, repo, *args):
1746 def debuglabelcomplete(ui, repo, *args):
1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1748 debugnamecomplete(ui, repo, *args)
1748 debugnamecomplete(ui, repo, *args)
1749
1749
1750
1750
1751 @command(
1751 @command(
1752 b'debuglocks',
1752 b'debuglocks',
1753 [
1753 [
1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1755 (
1755 (
1756 b'W',
1756 b'W',
1757 b'force-wlock',
1757 b'force-wlock',
1758 None,
1758 None,
1759 _(b'free the working state lock (DANGEROUS)'),
1759 _(b'free the working state lock (DANGEROUS)'),
1760 ),
1760 ),
1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1762 (
1762 (
1763 b'S',
1763 b'S',
1764 b'set-wlock',
1764 b'set-wlock',
1765 None,
1765 None,
1766 _(b'set the working state lock until stopped'),
1766 _(b'set the working state lock until stopped'),
1767 ),
1767 ),
1768 ],
1768 ],
1769 _(b'[OPTION]...'),
1769 _(b'[OPTION]...'),
1770 )
1770 )
1771 def debuglocks(ui, repo, **opts):
1771 def debuglocks(ui, repo, **opts):
1772 """show or modify state of locks
1772 """show or modify state of locks
1773
1773
1774 By default, this command will show which locks are held. This
1774 By default, this command will show which locks are held. This
1775 includes the user and process holding the lock, the amount of time
1775 includes the user and process holding the lock, the amount of time
1776 the lock has been held, and the machine name where the process is
1776 the lock has been held, and the machine name where the process is
1777 running if it's not local.
1777 running if it's not local.
1778
1778
1779 Locks protect the integrity of Mercurial's data, so should be
1779 Locks protect the integrity of Mercurial's data, so should be
1780 treated with care. System crashes or other interruptions may cause
1780 treated with care. System crashes or other interruptions may cause
1781 locks to not be properly released, though Mercurial will usually
1781 locks to not be properly released, though Mercurial will usually
1782 detect and remove such stale locks automatically.
1782 detect and remove such stale locks automatically.
1783
1783
1784 However, detecting stale locks may not always be possible (for
1784 However, detecting stale locks may not always be possible (for
1785 instance, on a shared filesystem). Removing locks may also be
1785 instance, on a shared filesystem). Removing locks may also be
1786 blocked by filesystem permissions.
1786 blocked by filesystem permissions.
1787
1787
1788 Setting a lock will prevent other commands from changing the data.
1788 Setting a lock will prevent other commands from changing the data.
1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1790 The set locks are removed when the command exits.
1790 The set locks are removed when the command exits.
1791
1791
1792 Returns 0 if no locks are held.
1792 Returns 0 if no locks are held.
1793
1793
1794 """
1794 """
1795
1795
1796 if opts.get('force_lock'):
1796 if opts.get('force_lock'):
1797 repo.svfs.unlink(b'lock')
1797 repo.svfs.unlink(b'lock')
1798 if opts.get('force_wlock'):
1798 if opts.get('force_wlock'):
1799 repo.vfs.unlink(b'wlock')
1799 repo.vfs.unlink(b'wlock')
1800 if opts.get('force_lock') or opts.get('force_wlock'):
1800 if opts.get('force_lock') or opts.get('force_wlock'):
1801 return 0
1801 return 0
1802
1802
1803 locks = []
1803 locks = []
1804 try:
1804 try:
1805 if opts.get('set_wlock'):
1805 if opts.get('set_wlock'):
1806 try:
1806 try:
1807 locks.append(repo.wlock(False))
1807 locks.append(repo.wlock(False))
1808 except error.LockHeld:
1808 except error.LockHeld:
1809 raise error.Abort(_(b'wlock is already held'))
1809 raise error.Abort(_(b'wlock is already held'))
1810 if opts.get('set_lock'):
1810 if opts.get('set_lock'):
1811 try:
1811 try:
1812 locks.append(repo.lock(False))
1812 locks.append(repo.lock(False))
1813 except error.LockHeld:
1813 except error.LockHeld:
1814 raise error.Abort(_(b'lock is already held'))
1814 raise error.Abort(_(b'lock is already held'))
1815 if len(locks):
1815 if len(locks):
1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1817 return 0
1817 return 0
1818 finally:
1818 finally:
1819 release(*locks)
1819 release(*locks)
1820
1820
1821 now = time.time()
1821 now = time.time()
1822 held = 0
1822 held = 0
1823
1823
1824 def report(vfs, name, method):
1824 def report(vfs, name, method):
1825 # this causes stale locks to get reaped for more accurate reporting
1825 # this causes stale locks to get reaped for more accurate reporting
1826 try:
1826 try:
1827 l = method(False)
1827 l = method(False)
1828 except error.LockHeld:
1828 except error.LockHeld:
1829 l = None
1829 l = None
1830
1830
1831 if l:
1831 if l:
1832 l.release()
1832 l.release()
1833 else:
1833 else:
1834 try:
1834 try:
1835 st = vfs.lstat(name)
1835 st = vfs.lstat(name)
1836 age = now - st[stat.ST_MTIME]
1836 age = now - st[stat.ST_MTIME]
1837 user = util.username(st.st_uid)
1837 user = util.username(st.st_uid)
1838 locker = vfs.readlock(name)
1838 locker = vfs.readlock(name)
1839 if b":" in locker:
1839 if b":" in locker:
1840 host, pid = locker.split(b':')
1840 host, pid = locker.split(b':')
1841 if host == socket.gethostname():
1841 if host == socket.gethostname():
1842 locker = b'user %s, process %s' % (user or b'None', pid)
1842 locker = b'user %s, process %s' % (user or b'None', pid)
1843 else:
1843 else:
1844 locker = b'user %s, process %s, host %s' % (
1844 locker = b'user %s, process %s, host %s' % (
1845 user or b'None',
1845 user or b'None',
1846 pid,
1846 pid,
1847 host,
1847 host,
1848 )
1848 )
1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1850 return 1
1850 return 1
1851 except OSError as e:
1851 except OSError as e:
1852 if e.errno != errno.ENOENT:
1852 if e.errno != errno.ENOENT:
1853 raise
1853 raise
1854
1854
1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1856 return 0
1856 return 0
1857
1857
1858 held += report(repo.svfs, b"lock", repo.lock)
1858 held += report(repo.svfs, b"lock", repo.lock)
1859 held += report(repo.vfs, b"wlock", repo.wlock)
1859 held += report(repo.vfs, b"wlock", repo.wlock)
1860
1860
1861 return held
1861 return held
1862
1862
1863
1863
1864 @command(
1864 @command(
1865 b'debugmanifestfulltextcache',
1865 b'debugmanifestfulltextcache',
1866 [
1866 [
1867 (b'', b'clear', False, _(b'clear the cache')),
1867 (b'', b'clear', False, _(b'clear the cache')),
1868 (
1868 (
1869 b'a',
1869 b'a',
1870 b'add',
1870 b'add',
1871 [],
1871 [],
1872 _(b'add the given manifest nodes to the cache'),
1872 _(b'add the given manifest nodes to the cache'),
1873 _(b'NODE'),
1873 _(b'NODE'),
1874 ),
1874 ),
1875 ],
1875 ],
1876 b'',
1876 b'',
1877 )
1877 )
1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1879 """show, clear or amend the contents of the manifest fulltext cache"""
1879 """show, clear or amend the contents of the manifest fulltext cache"""
1880
1880
1881 def getcache():
1881 def getcache():
1882 r = repo.manifestlog.getstorage(b'')
1882 r = repo.manifestlog.getstorage(b'')
1883 try:
1883 try:
1884 return r._fulltextcache
1884 return r._fulltextcache
1885 except AttributeError:
1885 except AttributeError:
1886 msg = _(
1886 msg = _(
1887 b"Current revlog implementation doesn't appear to have a "
1887 b"Current revlog implementation doesn't appear to have a "
1888 b"manifest fulltext cache\n"
1888 b"manifest fulltext cache\n"
1889 )
1889 )
1890 raise error.Abort(msg)
1890 raise error.Abort(msg)
1891
1891
1892 if opts.get('clear'):
1892 if opts.get('clear'):
1893 with repo.wlock():
1893 with repo.wlock():
1894 cache = getcache()
1894 cache = getcache()
1895 cache.clear(clear_persisted_data=True)
1895 cache.clear(clear_persisted_data=True)
1896 return
1896 return
1897
1897
1898 if add:
1898 if add:
1899 with repo.wlock():
1899 with repo.wlock():
1900 m = repo.manifestlog
1900 m = repo.manifestlog
1901 store = m.getstorage(b'')
1901 store = m.getstorage(b'')
1902 for n in add:
1902 for n in add:
1903 try:
1903 try:
1904 manifest = m[store.lookup(n)]
1904 manifest = m[store.lookup(n)]
1905 except error.LookupError as e:
1905 except error.LookupError as e:
1906 raise error.Abort(e, hint=b"Check your manifest node id")
1906 raise error.Abort(e, hint=b"Check your manifest node id")
1907 manifest.read() # stores revisision in cache too
1907 manifest.read() # stores revisision in cache too
1908 return
1908 return
1909
1909
1910 cache = getcache()
1910 cache = getcache()
1911 if not len(cache):
1911 if not len(cache):
1912 ui.write(_(b'cache empty\n'))
1912 ui.write(_(b'cache empty\n'))
1913 else:
1913 else:
1914 ui.write(
1914 ui.write(
1915 _(
1915 _(
1916 b'cache contains %d manifest entries, in order of most to '
1916 b'cache contains %d manifest entries, in order of most to '
1917 b'least recent:\n'
1917 b'least recent:\n'
1918 )
1918 )
1919 % (len(cache),)
1919 % (len(cache),)
1920 )
1920 )
1921 totalsize = 0
1921 totalsize = 0
1922 for nodeid in cache:
1922 for nodeid in cache:
1923 # Use cache.get to not update the LRU order
1923 # Use cache.get to not update the LRU order
1924 data = cache.peek(nodeid)
1924 data = cache.peek(nodeid)
1925 size = len(data)
1925 size = len(data)
1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1927 ui.write(
1927 ui.write(
1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1929 )
1929 )
1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1931 ui.write(
1931 ui.write(
1932 _(b'total cache data size %s, on-disk %s\n')
1932 _(b'total cache data size %s, on-disk %s\n')
1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1934 )
1934 )
1935
1935
1936
1936
1937 @command(b'debugmergestate', [], b'')
1937 @command(b'debugmergestate', [], b'')
1938 def debugmergestate(ui, repo, *args):
1938 def debugmergestate(ui, repo, *args):
1939 """print merge state
1939 """print merge state
1940
1940
1941 Use --verbose to print out information about whether v1 or v2 merge state
1941 Use --verbose to print out information about whether v1 or v2 merge state
1942 was chosen."""
1942 was chosen."""
1943
1943
1944 def _hashornull(h):
1944 def _hashornull(h):
1945 if h == nullhex:
1945 if h == nullhex:
1946 return b'null'
1946 return b'null'
1947 else:
1947 else:
1948 return h
1948 return h
1949
1949
1950 def printrecords(version):
1950 def printrecords(version):
1951 ui.writenoi18n(b'* version %d records\n' % version)
1951 ui.writenoi18n(b'* version %d records\n' % version)
1952 if version == 1:
1952 if version == 1:
1953 records = v1records
1953 records = v1records
1954 else:
1954 else:
1955 records = v2records
1955 records = v2records
1956
1956
1957 for rtype, record in records:
1957 for rtype, record in records:
1958 # pretty print some record types
1958 # pretty print some record types
1959 if rtype == b'L':
1959 if rtype == b'L':
1960 ui.writenoi18n(b'local: %s\n' % record)
1960 ui.writenoi18n(b'local: %s\n' % record)
1961 elif rtype == b'O':
1961 elif rtype == b'O':
1962 ui.writenoi18n(b'other: %s\n' % record)
1962 ui.writenoi18n(b'other: %s\n' % record)
1963 elif rtype == b'm':
1963 elif rtype == b'm':
1964 driver, mdstate = record.split(b'\0', 1)
1964 driver, mdstate = record.split(b'\0', 1)
1965 ui.writenoi18n(
1965 ui.writenoi18n(
1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1967 )
1967 )
1968 elif rtype in b'FDC':
1968 elif rtype in b'FDC':
1969 r = record.split(b'\0')
1969 r = record.split(b'\0')
1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1971 if version == 1:
1971 if version == 1:
1972 onode = b'not stored in v1 format'
1972 onode = b'not stored in v1 format'
1973 flags = r[7]
1973 flags = r[7]
1974 else:
1974 else:
1975 onode, flags = r[7:9]
1975 onode, flags = r[7:9]
1976 ui.writenoi18n(
1976 ui.writenoi18n(
1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1978 % (f, rtype, state, _hashornull(hash))
1978 % (f, rtype, state, _hashornull(hash))
1979 )
1979 )
1980 ui.writenoi18n(
1980 ui.writenoi18n(
1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1982 )
1982 )
1983 ui.writenoi18n(
1983 ui.writenoi18n(
1984 b' ancestor path: %s (node %s)\n'
1984 b' ancestor path: %s (node %s)\n'
1985 % (afile, _hashornull(anode))
1985 % (afile, _hashornull(anode))
1986 )
1986 )
1987 ui.writenoi18n(
1987 ui.writenoi18n(
1988 b' other path: %s (node %s)\n'
1988 b' other path: %s (node %s)\n'
1989 % (ofile, _hashornull(onode))
1989 % (ofile, _hashornull(onode))
1990 )
1990 )
1991 elif rtype == b'f':
1991 elif rtype == b'f':
1992 filename, rawextras = record.split(b'\0', 1)
1992 filename, rawextras = record.split(b'\0', 1)
1993 extras = rawextras.split(b'\0')
1993 extras = rawextras.split(b'\0')
1994 i = 0
1994 i = 0
1995 extrastrings = []
1995 extrastrings = []
1996 while i < len(extras):
1996 while i < len(extras):
1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1998 i += 2
1998 i += 2
1999
1999
2000 ui.writenoi18n(
2000 ui.writenoi18n(
2001 b'file extras: %s (%s)\n'
2001 b'file extras: %s (%s)\n'
2002 % (filename, b', '.join(extrastrings))
2002 % (filename, b', '.join(extrastrings))
2003 )
2003 )
2004 elif rtype == b'l':
2004 elif rtype == b'l':
2005 labels = record.split(b'\0', 2)
2005 labels = record.split(b'\0', 2)
2006 labels = [l for l in labels if len(l) > 0]
2006 labels = [l for l in labels if len(l) > 0]
2007 ui.writenoi18n(b'labels:\n')
2007 ui.writenoi18n(b'labels:\n')
2008 ui.write((b' local: %s\n' % labels[0]))
2008 ui.write((b' local: %s\n' % labels[0]))
2009 ui.write((b' other: %s\n' % labels[1]))
2009 ui.write((b' other: %s\n' % labels[1]))
2010 if len(labels) > 2:
2010 if len(labels) > 2:
2011 ui.write((b' base: %s\n' % labels[2]))
2011 ui.write((b' base: %s\n' % labels[2]))
2012 else:
2012 else:
2013 ui.writenoi18n(
2013 ui.writenoi18n(
2014 b'unrecognized entry: %s\t%s\n'
2014 b'unrecognized entry: %s\t%s\n'
2015 % (rtype, record.replace(b'\0', b'\t'))
2015 % (rtype, record.replace(b'\0', b'\t'))
2016 )
2016 )
2017
2017
2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2019 # merge state records. We shouldn't be doing this, but this is OK since this
2019 # merge state records. We shouldn't be doing this, but this is OK since this
2020 # command is pretty low-level.
2020 # command is pretty low-level.
2021 ms = mergemod.mergestate(repo)
2021 ms = mergemod.mergestate(repo)
2022
2022
2023 # sort so that reasonable information is on top
2023 # sort so that reasonable information is on top
2024 v1records = ms._readrecordsv1()
2024 v1records = ms._readrecordsv1()
2025 v2records = ms._readrecordsv2()
2025 v2records = ms._readrecordsv2()
2026 order = b'LOml'
2026 order = b'LOml'
2027
2027
2028 def key(r):
2028 def key(r):
2029 idx = order.find(r[0])
2029 idx = order.find(r[0])
2030 if idx == -1:
2030 if idx == -1:
2031 return (1, r[1])
2031 return (1, r[1])
2032 else:
2032 else:
2033 return (0, idx)
2033 return (0, idx)
2034
2034
2035 v1records.sort(key=key)
2035 v1records.sort(key=key)
2036 v2records.sort(key=key)
2036 v2records.sort(key=key)
2037
2037
2038 if not v1records and not v2records:
2038 if not v1records and not v2records:
2039 ui.writenoi18n(b'no merge state found\n')
2039 ui.writenoi18n(b'no merge state found\n')
2040 elif not v2records:
2040 elif not v2records:
2041 ui.notenoi18n(b'no version 2 merge state\n')
2041 ui.notenoi18n(b'no version 2 merge state\n')
2042 printrecords(1)
2042 printrecords(1)
2043 elif ms._v1v2match(v1records, v2records):
2043 elif ms._v1v2match(v1records, v2records):
2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2045 printrecords(2)
2045 printrecords(2)
2046 else:
2046 else:
2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2048 printrecords(1)
2048 printrecords(1)
2049 if ui.verbose:
2049 if ui.verbose:
2050 printrecords(2)
2050 printrecords(2)
2051
2051
2052
2052
2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2054 def debugnamecomplete(ui, repo, *args):
2054 def debugnamecomplete(ui, repo, *args):
2055 '''complete "names" - tags, open branch names, bookmark names'''
2055 '''complete "names" - tags, open branch names, bookmark names'''
2056
2056
2057 names = set()
2057 names = set()
2058 # since we previously only listed open branches, we will handle that
2058 # since we previously only listed open branches, we will handle that
2059 # specially (after this for loop)
2059 # specially (after this for loop)
2060 for name, ns in pycompat.iteritems(repo.names):
2060 for name, ns in pycompat.iteritems(repo.names):
2061 if name != b'branches':
2061 if name != b'branches':
2062 names.update(ns.listnames(repo))
2062 names.update(ns.listnames(repo))
2063 names.update(
2063 names.update(
2064 tag
2064 tag
2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2066 if not closed
2066 if not closed
2067 )
2067 )
2068 completions = set()
2068 completions = set()
2069 if not args:
2069 if not args:
2070 args = [b'']
2070 args = [b'']
2071 for a in args:
2071 for a in args:
2072 completions.update(n for n in names if n.startswith(a))
2072 completions.update(n for n in names if n.startswith(a))
2073 ui.write(b'\n'.join(sorted(completions)))
2073 ui.write(b'\n'.join(sorted(completions)))
2074 ui.write(b'\n')
2074 ui.write(b'\n')
2075
2075
2076
2076
2077 @command(
2077 @command(
2078 b'debugobsolete',
2078 b'debugobsolete',
2079 [
2079 [
2080 (b'', b'flags', 0, _(b'markers flag')),
2080 (b'', b'flags', 0, _(b'markers flag')),
2081 (
2081 (
2082 b'',
2082 b'',
2083 b'record-parents',
2083 b'record-parents',
2084 False,
2084 False,
2085 _(b'record parent information for the precursor'),
2085 _(b'record parent information for the precursor'),
2086 ),
2086 ),
2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2088 (
2088 (
2089 b'',
2089 b'',
2090 b'exclusive',
2090 b'exclusive',
2091 False,
2091 False,
2092 _(b'restrict display to markers only relevant to REV'),
2092 _(b'restrict display to markers only relevant to REV'),
2093 ),
2093 ),
2094 (b'', b'index', False, _(b'display index of the marker')),
2094 (b'', b'index', False, _(b'display index of the marker')),
2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2096 ]
2096 ]
2097 + cmdutil.commitopts2
2097 + cmdutil.commitopts2
2098 + cmdutil.formatteropts,
2098 + cmdutil.formatteropts,
2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2100 )
2100 )
2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2102 """create arbitrary obsolete marker
2102 """create arbitrary obsolete marker
2103
2103
2104 With no arguments, displays the list of obsolescence markers."""
2104 With no arguments, displays the list of obsolescence markers."""
2105
2105
2106 opts = pycompat.byteskwargs(opts)
2106 opts = pycompat.byteskwargs(opts)
2107
2107
2108 def parsenodeid(s):
2108 def parsenodeid(s):
2109 try:
2109 try:
2110 # We do not use revsingle/revrange functions here to accept
2110 # We do not use revsingle/revrange functions here to accept
2111 # arbitrary node identifiers, possibly not present in the
2111 # arbitrary node identifiers, possibly not present in the
2112 # local repository.
2112 # local repository.
2113 n = bin(s)
2113 n = bin(s)
2114 if len(n) != len(nullid):
2114 if len(n) != len(nullid):
2115 raise TypeError()
2115 raise TypeError()
2116 return n
2116 return n
2117 except TypeError:
2117 except TypeError:
2118 raise error.Abort(
2118 raise error.Abort(
2119 b'changeset references must be full hexadecimal '
2119 b'changeset references must be full hexadecimal '
2120 b'node identifiers'
2120 b'node identifiers'
2121 )
2121 )
2122
2122
2123 if opts.get(b'delete'):
2123 if opts.get(b'delete'):
2124 indices = []
2124 indices = []
2125 for v in opts.get(b'delete'):
2125 for v in opts.get(b'delete'):
2126 try:
2126 try:
2127 indices.append(int(v))
2127 indices.append(int(v))
2128 except ValueError:
2128 except ValueError:
2129 raise error.Abort(
2129 raise error.Abort(
2130 _(b'invalid index value: %r') % v,
2130 _(b'invalid index value: %r') % v,
2131 hint=_(b'use integers for indices'),
2131 hint=_(b'use integers for indices'),
2132 )
2132 )
2133
2133
2134 if repo.currenttransaction():
2134 if repo.currenttransaction():
2135 raise error.Abort(
2135 raise error.Abort(
2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2137 )
2137 )
2138
2138
2139 with repo.lock():
2139 with repo.lock():
2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2142
2142
2143 return
2143 return
2144
2144
2145 if precursor is not None:
2145 if precursor is not None:
2146 if opts[b'rev']:
2146 if opts[b'rev']:
2147 raise error.Abort(b'cannot select revision when creating marker')
2147 raise error.Abort(b'cannot select revision when creating marker')
2148 metadata = {}
2148 metadata = {}
2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2150 succs = tuple(parsenodeid(succ) for succ in successors)
2150 succs = tuple(parsenodeid(succ) for succ in successors)
2151 l = repo.lock()
2151 l = repo.lock()
2152 try:
2152 try:
2153 tr = repo.transaction(b'debugobsolete')
2153 tr = repo.transaction(b'debugobsolete')
2154 try:
2154 try:
2155 date = opts.get(b'date')
2155 date = opts.get(b'date')
2156 if date:
2156 if date:
2157 date = dateutil.parsedate(date)
2157 date = dateutil.parsedate(date)
2158 else:
2158 else:
2159 date = None
2159 date = None
2160 prec = parsenodeid(precursor)
2160 prec = parsenodeid(precursor)
2161 parents = None
2161 parents = None
2162 if opts[b'record_parents']:
2162 if opts[b'record_parents']:
2163 if prec not in repo.unfiltered():
2163 if prec not in repo.unfiltered():
2164 raise error.Abort(
2164 raise error.Abort(
2165 b'cannot used --record-parents on '
2165 b'cannot used --record-parents on '
2166 b'unknown changesets'
2166 b'unknown changesets'
2167 )
2167 )
2168 parents = repo.unfiltered()[prec].parents()
2168 parents = repo.unfiltered()[prec].parents()
2169 parents = tuple(p.node() for p in parents)
2169 parents = tuple(p.node() for p in parents)
2170 repo.obsstore.create(
2170 repo.obsstore.create(
2171 tr,
2171 tr,
2172 prec,
2172 prec,
2173 succs,
2173 succs,
2174 opts[b'flags'],
2174 opts[b'flags'],
2175 parents=parents,
2175 parents=parents,
2176 date=date,
2176 date=date,
2177 metadata=metadata,
2177 metadata=metadata,
2178 ui=ui,
2178 ui=ui,
2179 )
2179 )
2180 tr.close()
2180 tr.close()
2181 except ValueError as exc:
2181 except ValueError as exc:
2182 raise error.Abort(
2182 raise error.Abort(
2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2184 )
2184 )
2185 finally:
2185 finally:
2186 tr.release()
2186 tr.release()
2187 finally:
2187 finally:
2188 l.release()
2188 l.release()
2189 else:
2189 else:
2190 if opts[b'rev']:
2190 if opts[b'rev']:
2191 revs = scmutil.revrange(repo, opts[b'rev'])
2191 revs = scmutil.revrange(repo, opts[b'rev'])
2192 nodes = [repo[r].node() for r in revs]
2192 nodes = [repo[r].node() for r in revs]
2193 markers = list(
2193 markers = list(
2194 obsutil.getmarkers(
2194 obsutil.getmarkers(
2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2196 )
2196 )
2197 )
2197 )
2198 markers.sort(key=lambda x: x._data)
2198 markers.sort(key=lambda x: x._data)
2199 else:
2199 else:
2200 markers = obsutil.getmarkers(repo)
2200 markers = obsutil.getmarkers(repo)
2201
2201
2202 markerstoiter = markers
2202 markerstoiter = markers
2203 isrelevant = lambda m: True
2203 isrelevant = lambda m: True
2204 if opts.get(b'rev') and opts.get(b'index'):
2204 if opts.get(b'rev') and opts.get(b'index'):
2205 markerstoiter = obsutil.getmarkers(repo)
2205 markerstoiter = obsutil.getmarkers(repo)
2206 markerset = set(markers)
2206 markerset = set(markers)
2207 isrelevant = lambda m: m in markerset
2207 isrelevant = lambda m: m in markerset
2208
2208
2209 fm = ui.formatter(b'debugobsolete', opts)
2209 fm = ui.formatter(b'debugobsolete', opts)
2210 for i, m in enumerate(markerstoiter):
2210 for i, m in enumerate(markerstoiter):
2211 if not isrelevant(m):
2211 if not isrelevant(m):
2212 # marker can be irrelevant when we're iterating over a set
2212 # marker can be irrelevant when we're iterating over a set
2213 # of markers (markerstoiter) which is bigger than the set
2213 # of markers (markerstoiter) which is bigger than the set
2214 # of markers we want to display (markers)
2214 # of markers we want to display (markers)
2215 # this can happen if both --index and --rev options are
2215 # this can happen if both --index and --rev options are
2216 # provided and thus we need to iterate over all of the markers
2216 # provided and thus we need to iterate over all of the markers
2217 # to get the correct indices, but only display the ones that
2217 # to get the correct indices, but only display the ones that
2218 # are relevant to --rev value
2218 # are relevant to --rev value
2219 continue
2219 continue
2220 fm.startitem()
2220 fm.startitem()
2221 ind = i if opts.get(b'index') else None
2221 ind = i if opts.get(b'index') else None
2222 cmdutil.showmarker(fm, m, index=ind)
2222 cmdutil.showmarker(fm, m, index=ind)
2223 fm.end()
2223 fm.end()
2224
2224
2225
2225
2226 @command(
2226 @command(
2227 b'debugp1copies',
2227 b'debugp1copies',
2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2229 _(b'[-r REV]'),
2229 _(b'[-r REV]'),
2230 )
2230 )
2231 def debugp1copies(ui, repo, **opts):
2231 def debugp1copies(ui, repo, **opts):
2232 """dump copy information compared to p1"""
2232 """dump copy information compared to p1"""
2233
2233
2234 opts = pycompat.byteskwargs(opts)
2234 opts = pycompat.byteskwargs(opts)
2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2236 for dst, src in ctx.p1copies().items():
2236 for dst, src in ctx.p1copies().items():
2237 ui.write(b'%s -> %s\n' % (src, dst))
2237 ui.write(b'%s -> %s\n' % (src, dst))
2238
2238
2239
2239
2240 @command(
2240 @command(
2241 b'debugp2copies',
2241 b'debugp2copies',
2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2243 _(b'[-r REV]'),
2243 _(b'[-r REV]'),
2244 )
2244 )
2245 def debugp1copies(ui, repo, **opts):
2245 def debugp1copies(ui, repo, **opts):
2246 """dump copy information compared to p2"""
2246 """dump copy information compared to p2"""
2247
2247
2248 opts = pycompat.byteskwargs(opts)
2248 opts = pycompat.byteskwargs(opts)
2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2250 for dst, src in ctx.p2copies().items():
2250 for dst, src in ctx.p2copies().items():
2251 ui.write(b'%s -> %s\n' % (src, dst))
2251 ui.write(b'%s -> %s\n' % (src, dst))
2252
2252
2253
2253
2254 @command(
2254 @command(
2255 b'debugpathcomplete',
2255 b'debugpathcomplete',
2256 [
2256 [
2257 (b'f', b'full', None, _(b'complete an entire path')),
2257 (b'f', b'full', None, _(b'complete an entire path')),
2258 (b'n', b'normal', None, _(b'show only normal files')),
2258 (b'n', b'normal', None, _(b'show only normal files')),
2259 (b'a', b'added', None, _(b'show only added files')),
2259 (b'a', b'added', None, _(b'show only added files')),
2260 (b'r', b'removed', None, _(b'show only removed files')),
2260 (b'r', b'removed', None, _(b'show only removed files')),
2261 ],
2261 ],
2262 _(b'FILESPEC...'),
2262 _(b'FILESPEC...'),
2263 )
2263 )
2264 def debugpathcomplete(ui, repo, *specs, **opts):
2264 def debugpathcomplete(ui, repo, *specs, **opts):
2265 '''complete part or all of a tracked path
2265 '''complete part or all of a tracked path
2266
2266
2267 This command supports shells that offer path name completion. It
2267 This command supports shells that offer path name completion. It
2268 currently completes only files already known to the dirstate.
2268 currently completes only files already known to the dirstate.
2269
2269
2270 Completion extends only to the next path segment unless
2270 Completion extends only to the next path segment unless
2271 --full is specified, in which case entire paths are used.'''
2271 --full is specified, in which case entire paths are used.'''
2272
2272
2273 def complete(path, acceptable):
2273 def complete(path, acceptable):
2274 dirstate = repo.dirstate
2274 dirstate = repo.dirstate
2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2276 rootdir = repo.root + pycompat.ossep
2276 rootdir = repo.root + pycompat.ossep
2277 if spec != repo.root and not spec.startswith(rootdir):
2277 if spec != repo.root and not spec.startswith(rootdir):
2278 return [], []
2278 return [], []
2279 if os.path.isdir(spec):
2279 if os.path.isdir(spec):
2280 spec += b'/'
2280 spec += b'/'
2281 spec = spec[len(rootdir) :]
2281 spec = spec[len(rootdir) :]
2282 fixpaths = pycompat.ossep != b'/'
2282 fixpaths = pycompat.ossep != b'/'
2283 if fixpaths:
2283 if fixpaths:
2284 spec = spec.replace(pycompat.ossep, b'/')
2284 spec = spec.replace(pycompat.ossep, b'/')
2285 speclen = len(spec)
2285 speclen = len(spec)
2286 fullpaths = opts['full']
2286 fullpaths = opts['full']
2287 files, dirs = set(), set()
2287 files, dirs = set(), set()
2288 adddir, addfile = dirs.add, files.add
2288 adddir, addfile = dirs.add, files.add
2289 for f, st in pycompat.iteritems(dirstate):
2289 for f, st in pycompat.iteritems(dirstate):
2290 if f.startswith(spec) and st[0] in acceptable:
2290 if f.startswith(spec) and st[0] in acceptable:
2291 if fixpaths:
2291 if fixpaths:
2292 f = f.replace(b'/', pycompat.ossep)
2292 f = f.replace(b'/', pycompat.ossep)
2293 if fullpaths:
2293 if fullpaths:
2294 addfile(f)
2294 addfile(f)
2295 continue
2295 continue
2296 s = f.find(pycompat.ossep, speclen)
2296 s = f.find(pycompat.ossep, speclen)
2297 if s >= 0:
2297 if s >= 0:
2298 adddir(f[:s])
2298 adddir(f[:s])
2299 else:
2299 else:
2300 addfile(f)
2300 addfile(f)
2301 return files, dirs
2301 return files, dirs
2302
2302
2303 acceptable = b''
2303 acceptable = b''
2304 if opts['normal']:
2304 if opts['normal']:
2305 acceptable += b'nm'
2305 acceptable += b'nm'
2306 if opts['added']:
2306 if opts['added']:
2307 acceptable += b'a'
2307 acceptable += b'a'
2308 if opts['removed']:
2308 if opts['removed']:
2309 acceptable += b'r'
2309 acceptable += b'r'
2310 cwd = repo.getcwd()
2310 cwd = repo.getcwd()
2311 if not specs:
2311 if not specs:
2312 specs = [b'.']
2312 specs = [b'.']
2313
2313
2314 files, dirs = set(), set()
2314 files, dirs = set(), set()
2315 for spec in specs:
2315 for spec in specs:
2316 f, d = complete(spec, acceptable or b'nmar')
2316 f, d = complete(spec, acceptable or b'nmar')
2317 files.update(f)
2317 files.update(f)
2318 dirs.update(d)
2318 dirs.update(d)
2319 files.update(dirs)
2319 files.update(dirs)
2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2321 ui.write(b'\n')
2321 ui.write(b'\n')
2322
2322
2323
2323
2324 @command(
2324 @command(
2325 b'debugpathcopies',
2325 b'debugpathcopies',
2326 cmdutil.walkopts,
2326 cmdutil.walkopts,
2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2328 inferrepo=True,
2328 inferrepo=True,
2329 )
2329 )
2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2331 """show copies between two revisions"""
2331 """show copies between two revisions"""
2332 ctx1 = scmutil.revsingle(repo, rev1)
2332 ctx1 = scmutil.revsingle(repo, rev1)
2333 ctx2 = scmutil.revsingle(repo, rev2)
2333 ctx2 = scmutil.revsingle(repo, rev2)
2334 m = scmutil.match(ctx1, pats, opts)
2334 m = scmutil.match(ctx1, pats, opts)
2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2336 ui.write(b'%s -> %s\n' % (src, dst))
2336 ui.write(b'%s -> %s\n' % (src, dst))
2337
2337
2338
2338
2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2340 def debugpeer(ui, path):
2340 def debugpeer(ui, path):
2341 """establish a connection to a peer repository"""
2341 """establish a connection to a peer repository"""
2342 # Always enable peer request logging. Requires --debug to display
2342 # Always enable peer request logging. Requires --debug to display
2343 # though.
2343 # though.
2344 overrides = {
2344 overrides = {
2345 (b'devel', b'debug.peer-request'): True,
2345 (b'devel', b'debug.peer-request'): True,
2346 }
2346 }
2347
2347
2348 with ui.configoverride(overrides):
2348 with ui.configoverride(overrides):
2349 peer = hg.peer(ui, {}, path)
2349 peer = hg.peer(ui, {}, path)
2350
2350
2351 local = peer.local() is not None
2351 local = peer.local() is not None
2352 canpush = peer.canpush()
2352 canpush = peer.canpush()
2353
2353
2354 ui.write(_(b'url: %s\n') % peer.url())
2354 ui.write(_(b'url: %s\n') % peer.url())
2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2357
2357
2358
2358
2359 @command(
2359 @command(
2360 b'debugpickmergetool',
2360 b'debugpickmergetool',
2361 [
2361 [
2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2364 ]
2364 ]
2365 + cmdutil.walkopts
2365 + cmdutil.walkopts
2366 + cmdutil.mergetoolopts,
2366 + cmdutil.mergetoolopts,
2367 _(b'[PATTERN]...'),
2367 _(b'[PATTERN]...'),
2368 inferrepo=True,
2368 inferrepo=True,
2369 )
2369 )
2370 def debugpickmergetool(ui, repo, *pats, **opts):
2370 def debugpickmergetool(ui, repo, *pats, **opts):
2371 """examine which merge tool is chosen for specified file
2371 """examine which merge tool is chosen for specified file
2372
2372
2373 As described in :hg:`help merge-tools`, Mercurial examines
2373 As described in :hg:`help merge-tools`, Mercurial examines
2374 configurations below in this order to decide which merge tool is
2374 configurations below in this order to decide which merge tool is
2375 chosen for specified file.
2375 chosen for specified file.
2376
2376
2377 1. ``--tool`` option
2377 1. ``--tool`` option
2378 2. ``HGMERGE`` environment variable
2378 2. ``HGMERGE`` environment variable
2379 3. configurations in ``merge-patterns`` section
2379 3. configurations in ``merge-patterns`` section
2380 4. configuration of ``ui.merge``
2380 4. configuration of ``ui.merge``
2381 5. configurations in ``merge-tools`` section
2381 5. configurations in ``merge-tools`` section
2382 6. ``hgmerge`` tool (for historical reason only)
2382 6. ``hgmerge`` tool (for historical reason only)
2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2384
2384
2385 This command writes out examination result in the style below::
2385 This command writes out examination result in the style below::
2386
2386
2387 FILE = MERGETOOL
2387 FILE = MERGETOOL
2388
2388
2389 By default, all files known in the first parent context of the
2389 By default, all files known in the first parent context of the
2390 working directory are examined. Use file patterns and/or -I/-X
2390 working directory are examined. Use file patterns and/or -I/-X
2391 options to limit target files. -r/--rev is also useful to examine
2391 options to limit target files. -r/--rev is also useful to examine
2392 files in another context without actual updating to it.
2392 files in another context without actual updating to it.
2393
2393
2394 With --debug, this command shows warning messages while matching
2394 With --debug, this command shows warning messages while matching
2395 against ``merge-patterns`` and so on, too. It is recommended to
2395 against ``merge-patterns`` and so on, too. It is recommended to
2396 use this option with explicit file patterns and/or -I/-X options,
2396 use this option with explicit file patterns and/or -I/-X options,
2397 because this option increases amount of output per file according
2397 because this option increases amount of output per file according
2398 to configurations in hgrc.
2398 to configurations in hgrc.
2399
2399
2400 With -v/--verbose, this command shows configurations below at
2400 With -v/--verbose, this command shows configurations below at
2401 first (only if specified).
2401 first (only if specified).
2402
2402
2403 - ``--tool`` option
2403 - ``--tool`` option
2404 - ``HGMERGE`` environment variable
2404 - ``HGMERGE`` environment variable
2405 - configuration of ``ui.merge``
2405 - configuration of ``ui.merge``
2406
2406
2407 If merge tool is chosen before matching against
2407 If merge tool is chosen before matching against
2408 ``merge-patterns``, this command can't show any helpful
2408 ``merge-patterns``, this command can't show any helpful
2409 information, even with --debug. In such case, information above is
2409 information, even with --debug. In such case, information above is
2410 useful to know why a merge tool is chosen.
2410 useful to know why a merge tool is chosen.
2411 """
2411 """
2412 opts = pycompat.byteskwargs(opts)
2412 opts = pycompat.byteskwargs(opts)
2413 overrides = {}
2413 overrides = {}
2414 if opts[b'tool']:
2414 if opts[b'tool']:
2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2417
2417
2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2419 hgmerge = encoding.environ.get(b"HGMERGE")
2419 hgmerge = encoding.environ.get(b"HGMERGE")
2420 if hgmerge is not None:
2420 if hgmerge is not None:
2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2422 uimerge = ui.config(b"ui", b"merge")
2422 uimerge = ui.config(b"ui", b"merge")
2423 if uimerge:
2423 if uimerge:
2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2425
2425
2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2427 m = scmutil.match(ctx, pats, opts)
2427 m = scmutil.match(ctx, pats, opts)
2428 changedelete = opts[b'changedelete']
2428 changedelete = opts[b'changedelete']
2429 for path in ctx.walk(m):
2429 for path in ctx.walk(m):
2430 fctx = ctx[path]
2430 fctx = ctx[path]
2431 try:
2431 try:
2432 if not ui.debugflag:
2432 if not ui.debugflag:
2433 ui.pushbuffer(error=True)
2433 ui.pushbuffer(error=True)
2434 tool, toolpath = filemerge._picktool(
2434 tool, toolpath = filemerge._picktool(
2435 repo,
2435 repo,
2436 ui,
2436 ui,
2437 path,
2437 path,
2438 fctx.isbinary(),
2438 fctx.isbinary(),
2439 b'l' in fctx.flags(),
2439 b'l' in fctx.flags(),
2440 changedelete,
2440 changedelete,
2441 )
2441 )
2442 finally:
2442 finally:
2443 if not ui.debugflag:
2443 if not ui.debugflag:
2444 ui.popbuffer()
2444 ui.popbuffer()
2445 ui.write(b'%s = %s\n' % (path, tool))
2445 ui.write(b'%s = %s\n' % (path, tool))
2446
2446
2447
2447
2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2450 '''access the pushkey key/value protocol
2450 '''access the pushkey key/value protocol
2451
2451
2452 With two args, list the keys in the given namespace.
2452 With two args, list the keys in the given namespace.
2453
2453
2454 With five args, set a key to new if it currently is set to old.
2454 With five args, set a key to new if it currently is set to old.
2455 Reports success or failure.
2455 Reports success or failure.
2456 '''
2456 '''
2457
2457
2458 target = hg.peer(ui, {}, repopath)
2458 target = hg.peer(ui, {}, repopath)
2459 if keyinfo:
2459 if keyinfo:
2460 key, old, new = keyinfo
2460 key, old, new = keyinfo
2461 with target.commandexecutor() as e:
2461 with target.commandexecutor() as e:
2462 r = e.callcommand(
2462 r = e.callcommand(
2463 b'pushkey',
2463 b'pushkey',
2464 {
2464 {
2465 b'namespace': namespace,
2465 b'namespace': namespace,
2466 b'key': key,
2466 b'key': key,
2467 b'old': old,
2467 b'old': old,
2468 b'new': new,
2468 b'new': new,
2469 },
2469 },
2470 ).result()
2470 ).result()
2471
2471
2472 ui.status(pycompat.bytestr(r) + b'\n')
2472 ui.status(pycompat.bytestr(r) + b'\n')
2473 return not r
2473 return not r
2474 else:
2474 else:
2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2476 ui.write(
2476 ui.write(
2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2478 )
2478 )
2479
2479
2480
2480
2481 @command(b'debugpvec', [], _(b'A B'))
2481 @command(b'debugpvec', [], _(b'A B'))
2482 def debugpvec(ui, repo, a, b=None):
2482 def debugpvec(ui, repo, a, b=None):
2483 ca = scmutil.revsingle(repo, a)
2483 ca = scmutil.revsingle(repo, a)
2484 cb = scmutil.revsingle(repo, b)
2484 cb = scmutil.revsingle(repo, b)
2485 pa = pvec.ctxpvec(ca)
2485 pa = pvec.ctxpvec(ca)
2486 pb = pvec.ctxpvec(cb)
2486 pb = pvec.ctxpvec(cb)
2487 if pa == pb:
2487 if pa == pb:
2488 rel = b"="
2488 rel = b"="
2489 elif pa > pb:
2489 elif pa > pb:
2490 rel = b">"
2490 rel = b">"
2491 elif pa < pb:
2491 elif pa < pb:
2492 rel = b"<"
2492 rel = b"<"
2493 elif pa | pb:
2493 elif pa | pb:
2494 rel = b"|"
2494 rel = b"|"
2495 ui.write(_(b"a: %s\n") % pa)
2495 ui.write(_(b"a: %s\n") % pa)
2496 ui.write(_(b"b: %s\n") % pb)
2496 ui.write(_(b"b: %s\n") % pb)
2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2498 ui.write(
2498 ui.write(
2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2500 % (
2500 % (
2501 abs(pa._depth - pb._depth),
2501 abs(pa._depth - pb._depth),
2502 pvec._hamming(pa._vec, pb._vec),
2502 pvec._hamming(pa._vec, pb._vec),
2503 pa.distance(pb),
2503 pa.distance(pb),
2504 rel,
2504 rel,
2505 )
2505 )
2506 )
2506 )
2507
2507
2508
2508
2509 @command(
2509 @command(
2510 b'debugrebuilddirstate|debugrebuildstate',
2510 b'debugrebuilddirstate|debugrebuildstate',
2511 [
2511 [
2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2513 (
2513 (
2514 b'',
2514 b'',
2515 b'minimal',
2515 b'minimal',
2516 None,
2516 None,
2517 _(
2517 _(
2518 b'only rebuild files that are inconsistent with '
2518 b'only rebuild files that are inconsistent with '
2519 b'the working copy parent'
2519 b'the working copy parent'
2520 ),
2520 ),
2521 ),
2521 ),
2522 ],
2522 ],
2523 _(b'[-r REV]'),
2523 _(b'[-r REV]'),
2524 )
2524 )
2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2526 """rebuild the dirstate as it would look like for the given revision
2526 """rebuild the dirstate as it would look like for the given revision
2527
2527
2528 If no revision is specified the first current parent will be used.
2528 If no revision is specified the first current parent will be used.
2529
2529
2530 The dirstate will be set to the files of the given revision.
2530 The dirstate will be set to the files of the given revision.
2531 The actual working directory content or existing dirstate
2531 The actual working directory content or existing dirstate
2532 information such as adds or removes is not considered.
2532 information such as adds or removes is not considered.
2533
2533
2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2535 tracked but are not in the parent manifest, or that exist in the parent
2535 tracked but are not in the parent manifest, or that exist in the parent
2536 manifest but are not in the dirstate. It will not change adds, removes, or
2536 manifest but are not in the dirstate. It will not change adds, removes, or
2537 modified files that are in the working copy parent.
2537 modified files that are in the working copy parent.
2538
2538
2539 One use of this command is to make the next :hg:`status` invocation
2539 One use of this command is to make the next :hg:`status` invocation
2540 check the actual file content.
2540 check the actual file content.
2541 """
2541 """
2542 ctx = scmutil.revsingle(repo, rev)
2542 ctx = scmutil.revsingle(repo, rev)
2543 with repo.wlock():
2543 with repo.wlock():
2544 dirstate = repo.dirstate
2544 dirstate = repo.dirstate
2545 changedfiles = None
2545 changedfiles = None
2546 # See command doc for what minimal does.
2546 # See command doc for what minimal does.
2547 if opts.get('minimal'):
2547 if opts.get('minimal'):
2548 manifestfiles = set(ctx.manifest().keys())
2548 manifestfiles = set(ctx.manifest().keys())
2549 dirstatefiles = set(dirstate)
2549 dirstatefiles = set(dirstate)
2550 manifestonly = manifestfiles - dirstatefiles
2550 manifestonly = manifestfiles - dirstatefiles
2551 dsonly = dirstatefiles - manifestfiles
2551 dsonly = dirstatefiles - manifestfiles
2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2553 changedfiles = manifestonly | dsnotadded
2553 changedfiles = manifestonly | dsnotadded
2554
2554
2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2556
2556
2557
2557
2558 @command(b'debugrebuildfncache', [], b'')
2558 @command(b'debugrebuildfncache', [], b'')
2559 def debugrebuildfncache(ui, repo):
2559 def debugrebuildfncache(ui, repo):
2560 """rebuild the fncache file"""
2560 """rebuild the fncache file"""
2561 repair.rebuildfncache(ui, repo)
2561 repair.rebuildfncache(ui, repo)
2562
2562
2563
2563
2564 @command(
2564 @command(
2565 b'debugrename',
2565 b'debugrename',
2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2567 _(b'[-r REV] [FILE]...'),
2567 _(b'[-r REV] [FILE]...'),
2568 )
2568 )
2569 def debugrename(ui, repo, *pats, **opts):
2569 def debugrename(ui, repo, *pats, **opts):
2570 """dump rename information"""
2570 """dump rename information"""
2571
2571
2572 opts = pycompat.byteskwargs(opts)
2572 opts = pycompat.byteskwargs(opts)
2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2574 m = scmutil.match(ctx, pats, opts)
2574 m = scmutil.match(ctx, pats, opts)
2575 for abs in ctx.walk(m):
2575 for abs in ctx.walk(m):
2576 fctx = ctx[abs]
2576 fctx = ctx[abs]
2577 o = fctx.filelog().renamed(fctx.filenode())
2577 o = fctx.filelog().renamed(fctx.filenode())
2578 rel = repo.pathto(abs)
2578 rel = repo.pathto(abs)
2579 if o:
2579 if o:
2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2581 else:
2581 else:
2582 ui.write(_(b"%s not renamed\n") % rel)
2582 ui.write(_(b"%s not renamed\n") % rel)
2583
2583
2584
2584
2585 @command(
2585 @command(
2586 b'debugrevlog',
2586 b'debugrevlog',
2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2588 _(b'-c|-m|FILE'),
2588 _(b'-c|-m|FILE'),
2589 optionalrepo=True,
2589 optionalrepo=True,
2590 )
2590 )
2591 def debugrevlog(ui, repo, file_=None, **opts):
2591 def debugrevlog(ui, repo, file_=None, **opts):
2592 """show data and statistics about a revlog"""
2592 """show data and statistics about a revlog"""
2593 opts = pycompat.byteskwargs(opts)
2593 opts = pycompat.byteskwargs(opts)
2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2595
2595
2596 if opts.get(b"dump"):
2596 if opts.get(b"dump"):
2597 numrevs = len(r)
2597 numrevs = len(r)
2598 ui.write(
2598 ui.write(
2599 (
2599 (
2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2601 b" rawsize totalsize compression heads chainlen\n"
2601 b" rawsize totalsize compression heads chainlen\n"
2602 )
2602 )
2603 )
2603 )
2604 ts = 0
2604 ts = 0
2605 heads = set()
2605 heads = set()
2606
2606
2607 for rev in pycompat.xrange(numrevs):
2607 for rev in pycompat.xrange(numrevs):
2608 dbase = r.deltaparent(rev)
2608 dbase = r.deltaparent(rev)
2609 if dbase == -1:
2609 if dbase == -1:
2610 dbase = rev
2610 dbase = rev
2611 cbase = r.chainbase(rev)
2611 cbase = r.chainbase(rev)
2612 clen = r.chainlen(rev)
2612 clen = r.chainlen(rev)
2613 p1, p2 = r.parentrevs(rev)
2613 p1, p2 = r.parentrevs(rev)
2614 rs = r.rawsize(rev)
2614 rs = r.rawsize(rev)
2615 ts = ts + rs
2615 ts = ts + rs
2616 heads -= set(r.parentrevs(rev))
2616 heads -= set(r.parentrevs(rev))
2617 heads.add(rev)
2617 heads.add(rev)
2618 try:
2618 try:
2619 compression = ts / r.end(rev)
2619 compression = ts / r.end(rev)
2620 except ZeroDivisionError:
2620 except ZeroDivisionError:
2621 compression = 0
2621 compression = 0
2622 ui.write(
2622 ui.write(
2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2624 b"%11d %5d %8d\n"
2624 b"%11d %5d %8d\n"
2625 % (
2625 % (
2626 rev,
2626 rev,
2627 p1,
2627 p1,
2628 p2,
2628 p2,
2629 r.start(rev),
2629 r.start(rev),
2630 r.end(rev),
2630 r.end(rev),
2631 r.start(dbase),
2631 r.start(dbase),
2632 r.start(cbase),
2632 r.start(cbase),
2633 r.start(p1),
2633 r.start(p1),
2634 r.start(p2),
2634 r.start(p2),
2635 rs,
2635 rs,
2636 ts,
2636 ts,
2637 compression,
2637 compression,
2638 len(heads),
2638 len(heads),
2639 clen,
2639 clen,
2640 )
2640 )
2641 )
2641 )
2642 return 0
2642 return 0
2643
2643
2644 v = r.version
2644 v = r.version
2645 format = v & 0xFFFF
2645 format = v & 0xFFFF
2646 flags = []
2646 flags = []
2647 gdelta = False
2647 gdelta = False
2648 if v & revlog.FLAG_INLINE_DATA:
2648 if v & revlog.FLAG_INLINE_DATA:
2649 flags.append(b'inline')
2649 flags.append(b'inline')
2650 if v & revlog.FLAG_GENERALDELTA:
2650 if v & revlog.FLAG_GENERALDELTA:
2651 gdelta = True
2651 gdelta = True
2652 flags.append(b'generaldelta')
2652 flags.append(b'generaldelta')
2653 if not flags:
2653 if not flags:
2654 flags = [b'(none)']
2654 flags = [b'(none)']
2655
2655
2656 ### tracks merge vs single parent
2656 ### tracks merge vs single parent
2657 nummerges = 0
2657 nummerges = 0
2658
2658
2659 ### tracks ways the "delta" are build
2659 ### tracks ways the "delta" are build
2660 # nodelta
2660 # nodelta
2661 numempty = 0
2661 numempty = 0
2662 numemptytext = 0
2662 numemptytext = 0
2663 numemptydelta = 0
2663 numemptydelta = 0
2664 # full file content
2664 # full file content
2665 numfull = 0
2665 numfull = 0
2666 # intermediate snapshot against a prior snapshot
2666 # intermediate snapshot against a prior snapshot
2667 numsemi = 0
2667 numsemi = 0
2668 # snapshot count per depth
2668 # snapshot count per depth
2669 numsnapdepth = collections.defaultdict(lambda: 0)
2669 numsnapdepth = collections.defaultdict(lambda: 0)
2670 # delta against previous revision
2670 # delta against previous revision
2671 numprev = 0
2671 numprev = 0
2672 # delta against first or second parent (not prev)
2672 # delta against first or second parent (not prev)
2673 nump1 = 0
2673 nump1 = 0
2674 nump2 = 0
2674 nump2 = 0
2675 # delta against neither prev nor parents
2675 # delta against neither prev nor parents
2676 numother = 0
2676 numother = 0
2677 # delta against prev that are also first or second parent
2677 # delta against prev that are also first or second parent
2678 # (details of `numprev`)
2678 # (details of `numprev`)
2679 nump1prev = 0
2679 nump1prev = 0
2680 nump2prev = 0
2680 nump2prev = 0
2681
2681
2682 # data about delta chain of each revs
2682 # data about delta chain of each revs
2683 chainlengths = []
2683 chainlengths = []
2684 chainbases = []
2684 chainbases = []
2685 chainspans = []
2685 chainspans = []
2686
2686
2687 # data about each revision
2687 # data about each revision
2688 datasize = [None, 0, 0]
2688 datasize = [None, 0, 0]
2689 fullsize = [None, 0, 0]
2689 fullsize = [None, 0, 0]
2690 semisize = [None, 0, 0]
2690 semisize = [None, 0, 0]
2691 # snapshot count per depth
2691 # snapshot count per depth
2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2693 deltasize = [None, 0, 0]
2693 deltasize = [None, 0, 0]
2694 chunktypecounts = {}
2694 chunktypecounts = {}
2695 chunktypesizes = {}
2695 chunktypesizes = {}
2696
2696
2697 def addsize(size, l):
2697 def addsize(size, l):
2698 if l[0] is None or size < l[0]:
2698 if l[0] is None or size < l[0]:
2699 l[0] = size
2699 l[0] = size
2700 if size > l[1]:
2700 if size > l[1]:
2701 l[1] = size
2701 l[1] = size
2702 l[2] += size
2702 l[2] += size
2703
2703
2704 numrevs = len(r)
2704 numrevs = len(r)
2705 for rev in pycompat.xrange(numrevs):
2705 for rev in pycompat.xrange(numrevs):
2706 p1, p2 = r.parentrevs(rev)
2706 p1, p2 = r.parentrevs(rev)
2707 delta = r.deltaparent(rev)
2707 delta = r.deltaparent(rev)
2708 if format > 0:
2708 if format > 0:
2709 addsize(r.rawsize(rev), datasize)
2709 addsize(r.rawsize(rev), datasize)
2710 if p2 != nullrev:
2710 if p2 != nullrev:
2711 nummerges += 1
2711 nummerges += 1
2712 size = r.length(rev)
2712 size = r.length(rev)
2713 if delta == nullrev:
2713 if delta == nullrev:
2714 chainlengths.append(0)
2714 chainlengths.append(0)
2715 chainbases.append(r.start(rev))
2715 chainbases.append(r.start(rev))
2716 chainspans.append(size)
2716 chainspans.append(size)
2717 if size == 0:
2717 if size == 0:
2718 numempty += 1
2718 numempty += 1
2719 numemptytext += 1
2719 numemptytext += 1
2720 else:
2720 else:
2721 numfull += 1
2721 numfull += 1
2722 numsnapdepth[0] += 1
2722 numsnapdepth[0] += 1
2723 addsize(size, fullsize)
2723 addsize(size, fullsize)
2724 addsize(size, snapsizedepth[0])
2724 addsize(size, snapsizedepth[0])
2725 else:
2725 else:
2726 chainlengths.append(chainlengths[delta] + 1)
2726 chainlengths.append(chainlengths[delta] + 1)
2727 baseaddr = chainbases[delta]
2727 baseaddr = chainbases[delta]
2728 revaddr = r.start(rev)
2728 revaddr = r.start(rev)
2729 chainbases.append(baseaddr)
2729 chainbases.append(baseaddr)
2730 chainspans.append((revaddr - baseaddr) + size)
2730 chainspans.append((revaddr - baseaddr) + size)
2731 if size == 0:
2731 if size == 0:
2732 numempty += 1
2732 numempty += 1
2733 numemptydelta += 1
2733 numemptydelta += 1
2734 elif r.issnapshot(rev):
2734 elif r.issnapshot(rev):
2735 addsize(size, semisize)
2735 addsize(size, semisize)
2736 numsemi += 1
2736 numsemi += 1
2737 depth = r.snapshotdepth(rev)
2737 depth = r.snapshotdepth(rev)
2738 numsnapdepth[depth] += 1
2738 numsnapdepth[depth] += 1
2739 addsize(size, snapsizedepth[depth])
2739 addsize(size, snapsizedepth[depth])
2740 else:
2740 else:
2741 addsize(size, deltasize)
2741 addsize(size, deltasize)
2742 if delta == rev - 1:
2742 if delta == rev - 1:
2743 numprev += 1
2743 numprev += 1
2744 if delta == p1:
2744 if delta == p1:
2745 nump1prev += 1
2745 nump1prev += 1
2746 elif delta == p2:
2746 elif delta == p2:
2747 nump2prev += 1
2747 nump2prev += 1
2748 elif delta == p1:
2748 elif delta == p1:
2749 nump1 += 1
2749 nump1 += 1
2750 elif delta == p2:
2750 elif delta == p2:
2751 nump2 += 1
2751 nump2 += 1
2752 elif delta != nullrev:
2752 elif delta != nullrev:
2753 numother += 1
2753 numother += 1
2754
2754
2755 # Obtain data on the raw chunks in the revlog.
2755 # Obtain data on the raw chunks in the revlog.
2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2757 segment = r._getsegmentforrevs(rev, rev)[1]
2757 segment = r._getsegmentforrevs(rev, rev)[1]
2758 else:
2758 else:
2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2760 if segment:
2760 if segment:
2761 chunktype = bytes(segment[0:1])
2761 chunktype = bytes(segment[0:1])
2762 else:
2762 else:
2763 chunktype = b'empty'
2763 chunktype = b'empty'
2764
2764
2765 if chunktype not in chunktypecounts:
2765 if chunktype not in chunktypecounts:
2766 chunktypecounts[chunktype] = 0
2766 chunktypecounts[chunktype] = 0
2767 chunktypesizes[chunktype] = 0
2767 chunktypesizes[chunktype] = 0
2768
2768
2769 chunktypecounts[chunktype] += 1
2769 chunktypecounts[chunktype] += 1
2770 chunktypesizes[chunktype] += size
2770 chunktypesizes[chunktype] += size
2771
2771
2772 # Adjust size min value for empty cases
2772 # Adjust size min value for empty cases
2773 for size in (datasize, fullsize, semisize, deltasize):
2773 for size in (datasize, fullsize, semisize, deltasize):
2774 if size[0] is None:
2774 if size[0] is None:
2775 size[0] = 0
2775 size[0] = 0
2776
2776
2777 numdeltas = numrevs - numfull - numempty - numsemi
2777 numdeltas = numrevs - numfull - numempty - numsemi
2778 numoprev = numprev - nump1prev - nump2prev
2778 numoprev = numprev - nump1prev - nump2prev
2779 totalrawsize = datasize[2]
2779 totalrawsize = datasize[2]
2780 datasize[2] /= numrevs
2780 datasize[2] /= numrevs
2781 fulltotal = fullsize[2]
2781 fulltotal = fullsize[2]
2782 if numfull == 0:
2782 if numfull == 0:
2783 fullsize[2] = 0
2783 fullsize[2] = 0
2784 else:
2784 else:
2785 fullsize[2] /= numfull
2785 fullsize[2] /= numfull
2786 semitotal = semisize[2]
2786 semitotal = semisize[2]
2787 snaptotal = {}
2787 snaptotal = {}
2788 if numsemi > 0:
2788 if numsemi > 0:
2789 semisize[2] /= numsemi
2789 semisize[2] /= numsemi
2790 for depth in snapsizedepth:
2790 for depth in snapsizedepth:
2791 snaptotal[depth] = snapsizedepth[depth][2]
2791 snaptotal[depth] = snapsizedepth[depth][2]
2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2793
2793
2794 deltatotal = deltasize[2]
2794 deltatotal = deltasize[2]
2795 if numdeltas > 0:
2795 if numdeltas > 0:
2796 deltasize[2] /= numdeltas
2796 deltasize[2] /= numdeltas
2797 totalsize = fulltotal + semitotal + deltatotal
2797 totalsize = fulltotal + semitotal + deltatotal
2798 avgchainlen = sum(chainlengths) / numrevs
2798 avgchainlen = sum(chainlengths) / numrevs
2799 maxchainlen = max(chainlengths)
2799 maxchainlen = max(chainlengths)
2800 maxchainspan = max(chainspans)
2800 maxchainspan = max(chainspans)
2801 compratio = 1
2801 compratio = 1
2802 if totalsize:
2802 if totalsize:
2803 compratio = totalrawsize / totalsize
2803 compratio = totalrawsize / totalsize
2804
2804
2805 basedfmtstr = b'%%%dd\n'
2805 basedfmtstr = b'%%%dd\n'
2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2807
2807
2808 def dfmtstr(max):
2808 def dfmtstr(max):
2809 return basedfmtstr % len(str(max))
2809 return basedfmtstr % len(str(max))
2810
2810
2811 def pcfmtstr(max, padding=0):
2811 def pcfmtstr(max, padding=0):
2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2813
2813
2814 def pcfmt(value, total):
2814 def pcfmt(value, total):
2815 if total:
2815 if total:
2816 return (value, 100 * float(value) / total)
2816 return (value, 100 * float(value) / total)
2817 else:
2817 else:
2818 return value, 100.0
2818 return value, 100.0
2819
2819
2820 ui.writenoi18n(b'format : %d\n' % format)
2820 ui.writenoi18n(b'format : %d\n' % format)
2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2822
2822
2823 ui.write(b'\n')
2823 ui.write(b'\n')
2824 fmt = pcfmtstr(totalsize)
2824 fmt = pcfmtstr(totalsize)
2825 fmt2 = dfmtstr(totalsize)
2825 fmt2 = dfmtstr(totalsize)
2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2828 ui.writenoi18n(
2828 ui.writenoi18n(
2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2830 )
2830 )
2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2833 ui.writenoi18n(
2833 ui.writenoi18n(
2834 b' text : '
2834 b' text : '
2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2836 )
2836 )
2837 ui.writenoi18n(
2837 ui.writenoi18n(
2838 b' delta : '
2838 b' delta : '
2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2840 )
2840 )
2841 ui.writenoi18n(
2841 ui.writenoi18n(
2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2843 )
2843 )
2844 for depth in sorted(numsnapdepth):
2844 for depth in sorted(numsnapdepth):
2845 ui.write(
2845 ui.write(
2846 (b' lvl-%-3d : ' % depth)
2846 (b' lvl-%-3d : ' % depth)
2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2848 )
2848 )
2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2851 ui.writenoi18n(
2851 ui.writenoi18n(
2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2853 )
2853 )
2854 for depth in sorted(numsnapdepth):
2854 for depth in sorted(numsnapdepth):
2855 ui.write(
2855 ui.write(
2856 (b' lvl-%-3d : ' % depth)
2856 (b' lvl-%-3d : ' % depth)
2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2858 )
2858 )
2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2860
2860
2861 def fmtchunktype(chunktype):
2861 def fmtchunktype(chunktype):
2862 if chunktype == b'empty':
2862 if chunktype == b'empty':
2863 return b' %s : ' % chunktype
2863 return b' %s : ' % chunktype
2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2866 else:
2866 else:
2867 return b' 0x%s : ' % hex(chunktype)
2867 return b' 0x%s : ' % hex(chunktype)
2868
2868
2869 ui.write(b'\n')
2869 ui.write(b'\n')
2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2871 for chunktype in sorted(chunktypecounts):
2871 for chunktype in sorted(chunktypecounts):
2872 ui.write(fmtchunktype(chunktype))
2872 ui.write(fmtchunktype(chunktype))
2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2875 for chunktype in sorted(chunktypecounts):
2875 for chunktype in sorted(chunktypecounts):
2876 ui.write(fmtchunktype(chunktype))
2876 ui.write(fmtchunktype(chunktype))
2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2878
2878
2879 ui.write(b'\n')
2879 ui.write(b'\n')
2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2885
2885
2886 if format > 0:
2886 if format > 0:
2887 ui.write(b'\n')
2887 ui.write(b'\n')
2888 ui.writenoi18n(
2888 ui.writenoi18n(
2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2890 % tuple(datasize)
2890 % tuple(datasize)
2891 )
2891 )
2892 ui.writenoi18n(
2892 ui.writenoi18n(
2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2894 % tuple(fullsize)
2894 % tuple(fullsize)
2895 )
2895 )
2896 ui.writenoi18n(
2896 ui.writenoi18n(
2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2898 % tuple(semisize)
2898 % tuple(semisize)
2899 )
2899 )
2900 for depth in sorted(snapsizedepth):
2900 for depth in sorted(snapsizedepth):
2901 if depth == 0:
2901 if depth == 0:
2902 continue
2902 continue
2903 ui.writenoi18n(
2903 ui.writenoi18n(
2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2905 % ((depth,) + tuple(snapsizedepth[depth]))
2905 % ((depth,) + tuple(snapsizedepth[depth]))
2906 )
2906 )
2907 ui.writenoi18n(
2907 ui.writenoi18n(
2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2909 % tuple(deltasize)
2909 % tuple(deltasize)
2910 )
2910 )
2911
2911
2912 if numdeltas > 0:
2912 if numdeltas > 0:
2913 ui.write(b'\n')
2913 ui.write(b'\n')
2914 fmt = pcfmtstr(numdeltas)
2914 fmt = pcfmtstr(numdeltas)
2915 fmt2 = pcfmtstr(numdeltas, 4)
2915 fmt2 = pcfmtstr(numdeltas, 4)
2916 ui.writenoi18n(
2916 ui.writenoi18n(
2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2918 )
2918 )
2919 if numprev > 0:
2919 if numprev > 0:
2920 ui.writenoi18n(
2920 ui.writenoi18n(
2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2922 )
2922 )
2923 ui.writenoi18n(
2923 ui.writenoi18n(
2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2925 )
2925 )
2926 ui.writenoi18n(
2926 ui.writenoi18n(
2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2928 )
2928 )
2929 if gdelta:
2929 if gdelta:
2930 ui.writenoi18n(
2930 ui.writenoi18n(
2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2932 )
2932 )
2933 ui.writenoi18n(
2933 ui.writenoi18n(
2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2935 )
2935 )
2936 ui.writenoi18n(
2936 ui.writenoi18n(
2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2938 )
2938 )
2939
2939
2940
2940
2941 @command(
2941 @command(
2942 b'debugrevlogindex',
2942 b'debugrevlogindex',
2943 cmdutil.debugrevlogopts
2943 cmdutil.debugrevlogopts
2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2945 _(b'[-f FORMAT] -c|-m|FILE'),
2945 _(b'[-f FORMAT] -c|-m|FILE'),
2946 optionalrepo=True,
2946 optionalrepo=True,
2947 )
2947 )
2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2949 """dump the contents of a revlog index"""
2949 """dump the contents of a revlog index"""
2950 opts = pycompat.byteskwargs(opts)
2950 opts = pycompat.byteskwargs(opts)
2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2952 format = opts.get(b'format', 0)
2952 format = opts.get(b'format', 0)
2953 if format not in (0, 1):
2953 if format not in (0, 1):
2954 raise error.Abort(_(b"unknown format %d") % format)
2954 raise error.Abort(_(b"unknown format %d") % format)
2955
2955
2956 if ui.debugflag:
2956 if ui.debugflag:
2957 shortfn = hex
2957 shortfn = hex
2958 else:
2958 else:
2959 shortfn = short
2959 shortfn = short
2960
2960
2961 # There might not be anything in r, so have a sane default
2961 # There might not be anything in r, so have a sane default
2962 idlen = 12
2962 idlen = 12
2963 for i in r:
2963 for i in r:
2964 idlen = len(shortfn(r.node(i)))
2964 idlen = len(shortfn(r.node(i)))
2965 break
2965 break
2966
2966
2967 if format == 0:
2967 if format == 0:
2968 if ui.verbose:
2968 if ui.verbose:
2969 ui.writenoi18n(
2969 ui.writenoi18n(
2970 b" rev offset length linkrev %s %s p2\n"
2970 b" rev offset length linkrev %s %s p2\n"
2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2972 )
2972 )
2973 else:
2973 else:
2974 ui.writenoi18n(
2974 ui.writenoi18n(
2975 b" rev linkrev %s %s p2\n"
2975 b" rev linkrev %s %s p2\n"
2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2977 )
2977 )
2978 elif format == 1:
2978 elif format == 1:
2979 if ui.verbose:
2979 if ui.verbose:
2980 ui.writenoi18n(
2980 ui.writenoi18n(
2981 (
2981 (
2982 b" rev flag offset length size link p1"
2982 b" rev flag offset length size link p1"
2983 b" p2 %s\n"
2983 b" p2 %s\n"
2984 )
2984 )
2985 % b"nodeid".rjust(idlen)
2985 % b"nodeid".rjust(idlen)
2986 )
2986 )
2987 else:
2987 else:
2988 ui.writenoi18n(
2988 ui.writenoi18n(
2989 b" rev flag size link p1 p2 %s\n"
2989 b" rev flag size link p1 p2 %s\n"
2990 % b"nodeid".rjust(idlen)
2990 % b"nodeid".rjust(idlen)
2991 )
2991 )
2992
2992
2993 for i in r:
2993 for i in r:
2994 node = r.node(i)
2994 node = r.node(i)
2995 if format == 0:
2995 if format == 0:
2996 try:
2996 try:
2997 pp = r.parents(node)
2997 pp = r.parents(node)
2998 except Exception:
2998 except Exception:
2999 pp = [nullid, nullid]
2999 pp = [nullid, nullid]
3000 if ui.verbose:
3000 if ui.verbose:
3001 ui.write(
3001 ui.write(
3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3003 % (
3003 % (
3004 i,
3004 i,
3005 r.start(i),
3005 r.start(i),
3006 r.length(i),
3006 r.length(i),
3007 r.linkrev(i),
3007 r.linkrev(i),
3008 shortfn(node),
3008 shortfn(node),
3009 shortfn(pp[0]),
3009 shortfn(pp[0]),
3010 shortfn(pp[1]),
3010 shortfn(pp[1]),
3011 )
3011 )
3012 )
3012 )
3013 else:
3013 else:
3014 ui.write(
3014 ui.write(
3015 b"% 6d % 7d %s %s %s\n"
3015 b"% 6d % 7d %s %s %s\n"
3016 % (
3016 % (
3017 i,
3017 i,
3018 r.linkrev(i),
3018 r.linkrev(i),
3019 shortfn(node),
3019 shortfn(node),
3020 shortfn(pp[0]),
3020 shortfn(pp[0]),
3021 shortfn(pp[1]),
3021 shortfn(pp[1]),
3022 )
3022 )
3023 )
3023 )
3024 elif format == 1:
3024 elif format == 1:
3025 pr = r.parentrevs(i)
3025 pr = r.parentrevs(i)
3026 if ui.verbose:
3026 if ui.verbose:
3027 ui.write(
3027 ui.write(
3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3029 % (
3029 % (
3030 i,
3030 i,
3031 r.flags(i),
3031 r.flags(i),
3032 r.start(i),
3032 r.start(i),
3033 r.length(i),
3033 r.length(i),
3034 r.rawsize(i),
3034 r.rawsize(i),
3035 r.linkrev(i),
3035 r.linkrev(i),
3036 pr[0],
3036 pr[0],
3037 pr[1],
3037 pr[1],
3038 shortfn(node),
3038 shortfn(node),
3039 )
3039 )
3040 )
3040 )
3041 else:
3041 else:
3042 ui.write(
3042 ui.write(
3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3044 % (
3044 % (
3045 i,
3045 i,
3046 r.flags(i),
3046 r.flags(i),
3047 r.rawsize(i),
3047 r.rawsize(i),
3048 r.linkrev(i),
3048 r.linkrev(i),
3049 pr[0],
3049 pr[0],
3050 pr[1],
3050 pr[1],
3051 shortfn(node),
3051 shortfn(node),
3052 )
3052 )
3053 )
3053 )
3054
3054
3055
3055
3056 @command(
3056 @command(
3057 b'debugrevspec',
3057 b'debugrevspec',
3058 [
3058 [
3059 (
3059 (
3060 b'',
3060 b'',
3061 b'optimize',
3061 b'optimize',
3062 None,
3062 None,
3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3064 ),
3064 ),
3065 (
3065 (
3066 b'',
3066 b'',
3067 b'show-revs',
3067 b'show-revs',
3068 True,
3068 True,
3069 _(b'print list of result revisions (default)'),
3069 _(b'print list of result revisions (default)'),
3070 ),
3070 ),
3071 (
3071 (
3072 b's',
3072 b's',
3073 b'show-set',
3073 b'show-set',
3074 None,
3074 None,
3075 _(b'print internal representation of result set'),
3075 _(b'print internal representation of result set'),
3076 ),
3076 ),
3077 (
3077 (
3078 b'p',
3078 b'p',
3079 b'show-stage',
3079 b'show-stage',
3080 [],
3080 [],
3081 _(b'print parsed tree at the given stage'),
3081 _(b'print parsed tree at the given stage'),
3082 _(b'NAME'),
3082 _(b'NAME'),
3083 ),
3083 ),
3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3086 ],
3086 ],
3087 b'REVSPEC',
3087 b'REVSPEC',
3088 )
3088 )
3089 def debugrevspec(ui, repo, expr, **opts):
3089 def debugrevspec(ui, repo, expr, **opts):
3090 """parse and apply a revision specification
3090 """parse and apply a revision specification
3091
3091
3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3093 Use -p all to print tree at every stage.
3093 Use -p all to print tree at every stage.
3094
3094
3095 Use --no-show-revs option with -s or -p to print only the set
3095 Use --no-show-revs option with -s or -p to print only the set
3096 representation or the parsed tree respectively.
3096 representation or the parsed tree respectively.
3097
3097
3098 Use --verify-optimized to compare the optimized result with the unoptimized
3098 Use --verify-optimized to compare the optimized result with the unoptimized
3099 one. Returns 1 if the optimized result differs.
3099 one. Returns 1 if the optimized result differs.
3100 """
3100 """
3101 opts = pycompat.byteskwargs(opts)
3101 opts = pycompat.byteskwargs(opts)
3102 aliases = ui.configitems(b'revsetalias')
3102 aliases = ui.configitems(b'revsetalias')
3103 stages = [
3103 stages = [
3104 (b'parsed', lambda tree: tree),
3104 (b'parsed', lambda tree: tree),
3105 (
3105 (
3106 b'expanded',
3106 b'expanded',
3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3108 ),
3108 ),
3109 (b'concatenated', revsetlang.foldconcat),
3109 (b'concatenated', revsetlang.foldconcat),
3110 (b'analyzed', revsetlang.analyze),
3110 (b'analyzed', revsetlang.analyze),
3111 (b'optimized', revsetlang.optimize),
3111 (b'optimized', revsetlang.optimize),
3112 ]
3112 ]
3113 if opts[b'no_optimized']:
3113 if opts[b'no_optimized']:
3114 stages = stages[:-1]
3114 stages = stages[:-1]
3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3116 raise error.Abort(
3116 raise error.Abort(
3117 _(b'cannot use --verify-optimized with --no-optimized')
3117 _(b'cannot use --verify-optimized with --no-optimized')
3118 )
3118 )
3119 stagenames = set(n for n, f in stages)
3119 stagenames = set(n for n, f in stages)
3120
3120
3121 showalways = set()
3121 showalways = set()
3122 showchanged = set()
3122 showchanged = set()
3123 if ui.verbose and not opts[b'show_stage']:
3123 if ui.verbose and not opts[b'show_stage']:
3124 # show parsed tree by --verbose (deprecated)
3124 # show parsed tree by --verbose (deprecated)
3125 showalways.add(b'parsed')
3125 showalways.add(b'parsed')
3126 showchanged.update([b'expanded', b'concatenated'])
3126 showchanged.update([b'expanded', b'concatenated'])
3127 if opts[b'optimize']:
3127 if opts[b'optimize']:
3128 showalways.add(b'optimized')
3128 showalways.add(b'optimized')
3129 if opts[b'show_stage'] and opts[b'optimize']:
3129 if opts[b'show_stage'] and opts[b'optimize']:
3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3131 if opts[b'show_stage'] == [b'all']:
3131 if opts[b'show_stage'] == [b'all']:
3132 showalways.update(stagenames)
3132 showalways.update(stagenames)
3133 else:
3133 else:
3134 for n in opts[b'show_stage']:
3134 for n in opts[b'show_stage']:
3135 if n not in stagenames:
3135 if n not in stagenames:
3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3137 showalways.update(opts[b'show_stage'])
3137 showalways.update(opts[b'show_stage'])
3138
3138
3139 treebystage = {}
3139 treebystage = {}
3140 printedtree = None
3140 printedtree = None
3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3142 for n, f in stages:
3142 for n, f in stages:
3143 treebystage[n] = tree = f(tree)
3143 treebystage[n] = tree = f(tree)
3144 if n in showalways or (n in showchanged and tree != printedtree):
3144 if n in showalways or (n in showchanged and tree != printedtree):
3145 if opts[b'show_stage'] or n != b'parsed':
3145 if opts[b'show_stage'] or n != b'parsed':
3146 ui.write(b"* %s:\n" % n)
3146 ui.write(b"* %s:\n" % n)
3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3148 printedtree = tree
3148 printedtree = tree
3149
3149
3150 if opts[b'verify_optimized']:
3150 if opts[b'verify_optimized']:
3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3154 ui.writenoi18n(
3154 ui.writenoi18n(
3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3156 )
3156 )
3157 ui.writenoi18n(
3157 ui.writenoi18n(
3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3159 )
3159 )
3160 arevs = list(arevs)
3160 arevs = list(arevs)
3161 brevs = list(brevs)
3161 brevs = list(brevs)
3162 if arevs == brevs:
3162 if arevs == brevs:
3163 return 0
3163 return 0
3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3168 if tag in ('delete', 'replace'):
3168 if tag in ('delete', 'replace'):
3169 for c in arevs[alo:ahi]:
3169 for c in arevs[alo:ahi]:
3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3171 if tag in ('insert', 'replace'):
3171 if tag in ('insert', 'replace'):
3172 for c in brevs[blo:bhi]:
3172 for c in brevs[blo:bhi]:
3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3174 if tag == 'equal':
3174 if tag == 'equal':
3175 for c in arevs[alo:ahi]:
3175 for c in arevs[alo:ahi]:
3176 ui.write(b' %d\n' % c)
3176 ui.write(b' %d\n' % c)
3177 return 1
3177 return 1
3178
3178
3179 func = revset.makematcher(tree)
3179 func = revset.makematcher(tree)
3180 revs = func(repo)
3180 revs = func(repo)
3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3183 if not opts[b'show_revs']:
3183 if not opts[b'show_revs']:
3184 return
3184 return
3185 for c in revs:
3185 for c in revs:
3186 ui.write(b"%d\n" % c)
3186 ui.write(b"%d\n" % c)
3187
3187
3188
3188
3189 @command(
3189 @command(
3190 b'debugserve',
3190 b'debugserve',
3191 [
3191 [
3192 (
3192 (
3193 b'',
3193 b'',
3194 b'sshstdio',
3194 b'sshstdio',
3195 False,
3195 False,
3196 _(b'run an SSH server bound to process handles'),
3196 _(b'run an SSH server bound to process handles'),
3197 ),
3197 ),
3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3200 ],
3200 ],
3201 b'',
3201 b'',
3202 )
3202 )
3203 def debugserve(ui, repo, **opts):
3203 def debugserve(ui, repo, **opts):
3204 """run a server with advanced settings
3204 """run a server with advanced settings
3205
3205
3206 This command is similar to :hg:`serve`. It exists partially as a
3206 This command is similar to :hg:`serve`. It exists partially as a
3207 workaround to the fact that ``hg serve --stdio`` must have specific
3207 workaround to the fact that ``hg serve --stdio`` must have specific
3208 arguments for security reasons.
3208 arguments for security reasons.
3209 """
3209 """
3210 opts = pycompat.byteskwargs(opts)
3210 opts = pycompat.byteskwargs(opts)
3211
3211
3212 if not opts[b'sshstdio']:
3212 if not opts[b'sshstdio']:
3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3214
3214
3215 logfh = None
3215 logfh = None
3216
3216
3217 if opts[b'logiofd'] and opts[b'logiofile']:
3217 if opts[b'logiofd'] and opts[b'logiofile']:
3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3219
3219
3220 if opts[b'logiofd']:
3220 if opts[b'logiofd']:
3221 # Line buffered because output is line based.
3221 # Line buffered because output is line based.
3222 try:
3222 try:
3223 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3223 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3224 except OSError as e:
3224 except OSError as e:
3225 if e.errno != errno.ESPIPE:
3225 if e.errno != errno.ESPIPE:
3226 raise
3226 raise
3227 # can't seek a pipe, so `ab` mode fails on py3
3227 # can't seek a pipe, so `ab` mode fails on py3
3228 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3228 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3229 elif opts[b'logiofile']:
3229 elif opts[b'logiofile']:
3230 logfh = open(opts[b'logiofile'], b'ab', 1)
3230 logfh = open(opts[b'logiofile'], b'ab', 1)
3231
3231
3232 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3232 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3233 s.serve_forever()
3233 s.serve_forever()
3234
3234
3235
3235
3236 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3236 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3237 def debugsetparents(ui, repo, rev1, rev2=None):
3237 def debugsetparents(ui, repo, rev1, rev2=None):
3238 """manually set the parents of the current working directory
3238 """manually set the parents of the current working directory
3239
3239
3240 This is useful for writing repository conversion tools, but should
3240 This is useful for writing repository conversion tools, but should
3241 be used with care. For example, neither the working directory nor the
3241 be used with care. For example, neither the working directory nor the
3242 dirstate is updated, so file status may be incorrect after running this
3242 dirstate is updated, so file status may be incorrect after running this
3243 command.
3243 command.
3244
3244
3245 Returns 0 on success.
3245 Returns 0 on success.
3246 """
3246 """
3247
3247
3248 node1 = scmutil.revsingle(repo, rev1).node()
3248 node1 = scmutil.revsingle(repo, rev1).node()
3249 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3249 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3250
3250
3251 with repo.wlock():
3251 with repo.wlock():
3252 repo.setparents(node1, node2)
3252 repo.setparents(node1, node2)
3253
3253
3254
3254
3255 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3255 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3256 def debugsidedata(ui, repo, file_, rev=None, **opts):
3256 def debugsidedata(ui, repo, file_, rev=None, **opts):
3257 """dump the side data for a cl/manifest/file revision
3257 """dump the side data for a cl/manifest/file revision
3258
3258
3259 Use --verbose to dump the sidedata content."""
3259 Use --verbose to dump the sidedata content."""
3260 opts = pycompat.byteskwargs(opts)
3260 opts = pycompat.byteskwargs(opts)
3261 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3261 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3262 if rev is not None:
3262 if rev is not None:
3263 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3263 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3264 file_, rev = None, file_
3264 file_, rev = None, file_
3265 elif rev is None:
3265 elif rev is None:
3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3267 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3267 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3268 r = getattr(r, '_revlog', r)
3268 r = getattr(r, '_revlog', r)
3269 try:
3269 try:
3270 sidedata = r.sidedata(r.lookup(rev))
3270 sidedata = r.sidedata(r.lookup(rev))
3271 except KeyError:
3271 except KeyError:
3272 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3272 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3273 if sidedata:
3273 if sidedata:
3274 sidedata = list(sidedata.items())
3274 sidedata = list(sidedata.items())
3275 sidedata.sort()
3275 sidedata.sort()
3276 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3276 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3277 for key, value in sidedata:
3277 for key, value in sidedata:
3278 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3278 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3279 if ui.verbose:
3279 if ui.verbose:
3280 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3280 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3281
3281
3282
3282
3283 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3283 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3284 def debugssl(ui, repo, source=None, **opts):
3284 def debugssl(ui, repo, source=None, **opts):
3285 '''test a secure connection to a server
3285 '''test a secure connection to a server
3286
3286
3287 This builds the certificate chain for the server on Windows, installing the
3287 This builds the certificate chain for the server on Windows, installing the
3288 missing intermediates and trusted root via Windows Update if necessary. It
3288 missing intermediates and trusted root via Windows Update if necessary. It
3289 does nothing on other platforms.
3289 does nothing on other platforms.
3290
3290
3291 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3291 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3292 that server is used. See :hg:`help urls` for more information.
3292 that server is used. See :hg:`help urls` for more information.
3293
3293
3294 If the update succeeds, retry the original operation. Otherwise, the cause
3294 If the update succeeds, retry the original operation. Otherwise, the cause
3295 of the SSL error is likely another issue.
3295 of the SSL error is likely another issue.
3296 '''
3296 '''
3297 if not pycompat.iswindows:
3297 if not pycompat.iswindows:
3298 raise error.Abort(
3298 raise error.Abort(
3299 _(b'certificate chain building is only possible on Windows')
3299 _(b'certificate chain building is only possible on Windows')
3300 )
3300 )
3301
3301
3302 if not source:
3302 if not source:
3303 if not repo:
3303 if not repo:
3304 raise error.Abort(
3304 raise error.Abort(
3305 _(
3305 _(
3306 b"there is no Mercurial repository here, and no "
3306 b"there is no Mercurial repository here, and no "
3307 b"server specified"
3307 b"server specified"
3308 )
3308 )
3309 )
3309 )
3310 source = b"default"
3310 source = b"default"
3311
3311
3312 source, branches = hg.parseurl(ui.expandpath(source))
3312 source, branches = hg.parseurl(ui.expandpath(source))
3313 url = util.url(source)
3313 url = util.url(source)
3314
3314
3315 defaultport = {b'https': 443, b'ssh': 22}
3315 defaultport = {b'https': 443, b'ssh': 22}
3316 if url.scheme in defaultport:
3316 if url.scheme in defaultport:
3317 try:
3317 try:
3318 addr = (url.host, int(url.port or defaultport[url.scheme]))
3318 addr = (url.host, int(url.port or defaultport[url.scheme]))
3319 except ValueError:
3319 except ValueError:
3320 raise error.Abort(_(b"malformed port number in URL"))
3320 raise error.Abort(_(b"malformed port number in URL"))
3321 else:
3321 else:
3322 raise error.Abort(_(b"only https and ssh connections are supported"))
3322 raise error.Abort(_(b"only https and ssh connections are supported"))
3323
3323
3324 from . import win32
3324 from . import win32
3325
3325
3326 s = ssl.wrap_socket(
3326 s = ssl.wrap_socket(
3327 socket.socket(),
3327 socket.socket(),
3328 ssl_version=ssl.PROTOCOL_TLS,
3328 ssl_version=ssl.PROTOCOL_TLS,
3329 cert_reqs=ssl.CERT_NONE,
3329 cert_reqs=ssl.CERT_NONE,
3330 ca_certs=None,
3330 ca_certs=None,
3331 )
3331 )
3332
3332
3333 try:
3333 try:
3334 s.connect(addr)
3334 s.connect(addr)
3335 cert = s.getpeercert(True)
3335 cert = s.getpeercert(True)
3336
3336
3337 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3337 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3338
3338
3339 complete = win32.checkcertificatechain(cert, build=False)
3339 complete = win32.checkcertificatechain(cert, build=False)
3340
3340
3341 if not complete:
3341 if not complete:
3342 ui.status(_(b'certificate chain is incomplete, updating... '))
3342 ui.status(_(b'certificate chain is incomplete, updating... '))
3343
3343
3344 if not win32.checkcertificatechain(cert):
3344 if not win32.checkcertificatechain(cert):
3345 ui.status(_(b'failed.\n'))
3345 ui.status(_(b'failed.\n'))
3346 else:
3346 else:
3347 ui.status(_(b'done.\n'))
3347 ui.status(_(b'done.\n'))
3348 else:
3348 else:
3349 ui.status(_(b'full certificate chain is available\n'))
3349 ui.status(_(b'full certificate chain is available\n'))
3350 finally:
3350 finally:
3351 s.close()
3351 s.close()
3352
3352
3353
3353
3354 @command(
3354 @command(
3355 b'debugsub',
3355 b'debugsub',
3356 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3356 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3357 _(b'[-r REV] [REV]'),
3357 _(b'[-r REV] [REV]'),
3358 )
3358 )
3359 def debugsub(ui, repo, rev=None):
3359 def debugsub(ui, repo, rev=None):
3360 ctx = scmutil.revsingle(repo, rev, None)
3360 ctx = scmutil.revsingle(repo, rev, None)
3361 for k, v in sorted(ctx.substate.items()):
3361 for k, v in sorted(ctx.substate.items()):
3362 ui.writenoi18n(b'path %s\n' % k)
3362 ui.writenoi18n(b'path %s\n' % k)
3363 ui.writenoi18n(b' source %s\n' % v[0])
3363 ui.writenoi18n(b' source %s\n' % v[0])
3364 ui.writenoi18n(b' revision %s\n' % v[1])
3364 ui.writenoi18n(b' revision %s\n' % v[1])
3365
3365
3366
3366
3367 @command(
3367 @command(
3368 b'debugsuccessorssets',
3368 b'debugsuccessorssets',
3369 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3369 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3370 _(b'[REV]'),
3370 _(b'[REV]'),
3371 )
3371 )
3372 def debugsuccessorssets(ui, repo, *revs, **opts):
3372 def debugsuccessorssets(ui, repo, *revs, **opts):
3373 """show set of successors for revision
3373 """show set of successors for revision
3374
3374
3375 A successors set of changeset A is a consistent group of revisions that
3375 A successors set of changeset A is a consistent group of revisions that
3376 succeed A. It contains non-obsolete changesets only unless closests
3376 succeed A. It contains non-obsolete changesets only unless closests
3377 successors set is set.
3377 successors set is set.
3378
3378
3379 In most cases a changeset A has a single successors set containing a single
3379 In most cases a changeset A has a single successors set containing a single
3380 successor (changeset A replaced by A').
3380 successor (changeset A replaced by A').
3381
3381
3382 A changeset that is made obsolete with no successors are called "pruned".
3382 A changeset that is made obsolete with no successors are called "pruned".
3383 Such changesets have no successors sets at all.
3383 Such changesets have no successors sets at all.
3384
3384
3385 A changeset that has been "split" will have a successors set containing
3385 A changeset that has been "split" will have a successors set containing
3386 more than one successor.
3386 more than one successor.
3387
3387
3388 A changeset that has been rewritten in multiple different ways is called
3388 A changeset that has been rewritten in multiple different ways is called
3389 "divergent". Such changesets have multiple successor sets (each of which
3389 "divergent". Such changesets have multiple successor sets (each of which
3390 may also be split, i.e. have multiple successors).
3390 may also be split, i.e. have multiple successors).
3391
3391
3392 Results are displayed as follows::
3392 Results are displayed as follows::
3393
3393
3394 <rev1>
3394 <rev1>
3395 <successors-1A>
3395 <successors-1A>
3396 <rev2>
3396 <rev2>
3397 <successors-2A>
3397 <successors-2A>
3398 <successors-2B1> <successors-2B2> <successors-2B3>
3398 <successors-2B1> <successors-2B2> <successors-2B3>
3399
3399
3400 Here rev2 has two possible (i.e. divergent) successors sets. The first
3400 Here rev2 has two possible (i.e. divergent) successors sets. The first
3401 holds one element, whereas the second holds three (i.e. the changeset has
3401 holds one element, whereas the second holds three (i.e. the changeset has
3402 been split).
3402 been split).
3403 """
3403 """
3404 # passed to successorssets caching computation from one call to another
3404 # passed to successorssets caching computation from one call to another
3405 cache = {}
3405 cache = {}
3406 ctx2str = bytes
3406 ctx2str = bytes
3407 node2str = short
3407 node2str = short
3408 for rev in scmutil.revrange(repo, revs):
3408 for rev in scmutil.revrange(repo, revs):
3409 ctx = repo[rev]
3409 ctx = repo[rev]
3410 ui.write(b'%s\n' % ctx2str(ctx))
3410 ui.write(b'%s\n' % ctx2str(ctx))
3411 for succsset in obsutil.successorssets(
3411 for succsset in obsutil.successorssets(
3412 repo, ctx.node(), closest=opts['closest'], cache=cache
3412 repo, ctx.node(), closest=opts['closest'], cache=cache
3413 ):
3413 ):
3414 if succsset:
3414 if succsset:
3415 ui.write(b' ')
3415 ui.write(b' ')
3416 ui.write(node2str(succsset[0]))
3416 ui.write(node2str(succsset[0]))
3417 for node in succsset[1:]:
3417 for node in succsset[1:]:
3418 ui.write(b' ')
3418 ui.write(b' ')
3419 ui.write(node2str(node))
3419 ui.write(node2str(node))
3420 ui.write(b'\n')
3420 ui.write(b'\n')
3421
3421
3422
3422
3423 @command(
3423 @command(
3424 b'debugtemplate',
3424 b'debugtemplate',
3425 [
3425 [
3426 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3426 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3427 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3427 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3428 ],
3428 ],
3429 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3429 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3430 optionalrepo=True,
3430 optionalrepo=True,
3431 )
3431 )
3432 def debugtemplate(ui, repo, tmpl, **opts):
3432 def debugtemplate(ui, repo, tmpl, **opts):
3433 """parse and apply a template
3433 """parse and apply a template
3434
3434
3435 If -r/--rev is given, the template is processed as a log template and
3435 If -r/--rev is given, the template is processed as a log template and
3436 applied to the given changesets. Otherwise, it is processed as a generic
3436 applied to the given changesets. Otherwise, it is processed as a generic
3437 template.
3437 template.
3438
3438
3439 Use --verbose to print the parsed tree.
3439 Use --verbose to print the parsed tree.
3440 """
3440 """
3441 revs = None
3441 revs = None
3442 if opts['rev']:
3442 if opts['rev']:
3443 if repo is None:
3443 if repo is None:
3444 raise error.RepoError(
3444 raise error.RepoError(
3445 _(b'there is no Mercurial repository here (.hg not found)')
3445 _(b'there is no Mercurial repository here (.hg not found)')
3446 )
3446 )
3447 revs = scmutil.revrange(repo, opts['rev'])
3447 revs = scmutil.revrange(repo, opts['rev'])
3448
3448
3449 props = {}
3449 props = {}
3450 for d in opts['define']:
3450 for d in opts['define']:
3451 try:
3451 try:
3452 k, v = (e.strip() for e in d.split(b'=', 1))
3452 k, v = (e.strip() for e in d.split(b'=', 1))
3453 if not k or k == b'ui':
3453 if not k or k == b'ui':
3454 raise ValueError
3454 raise ValueError
3455 props[k] = v
3455 props[k] = v
3456 except ValueError:
3456 except ValueError:
3457 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3457 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3458
3458
3459 if ui.verbose:
3459 if ui.verbose:
3460 aliases = ui.configitems(b'templatealias')
3460 aliases = ui.configitems(b'templatealias')
3461 tree = templater.parse(tmpl)
3461 tree = templater.parse(tmpl)
3462 ui.note(templater.prettyformat(tree), b'\n')
3462 ui.note(templater.prettyformat(tree), b'\n')
3463 newtree = templater.expandaliases(tree, aliases)
3463 newtree = templater.expandaliases(tree, aliases)
3464 if newtree != tree:
3464 if newtree != tree:
3465 ui.notenoi18n(
3465 ui.notenoi18n(
3466 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3466 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3467 )
3467 )
3468
3468
3469 if revs is None:
3469 if revs is None:
3470 tres = formatter.templateresources(ui, repo)
3470 tres = formatter.templateresources(ui, repo)
3471 t = formatter.maketemplater(ui, tmpl, resources=tres)
3471 t = formatter.maketemplater(ui, tmpl, resources=tres)
3472 if ui.verbose:
3472 if ui.verbose:
3473 kwds, funcs = t.symbolsuseddefault()
3473 kwds, funcs = t.symbolsuseddefault()
3474 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3474 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3475 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3475 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3476 ui.write(t.renderdefault(props))
3476 ui.write(t.renderdefault(props))
3477 else:
3477 else:
3478 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3478 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3479 if ui.verbose:
3479 if ui.verbose:
3480 kwds, funcs = displayer.t.symbolsuseddefault()
3480 kwds, funcs = displayer.t.symbolsuseddefault()
3481 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3481 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3482 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3482 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3483 for r in revs:
3483 for r in revs:
3484 displayer.show(repo[r], **pycompat.strkwargs(props))
3484 displayer.show(repo[r], **pycompat.strkwargs(props))
3485 displayer.close()
3485 displayer.close()
3486
3486
3487
3487
3488 @command(
3488 @command(
3489 b'debuguigetpass',
3489 b'debuguigetpass',
3490 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3490 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3491 _(b'[-p TEXT]'),
3491 _(b'[-p TEXT]'),
3492 norepo=True,
3492 norepo=True,
3493 )
3493 )
3494 def debuguigetpass(ui, prompt=b''):
3494 def debuguigetpass(ui, prompt=b''):
3495 """show prompt to type password"""
3495 """show prompt to type password"""
3496 r = ui.getpass(prompt)
3496 r = ui.getpass(prompt)
3497 ui.writenoi18n(b'respose: %s\n' % r)
3497 ui.writenoi18n(b'respose: %s\n' % r)
3498
3498
3499
3499
3500 @command(
3500 @command(
3501 b'debuguiprompt',
3501 b'debuguiprompt',
3502 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3502 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3503 _(b'[-p TEXT]'),
3503 _(b'[-p TEXT]'),
3504 norepo=True,
3504 norepo=True,
3505 )
3505 )
3506 def debuguiprompt(ui, prompt=b''):
3506 def debuguiprompt(ui, prompt=b''):
3507 """show plain prompt"""
3507 """show plain prompt"""
3508 r = ui.prompt(prompt)
3508 r = ui.prompt(prompt)
3509 ui.writenoi18n(b'response: %s\n' % r)
3509 ui.writenoi18n(b'response: %s\n' % r)
3510
3510
3511
3511
3512 @command(b'debugupdatecaches', [])
3512 @command(b'debugupdatecaches', [])
3513 def debugupdatecaches(ui, repo, *pats, **opts):
3513 def debugupdatecaches(ui, repo, *pats, **opts):
3514 """warm all known caches in the repository"""
3514 """warm all known caches in the repository"""
3515 with repo.wlock(), repo.lock():
3515 with repo.wlock(), repo.lock():
3516 repo.updatecaches(full=True)
3516 repo.updatecaches(full=True)
3517
3517
3518
3518
3519 @command(
3519 @command(
3520 b'debugupgraderepo',
3520 b'debugupgraderepo',
3521 [
3521 [
3522 (
3522 (
3523 b'o',
3523 b'o',
3524 b'optimize',
3524 b'optimize',
3525 [],
3525 [],
3526 _(b'extra optimization to perform'),
3526 _(b'extra optimization to perform'),
3527 _(b'NAME'),
3527 _(b'NAME'),
3528 ),
3528 ),
3529 (b'', b'run', False, _(b'performs an upgrade')),
3529 (b'', b'run', False, _(b'performs an upgrade')),
3530 (b'', b'backup', True, _(b'keep the old repository content around')),
3530 (b'', b'backup', True, _(b'keep the old repository content around')),
3531 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3531 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3532 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3532 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3533 ],
3533 ],
3534 )
3534 )
3535 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3535 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3536 """upgrade a repository to use different features
3536 """upgrade a repository to use different features
3537
3537
3538 If no arguments are specified, the repository is evaluated for upgrade
3538 If no arguments are specified, the repository is evaluated for upgrade
3539 and a list of problems and potential optimizations is printed.
3539 and a list of problems and potential optimizations is printed.
3540
3540
3541 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3541 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3542 can be influenced via additional arguments. More details will be provided
3542 can be influenced via additional arguments. More details will be provided
3543 by the command output when run without ``--run``.
3543 by the command output when run without ``--run``.
3544
3544
3545 During the upgrade, the repository will be locked and no writes will be
3545 During the upgrade, the repository will be locked and no writes will be
3546 allowed.
3546 allowed.
3547
3547
3548 At the end of the upgrade, the repository may not be readable while new
3548 At the end of the upgrade, the repository may not be readable while new
3549 repository data is swapped in. This window will be as long as it takes to
3549 repository data is swapped in. This window will be as long as it takes to
3550 rename some directories inside the ``.hg`` directory. On most machines, this
3550 rename some directories inside the ``.hg`` directory. On most machines, this
3551 should complete almost instantaneously and the chances of a consumer being
3551 should complete almost instantaneously and the chances of a consumer being
3552 unable to access the repository should be low.
3552 unable to access the repository should be low.
3553
3553
3554 By default, all revlog will be upgraded. You can restrict this using flag
3554 By default, all revlog will be upgraded. You can restrict this using flag
3555 such as `--manifest`:
3555 such as `--manifest`:
3556
3556
3557 * `--manifest`: only optimize the manifest
3557 * `--manifest`: only optimize the manifest
3558 * `--no-manifest`: optimize all revlog but the manifest
3558 * `--no-manifest`: optimize all revlog but the manifest
3559 * `--changelog`: optimize the changelog only
3559 * `--changelog`: optimize the changelog only
3560 * `--no-changelog --no-manifest`: optimize filelogs only
3560 * `--no-changelog --no-manifest`: optimize filelogs only
3561 """
3561 """
3562 return upgrade.upgraderepo(
3562 return upgrade.upgraderepo(
3563 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3563 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3564 )
3564 )
3565
3565
3566
3566
3567 @command(
3567 @command(
3568 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3568 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3569 )
3569 )
3570 def debugwalk(ui, repo, *pats, **opts):
3570 def debugwalk(ui, repo, *pats, **opts):
3571 """show how files match on given patterns"""
3571 """show how files match on given patterns"""
3572 opts = pycompat.byteskwargs(opts)
3572 opts = pycompat.byteskwargs(opts)
3573 m = scmutil.match(repo[None], pats, opts)
3573 m = scmutil.match(repo[None], pats, opts)
3574 if ui.verbose:
3574 if ui.verbose:
3575 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3575 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3576 items = list(repo[None].walk(m))
3576 items = list(repo[None].walk(m))
3577 if not items:
3577 if not items:
3578 return
3578 return
3579 f = lambda fn: fn
3579 f = lambda fn: fn
3580 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3580 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3581 f = lambda fn: util.normpath(fn)
3581 f = lambda fn: util.normpath(fn)
3582 fmt = b'f %%-%ds %%-%ds %%s' % (
3582 fmt = b'f %%-%ds %%-%ds %%s' % (
3583 max([len(abs) for abs in items]),
3583 max([len(abs) for abs in items]),
3584 max([len(repo.pathto(abs)) for abs in items]),
3584 max([len(repo.pathto(abs)) for abs in items]),
3585 )
3585 )
3586 for abs in items:
3586 for abs in items:
3587 line = fmt % (
3587 line = fmt % (
3588 abs,
3588 abs,
3589 f(repo.pathto(abs)),
3589 f(repo.pathto(abs)),
3590 m.exact(abs) and b'exact' or b'',
3590 m.exact(abs) and b'exact' or b'',
3591 )
3591 )
3592 ui.write(b"%s\n" % line.rstrip())
3592 ui.write(b"%s\n" % line.rstrip())
3593
3593
3594
3594
3595 @command(b'debugwhyunstable', [], _(b'REV'))
3595 @command(b'debugwhyunstable', [], _(b'REV'))
3596 def debugwhyunstable(ui, repo, rev):
3596 def debugwhyunstable(ui, repo, rev):
3597 """explain instabilities of a changeset"""
3597 """explain instabilities of a changeset"""
3598 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3598 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3599 dnodes = b''
3599 dnodes = b''
3600 if entry.get(b'divergentnodes'):
3600 if entry.get(b'divergentnodes'):
3601 dnodes = (
3601 dnodes = (
3602 b' '.join(
3602 b' '.join(
3603 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3603 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3604 for ctx in entry[b'divergentnodes']
3604 for ctx in entry[b'divergentnodes']
3605 )
3605 )
3606 + b' '
3606 + b' '
3607 )
3607 )
3608 ui.write(
3608 ui.write(
3609 b'%s: %s%s %s\n'
3609 b'%s: %s%s %s\n'
3610 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3610 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3611 )
3611 )
3612
3612
3613
3613
3614 @command(
3614 @command(
3615 b'debugwireargs',
3615 b'debugwireargs',
3616 [
3616 [
3617 (b'', b'three', b'', b'three'),
3617 (b'', b'three', b'', b'three'),
3618 (b'', b'four', b'', b'four'),
3618 (b'', b'four', b'', b'four'),
3619 (b'', b'five', b'', b'five'),
3619 (b'', b'five', b'', b'five'),
3620 ]
3620 ]
3621 + cmdutil.remoteopts,
3621 + cmdutil.remoteopts,
3622 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3622 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3623 norepo=True,
3623 norepo=True,
3624 )
3624 )
3625 def debugwireargs(ui, repopath, *vals, **opts):
3625 def debugwireargs(ui, repopath, *vals, **opts):
3626 opts = pycompat.byteskwargs(opts)
3626 opts = pycompat.byteskwargs(opts)
3627 repo = hg.peer(ui, opts, repopath)
3627 repo = hg.peer(ui, opts, repopath)
3628 for opt in cmdutil.remoteopts:
3628 for opt in cmdutil.remoteopts:
3629 del opts[opt[1]]
3629 del opts[opt[1]]
3630 args = {}
3630 args = {}
3631 for k, v in pycompat.iteritems(opts):
3631 for k, v in pycompat.iteritems(opts):
3632 if v:
3632 if v:
3633 args[k] = v
3633 args[k] = v
3634 args = pycompat.strkwargs(args)
3634 args = pycompat.strkwargs(args)
3635 # run twice to check that we don't mess up the stream for the next command
3635 # run twice to check that we don't mess up the stream for the next command
3636 res1 = repo.debugwireargs(*vals, **args)
3636 res1 = repo.debugwireargs(*vals, **args)
3637 res2 = repo.debugwireargs(*vals, **args)
3637 res2 = repo.debugwireargs(*vals, **args)
3638 ui.write(b"%s\n" % res1)
3638 ui.write(b"%s\n" % res1)
3639 if res1 != res2:
3639 if res1 != res2:
3640 ui.warn(b"%s\n" % res2)
3640 ui.warn(b"%s\n" % res2)
3641
3641
3642
3642
3643 def _parsewirelangblocks(fh):
3643 def _parsewirelangblocks(fh):
3644 activeaction = None
3644 activeaction = None
3645 blocklines = []
3645 blocklines = []
3646 lastindent = 0
3646 lastindent = 0
3647
3647
3648 for line in fh:
3648 for line in fh:
3649 line = line.rstrip()
3649 line = line.rstrip()
3650 if not line:
3650 if not line:
3651 continue
3651 continue
3652
3652
3653 if line.startswith(b'#'):
3653 if line.startswith(b'#'):
3654 continue
3654 continue
3655
3655
3656 if not line.startswith(b' '):
3656 if not line.startswith(b' '):
3657 # New block. Flush previous one.
3657 # New block. Flush previous one.
3658 if activeaction:
3658 if activeaction:
3659 yield activeaction, blocklines
3659 yield activeaction, blocklines
3660
3660
3661 activeaction = line
3661 activeaction = line
3662 blocklines = []
3662 blocklines = []
3663 lastindent = 0
3663 lastindent = 0
3664 continue
3664 continue
3665
3665
3666 # Else we start with an indent.
3666 # Else we start with an indent.
3667
3667
3668 if not activeaction:
3668 if not activeaction:
3669 raise error.Abort(_(b'indented line outside of block'))
3669 raise error.Abort(_(b'indented line outside of block'))
3670
3670
3671 indent = len(line) - len(line.lstrip())
3671 indent = len(line) - len(line.lstrip())
3672
3672
3673 # If this line is indented more than the last line, concatenate it.
3673 # If this line is indented more than the last line, concatenate it.
3674 if indent > lastindent and blocklines:
3674 if indent > lastindent and blocklines:
3675 blocklines[-1] += line.lstrip()
3675 blocklines[-1] += line.lstrip()
3676 else:
3676 else:
3677 blocklines.append(line)
3677 blocklines.append(line)
3678 lastindent = indent
3678 lastindent = indent
3679
3679
3680 # Flush last block.
3680 # Flush last block.
3681 if activeaction:
3681 if activeaction:
3682 yield activeaction, blocklines
3682 yield activeaction, blocklines
3683
3683
3684
3684
3685 @command(
3685 @command(
3686 b'debugwireproto',
3686 b'debugwireproto',
3687 [
3687 [
3688 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3688 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3689 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3689 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3690 (
3690 (
3691 b'',
3691 b'',
3692 b'noreadstderr',
3692 b'noreadstderr',
3693 False,
3693 False,
3694 _(b'do not read from stderr of the remote'),
3694 _(b'do not read from stderr of the remote'),
3695 ),
3695 ),
3696 (
3696 (
3697 b'',
3697 b'',
3698 b'nologhandshake',
3698 b'nologhandshake',
3699 False,
3699 False,
3700 _(b'do not log I/O related to the peer handshake'),
3700 _(b'do not log I/O related to the peer handshake'),
3701 ),
3701 ),
3702 ]
3702 ]
3703 + cmdutil.remoteopts,
3703 + cmdutil.remoteopts,
3704 _(b'[PATH]'),
3704 _(b'[PATH]'),
3705 optionalrepo=True,
3705 optionalrepo=True,
3706 )
3706 )
3707 def debugwireproto(ui, repo, path=None, **opts):
3707 def debugwireproto(ui, repo, path=None, **opts):
3708 """send wire protocol commands to a server
3708 """send wire protocol commands to a server
3709
3709
3710 This command can be used to issue wire protocol commands to remote
3710 This command can be used to issue wire protocol commands to remote
3711 peers and to debug the raw data being exchanged.
3711 peers and to debug the raw data being exchanged.
3712
3712
3713 ``--localssh`` will start an SSH server against the current repository
3713 ``--localssh`` will start an SSH server against the current repository
3714 and connect to that. By default, the connection will perform a handshake
3714 and connect to that. By default, the connection will perform a handshake
3715 and establish an appropriate peer instance.
3715 and establish an appropriate peer instance.
3716
3716
3717 ``--peer`` can be used to bypass the handshake protocol and construct a
3717 ``--peer`` can be used to bypass the handshake protocol and construct a
3718 peer instance using the specified class type. Valid values are ``raw``,
3718 peer instance using the specified class type. Valid values are ``raw``,
3719 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3719 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3720 raw data payloads and don't support higher-level command actions.
3720 raw data payloads and don't support higher-level command actions.
3721
3721
3722 ``--noreadstderr`` can be used to disable automatic reading from stderr
3722 ``--noreadstderr`` can be used to disable automatic reading from stderr
3723 of the peer (for SSH connections only). Disabling automatic reading of
3723 of the peer (for SSH connections only). Disabling automatic reading of
3724 stderr is useful for making output more deterministic.
3724 stderr is useful for making output more deterministic.
3725
3725
3726 Commands are issued via a mini language which is specified via stdin.
3726 Commands are issued via a mini language which is specified via stdin.
3727 The language consists of individual actions to perform. An action is
3727 The language consists of individual actions to perform. An action is
3728 defined by a block. A block is defined as a line with no leading
3728 defined by a block. A block is defined as a line with no leading
3729 space followed by 0 or more lines with leading space. Blocks are
3729 space followed by 0 or more lines with leading space. Blocks are
3730 effectively a high-level command with additional metadata.
3730 effectively a high-level command with additional metadata.
3731
3731
3732 Lines beginning with ``#`` are ignored.
3732 Lines beginning with ``#`` are ignored.
3733
3733
3734 The following sections denote available actions.
3734 The following sections denote available actions.
3735
3735
3736 raw
3736 raw
3737 ---
3737 ---
3738
3738
3739 Send raw data to the server.
3739 Send raw data to the server.
3740
3740
3741 The block payload contains the raw data to send as one atomic send
3741 The block payload contains the raw data to send as one atomic send
3742 operation. The data may not actually be delivered in a single system
3742 operation. The data may not actually be delivered in a single system
3743 call: it depends on the abilities of the transport being used.
3743 call: it depends on the abilities of the transport being used.
3744
3744
3745 Each line in the block is de-indented and concatenated. Then, that
3745 Each line in the block is de-indented and concatenated. Then, that
3746 value is evaluated as a Python b'' literal. This allows the use of
3746 value is evaluated as a Python b'' literal. This allows the use of
3747 backslash escaping, etc.
3747 backslash escaping, etc.
3748
3748
3749 raw+
3749 raw+
3750 ----
3750 ----
3751
3751
3752 Behaves like ``raw`` except flushes output afterwards.
3752 Behaves like ``raw`` except flushes output afterwards.
3753
3753
3754 command <X>
3754 command <X>
3755 -----------
3755 -----------
3756
3756
3757 Send a request to run a named command, whose name follows the ``command``
3757 Send a request to run a named command, whose name follows the ``command``
3758 string.
3758 string.
3759
3759
3760 Arguments to the command are defined as lines in this block. The format of
3760 Arguments to the command are defined as lines in this block. The format of
3761 each line is ``<key> <value>``. e.g.::
3761 each line is ``<key> <value>``. e.g.::
3762
3762
3763 command listkeys
3763 command listkeys
3764 namespace bookmarks
3764 namespace bookmarks
3765
3765
3766 If the value begins with ``eval:``, it will be interpreted as a Python
3766 If the value begins with ``eval:``, it will be interpreted as a Python
3767 literal expression. Otherwise values are interpreted as Python b'' literals.
3767 literal expression. Otherwise values are interpreted as Python b'' literals.
3768 This allows sending complex types and encoding special byte sequences via
3768 This allows sending complex types and encoding special byte sequences via
3769 backslash escaping.
3769 backslash escaping.
3770
3770
3771 The following arguments have special meaning:
3771 The following arguments have special meaning:
3772
3772
3773 ``PUSHFILE``
3773 ``PUSHFILE``
3774 When defined, the *push* mechanism of the peer will be used instead
3774 When defined, the *push* mechanism of the peer will be used instead
3775 of the static request-response mechanism and the content of the
3775 of the static request-response mechanism and the content of the
3776 file specified in the value of this argument will be sent as the
3776 file specified in the value of this argument will be sent as the
3777 command payload.
3777 command payload.
3778
3778
3779 This can be used to submit a local bundle file to the remote.
3779 This can be used to submit a local bundle file to the remote.
3780
3780
3781 batchbegin
3781 batchbegin
3782 ----------
3782 ----------
3783
3783
3784 Instruct the peer to begin a batched send.
3784 Instruct the peer to begin a batched send.
3785
3785
3786 All ``command`` blocks are queued for execution until the next
3786 All ``command`` blocks are queued for execution until the next
3787 ``batchsubmit`` block.
3787 ``batchsubmit`` block.
3788
3788
3789 batchsubmit
3789 batchsubmit
3790 -----------
3790 -----------
3791
3791
3792 Submit previously queued ``command`` blocks as a batch request.
3792 Submit previously queued ``command`` blocks as a batch request.
3793
3793
3794 This action MUST be paired with a ``batchbegin`` action.
3794 This action MUST be paired with a ``batchbegin`` action.
3795
3795
3796 httprequest <method> <path>
3796 httprequest <method> <path>
3797 ---------------------------
3797 ---------------------------
3798
3798
3799 (HTTP peer only)
3799 (HTTP peer only)
3800
3800
3801 Send an HTTP request to the peer.
3801 Send an HTTP request to the peer.
3802
3802
3803 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3803 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3804
3804
3805 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3805 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3806 headers to add to the request. e.g. ``Accept: foo``.
3806 headers to add to the request. e.g. ``Accept: foo``.
3807
3807
3808 The following arguments are special:
3808 The following arguments are special:
3809
3809
3810 ``BODYFILE``
3810 ``BODYFILE``
3811 The content of the file defined as the value to this argument will be
3811 The content of the file defined as the value to this argument will be
3812 transferred verbatim as the HTTP request body.
3812 transferred verbatim as the HTTP request body.
3813
3813
3814 ``frame <type> <flags> <payload>``
3814 ``frame <type> <flags> <payload>``
3815 Send a unified protocol frame as part of the request body.
3815 Send a unified protocol frame as part of the request body.
3816
3816
3817 All frames will be collected and sent as the body to the HTTP
3817 All frames will be collected and sent as the body to the HTTP
3818 request.
3818 request.
3819
3819
3820 close
3820 close
3821 -----
3821 -----
3822
3822
3823 Close the connection to the server.
3823 Close the connection to the server.
3824
3824
3825 flush
3825 flush
3826 -----
3826 -----
3827
3827
3828 Flush data written to the server.
3828 Flush data written to the server.
3829
3829
3830 readavailable
3830 readavailable
3831 -------------
3831 -------------
3832
3832
3833 Close the write end of the connection and read all available data from
3833 Close the write end of the connection and read all available data from
3834 the server.
3834 the server.
3835
3835
3836 If the connection to the server encompasses multiple pipes, we poll both
3836 If the connection to the server encompasses multiple pipes, we poll both
3837 pipes and read available data.
3837 pipes and read available data.
3838
3838
3839 readline
3839 readline
3840 --------
3840 --------
3841
3841
3842 Read a line of output from the server. If there are multiple output
3842 Read a line of output from the server. If there are multiple output
3843 pipes, reads only the main pipe.
3843 pipes, reads only the main pipe.
3844
3844
3845 ereadline
3845 ereadline
3846 ---------
3846 ---------
3847
3847
3848 Like ``readline``, but read from the stderr pipe, if available.
3848 Like ``readline``, but read from the stderr pipe, if available.
3849
3849
3850 read <X>
3850 read <X>
3851 --------
3851 --------
3852
3852
3853 ``read()`` N bytes from the server's main output pipe.
3853 ``read()`` N bytes from the server's main output pipe.
3854
3854
3855 eread <X>
3855 eread <X>
3856 ---------
3856 ---------
3857
3857
3858 ``read()`` N bytes from the server's stderr pipe, if available.
3858 ``read()`` N bytes from the server's stderr pipe, if available.
3859
3859
3860 Specifying Unified Frame-Based Protocol Frames
3860 Specifying Unified Frame-Based Protocol Frames
3861 ----------------------------------------------
3861 ----------------------------------------------
3862
3862
3863 It is possible to emit a *Unified Frame-Based Protocol* by using special
3863 It is possible to emit a *Unified Frame-Based Protocol* by using special
3864 syntax.
3864 syntax.
3865
3865
3866 A frame is composed as a type, flags, and payload. These can be parsed
3866 A frame is composed as a type, flags, and payload. These can be parsed
3867 from a string of the form:
3867 from a string of the form:
3868
3868
3869 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3869 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3870
3870
3871 ``request-id`` and ``stream-id`` are integers defining the request and
3871 ``request-id`` and ``stream-id`` are integers defining the request and
3872 stream identifiers.
3872 stream identifiers.
3873
3873
3874 ``type`` can be an integer value for the frame type or the string name
3874 ``type`` can be an integer value for the frame type or the string name
3875 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3875 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3876 ``command-name``.
3876 ``command-name``.
3877
3877
3878 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3878 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3879 components. Each component (and there can be just one) can be an integer
3879 components. Each component (and there can be just one) can be an integer
3880 or a flag name for stream flags or frame flags, respectively. Values are
3880 or a flag name for stream flags or frame flags, respectively. Values are
3881 resolved to integers and then bitwise OR'd together.
3881 resolved to integers and then bitwise OR'd together.
3882
3882
3883 ``payload`` represents the raw frame payload. If it begins with
3883 ``payload`` represents the raw frame payload. If it begins with
3884 ``cbor:``, the following string is evaluated as Python code and the
3884 ``cbor:``, the following string is evaluated as Python code and the
3885 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3885 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3886 as a Python byte string literal.
3886 as a Python byte string literal.
3887 """
3887 """
3888 opts = pycompat.byteskwargs(opts)
3888 opts = pycompat.byteskwargs(opts)
3889
3889
3890 if opts[b'localssh'] and not repo:
3890 if opts[b'localssh'] and not repo:
3891 raise error.Abort(_(b'--localssh requires a repository'))
3891 raise error.Abort(_(b'--localssh requires a repository'))
3892
3892
3893 if opts[b'peer'] and opts[b'peer'] not in (
3893 if opts[b'peer'] and opts[b'peer'] not in (
3894 b'raw',
3894 b'raw',
3895 b'http2',
3895 b'http2',
3896 b'ssh1',
3896 b'ssh1',
3897 b'ssh2',
3897 b'ssh2',
3898 ):
3898 ):
3899 raise error.Abort(
3899 raise error.Abort(
3900 _(b'invalid value for --peer'),
3900 _(b'invalid value for --peer'),
3901 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3901 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3902 )
3902 )
3903
3903
3904 if path and opts[b'localssh']:
3904 if path and opts[b'localssh']:
3905 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3905 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3906
3906
3907 if ui.interactive():
3907 if ui.interactive():
3908 ui.write(_(b'(waiting for commands on stdin)\n'))
3908 ui.write(_(b'(waiting for commands on stdin)\n'))
3909
3909
3910 blocks = list(_parsewirelangblocks(ui.fin))
3910 blocks = list(_parsewirelangblocks(ui.fin))
3911
3911
3912 proc = None
3912 proc = None
3913 stdin = None
3913 stdin = None
3914 stdout = None
3914 stdout = None
3915 stderr = None
3915 stderr = None
3916 opener = None
3916 opener = None
3917
3917
3918 if opts[b'localssh']:
3918 if opts[b'localssh']:
3919 # We start the SSH server in its own process so there is process
3919 # We start the SSH server in its own process so there is process
3920 # separation. This prevents a whole class of potential bugs around
3920 # separation. This prevents a whole class of potential bugs around
3921 # shared state from interfering with server operation.
3921 # shared state from interfering with server operation.
3922 args = procutil.hgcmd() + [
3922 args = procutil.hgcmd() + [
3923 b'-R',
3923 b'-R',
3924 repo.root,
3924 repo.root,
3925 b'debugserve',
3925 b'debugserve',
3926 b'--sshstdio',
3926 b'--sshstdio',
3927 ]
3927 ]
3928 proc = subprocess.Popen(
3928 proc = subprocess.Popen(
3929 pycompat.rapply(procutil.tonativestr, args),
3929 pycompat.rapply(procutil.tonativestr, args),
3930 stdin=subprocess.PIPE,
3930 stdin=subprocess.PIPE,
3931 stdout=subprocess.PIPE,
3931 stdout=subprocess.PIPE,
3932 stderr=subprocess.PIPE,
3932 stderr=subprocess.PIPE,
3933 bufsize=0,
3933 bufsize=0,
3934 )
3934 )
3935
3935
3936 stdin = proc.stdin
3936 stdin = proc.stdin
3937 stdout = proc.stdout
3937 stdout = proc.stdout
3938 stderr = proc.stderr
3938 stderr = proc.stderr
3939
3939
3940 # We turn the pipes into observers so we can log I/O.
3940 # We turn the pipes into observers so we can log I/O.
3941 if ui.verbose or opts[b'peer'] == b'raw':
3941 if ui.verbose or opts[b'peer'] == b'raw':
3942 stdin = util.makeloggingfileobject(
3942 stdin = util.makeloggingfileobject(
3943 ui, proc.stdin, b'i', logdata=True
3943 ui, proc.stdin, b'i', logdata=True
3944 )
3944 )
3945 stdout = util.makeloggingfileobject(
3945 stdout = util.makeloggingfileobject(
3946 ui, proc.stdout, b'o', logdata=True
3946 ui, proc.stdout, b'o', logdata=True
3947 )
3947 )
3948 stderr = util.makeloggingfileobject(
3948 stderr = util.makeloggingfileobject(
3949 ui, proc.stderr, b'e', logdata=True
3949 ui, proc.stderr, b'e', logdata=True
3950 )
3950 )
3951
3951
3952 # --localssh also implies the peer connection settings.
3952 # --localssh also implies the peer connection settings.
3953
3953
3954 url = b'ssh://localserver'
3954 url = b'ssh://localserver'
3955 autoreadstderr = not opts[b'noreadstderr']
3955 autoreadstderr = not opts[b'noreadstderr']
3956
3956
3957 if opts[b'peer'] == b'ssh1':
3957 if opts[b'peer'] == b'ssh1':
3958 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3958 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3959 peer = sshpeer.sshv1peer(
3959 peer = sshpeer.sshv1peer(
3960 ui,
3960 ui,
3961 url,
3961 url,
3962 proc,
3962 proc,
3963 stdin,
3963 stdin,
3964 stdout,
3964 stdout,
3965 stderr,
3965 stderr,
3966 None,
3966 None,
3967 autoreadstderr=autoreadstderr,
3967 autoreadstderr=autoreadstderr,
3968 )
3968 )
3969 elif opts[b'peer'] == b'ssh2':
3969 elif opts[b'peer'] == b'ssh2':
3970 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3970 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3971 peer = sshpeer.sshv2peer(
3971 peer = sshpeer.sshv2peer(
3972 ui,
3972 ui,
3973 url,
3973 url,
3974 proc,
3974 proc,
3975 stdin,
3975 stdin,
3976 stdout,
3976 stdout,
3977 stderr,
3977 stderr,
3978 None,
3978 None,
3979 autoreadstderr=autoreadstderr,
3979 autoreadstderr=autoreadstderr,
3980 )
3980 )
3981 elif opts[b'peer'] == b'raw':
3981 elif opts[b'peer'] == b'raw':
3982 ui.write(_(b'using raw connection to peer\n'))
3982 ui.write(_(b'using raw connection to peer\n'))
3983 peer = None
3983 peer = None
3984 else:
3984 else:
3985 ui.write(_(b'creating ssh peer from handshake results\n'))
3985 ui.write(_(b'creating ssh peer from handshake results\n'))
3986 peer = sshpeer.makepeer(
3986 peer = sshpeer.makepeer(
3987 ui,
3987 ui,
3988 url,
3988 url,
3989 proc,
3989 proc,
3990 stdin,
3990 stdin,
3991 stdout,
3991 stdout,
3992 stderr,
3992 stderr,
3993 autoreadstderr=autoreadstderr,
3993 autoreadstderr=autoreadstderr,
3994 )
3994 )
3995
3995
3996 elif path:
3996 elif path:
3997 # We bypass hg.peer() so we can proxy the sockets.
3997 # We bypass hg.peer() so we can proxy the sockets.
3998 # TODO consider not doing this because we skip
3998 # TODO consider not doing this because we skip
3999 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3999 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4000 u = util.url(path)
4000 u = util.url(path)
4001 if u.scheme != b'http':
4001 if u.scheme != b'http':
4002 raise error.Abort(_(b'only http:// paths are currently supported'))
4002 raise error.Abort(_(b'only http:// paths are currently supported'))
4003
4003
4004 url, authinfo = u.authinfo()
4004 url, authinfo = u.authinfo()
4005 openerargs = {
4005 openerargs = {
4006 'useragent': b'Mercurial debugwireproto',
4006 'useragent': b'Mercurial debugwireproto',
4007 }
4007 }
4008
4008
4009 # Turn pipes/sockets into observers so we can log I/O.
4009 # Turn pipes/sockets into observers so we can log I/O.
4010 if ui.verbose:
4010 if ui.verbose:
4011 openerargs.update(
4011 openerargs.update(
4012 {
4012 {
4013 'loggingfh': ui,
4013 'loggingfh': ui,
4014 'loggingname': b's',
4014 'loggingname': b's',
4015 'loggingopts': {'logdata': True, 'logdataapis': False,},
4015 'loggingopts': {'logdata': True, 'logdataapis': False,},
4016 }
4016 }
4017 )
4017 )
4018
4018
4019 if ui.debugflag:
4019 if ui.debugflag:
4020 openerargs['loggingopts']['logdataapis'] = True
4020 openerargs['loggingopts']['logdataapis'] = True
4021
4021
4022 # Don't send default headers when in raw mode. This allows us to
4022 # Don't send default headers when in raw mode. This allows us to
4023 # bypass most of the behavior of our URL handling code so we can
4023 # bypass most of the behavior of our URL handling code so we can
4024 # have near complete control over what's sent on the wire.
4024 # have near complete control over what's sent on the wire.
4025 if opts[b'peer'] == b'raw':
4025 if opts[b'peer'] == b'raw':
4026 openerargs['sendaccept'] = False
4026 openerargs['sendaccept'] = False
4027
4027
4028 opener = urlmod.opener(ui, authinfo, **openerargs)
4028 opener = urlmod.opener(ui, authinfo, **openerargs)
4029
4029
4030 if opts[b'peer'] == b'http2':
4030 if opts[b'peer'] == b'http2':
4031 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4031 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4032 # We go through makepeer() because we need an API descriptor for
4032 # We go through makepeer() because we need an API descriptor for
4033 # the peer instance to be useful.
4033 # the peer instance to be useful.
4034 with ui.configoverride(
4034 with ui.configoverride(
4035 {(b'experimental', b'httppeer.advertise-v2'): True}
4035 {(b'experimental', b'httppeer.advertise-v2'): True}
4036 ):
4036 ):
4037 if opts[b'nologhandshake']:
4037 if opts[b'nologhandshake']:
4038 ui.pushbuffer()
4038 ui.pushbuffer()
4039
4039
4040 peer = httppeer.makepeer(ui, path, opener=opener)
4040 peer = httppeer.makepeer(ui, path, opener=opener)
4041
4041
4042 if opts[b'nologhandshake']:
4042 if opts[b'nologhandshake']:
4043 ui.popbuffer()
4043 ui.popbuffer()
4044
4044
4045 if not isinstance(peer, httppeer.httpv2peer):
4045 if not isinstance(peer, httppeer.httpv2peer):
4046 raise error.Abort(
4046 raise error.Abort(
4047 _(
4047 _(
4048 b'could not instantiate HTTP peer for '
4048 b'could not instantiate HTTP peer for '
4049 b'wire protocol version 2'
4049 b'wire protocol version 2'
4050 ),
4050 ),
4051 hint=_(
4051 hint=_(
4052 b'the server may not have the feature '
4052 b'the server may not have the feature '
4053 b'enabled or is not allowing this '
4053 b'enabled or is not allowing this '
4054 b'client version'
4054 b'client version'
4055 ),
4055 ),
4056 )
4056 )
4057
4057
4058 elif opts[b'peer'] == b'raw':
4058 elif opts[b'peer'] == b'raw':
4059 ui.write(_(b'using raw connection to peer\n'))
4059 ui.write(_(b'using raw connection to peer\n'))
4060 peer = None
4060 peer = None
4061 elif opts[b'peer']:
4061 elif opts[b'peer']:
4062 raise error.Abort(
4062 raise error.Abort(
4063 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4063 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4064 )
4064 )
4065 else:
4065 else:
4066 peer = httppeer.makepeer(ui, path, opener=opener)
4066 peer = httppeer.makepeer(ui, path, opener=opener)
4067
4067
4068 # We /could/ populate stdin/stdout with sock.makefile()...
4068 # We /could/ populate stdin/stdout with sock.makefile()...
4069 else:
4069 else:
4070 raise error.Abort(_(b'unsupported connection configuration'))
4070 raise error.Abort(_(b'unsupported connection configuration'))
4071
4071
4072 batchedcommands = None
4072 batchedcommands = None
4073
4073
4074 # Now perform actions based on the parsed wire language instructions.
4074 # Now perform actions based on the parsed wire language instructions.
4075 for action, lines in blocks:
4075 for action, lines in blocks:
4076 if action in (b'raw', b'raw+'):
4076 if action in (b'raw', b'raw+'):
4077 if not stdin:
4077 if not stdin:
4078 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4078 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4079
4079
4080 # Concatenate the data together.
4080 # Concatenate the data together.
4081 data = b''.join(l.lstrip() for l in lines)
4081 data = b''.join(l.lstrip() for l in lines)
4082 data = stringutil.unescapestr(data)
4082 data = stringutil.unescapestr(data)
4083 stdin.write(data)
4083 stdin.write(data)
4084
4084
4085 if action == b'raw+':
4085 if action == b'raw+':
4086 stdin.flush()
4086 stdin.flush()
4087 elif action == b'flush':
4087 elif action == b'flush':
4088 if not stdin:
4088 if not stdin:
4089 raise error.Abort(_(b'cannot call flush on this peer'))
4089 raise error.Abort(_(b'cannot call flush on this peer'))
4090 stdin.flush()
4090 stdin.flush()
4091 elif action.startswith(b'command'):
4091 elif action.startswith(b'command'):
4092 if not peer:
4092 if not peer:
4093 raise error.Abort(
4093 raise error.Abort(
4094 _(
4094 _(
4095 b'cannot send commands unless peer instance '
4095 b'cannot send commands unless peer instance '
4096 b'is available'
4096 b'is available'
4097 )
4097 )
4098 )
4098 )
4099
4099
4100 command = action.split(b' ', 1)[1]
4100 command = action.split(b' ', 1)[1]
4101
4101
4102 args = {}
4102 args = {}
4103 for line in lines:
4103 for line in lines:
4104 # We need to allow empty values.
4104 # We need to allow empty values.
4105 fields = line.lstrip().split(b' ', 1)
4105 fields = line.lstrip().split(b' ', 1)
4106 if len(fields) == 1:
4106 if len(fields) == 1:
4107 key = fields[0]
4107 key = fields[0]
4108 value = b''
4108 value = b''
4109 else:
4109 else:
4110 key, value = fields
4110 key, value = fields
4111
4111
4112 if value.startswith(b'eval:'):
4112 if value.startswith(b'eval:'):
4113 value = stringutil.evalpythonliteral(value[5:])
4113 value = stringutil.evalpythonliteral(value[5:])
4114 else:
4114 else:
4115 value = stringutil.unescapestr(value)
4115 value = stringutil.unescapestr(value)
4116
4116
4117 args[key] = value
4117 args[key] = value
4118
4118
4119 if batchedcommands is not None:
4119 if batchedcommands is not None:
4120 batchedcommands.append((command, args))
4120 batchedcommands.append((command, args))
4121 continue
4121 continue
4122
4122
4123 ui.status(_(b'sending %s command\n') % command)
4123 ui.status(_(b'sending %s command\n') % command)
4124
4124
4125 if b'PUSHFILE' in args:
4125 if b'PUSHFILE' in args:
4126 with open(args[b'PUSHFILE'], 'rb') as fh:
4126 with open(args[b'PUSHFILE'], 'rb') as fh:
4127 del args[b'PUSHFILE']
4127 del args[b'PUSHFILE']
4128 res, output = peer._callpush(
4128 res, output = peer._callpush(
4129 command, fh, **pycompat.strkwargs(args)
4129 command, fh, **pycompat.strkwargs(args)
4130 )
4130 )
4131 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4131 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4132 ui.status(
4132 ui.status(
4133 _(b'remote output: %s\n') % stringutil.escapestr(output)
4133 _(b'remote output: %s\n') % stringutil.escapestr(output)
4134 )
4134 )
4135 else:
4135 else:
4136 with peer.commandexecutor() as e:
4136 with peer.commandexecutor() as e:
4137 res = e.callcommand(command, args).result()
4137 res = e.callcommand(command, args).result()
4138
4138
4139 if isinstance(res, wireprotov2peer.commandresponse):
4139 if isinstance(res, wireprotov2peer.commandresponse):
4140 val = res.objects()
4140 val = res.objects()
4141 ui.status(
4141 ui.status(
4142 _(b'response: %s\n')
4142 _(b'response: %s\n')
4143 % stringutil.pprint(val, bprefix=True, indent=2)
4143 % stringutil.pprint(val, bprefix=True, indent=2)
4144 )
4144 )
4145 else:
4145 else:
4146 ui.status(
4146 ui.status(
4147 _(b'response: %s\n')
4147 _(b'response: %s\n')
4148 % stringutil.pprint(res, bprefix=True, indent=2)
4148 % stringutil.pprint(res, bprefix=True, indent=2)
4149 )
4149 )
4150
4150
4151 elif action == b'batchbegin':
4151 elif action == b'batchbegin':
4152 if batchedcommands is not None:
4152 if batchedcommands is not None:
4153 raise error.Abort(_(b'nested batchbegin not allowed'))
4153 raise error.Abort(_(b'nested batchbegin not allowed'))
4154
4154
4155 batchedcommands = []
4155 batchedcommands = []
4156 elif action == b'batchsubmit':
4156 elif action == b'batchsubmit':
4157 # There is a batching API we could go through. But it would be
4157 # There is a batching API we could go through. But it would be
4158 # difficult to normalize requests into function calls. It is easier
4158 # difficult to normalize requests into function calls. It is easier
4159 # to bypass this layer and normalize to commands + args.
4159 # to bypass this layer and normalize to commands + args.
4160 ui.status(
4160 ui.status(
4161 _(b'sending batch with %d sub-commands\n')
4161 _(b'sending batch with %d sub-commands\n')
4162 % len(batchedcommands)
4162 % len(batchedcommands)
4163 )
4163 )
4164 assert peer is not None
4164 assert peer is not None
4165 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4165 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4166 ui.status(
4166 ui.status(
4167 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4167 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4168 )
4168 )
4169
4169
4170 batchedcommands = None
4170 batchedcommands = None
4171
4171
4172 elif action.startswith(b'httprequest '):
4172 elif action.startswith(b'httprequest '):
4173 if not opener:
4173 if not opener:
4174 raise error.Abort(
4174 raise error.Abort(
4175 _(b'cannot use httprequest without an HTTP peer')
4175 _(b'cannot use httprequest without an HTTP peer')
4176 )
4176 )
4177
4177
4178 request = action.split(b' ', 2)
4178 request = action.split(b' ', 2)
4179 if len(request) != 3:
4179 if len(request) != 3:
4180 raise error.Abort(
4180 raise error.Abort(
4181 _(
4181 _(
4182 b'invalid httprequest: expected format is '
4182 b'invalid httprequest: expected format is '
4183 b'"httprequest <method> <path>'
4183 b'"httprequest <method> <path>'
4184 )
4184 )
4185 )
4185 )
4186
4186
4187 method, httppath = request[1:]
4187 method, httppath = request[1:]
4188 headers = {}
4188 headers = {}
4189 body = None
4189 body = None
4190 frames = []
4190 frames = []
4191 for line in lines:
4191 for line in lines:
4192 line = line.lstrip()
4192 line = line.lstrip()
4193 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4193 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4194 if m:
4194 if m:
4195 # Headers need to use native strings.
4195 # Headers need to use native strings.
4196 key = pycompat.strurl(m.group(1))
4196 key = pycompat.strurl(m.group(1))
4197 value = pycompat.strurl(m.group(2))
4197 value = pycompat.strurl(m.group(2))
4198 headers[key] = value
4198 headers[key] = value
4199 continue
4199 continue
4200
4200
4201 if line.startswith(b'BODYFILE '):
4201 if line.startswith(b'BODYFILE '):
4202 with open(line.split(b' ', 1), b'rb') as fh:
4202 with open(line.split(b' ', 1), b'rb') as fh:
4203 body = fh.read()
4203 body = fh.read()
4204 elif line.startswith(b'frame '):
4204 elif line.startswith(b'frame '):
4205 frame = wireprotoframing.makeframefromhumanstring(
4205 frame = wireprotoframing.makeframefromhumanstring(
4206 line[len(b'frame ') :]
4206 line[len(b'frame ') :]
4207 )
4207 )
4208
4208
4209 frames.append(frame)
4209 frames.append(frame)
4210 else:
4210 else:
4211 raise error.Abort(
4211 raise error.Abort(
4212 _(b'unknown argument to httprequest: %s') % line
4212 _(b'unknown argument to httprequest: %s') % line
4213 )
4213 )
4214
4214
4215 url = path + httppath
4215 url = path + httppath
4216
4216
4217 if frames:
4217 if frames:
4218 body = b''.join(bytes(f) for f in frames)
4218 body = b''.join(bytes(f) for f in frames)
4219
4219
4220 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4220 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4221
4221
4222 # urllib.Request insists on using has_data() as a proxy for
4222 # urllib.Request insists on using has_data() as a proxy for
4223 # determining the request method. Override that to use our
4223 # determining the request method. Override that to use our
4224 # explicitly requested method.
4224 # explicitly requested method.
4225 req.get_method = lambda: pycompat.sysstr(method)
4225 req.get_method = lambda: pycompat.sysstr(method)
4226
4226
4227 try:
4227 try:
4228 res = opener.open(req)
4228 res = opener.open(req)
4229 body = res.read()
4229 body = res.read()
4230 except util.urlerr.urlerror as e:
4230 except util.urlerr.urlerror as e:
4231 # read() method must be called, but only exists in Python 2
4231 # read() method must be called, but only exists in Python 2
4232 getattr(e, 'read', lambda: None)()
4232 getattr(e, 'read', lambda: None)()
4233 continue
4233 continue
4234
4234
4235 ct = res.headers.get('Content-Type')
4235 ct = res.headers.get('Content-Type')
4236 if ct == 'application/mercurial-cbor':
4236 if ct == 'application/mercurial-cbor':
4237 ui.write(
4237 ui.write(
4238 _(b'cbor> %s\n')
4238 _(b'cbor> %s\n')
4239 % stringutil.pprint(
4239 % stringutil.pprint(
4240 cborutil.decodeall(body), bprefix=True, indent=2
4240 cborutil.decodeall(body), bprefix=True, indent=2
4241 )
4241 )
4242 )
4242 )
4243
4243
4244 elif action == b'close':
4244 elif action == b'close':
4245 assert peer is not None
4245 assert peer is not None
4246 peer.close()
4246 peer.close()
4247 elif action == b'readavailable':
4247 elif action == b'readavailable':
4248 if not stdout or not stderr:
4248 if not stdout or not stderr:
4249 raise error.Abort(
4249 raise error.Abort(
4250 _(b'readavailable not available on this peer')
4250 _(b'readavailable not available on this peer')
4251 )
4251 )
4252
4252
4253 stdin.close()
4253 stdin.close()
4254 stdout.read()
4254 stdout.read()
4255 stderr.read()
4255 stderr.read()
4256
4256
4257 elif action == b'readline':
4257 elif action == b'readline':
4258 if not stdout:
4258 if not stdout:
4259 raise error.Abort(_(b'readline not available on this peer'))
4259 raise error.Abort(_(b'readline not available on this peer'))
4260 stdout.readline()
4260 stdout.readline()
4261 elif action == b'ereadline':
4261 elif action == b'ereadline':
4262 if not stderr:
4262 if not stderr:
4263 raise error.Abort(_(b'ereadline not available on this peer'))
4263 raise error.Abort(_(b'ereadline not available on this peer'))
4264 stderr.readline()
4264 stderr.readline()
4265 elif action.startswith(b'read '):
4265 elif action.startswith(b'read '):
4266 count = int(action.split(b' ', 1)[1])
4266 count = int(action.split(b' ', 1)[1])
4267 if not stdout:
4267 if not stdout:
4268 raise error.Abort(_(b'read not available on this peer'))
4268 raise error.Abort(_(b'read not available on this peer'))
4269 stdout.read(count)
4269 stdout.read(count)
4270 elif action.startswith(b'eread '):
4270 elif action.startswith(b'eread '):
4271 count = int(action.split(b' ', 1)[1])
4271 count = int(action.split(b' ', 1)[1])
4272 if not stderr:
4272 if not stderr:
4273 raise error.Abort(_(b'eread not available on this peer'))
4273 raise error.Abort(_(b'eread not available on this peer'))
4274 stderr.read(count)
4274 stderr.read(count)
4275 else:
4275 else:
4276 raise error.Abort(_(b'unknown action: %s') % action)
4276 raise error.Abort(_(b'unknown action: %s') % action)
4277
4277
4278 if batchedcommands is not None:
4278 if batchedcommands is not None:
4279 raise error.Abort(_(b'unclosed "batchbegin" request'))
4279 raise error.Abort(_(b'unclosed "batchbegin" request'))
4280
4280
4281 if peer:
4281 if peer:
4282 peer.close()
4282 peer.close()
4283
4283
4284 if proc:
4284 if proc:
4285 proc.kill()
4285 proc.kill()
@@ -1,637 +1,638 b''
1 # fileset.py - file set queries for mercurial
1 # fileset.py - file set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import re
11 import re
12
12
13 from .i18n import _
13 from .i18n import _
14 from .pycompat import getattr
14 from .pycompat import getattr
15 from . import (
15 from . import (
16 error,
16 error,
17 filesetlang,
17 filesetlang,
18 match as matchmod,
18 match as matchmod,
19 merge,
19 merge,
20 pycompat,
20 pycompat,
21 registrar,
21 registrar,
22 scmutil,
22 scmutil,
23 util,
23 util,
24 )
24 )
25 from .utils import stringutil
25 from .utils import stringutil
26
26
27 # common weight constants
27 # common weight constants
28 _WEIGHT_CHECK_FILENAME = filesetlang.WEIGHT_CHECK_FILENAME
28 _WEIGHT_CHECK_FILENAME = filesetlang.WEIGHT_CHECK_FILENAME
29 _WEIGHT_READ_CONTENTS = filesetlang.WEIGHT_READ_CONTENTS
29 _WEIGHT_READ_CONTENTS = filesetlang.WEIGHT_READ_CONTENTS
30 _WEIGHT_STATUS = filesetlang.WEIGHT_STATUS
30 _WEIGHT_STATUS = filesetlang.WEIGHT_STATUS
31 _WEIGHT_STATUS_THOROUGH = filesetlang.WEIGHT_STATUS_THOROUGH
31 _WEIGHT_STATUS_THOROUGH = filesetlang.WEIGHT_STATUS_THOROUGH
32
32
33 # helpers for processing parsed tree
33 # helpers for processing parsed tree
34 getsymbol = filesetlang.getsymbol
34 getsymbol = filesetlang.getsymbol
35 getstring = filesetlang.getstring
35 getstring = filesetlang.getstring
36 _getkindpat = filesetlang.getkindpat
36 _getkindpat = filesetlang.getkindpat
37 getpattern = filesetlang.getpattern
37 getpattern = filesetlang.getpattern
38 getargs = filesetlang.getargs
38 getargs = filesetlang.getargs
39
39
40
40
41 def getmatch(mctx, x):
41 def getmatch(mctx, x):
42 if not x:
42 if not x:
43 raise error.ParseError(_(b"missing argument"))
43 raise error.ParseError(_(b"missing argument"))
44 return methods[x[0]](mctx, *x[1:])
44 return methods[x[0]](mctx, *x[1:])
45
45
46
46
47 def getmatchwithstatus(mctx, x, hint):
47 def getmatchwithstatus(mctx, x, hint):
48 keys = set(getstring(hint, b'status hint must be a string').split())
48 keys = set(getstring(hint, b'status hint must be a string').split())
49 return getmatch(mctx.withstatus(keys), x)
49 return getmatch(mctx.withstatus(keys), x)
50
50
51
51
52 def stringmatch(mctx, x):
52 def stringmatch(mctx, x):
53 return mctx.matcher([x])
53 return mctx.matcher([x])
54
54
55
55
56 def kindpatmatch(mctx, x, y):
56 def kindpatmatch(mctx, x, y):
57 return stringmatch(
57 return stringmatch(
58 mctx,
58 mctx,
59 _getkindpat(
59 _getkindpat(
60 x, y, matchmod.allpatternkinds, _(b"pattern must be a string")
60 x, y, matchmod.allpatternkinds, _(b"pattern must be a string")
61 ),
61 ),
62 )
62 )
63
63
64
64
65 def patternsmatch(mctx, *xs):
65 def patternsmatch(mctx, *xs):
66 allkinds = matchmod.allpatternkinds
66 allkinds = matchmod.allpatternkinds
67 patterns = [
67 patterns = [
68 getpattern(x, allkinds, _(b"pattern must be a string")) for x in xs
68 getpattern(x, allkinds, _(b"pattern must be a string")) for x in xs
69 ]
69 ]
70 return mctx.matcher(patterns)
70 return mctx.matcher(patterns)
71
71
72
72
73 def andmatch(mctx, x, y):
73 def andmatch(mctx, x, y):
74 xm = getmatch(mctx, x)
74 xm = getmatch(mctx, x)
75 ym = getmatch(mctx.narrowed(xm), y)
75 ym = getmatch(mctx.narrowed(xm), y)
76 return matchmod.intersectmatchers(xm, ym)
76 return matchmod.intersectmatchers(xm, ym)
77
77
78
78
79 def ormatch(mctx, *xs):
79 def ormatch(mctx, *xs):
80 ms = [getmatch(mctx, x) for x in xs]
80 ms = [getmatch(mctx, x) for x in xs]
81 return matchmod.unionmatcher(ms)
81 return matchmod.unionmatcher(ms)
82
82
83
83
84 def notmatch(mctx, x):
84 def notmatch(mctx, x):
85 m = getmatch(mctx, x)
85 m = getmatch(mctx, x)
86 return mctx.predicate(lambda f: not m(f), predrepr=(b'<not %r>', m))
86 return mctx.predicate(lambda f: not m(f), predrepr=(b'<not %r>', m))
87
87
88
88
89 def minusmatch(mctx, x, y):
89 def minusmatch(mctx, x, y):
90 xm = getmatch(mctx, x)
90 xm = getmatch(mctx, x)
91 ym = getmatch(mctx.narrowed(xm), y)
91 ym = getmatch(mctx.narrowed(xm), y)
92 return matchmod.differencematcher(xm, ym)
92 return matchmod.differencematcher(xm, ym)
93
93
94
94
95 def listmatch(mctx, *xs):
95 def listmatch(mctx, *xs):
96 raise error.ParseError(
96 raise error.ParseError(
97 _(b"can't use a list in this context"),
97 _(b"can't use a list in this context"),
98 hint=_(b'see \'hg help "filesets.x or y"\''),
98 hint=_(b'see \'hg help "filesets.x or y"\''),
99 )
99 )
100
100
101
101
102 def func(mctx, a, b):
102 def func(mctx, a, b):
103 funcname = getsymbol(a)
103 funcname = getsymbol(a)
104 if funcname in symbols:
104 if funcname in symbols:
105 return symbols[funcname](mctx, b)
105 return symbols[funcname](mctx, b)
106
106
107 keep = lambda fn: getattr(fn, '__doc__', None) is not None
107 keep = lambda fn: getattr(fn, '__doc__', None) is not None
108
108
109 syms = [s for (s, fn) in symbols.items() if keep(fn)]
109 syms = [s for (s, fn) in symbols.items() if keep(fn)]
110 raise error.UnknownIdentifier(funcname, syms)
110 raise error.UnknownIdentifier(funcname, syms)
111
111
112
112
113 # symbols are callable like:
113 # symbols are callable like:
114 # fun(mctx, x)
114 # fun(mctx, x)
115 # with:
115 # with:
116 # mctx - current matchctx instance
116 # mctx - current matchctx instance
117 # x - argument in tree form
117 # x - argument in tree form
118 symbols = filesetlang.symbols
118 symbols = filesetlang.symbols
119
119
120 predicate = registrar.filesetpredicate(symbols)
120 predicate = registrar.filesetpredicate(symbols)
121
121
122
122
123 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
123 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
124 def modified(mctx, x):
124 def modified(mctx, x):
125 """File that is modified according to :hg:`status`.
125 """File that is modified according to :hg:`status`.
126 """
126 """
127 # i18n: "modified" is a keyword
127 # i18n: "modified" is a keyword
128 getargs(x, 0, 0, _(b"modified takes no arguments"))
128 getargs(x, 0, 0, _(b"modified takes no arguments"))
129 s = set(mctx.status().modified)
129 s = set(mctx.status().modified)
130 return mctx.predicate(s.__contains__, predrepr=b'modified')
130 return mctx.predicate(s.__contains__, predrepr=b'modified')
131
131
132
132
133 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
133 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
134 def added(mctx, x):
134 def added(mctx, x):
135 """File that is added according to :hg:`status`.
135 """File that is added according to :hg:`status`.
136 """
136 """
137 # i18n: "added" is a keyword
137 # i18n: "added" is a keyword
138 getargs(x, 0, 0, _(b"added takes no arguments"))
138 getargs(x, 0, 0, _(b"added takes no arguments"))
139 s = set(mctx.status().added)
139 s = set(mctx.status().added)
140 return mctx.predicate(s.__contains__, predrepr=b'added')
140 return mctx.predicate(s.__contains__, predrepr=b'added')
141
141
142
142
143 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
143 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
144 def removed(mctx, x):
144 def removed(mctx, x):
145 """File that is removed according to :hg:`status`.
145 """File that is removed according to :hg:`status`.
146 """
146 """
147 # i18n: "removed" is a keyword
147 # i18n: "removed" is a keyword
148 getargs(x, 0, 0, _(b"removed takes no arguments"))
148 getargs(x, 0, 0, _(b"removed takes no arguments"))
149 s = set(mctx.status().removed)
149 s = set(mctx.status().removed)
150 return mctx.predicate(s.__contains__, predrepr=b'removed')
150 return mctx.predicate(s.__contains__, predrepr=b'removed')
151
151
152
152
153 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
153 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
154 def deleted(mctx, x):
154 def deleted(mctx, x):
155 """Alias for ``missing()``.
155 """Alias for ``missing()``.
156 """
156 """
157 # i18n: "deleted" is a keyword
157 # i18n: "deleted" is a keyword
158 getargs(x, 0, 0, _(b"deleted takes no arguments"))
158 getargs(x, 0, 0, _(b"deleted takes no arguments"))
159 s = set(mctx.status().deleted)
159 s = set(mctx.status().deleted)
160 return mctx.predicate(s.__contains__, predrepr=b'deleted')
160 return mctx.predicate(s.__contains__, predrepr=b'deleted')
161
161
162
162
163 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
163 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
164 def missing(mctx, x):
164 def missing(mctx, x):
165 """File that is missing according to :hg:`status`.
165 """File that is missing according to :hg:`status`.
166 """
166 """
167 # i18n: "missing" is a keyword
167 # i18n: "missing" is a keyword
168 getargs(x, 0, 0, _(b"missing takes no arguments"))
168 getargs(x, 0, 0, _(b"missing takes no arguments"))
169 s = set(mctx.status().deleted)
169 s = set(mctx.status().deleted)
170 return mctx.predicate(s.__contains__, predrepr=b'deleted')
170 return mctx.predicate(s.__contains__, predrepr=b'deleted')
171
171
172
172
173 @predicate(b'unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
173 @predicate(b'unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
174 def unknown(mctx, x):
174 def unknown(mctx, x):
175 """File that is unknown according to :hg:`status`."""
175 """File that is unknown according to :hg:`status`."""
176 # i18n: "unknown" is a keyword
176 # i18n: "unknown" is a keyword
177 getargs(x, 0, 0, _(b"unknown takes no arguments"))
177 getargs(x, 0, 0, _(b"unknown takes no arguments"))
178 s = set(mctx.status().unknown)
178 s = set(mctx.status().unknown)
179 return mctx.predicate(s.__contains__, predrepr=b'unknown')
179 return mctx.predicate(s.__contains__, predrepr=b'unknown')
180
180
181
181
182 @predicate(b'ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
182 @predicate(b'ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
183 def ignored(mctx, x):
183 def ignored(mctx, x):
184 """File that is ignored according to :hg:`status`."""
184 """File that is ignored according to :hg:`status`."""
185 # i18n: "ignored" is a keyword
185 # i18n: "ignored" is a keyword
186 getargs(x, 0, 0, _(b"ignored takes no arguments"))
186 getargs(x, 0, 0, _(b"ignored takes no arguments"))
187 s = set(mctx.status().ignored)
187 s = set(mctx.status().ignored)
188 return mctx.predicate(s.__contains__, predrepr=b'ignored')
188 return mctx.predicate(s.__contains__, predrepr=b'ignored')
189
189
190
190
191 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
191 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
192 def clean(mctx, x):
192 def clean(mctx, x):
193 """File that is clean according to :hg:`status`.
193 """File that is clean according to :hg:`status`.
194 """
194 """
195 # i18n: "clean" is a keyword
195 # i18n: "clean" is a keyword
196 getargs(x, 0, 0, _(b"clean takes no arguments"))
196 getargs(x, 0, 0, _(b"clean takes no arguments"))
197 s = set(mctx.status().clean)
197 s = set(mctx.status().clean)
198 return mctx.predicate(s.__contains__, predrepr=b'clean')
198 return mctx.predicate(s.__contains__, predrepr=b'clean')
199
199
200
200
201 @predicate(b'tracked()')
201 @predicate(b'tracked()')
202 def tracked(mctx, x):
202 def tracked(mctx, x):
203 """File that is under Mercurial control."""
203 """File that is under Mercurial control."""
204 # i18n: "tracked" is a keyword
204 # i18n: "tracked" is a keyword
205 getargs(x, 0, 0, _(b"tracked takes no arguments"))
205 getargs(x, 0, 0, _(b"tracked takes no arguments"))
206 return mctx.predicate(mctx.ctx.__contains__, predrepr=b'tracked')
206 return mctx.predicate(mctx.ctx.__contains__, predrepr=b'tracked')
207
207
208
208
209 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
209 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
210 def binary(mctx, x):
210 def binary(mctx, x):
211 """File that appears to be binary (contains NUL bytes).
211 """File that appears to be binary (contains NUL bytes).
212 """
212 """
213 # i18n: "binary" is a keyword
213 # i18n: "binary" is a keyword
214 getargs(x, 0, 0, _(b"binary takes no arguments"))
214 getargs(x, 0, 0, _(b"binary takes no arguments"))
215 return mctx.fpredicate(
215 return mctx.fpredicate(
216 lambda fctx: fctx.isbinary(), predrepr=b'binary', cache=True
216 lambda fctx: fctx.isbinary(), predrepr=b'binary', cache=True
217 )
217 )
218
218
219
219
220 @predicate(b'exec()')
220 @predicate(b'exec()')
221 def exec_(mctx, x):
221 def exec_(mctx, x):
222 """File that is marked as executable.
222 """File that is marked as executable.
223 """
223 """
224 # i18n: "exec" is a keyword
224 # i18n: "exec" is a keyword
225 getargs(x, 0, 0, _(b"exec takes no arguments"))
225 getargs(x, 0, 0, _(b"exec takes no arguments"))
226 ctx = mctx.ctx
226 ctx = mctx.ctx
227 return mctx.predicate(lambda f: ctx.flags(f) == b'x', predrepr=b'exec')
227 return mctx.predicate(lambda f: ctx.flags(f) == b'x', predrepr=b'exec')
228
228
229
229
230 @predicate(b'symlink()')
230 @predicate(b'symlink()')
231 def symlink(mctx, x):
231 def symlink(mctx, x):
232 """File that is marked as a symlink.
232 """File that is marked as a symlink.
233 """
233 """
234 # i18n: "symlink" is a keyword
234 # i18n: "symlink" is a keyword
235 getargs(x, 0, 0, _(b"symlink takes no arguments"))
235 getargs(x, 0, 0, _(b"symlink takes no arguments"))
236 ctx = mctx.ctx
236 ctx = mctx.ctx
237 return mctx.predicate(lambda f: ctx.flags(f) == b'l', predrepr=b'symlink')
237 return mctx.predicate(lambda f: ctx.flags(f) == b'l', predrepr=b'symlink')
238
238
239
239
240 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
240 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
241 def resolved(mctx, x):
241 def resolved(mctx, x):
242 """File that is marked resolved according to :hg:`resolve -l`.
242 """File that is marked resolved according to :hg:`resolve -l`.
243 """
243 """
244 # i18n: "resolved" is a keyword
244 # i18n: "resolved" is a keyword
245 getargs(x, 0, 0, _(b"resolved takes no arguments"))
245 getargs(x, 0, 0, _(b"resolved takes no arguments"))
246 if mctx.ctx.rev() is not None:
246 if mctx.ctx.rev() is not None:
247 return mctx.never()
247 return mctx.never()
248 ms = merge.mergestate.read(mctx.ctx.repo())
248 ms = merge.mergestate.read(mctx.ctx.repo())
249 return mctx.predicate(
249 return mctx.predicate(
250 lambda f: f in ms and ms[f] == b'r', predrepr=b'resolved'
250 lambda f: f in ms and ms[f] == b'r', predrepr=b'resolved'
251 )
251 )
252
252
253
253
254 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
254 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
255 def unresolved(mctx, x):
255 def unresolved(mctx, x):
256 """File that is marked unresolved according to :hg:`resolve -l`.
256 """File that is marked unresolved according to :hg:`resolve -l`.
257 """
257 """
258 # i18n: "unresolved" is a keyword
258 # i18n: "unresolved" is a keyword
259 getargs(x, 0, 0, _(b"unresolved takes no arguments"))
259 getargs(x, 0, 0, _(b"unresolved takes no arguments"))
260 if mctx.ctx.rev() is not None:
260 if mctx.ctx.rev() is not None:
261 return mctx.never()
261 return mctx.never()
262 ms = merge.mergestate.read(mctx.ctx.repo())
262 ms = merge.mergestate.read(mctx.ctx.repo())
263 return mctx.predicate(
263 return mctx.predicate(
264 lambda f: f in ms and ms[f] == b'u', predrepr=b'unresolved'
264 lambda f: f in ms and ms[f] == b'u', predrepr=b'unresolved'
265 )
265 )
266
266
267
267
268 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
268 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
269 def hgignore(mctx, x):
269 def hgignore(mctx, x):
270 """File that matches the active .hgignore pattern.
270 """File that matches the active .hgignore pattern.
271 """
271 """
272 # i18n: "hgignore" is a keyword
272 # i18n: "hgignore" is a keyword
273 getargs(x, 0, 0, _(b"hgignore takes no arguments"))
273 getargs(x, 0, 0, _(b"hgignore takes no arguments"))
274 return mctx.ctx.repo().dirstate._ignore
274 return mctx.ctx.repo().dirstate._ignore
275
275
276
276
277 @predicate(b'portable()', weight=_WEIGHT_CHECK_FILENAME)
277 @predicate(b'portable()', weight=_WEIGHT_CHECK_FILENAME)
278 def portable(mctx, x):
278 def portable(mctx, x):
279 """File that has a portable name. (This doesn't include filenames with case
279 """File that has a portable name. (This doesn't include filenames with case
280 collisions.)
280 collisions.)
281 """
281 """
282 # i18n: "portable" is a keyword
282 # i18n: "portable" is a keyword
283 getargs(x, 0, 0, _(b"portable takes no arguments"))
283 getargs(x, 0, 0, _(b"portable takes no arguments"))
284 return mctx.predicate(
284 return mctx.predicate(
285 lambda f: util.checkwinfilename(f) is None, predrepr=b'portable'
285 lambda f: util.checkwinfilename(f) is None, predrepr=b'portable'
286 )
286 )
287
287
288
288
289 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
289 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
290 def grep(mctx, x):
290 def grep(mctx, x):
291 """File contains the given regular expression.
291 """File contains the given regular expression.
292 """
292 """
293 try:
293 try:
294 # i18n: "grep" is a keyword
294 # i18n: "grep" is a keyword
295 r = re.compile(getstring(x, _(b"grep requires a pattern")))
295 r = re.compile(getstring(x, _(b"grep requires a pattern")))
296 except re.error as e:
296 except re.error as e:
297 raise error.ParseError(
297 raise error.ParseError(
298 _(b'invalid match pattern: %s') % stringutil.forcebytestr(e)
298 _(b'invalid match pattern: %s') % stringutil.forcebytestr(e)
299 )
299 )
300 return mctx.fpredicate(
300 return mctx.fpredicate(
301 lambda fctx: r.search(fctx.data()),
301 lambda fctx: r.search(fctx.data()),
302 predrepr=(b'grep(%r)', r.pattern),
302 predrepr=(b'grep(%r)', r.pattern),
303 cache=True,
303 cache=True,
304 )
304 )
305
305
306
306
307 def _sizetomax(s):
307 def _sizetomax(s):
308 try:
308 try:
309 s = s.strip().lower()
309 s = s.strip().lower()
310 for k, v in util._sizeunits:
310 for k, v in util._sizeunits:
311 if s.endswith(k):
311 if s.endswith(k):
312 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
312 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
313 n = s[: -len(k)]
313 n = s[: -len(k)]
314 inc = 1.0
314 inc = 1.0
315 if b"." in n:
315 if b"." in n:
316 inc /= 10 ** len(n.split(b".")[1])
316 inc /= 10 ** len(n.split(b".")[1])
317 return int((float(n) + inc) * v) - 1
317 return int((float(n) + inc) * v) - 1
318 # no extension, this is a precise value
318 # no extension, this is a precise value
319 return int(s)
319 return int(s)
320 except ValueError:
320 except ValueError:
321 raise error.ParseError(_(b"couldn't parse size: %s") % s)
321 raise error.ParseError(_(b"couldn't parse size: %s") % s)
322
322
323
323
324 def sizematcher(expr):
324 def sizematcher(expr):
325 """Return a function(size) -> bool from the ``size()`` expression"""
325 """Return a function(size) -> bool from the ``size()`` expression"""
326 expr = expr.strip()
326 expr = expr.strip()
327 if b'-' in expr: # do we have a range?
327 if b'-' in expr: # do we have a range?
328 a, b = expr.split(b'-', 1)
328 a, b = expr.split(b'-', 1)
329 a = util.sizetoint(a)
329 a = util.sizetoint(a)
330 b = util.sizetoint(b)
330 b = util.sizetoint(b)
331 return lambda x: x >= a and x <= b
331 return lambda x: x >= a and x <= b
332 elif expr.startswith(b"<="):
332 elif expr.startswith(b"<="):
333 a = util.sizetoint(expr[2:])
333 a = util.sizetoint(expr[2:])
334 return lambda x: x <= a
334 return lambda x: x <= a
335 elif expr.startswith(b"<"):
335 elif expr.startswith(b"<"):
336 a = util.sizetoint(expr[1:])
336 a = util.sizetoint(expr[1:])
337 return lambda x: x < a
337 return lambda x: x < a
338 elif expr.startswith(b">="):
338 elif expr.startswith(b">="):
339 a = util.sizetoint(expr[2:])
339 a = util.sizetoint(expr[2:])
340 return lambda x: x >= a
340 return lambda x: x >= a
341 elif expr.startswith(b">"):
341 elif expr.startswith(b">"):
342 a = util.sizetoint(expr[1:])
342 a = util.sizetoint(expr[1:])
343 return lambda x: x > a
343 return lambda x: x > a
344 else:
344 else:
345 a = util.sizetoint(expr)
345 a = util.sizetoint(expr)
346 b = _sizetomax(expr)
346 b = _sizetomax(expr)
347 return lambda x: x >= a and x <= b
347 return lambda x: x >= a and x <= b
348
348
349
349
350 @predicate(b'size(expression)', weight=_WEIGHT_STATUS)
350 @predicate(b'size(expression)', weight=_WEIGHT_STATUS)
351 def size(mctx, x):
351 def size(mctx, x):
352 """File size matches the given expression. Examples:
352 """File size matches the given expression. Examples:
353
353
354 - size('1k') - files from 1024 to 2047 bytes
354 - size('1k') - files from 1024 to 2047 bytes
355 - size('< 20k') - files less than 20480 bytes
355 - size('< 20k') - files less than 20480 bytes
356 - size('>= .5MB') - files at least 524288 bytes
356 - size('>= .5MB') - files at least 524288 bytes
357 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
357 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
358 """
358 """
359 # i18n: "size" is a keyword
359 # i18n: "size" is a keyword
360 expr = getstring(x, _(b"size requires an expression"))
360 expr = getstring(x, _(b"size requires an expression"))
361 m = sizematcher(expr)
361 m = sizematcher(expr)
362 return mctx.fpredicate(
362 return mctx.fpredicate(
363 lambda fctx: m(fctx.size()), predrepr=(b'size(%r)', expr), cache=True
363 lambda fctx: m(fctx.size()), predrepr=(b'size(%r)', expr), cache=True
364 )
364 )
365
365
366
366
367 @predicate(b'encoding(name)', weight=_WEIGHT_READ_CONTENTS)
367 @predicate(b'encoding(name)', weight=_WEIGHT_READ_CONTENTS)
368 def encoding(mctx, x):
368 def encoding(mctx, x):
369 """File can be successfully decoded with the given character
369 """File can be successfully decoded with the given character
370 encoding. May not be useful for encodings other than ASCII and
370 encoding. May not be useful for encodings other than ASCII and
371 UTF-8.
371 UTF-8.
372 """
372 """
373
373
374 # i18n: "encoding" is a keyword
374 # i18n: "encoding" is a keyword
375 enc = getstring(x, _(b"encoding requires an encoding name"))
375 enc = getstring(x, _(b"encoding requires an encoding name"))
376
376
377 def encp(fctx):
377 def encp(fctx):
378 d = fctx.data()
378 d = fctx.data()
379 try:
379 try:
380 d.decode(pycompat.sysstr(enc))
380 d.decode(pycompat.sysstr(enc))
381 return True
381 return True
382 except LookupError:
382 except LookupError:
383 raise error.Abort(_(b"unknown encoding '%s'") % enc)
383 raise error.Abort(_(b"unknown encoding '%s'") % enc)
384 except UnicodeDecodeError:
384 except UnicodeDecodeError:
385 return False
385 return False
386
386
387 return mctx.fpredicate(encp, predrepr=(b'encoding(%r)', enc), cache=True)
387 return mctx.fpredicate(encp, predrepr=(b'encoding(%r)', enc), cache=True)
388
388
389
389
390 @predicate(b'eol(style)', weight=_WEIGHT_READ_CONTENTS)
390 @predicate(b'eol(style)', weight=_WEIGHT_READ_CONTENTS)
391 def eol(mctx, x):
391 def eol(mctx, x):
392 """File contains newlines of the given style (dos, unix, mac). Binary
392 """File contains newlines of the given style (dos, unix, mac). Binary
393 files are excluded, files with mixed line endings match multiple
393 files are excluded, files with mixed line endings match multiple
394 styles.
394 styles.
395 """
395 """
396
396
397 # i18n: "eol" is a keyword
397 # i18n: "eol" is a keyword
398 enc = getstring(x, _(b"eol requires a style name"))
398 enc = getstring(x, _(b"eol requires a style name"))
399
399
400 def eolp(fctx):
400 def eolp(fctx):
401 if fctx.isbinary():
401 if fctx.isbinary():
402 return False
402 return False
403 d = fctx.data()
403 d = fctx.data()
404 if (enc == b'dos' or enc == b'win') and b'\r\n' in d:
404 if (enc == b'dos' or enc == b'win') and b'\r\n' in d:
405 return True
405 return True
406 elif enc == b'unix' and re.search(b'(?<!\r)\n', d):
406 elif enc == b'unix' and re.search(b'(?<!\r)\n', d):
407 return True
407 return True
408 elif enc == b'mac' and re.search(b'\r(?!\n)', d):
408 elif enc == b'mac' and re.search(b'\r(?!\n)', d):
409 return True
409 return True
410 return False
410 return False
411
411
412 return mctx.fpredicate(eolp, predrepr=(b'eol(%r)', enc), cache=True)
412 return mctx.fpredicate(eolp, predrepr=(b'eol(%r)', enc), cache=True)
413
413
414
414
415 @predicate(b'copied()')
415 @predicate(b'copied()')
416 def copied(mctx, x):
416 def copied(mctx, x):
417 """File that is recorded as being copied.
417 """File that is recorded as being copied.
418 """
418 """
419 # i18n: "copied" is a keyword
419 # i18n: "copied" is a keyword
420 getargs(x, 0, 0, _(b"copied takes no arguments"))
420 getargs(x, 0, 0, _(b"copied takes no arguments"))
421
421
422 def copiedp(fctx):
422 def copiedp(fctx):
423 p = fctx.parents()
423 p = fctx.parents()
424 return p and p[0].path() != fctx.path()
424 return p and p[0].path() != fctx.path()
425
425
426 return mctx.fpredicate(copiedp, predrepr=b'copied', cache=True)
426 return mctx.fpredicate(copiedp, predrepr=b'copied', cache=True)
427
427
428
428
429 @predicate(b'revs(revs, pattern)', weight=_WEIGHT_STATUS)
429 @predicate(b'revs(revs, pattern)', weight=_WEIGHT_STATUS)
430 def revs(mctx, x):
430 def revs(mctx, x):
431 """Evaluate set in the specified revisions. If the revset match multiple
431 """Evaluate set in the specified revisions. If the revset match multiple
432 revs, this will return file matching pattern in any of the revision.
432 revs, this will return file matching pattern in any of the revision.
433 """
433 """
434 # i18n: "revs" is a keyword
434 # i18n: "revs" is a keyword
435 r, x = getargs(x, 2, 2, _(b"revs takes two arguments"))
435 r, x = getargs(x, 2, 2, _(b"revs takes two arguments"))
436 # i18n: "revs" is a keyword
436 # i18n: "revs" is a keyword
437 revspec = getstring(r, _(b"first argument to revs must be a revision"))
437 revspec = getstring(r, _(b"first argument to revs must be a revision"))
438 repo = mctx.ctx.repo()
438 repo = mctx.ctx.repo()
439 revs = scmutil.revrange(repo, [revspec])
439 revs = scmutil.revrange(repo, [revspec])
440
440
441 matchers = []
441 matchers = []
442 for r in revs:
442 for r in revs:
443 ctx = repo[r]
443 ctx = repo[r]
444 mc = mctx.switch(ctx.p1(), ctx)
444 mc = mctx.switch(ctx.p1(), ctx)
445 matchers.append(getmatch(mc, x))
445 matchers.append(getmatch(mc, x))
446 if not matchers:
446 if not matchers:
447 return mctx.never()
447 return mctx.never()
448 if len(matchers) == 1:
448 if len(matchers) == 1:
449 return matchers[0]
449 return matchers[0]
450 return matchmod.unionmatcher(matchers)
450 return matchmod.unionmatcher(matchers)
451
451
452
452
453 @predicate(b'status(base, rev, pattern)', weight=_WEIGHT_STATUS)
453 @predicate(b'status(base, rev, pattern)', weight=_WEIGHT_STATUS)
454 def status(mctx, x):
454 def status(mctx, x):
455 """Evaluate predicate using status change between ``base`` and
455 """Evaluate predicate using status change between ``base`` and
456 ``rev``. Examples:
456 ``rev``. Examples:
457
457
458 - ``status(3, 7, added())`` - matches files added from "3" to "7"
458 - ``status(3, 7, added())`` - matches files added from "3" to "7"
459 """
459 """
460 repo = mctx.ctx.repo()
460 repo = mctx.ctx.repo()
461 # i18n: "status" is a keyword
461 # i18n: "status" is a keyword
462 b, r, x = getargs(x, 3, 3, _(b"status takes three arguments"))
462 b, r, x = getargs(x, 3, 3, _(b"status takes three arguments"))
463 # i18n: "status" is a keyword
463 # i18n: "status" is a keyword
464 baseerr = _(b"first argument to status must be a revision")
464 baseerr = _(b"first argument to status must be a revision")
465 baserevspec = getstring(b, baseerr)
465 baserevspec = getstring(b, baseerr)
466 if not baserevspec:
466 if not baserevspec:
467 raise error.ParseError(baseerr)
467 raise error.ParseError(baseerr)
468 reverr = _(b"second argument to status must be a revision")
468 reverr = _(b"second argument to status must be a revision")
469 revspec = getstring(r, reverr)
469 revspec = getstring(r, reverr)
470 if not revspec:
470 if not revspec:
471 raise error.ParseError(reverr)
471 raise error.ParseError(reverr)
472 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
472 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
473 mc = mctx.switch(basectx, ctx)
473 mc = mctx.switch(basectx, ctx)
474 return getmatch(mc, x)
474 return getmatch(mc, x)
475
475
476
476
477 @predicate(b'subrepo([pattern])')
477 @predicate(b'subrepo([pattern])')
478 def subrepo(mctx, x):
478 def subrepo(mctx, x):
479 """Subrepositories whose paths match the given pattern.
479 """Subrepositories whose paths match the given pattern.
480 """
480 """
481 # i18n: "subrepo" is a keyword
481 # i18n: "subrepo" is a keyword
482 getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
482 getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
483 ctx = mctx.ctx
483 ctx = mctx.ctx
484 sstate = ctx.substate
484 sstate = ctx.substate
485 if x:
485 if x:
486 pat = getpattern(
486 pat = getpattern(
487 x,
487 x,
488 matchmod.allpatternkinds,
488 matchmod.allpatternkinds,
489 # i18n: "subrepo" is a keyword
489 # i18n: "subrepo" is a keyword
490 _(b"subrepo requires a pattern or no arguments"),
490 _(b"subrepo requires a pattern or no arguments"),
491 )
491 )
492 fast = not matchmod.patkind(pat)
492 fast = not matchmod.patkind(pat)
493 if fast:
493 if fast:
494
494
495 def m(s):
495 def m(s):
496 return s == pat
496 return s == pat
497
497
498 else:
498 else:
499 m = matchmod.match(ctx.repo().root, b'', [pat], ctx=ctx)
499 m = matchmod.match(ctx.repo().root, b'', [pat], ctx=ctx)
500 return mctx.predicate(
500 return mctx.predicate(
501 lambda f: f in sstate and m(f), predrepr=(b'subrepo(%r)', pat)
501 lambda f: f in sstate and m(f), predrepr=(b'subrepo(%r)', pat)
502 )
502 )
503 else:
503 else:
504 return mctx.predicate(sstate.__contains__, predrepr=b'subrepo')
504 return mctx.predicate(sstate.__contains__, predrepr=b'subrepo')
505
505
506
506
507 methods = {
507 methods = {
508 b'withstatus': getmatchwithstatus,
508 b'withstatus': getmatchwithstatus,
509 b'string': stringmatch,
509 b'string': stringmatch,
510 b'symbol': stringmatch,
510 b'symbol': stringmatch,
511 b'kindpat': kindpatmatch,
511 b'kindpat': kindpatmatch,
512 b'patterns': patternsmatch,
512 b'patterns': patternsmatch,
513 b'and': andmatch,
513 b'and': andmatch,
514 b'or': ormatch,
514 b'or': ormatch,
515 b'minus': minusmatch,
515 b'minus': minusmatch,
516 b'list': listmatch,
516 b'list': listmatch,
517 b'not': notmatch,
517 b'not': notmatch,
518 b'func': func,
518 b'func': func,
519 }
519 }
520
520
521
521
522 class matchctx(object):
522 class matchctx(object):
523 def __init__(self, basectx, ctx, badfn=None):
523 def __init__(self, basectx, ctx, cwd, badfn=None):
524 self._basectx = basectx
524 self._basectx = basectx
525 self.ctx = ctx
525 self.ctx = ctx
526 self._badfn = badfn
526 self._badfn = badfn
527 self._match = None
527 self._match = None
528 self._status = None
528 self._status = None
529 self.cwd = cwd
529
530
530 def narrowed(self, match):
531 def narrowed(self, match):
531 """Create matchctx for a sub-tree narrowed by the given matcher"""
532 """Create matchctx for a sub-tree narrowed by the given matcher"""
532 mctx = matchctx(self._basectx, self.ctx, self._badfn)
533 mctx = matchctx(self._basectx, self.ctx, self.cwd, self._badfn)
533 mctx._match = match
534 mctx._match = match
534 # leave wider status which we don't have to care
535 # leave wider status which we don't have to care
535 mctx._status = self._status
536 mctx._status = self._status
536 return mctx
537 return mctx
537
538
538 def switch(self, basectx, ctx):
539 def switch(self, basectx, ctx):
539 mctx = matchctx(basectx, ctx, self._badfn)
540 mctx = matchctx(basectx, ctx, self.cwd, self._badfn)
540 mctx._match = self._match
541 mctx._match = self._match
541 return mctx
542 return mctx
542
543
543 def withstatus(self, keys):
544 def withstatus(self, keys):
544 """Create matchctx which has precomputed status specified by the keys"""
545 """Create matchctx which has precomputed status specified by the keys"""
545 mctx = matchctx(self._basectx, self.ctx, self._badfn)
546 mctx = matchctx(self._basectx, self.ctx, self.cwd, self._badfn)
546 mctx._match = self._match
547 mctx._match = self._match
547 mctx._buildstatus(keys)
548 mctx._buildstatus(keys)
548 return mctx
549 return mctx
549
550
550 def _buildstatus(self, keys):
551 def _buildstatus(self, keys):
551 self._status = self._basectx.status(
552 self._status = self._basectx.status(
552 self.ctx,
553 self.ctx,
553 self._match,
554 self._match,
554 listignored=b'ignored' in keys,
555 listignored=b'ignored' in keys,
555 listclean=b'clean' in keys,
556 listclean=b'clean' in keys,
556 listunknown=b'unknown' in keys,
557 listunknown=b'unknown' in keys,
557 )
558 )
558
559
559 def status(self):
560 def status(self):
560 return self._status
561 return self._status
561
562
562 def matcher(self, patterns):
563 def matcher(self, patterns):
563 return self.ctx.match(patterns, badfn=self._badfn)
564 return self.ctx.match(patterns, badfn=self._badfn, cwd=self.cwd)
564
565
565 def predicate(self, predfn, predrepr=None, cache=False):
566 def predicate(self, predfn, predrepr=None, cache=False):
566 """Create a matcher to select files by predfn(filename)"""
567 """Create a matcher to select files by predfn(filename)"""
567 if cache:
568 if cache:
568 predfn = util.cachefunc(predfn)
569 predfn = util.cachefunc(predfn)
569 return matchmod.predicatematcher(
570 return matchmod.predicatematcher(
570 predfn, predrepr=predrepr, badfn=self._badfn
571 predfn, predrepr=predrepr, badfn=self._badfn
571 )
572 )
572
573
573 def fpredicate(self, predfn, predrepr=None, cache=False):
574 def fpredicate(self, predfn, predrepr=None, cache=False):
574 """Create a matcher to select files by predfn(fctx) at the current
575 """Create a matcher to select files by predfn(fctx) at the current
575 revision
576 revision
576
577
577 Missing files are ignored.
578 Missing files are ignored.
578 """
579 """
579 ctx = self.ctx
580 ctx = self.ctx
580 if ctx.rev() is None:
581 if ctx.rev() is None:
581
582
582 def fctxpredfn(f):
583 def fctxpredfn(f):
583 try:
584 try:
584 fctx = ctx[f]
585 fctx = ctx[f]
585 except error.LookupError:
586 except error.LookupError:
586 return False
587 return False
587 try:
588 try:
588 fctx.audit()
589 fctx.audit()
589 except error.Abort:
590 except error.Abort:
590 return False
591 return False
591 try:
592 try:
592 return predfn(fctx)
593 return predfn(fctx)
593 except (IOError, OSError) as e:
594 except (IOError, OSError) as e:
594 # open()-ing a directory fails with EACCES on Windows
595 # open()-ing a directory fails with EACCES on Windows
595 if e.errno in (
596 if e.errno in (
596 errno.ENOENT,
597 errno.ENOENT,
597 errno.EACCES,
598 errno.EACCES,
598 errno.ENOTDIR,
599 errno.ENOTDIR,
599 errno.EISDIR,
600 errno.EISDIR,
600 ):
601 ):
601 return False
602 return False
602 raise
603 raise
603
604
604 else:
605 else:
605
606
606 def fctxpredfn(f):
607 def fctxpredfn(f):
607 try:
608 try:
608 fctx = ctx[f]
609 fctx = ctx[f]
609 except error.LookupError:
610 except error.LookupError:
610 return False
611 return False
611 return predfn(fctx)
612 return predfn(fctx)
612
613
613 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
614 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
614
615
615 def never(self):
616 def never(self):
616 """Create a matcher to select nothing"""
617 """Create a matcher to select nothing"""
617 return matchmod.never(badfn=self._badfn)
618 return matchmod.never(badfn=self._badfn)
618
619
619
620
620 def match(ctx, expr, badfn=None):
621 def match(ctx, cwd, expr, badfn=None):
621 """Create a matcher for a single fileset expression"""
622 """Create a matcher for a single fileset expression"""
622 tree = filesetlang.parse(expr)
623 tree = filesetlang.parse(expr)
623 tree = filesetlang.analyze(tree)
624 tree = filesetlang.analyze(tree)
624 tree = filesetlang.optimize(tree)
625 tree = filesetlang.optimize(tree)
625 mctx = matchctx(ctx.p1(), ctx, badfn=badfn)
626 mctx = matchctx(ctx.p1(), ctx, cwd, badfn=badfn)
626 return getmatch(mctx, tree)
627 return getmatch(mctx, tree)
627
628
628
629
629 def loadpredicate(ui, extname, registrarobj):
630 def loadpredicate(ui, extname, registrarobj):
630 """Load fileset predicates from specified registrarobj
631 """Load fileset predicates from specified registrarobj
631 """
632 """
632 for name, func in pycompat.iteritems(registrarobj._table):
633 for name, func in pycompat.iteritems(registrarobj._table):
633 symbols[name] = func
634 symbols[name] = func
634
635
635
636
636 # tell hggettext to extract docstrings from these functions:
637 # tell hggettext to extract docstrings from these functions:
637 i18nfunctions = symbols.values()
638 i18nfunctions = symbols.values()
@@ -1,1619 +1,1622 b''
1 # match.py - filename matching
1 # match.py - filename matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import, print_function
8 from __future__ import absolute_import, print_function
9
9
10 import copy
10 import copy
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .pycompat import open
16 from .pycompat import open
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 pathutil,
20 pathutil,
21 policy,
21 policy,
22 pycompat,
22 pycompat,
23 util,
23 util,
24 )
24 )
25 from .utils import stringutil
25 from .utils import stringutil
26
26
27 rustmod = policy.importrust('filepatterns')
27 rustmod = policy.importrust('filepatterns')
28
28
29 allpatternkinds = (
29 allpatternkinds = (
30 b're',
30 b're',
31 b'glob',
31 b'glob',
32 b'path',
32 b'path',
33 b'relglob',
33 b'relglob',
34 b'relpath',
34 b'relpath',
35 b'relre',
35 b'relre',
36 b'rootglob',
36 b'rootglob',
37 b'listfile',
37 b'listfile',
38 b'listfile0',
38 b'listfile0',
39 b'set',
39 b'set',
40 b'include',
40 b'include',
41 b'subinclude',
41 b'subinclude',
42 b'rootfilesin',
42 b'rootfilesin',
43 )
43 )
44 cwdrelativepatternkinds = (b'relpath', b'glob')
44 cwdrelativepatternkinds = (b'relpath', b'glob')
45
45
46 propertycache = util.propertycache
46 propertycache = util.propertycache
47
47
48
48
49 def _rematcher(regex):
49 def _rematcher(regex):
50 '''compile the regexp with the best available regexp engine and return a
50 '''compile the regexp with the best available regexp engine and return a
51 matcher function'''
51 matcher function'''
52 m = util.re.compile(regex)
52 m = util.re.compile(regex)
53 try:
53 try:
54 # slightly faster, provided by facebook's re2 bindings
54 # slightly faster, provided by facebook's re2 bindings
55 return m.test_match
55 return m.test_match
56 except AttributeError:
56 except AttributeError:
57 return m.match
57 return m.match
58
58
59
59
60 def _expandsets(kindpats, ctx=None, listsubrepos=False, badfn=None):
60 def _expandsets(cwd, kindpats, ctx=None, listsubrepos=False, badfn=None):
61 '''Returns the kindpats list with the 'set' patterns expanded to matchers'''
61 '''Returns the kindpats list with the 'set' patterns expanded to matchers'''
62 matchers = []
62 matchers = []
63 other = []
63 other = []
64
64
65 for kind, pat, source in kindpats:
65 for kind, pat, source in kindpats:
66 if kind == b'set':
66 if kind == b'set':
67 if ctx is None:
67 if ctx is None:
68 raise error.ProgrammingError(
68 raise error.ProgrammingError(
69 b"fileset expression with no context"
69 b"fileset expression with no context"
70 )
70 )
71 matchers.append(ctx.matchfileset(pat, badfn=badfn))
71 matchers.append(ctx.matchfileset(cwd, pat, badfn=badfn))
72
72
73 if listsubrepos:
73 if listsubrepos:
74 for subpath in ctx.substate:
74 for subpath in ctx.substate:
75 sm = ctx.sub(subpath).matchfileset(pat, badfn=badfn)
75 sm = ctx.sub(subpath).matchfileset(cwd, pat, badfn=badfn)
76 pm = prefixdirmatcher(subpath, sm, badfn=badfn)
76 pm = prefixdirmatcher(subpath, sm, badfn=badfn)
77 matchers.append(pm)
77 matchers.append(pm)
78
78
79 continue
79 continue
80 other.append((kind, pat, source))
80 other.append((kind, pat, source))
81 return matchers, other
81 return matchers, other
82
82
83
83
84 def _expandsubinclude(kindpats, root):
84 def _expandsubinclude(kindpats, root):
85 '''Returns the list of subinclude matcher args and the kindpats without the
85 '''Returns the list of subinclude matcher args and the kindpats without the
86 subincludes in it.'''
86 subincludes in it.'''
87 relmatchers = []
87 relmatchers = []
88 other = []
88 other = []
89
89
90 for kind, pat, source in kindpats:
90 for kind, pat, source in kindpats:
91 if kind == b'subinclude':
91 if kind == b'subinclude':
92 sourceroot = pathutil.dirname(util.normpath(source))
92 sourceroot = pathutil.dirname(util.normpath(source))
93 pat = util.pconvert(pat)
93 pat = util.pconvert(pat)
94 path = pathutil.join(sourceroot, pat)
94 path = pathutil.join(sourceroot, pat)
95
95
96 newroot = pathutil.dirname(path)
96 newroot = pathutil.dirname(path)
97 matcherargs = (newroot, b'', [], [b'include:%s' % path])
97 matcherargs = (newroot, b'', [], [b'include:%s' % path])
98
98
99 prefix = pathutil.canonpath(root, root, newroot)
99 prefix = pathutil.canonpath(root, root, newroot)
100 if prefix:
100 if prefix:
101 prefix += b'/'
101 prefix += b'/'
102 relmatchers.append((prefix, matcherargs))
102 relmatchers.append((prefix, matcherargs))
103 else:
103 else:
104 other.append((kind, pat, source))
104 other.append((kind, pat, source))
105
105
106 return relmatchers, other
106 return relmatchers, other
107
107
108
108
109 def _kindpatsalwaysmatch(kindpats):
109 def _kindpatsalwaysmatch(kindpats):
110 """"Checks whether the kindspats match everything, as e.g.
110 """"Checks whether the kindspats match everything, as e.g.
111 'relpath:.' does.
111 'relpath:.' does.
112 """
112 """
113 for kind, pat, source in kindpats:
113 for kind, pat, source in kindpats:
114 if pat != b'' or kind not in [b'relpath', b'glob']:
114 if pat != b'' or kind not in [b'relpath', b'glob']:
115 return False
115 return False
116 return True
116 return True
117
117
118
118
119 def _buildkindpatsmatcher(
119 def _buildkindpatsmatcher(
120 matchercls, root, kindpats, ctx=None, listsubrepos=False, badfn=None
120 matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
121 ):
121 ):
122 matchers = []
122 matchers = []
123 fms, kindpats = _expandsets(
123 fms, kindpats = _expandsets(
124 kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn
124 cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
125 )
125 )
126 if kindpats:
126 if kindpats:
127 m = matchercls(root, kindpats, badfn=badfn)
127 m = matchercls(root, kindpats, badfn=badfn)
128 matchers.append(m)
128 matchers.append(m)
129 if fms:
129 if fms:
130 matchers.extend(fms)
130 matchers.extend(fms)
131 if not matchers:
131 if not matchers:
132 return nevermatcher(badfn=badfn)
132 return nevermatcher(badfn=badfn)
133 if len(matchers) == 1:
133 if len(matchers) == 1:
134 return matchers[0]
134 return matchers[0]
135 return unionmatcher(matchers)
135 return unionmatcher(matchers)
136
136
137
137
138 def match(
138 def match(
139 root,
139 root,
140 cwd,
140 cwd,
141 patterns=None,
141 patterns=None,
142 include=None,
142 include=None,
143 exclude=None,
143 exclude=None,
144 default=b'glob',
144 default=b'glob',
145 auditor=None,
145 auditor=None,
146 ctx=None,
146 ctx=None,
147 listsubrepos=False,
147 listsubrepos=False,
148 warn=None,
148 warn=None,
149 badfn=None,
149 badfn=None,
150 icasefs=False,
150 icasefs=False,
151 ):
151 ):
152 r"""build an object to match a set of file patterns
152 r"""build an object to match a set of file patterns
153
153
154 arguments:
154 arguments:
155 root - the canonical root of the tree you're matching against
155 root - the canonical root of the tree you're matching against
156 cwd - the current working directory, if relevant
156 cwd - the current working directory, if relevant
157 patterns - patterns to find
157 patterns - patterns to find
158 include - patterns to include (unless they are excluded)
158 include - patterns to include (unless they are excluded)
159 exclude - patterns to exclude (even if they are included)
159 exclude - patterns to exclude (even if they are included)
160 default - if a pattern in patterns has no explicit type, assume this one
160 default - if a pattern in patterns has no explicit type, assume this one
161 auditor - optional path auditor
161 auditor - optional path auditor
162 ctx - optional changecontext
162 ctx - optional changecontext
163 listsubrepos - if True, recurse into subrepositories
163 listsubrepos - if True, recurse into subrepositories
164 warn - optional function used for printing warnings
164 warn - optional function used for printing warnings
165 badfn - optional bad() callback for this matcher instead of the default
165 badfn - optional bad() callback for this matcher instead of the default
166 icasefs - make a matcher for wdir on case insensitive filesystems, which
166 icasefs - make a matcher for wdir on case insensitive filesystems, which
167 normalizes the given patterns to the case in the filesystem
167 normalizes the given patterns to the case in the filesystem
168
168
169 a pattern is one of:
169 a pattern is one of:
170 'glob:<glob>' - a glob relative to cwd
170 'glob:<glob>' - a glob relative to cwd
171 're:<regexp>' - a regular expression
171 're:<regexp>' - a regular expression
172 'path:<path>' - a path relative to repository root, which is matched
172 'path:<path>' - a path relative to repository root, which is matched
173 recursively
173 recursively
174 'rootfilesin:<path>' - a path relative to repository root, which is
174 'rootfilesin:<path>' - a path relative to repository root, which is
175 matched non-recursively (will not match subdirectories)
175 matched non-recursively (will not match subdirectories)
176 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
176 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
177 'relpath:<path>' - a path relative to cwd
177 'relpath:<path>' - a path relative to cwd
178 'relre:<regexp>' - a regexp that needn't match the start of a name
178 'relre:<regexp>' - a regexp that needn't match the start of a name
179 'set:<fileset>' - a fileset expression
179 'set:<fileset>' - a fileset expression
180 'include:<path>' - a file of patterns to read and include
180 'include:<path>' - a file of patterns to read and include
181 'subinclude:<path>' - a file of patterns to match against files under
181 'subinclude:<path>' - a file of patterns to match against files under
182 the same directory
182 the same directory
183 '<something>' - a pattern of the specified default type
183 '<something>' - a pattern of the specified default type
184
184
185 >>> def _match(root, *args, **kwargs):
185 >>> def _match(root, *args, **kwargs):
186 ... return match(util.localpath(root), *args, **kwargs)
186 ... return match(util.localpath(root), *args, **kwargs)
187
187
188 Usually a patternmatcher is returned:
188 Usually a patternmatcher is returned:
189 >>> _match(b'/foo', b'.', [b're:.*\.c$', b'path:foo/a', b'*.py'])
189 >>> _match(b'/foo', b'.', [b're:.*\.c$', b'path:foo/a', b'*.py'])
190 <patternmatcher patterns='.*\\.c$|foo/a(?:/|$)|[^/]*\\.py$'>
190 <patternmatcher patterns='.*\\.c$|foo/a(?:/|$)|[^/]*\\.py$'>
191
191
192 Combining 'patterns' with 'include' (resp. 'exclude') gives an
192 Combining 'patterns' with 'include' (resp. 'exclude') gives an
193 intersectionmatcher (resp. a differencematcher):
193 intersectionmatcher (resp. a differencematcher):
194 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], include=[b'path:lib']))
194 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], include=[b'path:lib']))
195 <class 'mercurial.match.intersectionmatcher'>
195 <class 'mercurial.match.intersectionmatcher'>
196 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], exclude=[b'path:build']))
196 >>> type(_match(b'/foo', b'.', [b're:.*\.c$'], exclude=[b'path:build']))
197 <class 'mercurial.match.differencematcher'>
197 <class 'mercurial.match.differencematcher'>
198
198
199 Notice that, if 'patterns' is empty, an alwaysmatcher is returned:
199 Notice that, if 'patterns' is empty, an alwaysmatcher is returned:
200 >>> _match(b'/foo', b'.', [])
200 >>> _match(b'/foo', b'.', [])
201 <alwaysmatcher>
201 <alwaysmatcher>
202
202
203 The 'default' argument determines which kind of pattern is assumed if a
203 The 'default' argument determines which kind of pattern is assumed if a
204 pattern has no prefix:
204 pattern has no prefix:
205 >>> _match(b'/foo', b'.', [b'.*\.c$'], default=b're')
205 >>> _match(b'/foo', b'.', [b'.*\.c$'], default=b're')
206 <patternmatcher patterns='.*\\.c$'>
206 <patternmatcher patterns='.*\\.c$'>
207 >>> _match(b'/foo', b'.', [b'main.py'], default=b'relpath')
207 >>> _match(b'/foo', b'.', [b'main.py'], default=b'relpath')
208 <patternmatcher patterns='main\\.py(?:/|$)'>
208 <patternmatcher patterns='main\\.py(?:/|$)'>
209 >>> _match(b'/foo', b'.', [b'main.py'], default=b're')
209 >>> _match(b'/foo', b'.', [b'main.py'], default=b're')
210 <patternmatcher patterns='main.py'>
210 <patternmatcher patterns='main.py'>
211
211
212 The primary use of matchers is to check whether a value (usually a file
212 The primary use of matchers is to check whether a value (usually a file
213 name) matches againset one of the patterns given at initialization. There
213 name) matches againset one of the patterns given at initialization. There
214 are two ways of doing this check.
214 are two ways of doing this check.
215
215
216 >>> m = _match(b'/foo', b'', [b're:.*\.c$', b'relpath:a'])
216 >>> m = _match(b'/foo', b'', [b're:.*\.c$', b'relpath:a'])
217
217
218 1. Calling the matcher with a file name returns True if any pattern
218 1. Calling the matcher with a file name returns True if any pattern
219 matches that file name:
219 matches that file name:
220 >>> m(b'a')
220 >>> m(b'a')
221 True
221 True
222 >>> m(b'main.c')
222 >>> m(b'main.c')
223 True
223 True
224 >>> m(b'test.py')
224 >>> m(b'test.py')
225 False
225 False
226
226
227 2. Using the exact() method only returns True if the file name matches one
227 2. Using the exact() method only returns True if the file name matches one
228 of the exact patterns (i.e. not re: or glob: patterns):
228 of the exact patterns (i.e. not re: or glob: patterns):
229 >>> m.exact(b'a')
229 >>> m.exact(b'a')
230 True
230 True
231 >>> m.exact(b'main.c')
231 >>> m.exact(b'main.c')
232 False
232 False
233 """
233 """
234 assert os.path.isabs(root)
234 assert os.path.isabs(root)
235 cwd = os.path.join(root, util.localpath(cwd))
235 cwd = os.path.join(root, util.localpath(cwd))
236 normalize = _donormalize
236 normalize = _donormalize
237 if icasefs:
237 if icasefs:
238 dirstate = ctx.repo().dirstate
238 dirstate = ctx.repo().dirstate
239 dsnormalize = dirstate.normalize
239 dsnormalize = dirstate.normalize
240
240
241 def normalize(patterns, default, root, cwd, auditor, warn):
241 def normalize(patterns, default, root, cwd, auditor, warn):
242 kp = _donormalize(patterns, default, root, cwd, auditor, warn)
242 kp = _donormalize(patterns, default, root, cwd, auditor, warn)
243 kindpats = []
243 kindpats = []
244 for kind, pats, source in kp:
244 for kind, pats, source in kp:
245 if kind not in (b're', b'relre'): # regex can't be normalized
245 if kind not in (b're', b'relre'): # regex can't be normalized
246 p = pats
246 p = pats
247 pats = dsnormalize(pats)
247 pats = dsnormalize(pats)
248
248
249 # Preserve the original to handle a case only rename.
249 # Preserve the original to handle a case only rename.
250 if p != pats and p in dirstate:
250 if p != pats and p in dirstate:
251 kindpats.append((kind, p, source))
251 kindpats.append((kind, p, source))
252
252
253 kindpats.append((kind, pats, source))
253 kindpats.append((kind, pats, source))
254 return kindpats
254 return kindpats
255
255
256 if patterns:
256 if patterns:
257 kindpats = normalize(patterns, default, root, cwd, auditor, warn)
257 kindpats = normalize(patterns, default, root, cwd, auditor, warn)
258 if _kindpatsalwaysmatch(kindpats):
258 if _kindpatsalwaysmatch(kindpats):
259 m = alwaysmatcher(badfn)
259 m = alwaysmatcher(badfn)
260 else:
260 else:
261 m = _buildkindpatsmatcher(
261 m = _buildkindpatsmatcher(
262 patternmatcher,
262 patternmatcher,
263 root,
263 root,
264 cwd,
264 kindpats,
265 kindpats,
265 ctx=ctx,
266 ctx=ctx,
266 listsubrepos=listsubrepos,
267 listsubrepos=listsubrepos,
267 badfn=badfn,
268 badfn=badfn,
268 )
269 )
269 else:
270 else:
270 # It's a little strange that no patterns means to match everything.
271 # It's a little strange that no patterns means to match everything.
271 # Consider changing this to match nothing (probably using nevermatcher).
272 # Consider changing this to match nothing (probably using nevermatcher).
272 m = alwaysmatcher(badfn)
273 m = alwaysmatcher(badfn)
273
274
274 if include:
275 if include:
275 kindpats = normalize(include, b'glob', root, cwd, auditor, warn)
276 kindpats = normalize(include, b'glob', root, cwd, auditor, warn)
276 im = _buildkindpatsmatcher(
277 im = _buildkindpatsmatcher(
277 includematcher,
278 includematcher,
278 root,
279 root,
280 cwd,
279 kindpats,
281 kindpats,
280 ctx=ctx,
282 ctx=ctx,
281 listsubrepos=listsubrepos,
283 listsubrepos=listsubrepos,
282 badfn=None,
284 badfn=None,
283 )
285 )
284 m = intersectmatchers(m, im)
286 m = intersectmatchers(m, im)
285 if exclude:
287 if exclude:
286 kindpats = normalize(exclude, b'glob', root, cwd, auditor, warn)
288 kindpats = normalize(exclude, b'glob', root, cwd, auditor, warn)
287 em = _buildkindpatsmatcher(
289 em = _buildkindpatsmatcher(
288 includematcher,
290 includematcher,
289 root,
291 root,
292 cwd,
290 kindpats,
293 kindpats,
291 ctx=ctx,
294 ctx=ctx,
292 listsubrepos=listsubrepos,
295 listsubrepos=listsubrepos,
293 badfn=None,
296 badfn=None,
294 )
297 )
295 m = differencematcher(m, em)
298 m = differencematcher(m, em)
296 return m
299 return m
297
300
298
301
299 def exact(files, badfn=None):
302 def exact(files, badfn=None):
300 return exactmatcher(files, badfn=badfn)
303 return exactmatcher(files, badfn=badfn)
301
304
302
305
303 def always(badfn=None):
306 def always(badfn=None):
304 return alwaysmatcher(badfn)
307 return alwaysmatcher(badfn)
305
308
306
309
307 def never(badfn=None):
310 def never(badfn=None):
308 return nevermatcher(badfn)
311 return nevermatcher(badfn)
309
312
310
313
311 def badmatch(match, badfn):
314 def badmatch(match, badfn):
312 """Make a copy of the given matcher, replacing its bad method with the given
315 """Make a copy of the given matcher, replacing its bad method with the given
313 one.
316 one.
314 """
317 """
315 m = copy.copy(match)
318 m = copy.copy(match)
316 m.bad = badfn
319 m.bad = badfn
317 return m
320 return m
318
321
319
322
320 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
323 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
321 '''Convert 'kind:pat' from the patterns list to tuples with kind and
324 '''Convert 'kind:pat' from the patterns list to tuples with kind and
322 normalized and rooted patterns and with listfiles expanded.'''
325 normalized and rooted patterns and with listfiles expanded.'''
323 kindpats = []
326 kindpats = []
324 for kind, pat in [_patsplit(p, default) for p in patterns]:
327 for kind, pat in [_patsplit(p, default) for p in patterns]:
325 if kind in cwdrelativepatternkinds:
328 if kind in cwdrelativepatternkinds:
326 pat = pathutil.canonpath(root, cwd, pat, auditor=auditor)
329 pat = pathutil.canonpath(root, cwd, pat, auditor=auditor)
327 elif kind in (b'relglob', b'path', b'rootfilesin', b'rootglob'):
330 elif kind in (b'relglob', b'path', b'rootfilesin', b'rootglob'):
328 pat = util.normpath(pat)
331 pat = util.normpath(pat)
329 elif kind in (b'listfile', b'listfile0'):
332 elif kind in (b'listfile', b'listfile0'):
330 try:
333 try:
331 files = util.readfile(pat)
334 files = util.readfile(pat)
332 if kind == b'listfile0':
335 if kind == b'listfile0':
333 files = files.split(b'\0')
336 files = files.split(b'\0')
334 else:
337 else:
335 files = files.splitlines()
338 files = files.splitlines()
336 files = [f for f in files if f]
339 files = [f for f in files if f]
337 except EnvironmentError:
340 except EnvironmentError:
338 raise error.Abort(_(b"unable to read file list (%s)") % pat)
341 raise error.Abort(_(b"unable to read file list (%s)") % pat)
339 for k, p, source in _donormalize(
342 for k, p, source in _donormalize(
340 files, default, root, cwd, auditor, warn
343 files, default, root, cwd, auditor, warn
341 ):
344 ):
342 kindpats.append((k, p, pat))
345 kindpats.append((k, p, pat))
343 continue
346 continue
344 elif kind == b'include':
347 elif kind == b'include':
345 try:
348 try:
346 fullpath = os.path.join(root, util.localpath(pat))
349 fullpath = os.path.join(root, util.localpath(pat))
347 includepats = readpatternfile(fullpath, warn)
350 includepats = readpatternfile(fullpath, warn)
348 for k, p, source in _donormalize(
351 for k, p, source in _donormalize(
349 includepats, default, root, cwd, auditor, warn
352 includepats, default, root, cwd, auditor, warn
350 ):
353 ):
351 kindpats.append((k, p, source or pat))
354 kindpats.append((k, p, source or pat))
352 except error.Abort as inst:
355 except error.Abort as inst:
353 raise error.Abort(
356 raise error.Abort(
354 b'%s: %s'
357 b'%s: %s'
355 % (pat, inst[0]) # pytype: disable=unsupported-operands
358 % (pat, inst[0]) # pytype: disable=unsupported-operands
356 )
359 )
357 except IOError as inst:
360 except IOError as inst:
358 if warn:
361 if warn:
359 warn(
362 warn(
360 _(b"skipping unreadable pattern file '%s': %s\n")
363 _(b"skipping unreadable pattern file '%s': %s\n")
361 % (pat, stringutil.forcebytestr(inst.strerror))
364 % (pat, stringutil.forcebytestr(inst.strerror))
362 )
365 )
363 continue
366 continue
364 # else: re or relre - which cannot be normalized
367 # else: re or relre - which cannot be normalized
365 kindpats.append((kind, pat, b''))
368 kindpats.append((kind, pat, b''))
366 return kindpats
369 return kindpats
367
370
368
371
369 class basematcher(object):
372 class basematcher(object):
370 def __init__(self, badfn=None):
373 def __init__(self, badfn=None):
371 if badfn is not None:
374 if badfn is not None:
372 self.bad = badfn
375 self.bad = badfn
373
376
374 def __call__(self, fn):
377 def __call__(self, fn):
375 return self.matchfn(fn)
378 return self.matchfn(fn)
376
379
377 # Callbacks related to how the matcher is used by dirstate.walk.
380 # Callbacks related to how the matcher is used by dirstate.walk.
378 # Subscribers to these events must monkeypatch the matcher object.
381 # Subscribers to these events must monkeypatch the matcher object.
379 def bad(self, f, msg):
382 def bad(self, f, msg):
380 '''Callback from dirstate.walk for each explicit file that can't be
383 '''Callback from dirstate.walk for each explicit file that can't be
381 found/accessed, with an error message.'''
384 found/accessed, with an error message.'''
382
385
383 # If an traversedir is set, it will be called when a directory discovered
386 # If an traversedir is set, it will be called when a directory discovered
384 # by recursive traversal is visited.
387 # by recursive traversal is visited.
385 traversedir = None
388 traversedir = None
386
389
387 @propertycache
390 @propertycache
388 def _files(self):
391 def _files(self):
389 return []
392 return []
390
393
391 def files(self):
394 def files(self):
392 '''Explicitly listed files or patterns or roots:
395 '''Explicitly listed files or patterns or roots:
393 if no patterns or .always(): empty list,
396 if no patterns or .always(): empty list,
394 if exact: list exact files,
397 if exact: list exact files,
395 if not .anypats(): list all files and dirs,
398 if not .anypats(): list all files and dirs,
396 else: optimal roots'''
399 else: optimal roots'''
397 return self._files
400 return self._files
398
401
399 @propertycache
402 @propertycache
400 def _fileset(self):
403 def _fileset(self):
401 return set(self._files)
404 return set(self._files)
402
405
403 def exact(self, f):
406 def exact(self, f):
404 '''Returns True if f is in .files().'''
407 '''Returns True if f is in .files().'''
405 return f in self._fileset
408 return f in self._fileset
406
409
407 def matchfn(self, f):
410 def matchfn(self, f):
408 return False
411 return False
409
412
410 def visitdir(self, dir):
413 def visitdir(self, dir):
411 '''Decides whether a directory should be visited based on whether it
414 '''Decides whether a directory should be visited based on whether it
412 has potential matches in it or one of its subdirectories. This is
415 has potential matches in it or one of its subdirectories. This is
413 based on the match's primary, included, and excluded patterns.
416 based on the match's primary, included, and excluded patterns.
414
417
415 Returns the string 'all' if the given directory and all subdirectories
418 Returns the string 'all' if the given directory and all subdirectories
416 should be visited. Otherwise returns True or False indicating whether
419 should be visited. Otherwise returns True or False indicating whether
417 the given directory should be visited.
420 the given directory should be visited.
418 '''
421 '''
419 return True
422 return True
420
423
421 def visitchildrenset(self, dir):
424 def visitchildrenset(self, dir):
422 '''Decides whether a directory should be visited based on whether it
425 '''Decides whether a directory should be visited based on whether it
423 has potential matches in it or one of its subdirectories, and
426 has potential matches in it or one of its subdirectories, and
424 potentially lists which subdirectories of that directory should be
427 potentially lists which subdirectories of that directory should be
425 visited. This is based on the match's primary, included, and excluded
428 visited. This is based on the match's primary, included, and excluded
426 patterns.
429 patterns.
427
430
428 This function is very similar to 'visitdir', and the following mapping
431 This function is very similar to 'visitdir', and the following mapping
429 can be applied:
432 can be applied:
430
433
431 visitdir | visitchildrenlist
434 visitdir | visitchildrenlist
432 ----------+-------------------
435 ----------+-------------------
433 False | set()
436 False | set()
434 'all' | 'all'
437 'all' | 'all'
435 True | 'this' OR non-empty set of subdirs -or files- to visit
438 True | 'this' OR non-empty set of subdirs -or files- to visit
436
439
437 Example:
440 Example:
438 Assume matchers ['path:foo/bar', 'rootfilesin:qux'], we would return
441 Assume matchers ['path:foo/bar', 'rootfilesin:qux'], we would return
439 the following values (assuming the implementation of visitchildrenset
442 the following values (assuming the implementation of visitchildrenset
440 is capable of recognizing this; some implementations are not).
443 is capable of recognizing this; some implementations are not).
441
444
442 '' -> {'foo', 'qux'}
445 '' -> {'foo', 'qux'}
443 'baz' -> set()
446 'baz' -> set()
444 'foo' -> {'bar'}
447 'foo' -> {'bar'}
445 # Ideally this would be 'all', but since the prefix nature of matchers
448 # Ideally this would be 'all', but since the prefix nature of matchers
446 # is applied to the entire matcher, we have to downgrade this to
449 # is applied to the entire matcher, we have to downgrade this to
447 # 'this' due to the non-prefix 'rootfilesin'-kind matcher being mixed
450 # 'this' due to the non-prefix 'rootfilesin'-kind matcher being mixed
448 # in.
451 # in.
449 'foo/bar' -> 'this'
452 'foo/bar' -> 'this'
450 'qux' -> 'this'
453 'qux' -> 'this'
451
454
452 Important:
455 Important:
453 Most matchers do not know if they're representing files or
456 Most matchers do not know if they're representing files or
454 directories. They see ['path:dir/f'] and don't know whether 'f' is a
457 directories. They see ['path:dir/f'] and don't know whether 'f' is a
455 file or a directory, so visitchildrenset('dir') for most matchers will
458 file or a directory, so visitchildrenset('dir') for most matchers will
456 return {'f'}, but if the matcher knows it's a file (like exactmatcher
459 return {'f'}, but if the matcher knows it's a file (like exactmatcher
457 does), it may return 'this'. Do not rely on the return being a set
460 does), it may return 'this'. Do not rely on the return being a set
458 indicating that there are no files in this dir to investigate (or
461 indicating that there are no files in this dir to investigate (or
459 equivalently that if there are files to investigate in 'dir' that it
462 equivalently that if there are files to investigate in 'dir' that it
460 will always return 'this').
463 will always return 'this').
461 '''
464 '''
462 return b'this'
465 return b'this'
463
466
464 def always(self):
467 def always(self):
465 '''Matcher will match everything and .files() will be empty --
468 '''Matcher will match everything and .files() will be empty --
466 optimization might be possible.'''
469 optimization might be possible.'''
467 return False
470 return False
468
471
469 def isexact(self):
472 def isexact(self):
470 '''Matcher will match exactly the list of files in .files() --
473 '''Matcher will match exactly the list of files in .files() --
471 optimization might be possible.'''
474 optimization might be possible.'''
472 return False
475 return False
473
476
474 def prefix(self):
477 def prefix(self):
475 '''Matcher will match the paths in .files() recursively --
478 '''Matcher will match the paths in .files() recursively --
476 optimization might be possible.'''
479 optimization might be possible.'''
477 return False
480 return False
478
481
479 def anypats(self):
482 def anypats(self):
480 '''None of .always(), .isexact(), and .prefix() is true --
483 '''None of .always(), .isexact(), and .prefix() is true --
481 optimizations will be difficult.'''
484 optimizations will be difficult.'''
482 return not self.always() and not self.isexact() and not self.prefix()
485 return not self.always() and not self.isexact() and not self.prefix()
483
486
484
487
485 class alwaysmatcher(basematcher):
488 class alwaysmatcher(basematcher):
486 '''Matches everything.'''
489 '''Matches everything.'''
487
490
488 def __init__(self, badfn=None):
491 def __init__(self, badfn=None):
489 super(alwaysmatcher, self).__init__(badfn)
492 super(alwaysmatcher, self).__init__(badfn)
490
493
491 def always(self):
494 def always(self):
492 return True
495 return True
493
496
494 def matchfn(self, f):
497 def matchfn(self, f):
495 return True
498 return True
496
499
497 def visitdir(self, dir):
500 def visitdir(self, dir):
498 return b'all'
501 return b'all'
499
502
500 def visitchildrenset(self, dir):
503 def visitchildrenset(self, dir):
501 return b'all'
504 return b'all'
502
505
503 def __repr__(self):
506 def __repr__(self):
504 return r'<alwaysmatcher>'
507 return r'<alwaysmatcher>'
505
508
506
509
507 class nevermatcher(basematcher):
510 class nevermatcher(basematcher):
508 '''Matches nothing.'''
511 '''Matches nothing.'''
509
512
510 def __init__(self, badfn=None):
513 def __init__(self, badfn=None):
511 super(nevermatcher, self).__init__(badfn)
514 super(nevermatcher, self).__init__(badfn)
512
515
513 # It's a little weird to say that the nevermatcher is an exact matcher
516 # It's a little weird to say that the nevermatcher is an exact matcher
514 # or a prefix matcher, but it seems to make sense to let callers take
517 # or a prefix matcher, but it seems to make sense to let callers take
515 # fast paths based on either. There will be no exact matches, nor any
518 # fast paths based on either. There will be no exact matches, nor any
516 # prefixes (files() returns []), so fast paths iterating over them should
519 # prefixes (files() returns []), so fast paths iterating over them should
517 # be efficient (and correct).
520 # be efficient (and correct).
518 def isexact(self):
521 def isexact(self):
519 return True
522 return True
520
523
521 def prefix(self):
524 def prefix(self):
522 return True
525 return True
523
526
524 def visitdir(self, dir):
527 def visitdir(self, dir):
525 return False
528 return False
526
529
527 def visitchildrenset(self, dir):
530 def visitchildrenset(self, dir):
528 return set()
531 return set()
529
532
530 def __repr__(self):
533 def __repr__(self):
531 return r'<nevermatcher>'
534 return r'<nevermatcher>'
532
535
533
536
534 class predicatematcher(basematcher):
537 class predicatematcher(basematcher):
535 """A matcher adapter for a simple boolean function"""
538 """A matcher adapter for a simple boolean function"""
536
539
537 def __init__(self, predfn, predrepr=None, badfn=None):
540 def __init__(self, predfn, predrepr=None, badfn=None):
538 super(predicatematcher, self).__init__(badfn)
541 super(predicatematcher, self).__init__(badfn)
539 self.matchfn = predfn
542 self.matchfn = predfn
540 self._predrepr = predrepr
543 self._predrepr = predrepr
541
544
542 @encoding.strmethod
545 @encoding.strmethod
543 def __repr__(self):
546 def __repr__(self):
544 s = stringutil.buildrepr(self._predrepr) or pycompat.byterepr(
547 s = stringutil.buildrepr(self._predrepr) or pycompat.byterepr(
545 self.matchfn
548 self.matchfn
546 )
549 )
547 return b'<predicatenmatcher pred=%s>' % s
550 return b'<predicatenmatcher pred=%s>' % s
548
551
549
552
550 class patternmatcher(basematcher):
553 class patternmatcher(basematcher):
551 r"""Matches a set of (kind, pat, source) against a 'root' directory.
554 r"""Matches a set of (kind, pat, source) against a 'root' directory.
552
555
553 >>> kindpats = [
556 >>> kindpats = [
554 ... (b're', br'.*\.c$', b''),
557 ... (b're', br'.*\.c$', b''),
555 ... (b'path', b'foo/a', b''),
558 ... (b'path', b'foo/a', b''),
556 ... (b'relpath', b'b', b''),
559 ... (b'relpath', b'b', b''),
557 ... (b'glob', b'*.h', b''),
560 ... (b'glob', b'*.h', b''),
558 ... ]
561 ... ]
559 >>> m = patternmatcher(b'foo', kindpats)
562 >>> m = patternmatcher(b'foo', kindpats)
560 >>> m(b'main.c') # matches re:.*\.c$
563 >>> m(b'main.c') # matches re:.*\.c$
561 True
564 True
562 >>> m(b'b.txt')
565 >>> m(b'b.txt')
563 False
566 False
564 >>> m(b'foo/a') # matches path:foo/a
567 >>> m(b'foo/a') # matches path:foo/a
565 True
568 True
566 >>> m(b'a') # does not match path:b, since 'root' is 'foo'
569 >>> m(b'a') # does not match path:b, since 'root' is 'foo'
567 False
570 False
568 >>> m(b'b') # matches relpath:b, since 'root' is 'foo'
571 >>> m(b'b') # matches relpath:b, since 'root' is 'foo'
569 True
572 True
570 >>> m(b'lib.h') # matches glob:*.h
573 >>> m(b'lib.h') # matches glob:*.h
571 True
574 True
572
575
573 >>> m.files()
576 >>> m.files()
574 ['', 'foo/a', 'b', '']
577 ['', 'foo/a', 'b', '']
575 >>> m.exact(b'foo/a')
578 >>> m.exact(b'foo/a')
576 True
579 True
577 >>> m.exact(b'b')
580 >>> m.exact(b'b')
578 True
581 True
579 >>> m.exact(b'lib.h') # exact matches are for (rel)path kinds
582 >>> m.exact(b'lib.h') # exact matches are for (rel)path kinds
580 False
583 False
581 """
584 """
582
585
583 def __init__(self, root, kindpats, badfn=None):
586 def __init__(self, root, kindpats, badfn=None):
584 super(patternmatcher, self).__init__(badfn)
587 super(patternmatcher, self).__init__(badfn)
585
588
586 self._files = _explicitfiles(kindpats)
589 self._files = _explicitfiles(kindpats)
587 self._prefix = _prefix(kindpats)
590 self._prefix = _prefix(kindpats)
588 self._pats, self.matchfn = _buildmatch(kindpats, b'$', root)
591 self._pats, self.matchfn = _buildmatch(kindpats, b'$', root)
589
592
590 @propertycache
593 @propertycache
591 def _dirs(self):
594 def _dirs(self):
592 return set(pathutil.dirs(self._fileset))
595 return set(pathutil.dirs(self._fileset))
593
596
594 def visitdir(self, dir):
597 def visitdir(self, dir):
595 if self._prefix and dir in self._fileset:
598 if self._prefix and dir in self._fileset:
596 return b'all'
599 return b'all'
597 return (
600 return (
598 dir in self._fileset
601 dir in self._fileset
599 or dir in self._dirs
602 or dir in self._dirs
600 or any(
603 or any(
601 parentdir in self._fileset
604 parentdir in self._fileset
602 for parentdir in pathutil.finddirs(dir)
605 for parentdir in pathutil.finddirs(dir)
603 )
606 )
604 )
607 )
605
608
606 def visitchildrenset(self, dir):
609 def visitchildrenset(self, dir):
607 ret = self.visitdir(dir)
610 ret = self.visitdir(dir)
608 if ret is True:
611 if ret is True:
609 return b'this'
612 return b'this'
610 elif not ret:
613 elif not ret:
611 return set()
614 return set()
612 assert ret == b'all'
615 assert ret == b'all'
613 return b'all'
616 return b'all'
614
617
615 def prefix(self):
618 def prefix(self):
616 return self._prefix
619 return self._prefix
617
620
618 @encoding.strmethod
621 @encoding.strmethod
619 def __repr__(self):
622 def __repr__(self):
620 return b'<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
623 return b'<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
621
624
622
625
623 # This is basically a reimplementation of pathutil.dirs that stores the
626 # This is basically a reimplementation of pathutil.dirs that stores the
624 # children instead of just a count of them, plus a small optional optimization
627 # children instead of just a count of them, plus a small optional optimization
625 # to avoid some directories we don't need.
628 # to avoid some directories we don't need.
626 class _dirchildren(object):
629 class _dirchildren(object):
627 def __init__(self, paths, onlyinclude=None):
630 def __init__(self, paths, onlyinclude=None):
628 self._dirs = {}
631 self._dirs = {}
629 self._onlyinclude = onlyinclude or []
632 self._onlyinclude = onlyinclude or []
630 addpath = self.addpath
633 addpath = self.addpath
631 for f in paths:
634 for f in paths:
632 addpath(f)
635 addpath(f)
633
636
634 def addpath(self, path):
637 def addpath(self, path):
635 if path == b'':
638 if path == b'':
636 return
639 return
637 dirs = self._dirs
640 dirs = self._dirs
638 findsplitdirs = _dirchildren._findsplitdirs
641 findsplitdirs = _dirchildren._findsplitdirs
639 for d, b in findsplitdirs(path):
642 for d, b in findsplitdirs(path):
640 if d not in self._onlyinclude:
643 if d not in self._onlyinclude:
641 continue
644 continue
642 dirs.setdefault(d, set()).add(b)
645 dirs.setdefault(d, set()).add(b)
643
646
644 @staticmethod
647 @staticmethod
645 def _findsplitdirs(path):
648 def _findsplitdirs(path):
646 # yields (dirname, basename) tuples, walking back to the root. This is
649 # yields (dirname, basename) tuples, walking back to the root. This is
647 # very similar to pathutil.finddirs, except:
650 # very similar to pathutil.finddirs, except:
648 # - produces a (dirname, basename) tuple, not just 'dirname'
651 # - produces a (dirname, basename) tuple, not just 'dirname'
649 # Unlike manifest._splittopdir, this does not suffix `dirname` with a
652 # Unlike manifest._splittopdir, this does not suffix `dirname` with a
650 # slash.
653 # slash.
651 oldpos = len(path)
654 oldpos = len(path)
652 pos = path.rfind(b'/')
655 pos = path.rfind(b'/')
653 while pos != -1:
656 while pos != -1:
654 yield path[:pos], path[pos + 1 : oldpos]
657 yield path[:pos], path[pos + 1 : oldpos]
655 oldpos = pos
658 oldpos = pos
656 pos = path.rfind(b'/', 0, pos)
659 pos = path.rfind(b'/', 0, pos)
657 yield b'', path[:oldpos]
660 yield b'', path[:oldpos]
658
661
659 def get(self, path):
662 def get(self, path):
660 return self._dirs.get(path, set())
663 return self._dirs.get(path, set())
661
664
662
665
663 class includematcher(basematcher):
666 class includematcher(basematcher):
664 def __init__(self, root, kindpats, badfn=None):
667 def __init__(self, root, kindpats, badfn=None):
665 super(includematcher, self).__init__(badfn)
668 super(includematcher, self).__init__(badfn)
666
669
667 self._pats, self.matchfn = _buildmatch(kindpats, b'(?:/|$)', root)
670 self._pats, self.matchfn = _buildmatch(kindpats, b'(?:/|$)', root)
668 self._prefix = _prefix(kindpats)
671 self._prefix = _prefix(kindpats)
669 roots, dirs, parents = _rootsdirsandparents(kindpats)
672 roots, dirs, parents = _rootsdirsandparents(kindpats)
670 # roots are directories which are recursively included.
673 # roots are directories which are recursively included.
671 self._roots = set(roots)
674 self._roots = set(roots)
672 # dirs are directories which are non-recursively included.
675 # dirs are directories which are non-recursively included.
673 self._dirs = set(dirs)
676 self._dirs = set(dirs)
674 # parents are directories which are non-recursively included because
677 # parents are directories which are non-recursively included because
675 # they are needed to get to items in _dirs or _roots.
678 # they are needed to get to items in _dirs or _roots.
676 self._parents = parents
679 self._parents = parents
677
680
678 def visitdir(self, dir):
681 def visitdir(self, dir):
679 if self._prefix and dir in self._roots:
682 if self._prefix and dir in self._roots:
680 return b'all'
683 return b'all'
681 return (
684 return (
682 dir in self._roots
685 dir in self._roots
683 or dir in self._dirs
686 or dir in self._dirs
684 or dir in self._parents
687 or dir in self._parents
685 or any(
688 or any(
686 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
689 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
687 )
690 )
688 )
691 )
689
692
690 @propertycache
693 @propertycache
691 def _allparentschildren(self):
694 def _allparentschildren(self):
692 # It may seem odd that we add dirs, roots, and parents, and then
695 # It may seem odd that we add dirs, roots, and parents, and then
693 # restrict to only parents. This is to catch the case of:
696 # restrict to only parents. This is to catch the case of:
694 # dirs = ['foo/bar']
697 # dirs = ['foo/bar']
695 # parents = ['foo']
698 # parents = ['foo']
696 # if we asked for the children of 'foo', but had only added
699 # if we asked for the children of 'foo', but had only added
697 # self._parents, we wouldn't be able to respond ['bar'].
700 # self._parents, we wouldn't be able to respond ['bar'].
698 return _dirchildren(
701 return _dirchildren(
699 itertools.chain(self._dirs, self._roots, self._parents),
702 itertools.chain(self._dirs, self._roots, self._parents),
700 onlyinclude=self._parents,
703 onlyinclude=self._parents,
701 )
704 )
702
705
703 def visitchildrenset(self, dir):
706 def visitchildrenset(self, dir):
704 if self._prefix and dir in self._roots:
707 if self._prefix and dir in self._roots:
705 return b'all'
708 return b'all'
706 # Note: this does *not* include the 'dir in self._parents' case from
709 # Note: this does *not* include the 'dir in self._parents' case from
707 # visitdir, that's handled below.
710 # visitdir, that's handled below.
708 if (
711 if (
709 b'' in self._roots
712 b'' in self._roots
710 or dir in self._roots
713 or dir in self._roots
711 or dir in self._dirs
714 or dir in self._dirs
712 or any(
715 or any(
713 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
716 parentdir in self._roots for parentdir in pathutil.finddirs(dir)
714 )
717 )
715 ):
718 ):
716 return b'this'
719 return b'this'
717
720
718 if dir in self._parents:
721 if dir in self._parents:
719 return self._allparentschildren.get(dir) or set()
722 return self._allparentschildren.get(dir) or set()
720 return set()
723 return set()
721
724
722 @encoding.strmethod
725 @encoding.strmethod
723 def __repr__(self):
726 def __repr__(self):
724 return b'<includematcher includes=%r>' % pycompat.bytestr(self._pats)
727 return b'<includematcher includes=%r>' % pycompat.bytestr(self._pats)
725
728
726
729
727 class exactmatcher(basematcher):
730 class exactmatcher(basematcher):
728 r'''Matches the input files exactly. They are interpreted as paths, not
731 r'''Matches the input files exactly. They are interpreted as paths, not
729 patterns (so no kind-prefixes).
732 patterns (so no kind-prefixes).
730
733
731 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
734 >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
732 >>> m(b'a.txt')
735 >>> m(b'a.txt')
733 True
736 True
734 >>> m(b'b.txt')
737 >>> m(b'b.txt')
735 False
738 False
736
739
737 Input files that would be matched are exactly those returned by .files()
740 Input files that would be matched are exactly those returned by .files()
738 >>> m.files()
741 >>> m.files()
739 ['a.txt', 're:.*\\.c$']
742 ['a.txt', 're:.*\\.c$']
740
743
741 So pattern 're:.*\.c$' is not considered as a regex, but as a file name
744 So pattern 're:.*\.c$' is not considered as a regex, but as a file name
742 >>> m(b'main.c')
745 >>> m(b'main.c')
743 False
746 False
744 >>> m(br're:.*\.c$')
747 >>> m(br're:.*\.c$')
745 True
748 True
746 '''
749 '''
747
750
748 def __init__(self, files, badfn=None):
751 def __init__(self, files, badfn=None):
749 super(exactmatcher, self).__init__(badfn)
752 super(exactmatcher, self).__init__(badfn)
750
753
751 if isinstance(files, list):
754 if isinstance(files, list):
752 self._files = files
755 self._files = files
753 else:
756 else:
754 self._files = list(files)
757 self._files = list(files)
755
758
756 matchfn = basematcher.exact
759 matchfn = basematcher.exact
757
760
758 @propertycache
761 @propertycache
759 def _dirs(self):
762 def _dirs(self):
760 return set(pathutil.dirs(self._fileset))
763 return set(pathutil.dirs(self._fileset))
761
764
762 def visitdir(self, dir):
765 def visitdir(self, dir):
763 return dir in self._dirs
766 return dir in self._dirs
764
767
765 def visitchildrenset(self, dir):
768 def visitchildrenset(self, dir):
766 if not self._fileset or dir not in self._dirs:
769 if not self._fileset or dir not in self._dirs:
767 return set()
770 return set()
768
771
769 candidates = self._fileset | self._dirs - {b''}
772 candidates = self._fileset | self._dirs - {b''}
770 if dir != b'':
773 if dir != b'':
771 d = dir + b'/'
774 d = dir + b'/'
772 candidates = set(c[len(d) :] for c in candidates if c.startswith(d))
775 candidates = set(c[len(d) :] for c in candidates if c.startswith(d))
773 # self._dirs includes all of the directories, recursively, so if
776 # self._dirs includes all of the directories, recursively, so if
774 # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
777 # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
775 # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
778 # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
776 # '/' in it, indicating a it's for a subdir-of-a-subdir; the
779 # '/' in it, indicating a it's for a subdir-of-a-subdir; the
777 # immediate subdir will be in there without a slash.
780 # immediate subdir will be in there without a slash.
778 ret = {c for c in candidates if b'/' not in c}
781 ret = {c for c in candidates if b'/' not in c}
779 # We really do not expect ret to be empty, since that would imply that
782 # We really do not expect ret to be empty, since that would imply that
780 # there's something in _dirs that didn't have a file in _fileset.
783 # there's something in _dirs that didn't have a file in _fileset.
781 assert ret
784 assert ret
782 return ret
785 return ret
783
786
784 def isexact(self):
787 def isexact(self):
785 return True
788 return True
786
789
787 @encoding.strmethod
790 @encoding.strmethod
788 def __repr__(self):
791 def __repr__(self):
789 return b'<exactmatcher files=%r>' % self._files
792 return b'<exactmatcher files=%r>' % self._files
790
793
791
794
792 class differencematcher(basematcher):
795 class differencematcher(basematcher):
793 '''Composes two matchers by matching if the first matches and the second
796 '''Composes two matchers by matching if the first matches and the second
794 does not.
797 does not.
795
798
796 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
799 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
797 '''
800 '''
798
801
799 def __init__(self, m1, m2):
802 def __init__(self, m1, m2):
800 super(differencematcher, self).__init__()
803 super(differencematcher, self).__init__()
801 self._m1 = m1
804 self._m1 = m1
802 self._m2 = m2
805 self._m2 = m2
803 self.bad = m1.bad
806 self.bad = m1.bad
804 self.traversedir = m1.traversedir
807 self.traversedir = m1.traversedir
805
808
806 def matchfn(self, f):
809 def matchfn(self, f):
807 return self._m1(f) and not self._m2(f)
810 return self._m1(f) and not self._m2(f)
808
811
809 @propertycache
812 @propertycache
810 def _files(self):
813 def _files(self):
811 if self.isexact():
814 if self.isexact():
812 return [f for f in self._m1.files() if self(f)]
815 return [f for f in self._m1.files() if self(f)]
813 # If m1 is not an exact matcher, we can't easily figure out the set of
816 # If m1 is not an exact matcher, we can't easily figure out the set of
814 # files, because its files() are not always files. For example, if
817 # files, because its files() are not always files. For example, if
815 # m1 is "path:dir" and m2 is "rootfileins:.", we don't
818 # m1 is "path:dir" and m2 is "rootfileins:.", we don't
816 # want to remove "dir" from the set even though it would match m2,
819 # want to remove "dir" from the set even though it would match m2,
817 # because the "dir" in m1 may not be a file.
820 # because the "dir" in m1 may not be a file.
818 return self._m1.files()
821 return self._m1.files()
819
822
820 def visitdir(self, dir):
823 def visitdir(self, dir):
821 if self._m2.visitdir(dir) == b'all':
824 if self._m2.visitdir(dir) == b'all':
822 return False
825 return False
823 elif not self._m2.visitdir(dir):
826 elif not self._m2.visitdir(dir):
824 # m2 does not match dir, we can return 'all' here if possible
827 # m2 does not match dir, we can return 'all' here if possible
825 return self._m1.visitdir(dir)
828 return self._m1.visitdir(dir)
826 return bool(self._m1.visitdir(dir))
829 return bool(self._m1.visitdir(dir))
827
830
828 def visitchildrenset(self, dir):
831 def visitchildrenset(self, dir):
829 m2_set = self._m2.visitchildrenset(dir)
832 m2_set = self._m2.visitchildrenset(dir)
830 if m2_set == b'all':
833 if m2_set == b'all':
831 return set()
834 return set()
832 m1_set = self._m1.visitchildrenset(dir)
835 m1_set = self._m1.visitchildrenset(dir)
833 # Possible values for m1: 'all', 'this', set(...), set()
836 # Possible values for m1: 'all', 'this', set(...), set()
834 # Possible values for m2: 'this', set(...), set()
837 # Possible values for m2: 'this', set(...), set()
835 # If m2 has nothing under here that we care about, return m1, even if
838 # If m2 has nothing under here that we care about, return m1, even if
836 # it's 'all'. This is a change in behavior from visitdir, which would
839 # it's 'all'. This is a change in behavior from visitdir, which would
837 # return True, not 'all', for some reason.
840 # return True, not 'all', for some reason.
838 if not m2_set:
841 if not m2_set:
839 return m1_set
842 return m1_set
840 if m1_set in [b'all', b'this']:
843 if m1_set in [b'all', b'this']:
841 # Never return 'all' here if m2_set is any kind of non-empty (either
844 # Never return 'all' here if m2_set is any kind of non-empty (either
842 # 'this' or set(foo)), since m2 might return set() for a
845 # 'this' or set(foo)), since m2 might return set() for a
843 # subdirectory.
846 # subdirectory.
844 return b'this'
847 return b'this'
845 # Possible values for m1: set(...), set()
848 # Possible values for m1: set(...), set()
846 # Possible values for m2: 'this', set(...)
849 # Possible values for m2: 'this', set(...)
847 # We ignore m2's set results. They're possibly incorrect:
850 # We ignore m2's set results. They're possibly incorrect:
848 # m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset(''):
851 # m1 = path:dir/subdir, m2=rootfilesin:dir, visitchildrenset(''):
849 # m1 returns {'dir'}, m2 returns {'dir'}, if we subtracted we'd
852 # m1 returns {'dir'}, m2 returns {'dir'}, if we subtracted we'd
850 # return set(), which is *not* correct, we still need to visit 'dir'!
853 # return set(), which is *not* correct, we still need to visit 'dir'!
851 return m1_set
854 return m1_set
852
855
853 def isexact(self):
856 def isexact(self):
854 return self._m1.isexact()
857 return self._m1.isexact()
855
858
856 @encoding.strmethod
859 @encoding.strmethod
857 def __repr__(self):
860 def __repr__(self):
858 return b'<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
861 return b'<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
859
862
860
863
861 def intersectmatchers(m1, m2):
864 def intersectmatchers(m1, m2):
862 '''Composes two matchers by matching if both of them match.
865 '''Composes two matchers by matching if both of them match.
863
866
864 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
867 The second matcher's non-matching-attributes (bad, traversedir) are ignored.
865 '''
868 '''
866 if m1 is None or m2 is None:
869 if m1 is None or m2 is None:
867 return m1 or m2
870 return m1 or m2
868 if m1.always():
871 if m1.always():
869 m = copy.copy(m2)
872 m = copy.copy(m2)
870 # TODO: Consider encapsulating these things in a class so there's only
873 # TODO: Consider encapsulating these things in a class so there's only
871 # one thing to copy from m1.
874 # one thing to copy from m1.
872 m.bad = m1.bad
875 m.bad = m1.bad
873 m.traversedir = m1.traversedir
876 m.traversedir = m1.traversedir
874 return m
877 return m
875 if m2.always():
878 if m2.always():
876 m = copy.copy(m1)
879 m = copy.copy(m1)
877 return m
880 return m
878 return intersectionmatcher(m1, m2)
881 return intersectionmatcher(m1, m2)
879
882
880
883
881 class intersectionmatcher(basematcher):
884 class intersectionmatcher(basematcher):
882 def __init__(self, m1, m2):
885 def __init__(self, m1, m2):
883 super(intersectionmatcher, self).__init__()
886 super(intersectionmatcher, self).__init__()
884 self._m1 = m1
887 self._m1 = m1
885 self._m2 = m2
888 self._m2 = m2
886 self.bad = m1.bad
889 self.bad = m1.bad
887 self.traversedir = m1.traversedir
890 self.traversedir = m1.traversedir
888
891
889 @propertycache
892 @propertycache
890 def _files(self):
893 def _files(self):
891 if self.isexact():
894 if self.isexact():
892 m1, m2 = self._m1, self._m2
895 m1, m2 = self._m1, self._m2
893 if not m1.isexact():
896 if not m1.isexact():
894 m1, m2 = m2, m1
897 m1, m2 = m2, m1
895 return [f for f in m1.files() if m2(f)]
898 return [f for f in m1.files() if m2(f)]
896 # It neither m1 nor m2 is an exact matcher, we can't easily intersect
899 # It neither m1 nor m2 is an exact matcher, we can't easily intersect
897 # the set of files, because their files() are not always files. For
900 # the set of files, because their files() are not always files. For
898 # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
901 # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
899 # "path:dir2", we don't want to remove "dir2" from the set.
902 # "path:dir2", we don't want to remove "dir2" from the set.
900 return self._m1.files() + self._m2.files()
903 return self._m1.files() + self._m2.files()
901
904
902 def matchfn(self, f):
905 def matchfn(self, f):
903 return self._m1(f) and self._m2(f)
906 return self._m1(f) and self._m2(f)
904
907
905 def visitdir(self, dir):
908 def visitdir(self, dir):
906 visit1 = self._m1.visitdir(dir)
909 visit1 = self._m1.visitdir(dir)
907 if visit1 == b'all':
910 if visit1 == b'all':
908 return self._m2.visitdir(dir)
911 return self._m2.visitdir(dir)
909 # bool() because visit1=True + visit2='all' should not be 'all'
912 # bool() because visit1=True + visit2='all' should not be 'all'
910 return bool(visit1 and self._m2.visitdir(dir))
913 return bool(visit1 and self._m2.visitdir(dir))
911
914
912 def visitchildrenset(self, dir):
915 def visitchildrenset(self, dir):
913 m1_set = self._m1.visitchildrenset(dir)
916 m1_set = self._m1.visitchildrenset(dir)
914 if not m1_set:
917 if not m1_set:
915 return set()
918 return set()
916 m2_set = self._m2.visitchildrenset(dir)
919 m2_set = self._m2.visitchildrenset(dir)
917 if not m2_set:
920 if not m2_set:
918 return set()
921 return set()
919
922
920 if m1_set == b'all':
923 if m1_set == b'all':
921 return m2_set
924 return m2_set
922 elif m2_set == b'all':
925 elif m2_set == b'all':
923 return m1_set
926 return m1_set
924
927
925 if m1_set == b'this' or m2_set == b'this':
928 if m1_set == b'this' or m2_set == b'this':
926 return b'this'
929 return b'this'
927
930
928 assert isinstance(m1_set, set) and isinstance(m2_set, set)
931 assert isinstance(m1_set, set) and isinstance(m2_set, set)
929 return m1_set.intersection(m2_set)
932 return m1_set.intersection(m2_set)
930
933
931 def always(self):
934 def always(self):
932 return self._m1.always() and self._m2.always()
935 return self._m1.always() and self._m2.always()
933
936
934 def isexact(self):
937 def isexact(self):
935 return self._m1.isexact() or self._m2.isexact()
938 return self._m1.isexact() or self._m2.isexact()
936
939
937 @encoding.strmethod
940 @encoding.strmethod
938 def __repr__(self):
941 def __repr__(self):
939 return b'<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
942 return b'<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
940
943
941
944
942 class subdirmatcher(basematcher):
945 class subdirmatcher(basematcher):
943 """Adapt a matcher to work on a subdirectory only.
946 """Adapt a matcher to work on a subdirectory only.
944
947
945 The paths are remapped to remove/insert the path as needed:
948 The paths are remapped to remove/insert the path as needed:
946
949
947 >>> from . import pycompat
950 >>> from . import pycompat
948 >>> m1 = match(util.localpath(b'/root'), b'', [b'a.txt', b'sub/b.txt'], auditor=lambda name: None)
951 >>> m1 = match(util.localpath(b'/root'), b'', [b'a.txt', b'sub/b.txt'], auditor=lambda name: None)
949 >>> m2 = subdirmatcher(b'sub', m1)
952 >>> m2 = subdirmatcher(b'sub', m1)
950 >>> m2(b'a.txt')
953 >>> m2(b'a.txt')
951 False
954 False
952 >>> m2(b'b.txt')
955 >>> m2(b'b.txt')
953 True
956 True
954 >>> m2.matchfn(b'a.txt')
957 >>> m2.matchfn(b'a.txt')
955 False
958 False
956 >>> m2.matchfn(b'b.txt')
959 >>> m2.matchfn(b'b.txt')
957 True
960 True
958 >>> m2.files()
961 >>> m2.files()
959 ['b.txt']
962 ['b.txt']
960 >>> m2.exact(b'b.txt')
963 >>> m2.exact(b'b.txt')
961 True
964 True
962 >>> def bad(f, msg):
965 >>> def bad(f, msg):
963 ... print(pycompat.sysstr(b"%s: %s" % (f, msg)))
966 ... print(pycompat.sysstr(b"%s: %s" % (f, msg)))
964 >>> m1.bad = bad
967 >>> m1.bad = bad
965 >>> m2.bad(b'x.txt', b'No such file')
968 >>> m2.bad(b'x.txt', b'No such file')
966 sub/x.txt: No such file
969 sub/x.txt: No such file
967 """
970 """
968
971
969 def __init__(self, path, matcher):
972 def __init__(self, path, matcher):
970 super(subdirmatcher, self).__init__()
973 super(subdirmatcher, self).__init__()
971 self._path = path
974 self._path = path
972 self._matcher = matcher
975 self._matcher = matcher
973 self._always = matcher.always()
976 self._always = matcher.always()
974
977
975 self._files = [
978 self._files = [
976 f[len(path) + 1 :]
979 f[len(path) + 1 :]
977 for f in matcher._files
980 for f in matcher._files
978 if f.startswith(path + b"/")
981 if f.startswith(path + b"/")
979 ]
982 ]
980
983
981 # If the parent repo had a path to this subrepo and the matcher is
984 # If the parent repo had a path to this subrepo and the matcher is
982 # a prefix matcher, this submatcher always matches.
985 # a prefix matcher, this submatcher always matches.
983 if matcher.prefix():
986 if matcher.prefix():
984 self._always = any(f == path for f in matcher._files)
987 self._always = any(f == path for f in matcher._files)
985
988
986 def bad(self, f, msg):
989 def bad(self, f, msg):
987 self._matcher.bad(self._path + b"/" + f, msg)
990 self._matcher.bad(self._path + b"/" + f, msg)
988
991
989 def matchfn(self, f):
992 def matchfn(self, f):
990 # Some information is lost in the superclass's constructor, so we
993 # Some information is lost in the superclass's constructor, so we
991 # can not accurately create the matching function for the subdirectory
994 # can not accurately create the matching function for the subdirectory
992 # from the inputs. Instead, we override matchfn() and visitdir() to
995 # from the inputs. Instead, we override matchfn() and visitdir() to
993 # call the original matcher with the subdirectory path prepended.
996 # call the original matcher with the subdirectory path prepended.
994 return self._matcher.matchfn(self._path + b"/" + f)
997 return self._matcher.matchfn(self._path + b"/" + f)
995
998
996 def visitdir(self, dir):
999 def visitdir(self, dir):
997 if dir == b'':
1000 if dir == b'':
998 dir = self._path
1001 dir = self._path
999 else:
1002 else:
1000 dir = self._path + b"/" + dir
1003 dir = self._path + b"/" + dir
1001 return self._matcher.visitdir(dir)
1004 return self._matcher.visitdir(dir)
1002
1005
1003 def visitchildrenset(self, dir):
1006 def visitchildrenset(self, dir):
1004 if dir == b'':
1007 if dir == b'':
1005 dir = self._path
1008 dir = self._path
1006 else:
1009 else:
1007 dir = self._path + b"/" + dir
1010 dir = self._path + b"/" + dir
1008 return self._matcher.visitchildrenset(dir)
1011 return self._matcher.visitchildrenset(dir)
1009
1012
1010 def always(self):
1013 def always(self):
1011 return self._always
1014 return self._always
1012
1015
1013 def prefix(self):
1016 def prefix(self):
1014 return self._matcher.prefix() and not self._always
1017 return self._matcher.prefix() and not self._always
1015
1018
1016 @encoding.strmethod
1019 @encoding.strmethod
1017 def __repr__(self):
1020 def __repr__(self):
1018 return b'<subdirmatcher path=%r, matcher=%r>' % (
1021 return b'<subdirmatcher path=%r, matcher=%r>' % (
1019 self._path,
1022 self._path,
1020 self._matcher,
1023 self._matcher,
1021 )
1024 )
1022
1025
1023
1026
1024 class prefixdirmatcher(basematcher):
1027 class prefixdirmatcher(basematcher):
1025 """Adapt a matcher to work on a parent directory.
1028 """Adapt a matcher to work on a parent directory.
1026
1029
1027 The matcher's non-matching-attributes (bad, traversedir) are ignored.
1030 The matcher's non-matching-attributes (bad, traversedir) are ignored.
1028
1031
1029 The prefix path should usually be the relative path from the root of
1032 The prefix path should usually be the relative path from the root of
1030 this matcher to the root of the wrapped matcher.
1033 this matcher to the root of the wrapped matcher.
1031
1034
1032 >>> m1 = match(util.localpath(b'/root/d/e'), b'f', [b'../a.txt', b'b.txt'], auditor=lambda name: None)
1035 >>> m1 = match(util.localpath(b'/root/d/e'), b'f', [b'../a.txt', b'b.txt'], auditor=lambda name: None)
1033 >>> m2 = prefixdirmatcher(b'd/e', m1)
1036 >>> m2 = prefixdirmatcher(b'd/e', m1)
1034 >>> m2(b'a.txt')
1037 >>> m2(b'a.txt')
1035 False
1038 False
1036 >>> m2(b'd/e/a.txt')
1039 >>> m2(b'd/e/a.txt')
1037 True
1040 True
1038 >>> m2(b'd/e/b.txt')
1041 >>> m2(b'd/e/b.txt')
1039 False
1042 False
1040 >>> m2.files()
1043 >>> m2.files()
1041 ['d/e/a.txt', 'd/e/f/b.txt']
1044 ['d/e/a.txt', 'd/e/f/b.txt']
1042 >>> m2.exact(b'd/e/a.txt')
1045 >>> m2.exact(b'd/e/a.txt')
1043 True
1046 True
1044 >>> m2.visitdir(b'd')
1047 >>> m2.visitdir(b'd')
1045 True
1048 True
1046 >>> m2.visitdir(b'd/e')
1049 >>> m2.visitdir(b'd/e')
1047 True
1050 True
1048 >>> m2.visitdir(b'd/e/f')
1051 >>> m2.visitdir(b'd/e/f')
1049 True
1052 True
1050 >>> m2.visitdir(b'd/e/g')
1053 >>> m2.visitdir(b'd/e/g')
1051 False
1054 False
1052 >>> m2.visitdir(b'd/ef')
1055 >>> m2.visitdir(b'd/ef')
1053 False
1056 False
1054 """
1057 """
1055
1058
1056 def __init__(self, path, matcher, badfn=None):
1059 def __init__(self, path, matcher, badfn=None):
1057 super(prefixdirmatcher, self).__init__(badfn)
1060 super(prefixdirmatcher, self).__init__(badfn)
1058 if not path:
1061 if not path:
1059 raise error.ProgrammingError(b'prefix path must not be empty')
1062 raise error.ProgrammingError(b'prefix path must not be empty')
1060 self._path = path
1063 self._path = path
1061 self._pathprefix = path + b'/'
1064 self._pathprefix = path + b'/'
1062 self._matcher = matcher
1065 self._matcher = matcher
1063
1066
1064 @propertycache
1067 @propertycache
1065 def _files(self):
1068 def _files(self):
1066 return [self._pathprefix + f for f in self._matcher._files]
1069 return [self._pathprefix + f for f in self._matcher._files]
1067
1070
1068 def matchfn(self, f):
1071 def matchfn(self, f):
1069 if not f.startswith(self._pathprefix):
1072 if not f.startswith(self._pathprefix):
1070 return False
1073 return False
1071 return self._matcher.matchfn(f[len(self._pathprefix) :])
1074 return self._matcher.matchfn(f[len(self._pathprefix) :])
1072
1075
1073 @propertycache
1076 @propertycache
1074 def _pathdirs(self):
1077 def _pathdirs(self):
1075 return set(pathutil.finddirs(self._path))
1078 return set(pathutil.finddirs(self._path))
1076
1079
1077 def visitdir(self, dir):
1080 def visitdir(self, dir):
1078 if dir == self._path:
1081 if dir == self._path:
1079 return self._matcher.visitdir(b'')
1082 return self._matcher.visitdir(b'')
1080 if dir.startswith(self._pathprefix):
1083 if dir.startswith(self._pathprefix):
1081 return self._matcher.visitdir(dir[len(self._pathprefix) :])
1084 return self._matcher.visitdir(dir[len(self._pathprefix) :])
1082 return dir in self._pathdirs
1085 return dir in self._pathdirs
1083
1086
1084 def visitchildrenset(self, dir):
1087 def visitchildrenset(self, dir):
1085 if dir == self._path:
1088 if dir == self._path:
1086 return self._matcher.visitchildrenset(b'')
1089 return self._matcher.visitchildrenset(b'')
1087 if dir.startswith(self._pathprefix):
1090 if dir.startswith(self._pathprefix):
1088 return self._matcher.visitchildrenset(dir[len(self._pathprefix) :])
1091 return self._matcher.visitchildrenset(dir[len(self._pathprefix) :])
1089 if dir in self._pathdirs:
1092 if dir in self._pathdirs:
1090 return b'this'
1093 return b'this'
1091 return set()
1094 return set()
1092
1095
1093 def isexact(self):
1096 def isexact(self):
1094 return self._matcher.isexact()
1097 return self._matcher.isexact()
1095
1098
1096 def prefix(self):
1099 def prefix(self):
1097 return self._matcher.prefix()
1100 return self._matcher.prefix()
1098
1101
1099 @encoding.strmethod
1102 @encoding.strmethod
1100 def __repr__(self):
1103 def __repr__(self):
1101 return b'<prefixdirmatcher path=%r, matcher=%r>' % (
1104 return b'<prefixdirmatcher path=%r, matcher=%r>' % (
1102 pycompat.bytestr(self._path),
1105 pycompat.bytestr(self._path),
1103 self._matcher,
1106 self._matcher,
1104 )
1107 )
1105
1108
1106
1109
1107 class unionmatcher(basematcher):
1110 class unionmatcher(basematcher):
1108 """A matcher that is the union of several matchers.
1111 """A matcher that is the union of several matchers.
1109
1112
1110 The non-matching-attributes (bad, traversedir) are taken from the first
1113 The non-matching-attributes (bad, traversedir) are taken from the first
1111 matcher.
1114 matcher.
1112 """
1115 """
1113
1116
1114 def __init__(self, matchers):
1117 def __init__(self, matchers):
1115 m1 = matchers[0]
1118 m1 = matchers[0]
1116 super(unionmatcher, self).__init__()
1119 super(unionmatcher, self).__init__()
1117 self.traversedir = m1.traversedir
1120 self.traversedir = m1.traversedir
1118 self._matchers = matchers
1121 self._matchers = matchers
1119
1122
1120 def matchfn(self, f):
1123 def matchfn(self, f):
1121 for match in self._matchers:
1124 for match in self._matchers:
1122 if match(f):
1125 if match(f):
1123 return True
1126 return True
1124 return False
1127 return False
1125
1128
1126 def visitdir(self, dir):
1129 def visitdir(self, dir):
1127 r = False
1130 r = False
1128 for m in self._matchers:
1131 for m in self._matchers:
1129 v = m.visitdir(dir)
1132 v = m.visitdir(dir)
1130 if v == b'all':
1133 if v == b'all':
1131 return v
1134 return v
1132 r |= v
1135 r |= v
1133 return r
1136 return r
1134
1137
1135 def visitchildrenset(self, dir):
1138 def visitchildrenset(self, dir):
1136 r = set()
1139 r = set()
1137 this = False
1140 this = False
1138 for m in self._matchers:
1141 for m in self._matchers:
1139 v = m.visitchildrenset(dir)
1142 v = m.visitchildrenset(dir)
1140 if not v:
1143 if not v:
1141 continue
1144 continue
1142 if v == b'all':
1145 if v == b'all':
1143 return v
1146 return v
1144 if this or v == b'this':
1147 if this or v == b'this':
1145 this = True
1148 this = True
1146 # don't break, we might have an 'all' in here.
1149 # don't break, we might have an 'all' in here.
1147 continue
1150 continue
1148 assert isinstance(v, set)
1151 assert isinstance(v, set)
1149 r = r.union(v)
1152 r = r.union(v)
1150 if this:
1153 if this:
1151 return b'this'
1154 return b'this'
1152 return r
1155 return r
1153
1156
1154 @encoding.strmethod
1157 @encoding.strmethod
1155 def __repr__(self):
1158 def __repr__(self):
1156 return b'<unionmatcher matchers=%r>' % self._matchers
1159 return b'<unionmatcher matchers=%r>' % self._matchers
1157
1160
1158
1161
1159 def patkind(pattern, default=None):
1162 def patkind(pattern, default=None):
1160 r'''If pattern is 'kind:pat' with a known kind, return kind.
1163 r'''If pattern is 'kind:pat' with a known kind, return kind.
1161
1164
1162 >>> patkind(br're:.*\.c$')
1165 >>> patkind(br're:.*\.c$')
1163 're'
1166 're'
1164 >>> patkind(b'glob:*.c')
1167 >>> patkind(b'glob:*.c')
1165 'glob'
1168 'glob'
1166 >>> patkind(b'relpath:test.py')
1169 >>> patkind(b'relpath:test.py')
1167 'relpath'
1170 'relpath'
1168 >>> patkind(b'main.py')
1171 >>> patkind(b'main.py')
1169 >>> patkind(b'main.py', default=b're')
1172 >>> patkind(b'main.py', default=b're')
1170 're'
1173 're'
1171 '''
1174 '''
1172 return _patsplit(pattern, default)[0]
1175 return _patsplit(pattern, default)[0]
1173
1176
1174
1177
1175 def _patsplit(pattern, default):
1178 def _patsplit(pattern, default):
1176 """Split a string into the optional pattern kind prefix and the actual
1179 """Split a string into the optional pattern kind prefix and the actual
1177 pattern."""
1180 pattern."""
1178 if b':' in pattern:
1181 if b':' in pattern:
1179 kind, pat = pattern.split(b':', 1)
1182 kind, pat = pattern.split(b':', 1)
1180 if kind in allpatternkinds:
1183 if kind in allpatternkinds:
1181 return kind, pat
1184 return kind, pat
1182 return default, pattern
1185 return default, pattern
1183
1186
1184
1187
1185 def _globre(pat):
1188 def _globre(pat):
1186 r'''Convert an extended glob string to a regexp string.
1189 r'''Convert an extended glob string to a regexp string.
1187
1190
1188 >>> from . import pycompat
1191 >>> from . import pycompat
1189 >>> def bprint(s):
1192 >>> def bprint(s):
1190 ... print(pycompat.sysstr(s))
1193 ... print(pycompat.sysstr(s))
1191 >>> bprint(_globre(br'?'))
1194 >>> bprint(_globre(br'?'))
1192 .
1195 .
1193 >>> bprint(_globre(br'*'))
1196 >>> bprint(_globre(br'*'))
1194 [^/]*
1197 [^/]*
1195 >>> bprint(_globre(br'**'))
1198 >>> bprint(_globre(br'**'))
1196 .*
1199 .*
1197 >>> bprint(_globre(br'**/a'))
1200 >>> bprint(_globre(br'**/a'))
1198 (?:.*/)?a
1201 (?:.*/)?a
1199 >>> bprint(_globre(br'a/**/b'))
1202 >>> bprint(_globre(br'a/**/b'))
1200 a/(?:.*/)?b
1203 a/(?:.*/)?b
1201 >>> bprint(_globre(br'[a*?!^][^b][!c]'))
1204 >>> bprint(_globre(br'[a*?!^][^b][!c]'))
1202 [a*?!^][\^b][^c]
1205 [a*?!^][\^b][^c]
1203 >>> bprint(_globre(br'{a,b}'))
1206 >>> bprint(_globre(br'{a,b}'))
1204 (?:a|b)
1207 (?:a|b)
1205 >>> bprint(_globre(br'.\*\?'))
1208 >>> bprint(_globre(br'.\*\?'))
1206 \.\*\?
1209 \.\*\?
1207 '''
1210 '''
1208 i, n = 0, len(pat)
1211 i, n = 0, len(pat)
1209 res = b''
1212 res = b''
1210 group = 0
1213 group = 0
1211 escape = util.stringutil.regexbytesescapemap.get
1214 escape = util.stringutil.regexbytesescapemap.get
1212
1215
1213 def peek():
1216 def peek():
1214 return i < n and pat[i : i + 1]
1217 return i < n and pat[i : i + 1]
1215
1218
1216 while i < n:
1219 while i < n:
1217 c = pat[i : i + 1]
1220 c = pat[i : i + 1]
1218 i += 1
1221 i += 1
1219 if c not in b'*?[{},\\':
1222 if c not in b'*?[{},\\':
1220 res += escape(c, c)
1223 res += escape(c, c)
1221 elif c == b'*':
1224 elif c == b'*':
1222 if peek() == b'*':
1225 if peek() == b'*':
1223 i += 1
1226 i += 1
1224 if peek() == b'/':
1227 if peek() == b'/':
1225 i += 1
1228 i += 1
1226 res += b'(?:.*/)?'
1229 res += b'(?:.*/)?'
1227 else:
1230 else:
1228 res += b'.*'
1231 res += b'.*'
1229 else:
1232 else:
1230 res += b'[^/]*'
1233 res += b'[^/]*'
1231 elif c == b'?':
1234 elif c == b'?':
1232 res += b'.'
1235 res += b'.'
1233 elif c == b'[':
1236 elif c == b'[':
1234 j = i
1237 j = i
1235 if j < n and pat[j : j + 1] in b'!]':
1238 if j < n and pat[j : j + 1] in b'!]':
1236 j += 1
1239 j += 1
1237 while j < n and pat[j : j + 1] != b']':
1240 while j < n and pat[j : j + 1] != b']':
1238 j += 1
1241 j += 1
1239 if j >= n:
1242 if j >= n:
1240 res += b'\\['
1243 res += b'\\['
1241 else:
1244 else:
1242 stuff = pat[i:j].replace(b'\\', b'\\\\')
1245 stuff = pat[i:j].replace(b'\\', b'\\\\')
1243 i = j + 1
1246 i = j + 1
1244 if stuff[0:1] == b'!':
1247 if stuff[0:1] == b'!':
1245 stuff = b'^' + stuff[1:]
1248 stuff = b'^' + stuff[1:]
1246 elif stuff[0:1] == b'^':
1249 elif stuff[0:1] == b'^':
1247 stuff = b'\\' + stuff
1250 stuff = b'\\' + stuff
1248 res = b'%s[%s]' % (res, stuff)
1251 res = b'%s[%s]' % (res, stuff)
1249 elif c == b'{':
1252 elif c == b'{':
1250 group += 1
1253 group += 1
1251 res += b'(?:'
1254 res += b'(?:'
1252 elif c == b'}' and group:
1255 elif c == b'}' and group:
1253 res += b')'
1256 res += b')'
1254 group -= 1
1257 group -= 1
1255 elif c == b',' and group:
1258 elif c == b',' and group:
1256 res += b'|'
1259 res += b'|'
1257 elif c == b'\\':
1260 elif c == b'\\':
1258 p = peek()
1261 p = peek()
1259 if p:
1262 if p:
1260 i += 1
1263 i += 1
1261 res += escape(p, p)
1264 res += escape(p, p)
1262 else:
1265 else:
1263 res += escape(c, c)
1266 res += escape(c, c)
1264 else:
1267 else:
1265 res += escape(c, c)
1268 res += escape(c, c)
1266 return res
1269 return res
1267
1270
1268
1271
1269 def _regex(kind, pat, globsuffix):
1272 def _regex(kind, pat, globsuffix):
1270 '''Convert a (normalized) pattern of any kind into a
1273 '''Convert a (normalized) pattern of any kind into a
1271 regular expression.
1274 regular expression.
1272 globsuffix is appended to the regexp of globs.'''
1275 globsuffix is appended to the regexp of globs.'''
1273
1276
1274 if rustmod is not None:
1277 if rustmod is not None:
1275 try:
1278 try:
1276 return rustmod.build_single_regex(kind, pat, globsuffix)
1279 return rustmod.build_single_regex(kind, pat, globsuffix)
1277 except rustmod.PatternError:
1280 except rustmod.PatternError:
1278 raise error.ProgrammingError(
1281 raise error.ProgrammingError(
1279 b'not a regex pattern: %s:%s' % (kind, pat)
1282 b'not a regex pattern: %s:%s' % (kind, pat)
1280 )
1283 )
1281
1284
1282 if not pat and kind in (b'glob', b'relpath'):
1285 if not pat and kind in (b'glob', b'relpath'):
1283 return b''
1286 return b''
1284 if kind == b're':
1287 if kind == b're':
1285 return pat
1288 return pat
1286 if kind in (b'path', b'relpath'):
1289 if kind in (b'path', b'relpath'):
1287 if pat == b'.':
1290 if pat == b'.':
1288 return b''
1291 return b''
1289 return util.stringutil.reescape(pat) + b'(?:/|$)'
1292 return util.stringutil.reescape(pat) + b'(?:/|$)'
1290 if kind == b'rootfilesin':
1293 if kind == b'rootfilesin':
1291 if pat == b'.':
1294 if pat == b'.':
1292 escaped = b''
1295 escaped = b''
1293 else:
1296 else:
1294 # Pattern is a directory name.
1297 # Pattern is a directory name.
1295 escaped = util.stringutil.reescape(pat) + b'/'
1298 escaped = util.stringutil.reescape(pat) + b'/'
1296 # Anything after the pattern must be a non-directory.
1299 # Anything after the pattern must be a non-directory.
1297 return escaped + b'[^/]+$'
1300 return escaped + b'[^/]+$'
1298 if kind == b'relglob':
1301 if kind == b'relglob':
1299 globre = _globre(pat)
1302 globre = _globre(pat)
1300 if globre.startswith(b'[^/]*'):
1303 if globre.startswith(b'[^/]*'):
1301 # When pat has the form *XYZ (common), make the returned regex more
1304 # When pat has the form *XYZ (common), make the returned regex more
1302 # legible by returning the regex for **XYZ instead of **/*XYZ.
1305 # legible by returning the regex for **XYZ instead of **/*XYZ.
1303 return b'.*' + globre[len(b'[^/]*') :] + globsuffix
1306 return b'.*' + globre[len(b'[^/]*') :] + globsuffix
1304 return b'(?:|.*/)' + globre + globsuffix
1307 return b'(?:|.*/)' + globre + globsuffix
1305 if kind == b'relre':
1308 if kind == b'relre':
1306 if pat.startswith(b'^'):
1309 if pat.startswith(b'^'):
1307 return pat
1310 return pat
1308 return b'.*' + pat
1311 return b'.*' + pat
1309 if kind in (b'glob', b'rootglob'):
1312 if kind in (b'glob', b'rootglob'):
1310 return _globre(pat) + globsuffix
1313 return _globre(pat) + globsuffix
1311 raise error.ProgrammingError(b'not a regex pattern: %s:%s' % (kind, pat))
1314 raise error.ProgrammingError(b'not a regex pattern: %s:%s' % (kind, pat))
1312
1315
1313
1316
1314 def _buildmatch(kindpats, globsuffix, root):
1317 def _buildmatch(kindpats, globsuffix, root):
1315 '''Return regexp string and a matcher function for kindpats.
1318 '''Return regexp string and a matcher function for kindpats.
1316 globsuffix is appended to the regexp of globs.'''
1319 globsuffix is appended to the regexp of globs.'''
1317 matchfuncs = []
1320 matchfuncs = []
1318
1321
1319 subincludes, kindpats = _expandsubinclude(kindpats, root)
1322 subincludes, kindpats = _expandsubinclude(kindpats, root)
1320 if subincludes:
1323 if subincludes:
1321 submatchers = {}
1324 submatchers = {}
1322
1325
1323 def matchsubinclude(f):
1326 def matchsubinclude(f):
1324 for prefix, matcherargs in subincludes:
1327 for prefix, matcherargs in subincludes:
1325 if f.startswith(prefix):
1328 if f.startswith(prefix):
1326 mf = submatchers.get(prefix)
1329 mf = submatchers.get(prefix)
1327 if mf is None:
1330 if mf is None:
1328 mf = match(*matcherargs)
1331 mf = match(*matcherargs)
1329 submatchers[prefix] = mf
1332 submatchers[prefix] = mf
1330
1333
1331 if mf(f[len(prefix) :]):
1334 if mf(f[len(prefix) :]):
1332 return True
1335 return True
1333 return False
1336 return False
1334
1337
1335 matchfuncs.append(matchsubinclude)
1338 matchfuncs.append(matchsubinclude)
1336
1339
1337 regex = b''
1340 regex = b''
1338 if kindpats:
1341 if kindpats:
1339 if all(k == b'rootfilesin' for k, p, s in kindpats):
1342 if all(k == b'rootfilesin' for k, p, s in kindpats):
1340 dirs = {p for k, p, s in kindpats}
1343 dirs = {p for k, p, s in kindpats}
1341
1344
1342 def mf(f):
1345 def mf(f):
1343 i = f.rfind(b'/')
1346 i = f.rfind(b'/')
1344 if i >= 0:
1347 if i >= 0:
1345 dir = f[:i]
1348 dir = f[:i]
1346 else:
1349 else:
1347 dir = b'.'
1350 dir = b'.'
1348 return dir in dirs
1351 return dir in dirs
1349
1352
1350 regex = b'rootfilesin: %s' % stringutil.pprint(list(sorted(dirs)))
1353 regex = b'rootfilesin: %s' % stringutil.pprint(list(sorted(dirs)))
1351 matchfuncs.append(mf)
1354 matchfuncs.append(mf)
1352 else:
1355 else:
1353 regex, mf = _buildregexmatch(kindpats, globsuffix)
1356 regex, mf = _buildregexmatch(kindpats, globsuffix)
1354 matchfuncs.append(mf)
1357 matchfuncs.append(mf)
1355
1358
1356 if len(matchfuncs) == 1:
1359 if len(matchfuncs) == 1:
1357 return regex, matchfuncs[0]
1360 return regex, matchfuncs[0]
1358 else:
1361 else:
1359 return regex, lambda f: any(mf(f) for mf in matchfuncs)
1362 return regex, lambda f: any(mf(f) for mf in matchfuncs)
1360
1363
1361
1364
1362 MAX_RE_SIZE = 20000
1365 MAX_RE_SIZE = 20000
1363
1366
1364
1367
1365 def _joinregexes(regexps):
1368 def _joinregexes(regexps):
1366 """gather multiple regular expressions into a single one"""
1369 """gather multiple regular expressions into a single one"""
1367 return b'|'.join(regexps)
1370 return b'|'.join(regexps)
1368
1371
1369
1372
1370 def _buildregexmatch(kindpats, globsuffix):
1373 def _buildregexmatch(kindpats, globsuffix):
1371 """Build a match function from a list of kinds and kindpats,
1374 """Build a match function from a list of kinds and kindpats,
1372 return regexp string and a matcher function.
1375 return regexp string and a matcher function.
1373
1376
1374 Test too large input
1377 Test too large input
1375 >>> _buildregexmatch([
1378 >>> _buildregexmatch([
1376 ... (b'relglob', b'?' * MAX_RE_SIZE, b'')
1379 ... (b'relglob', b'?' * MAX_RE_SIZE, b'')
1377 ... ], b'$')
1380 ... ], b'$')
1378 Traceback (most recent call last):
1381 Traceback (most recent call last):
1379 ...
1382 ...
1380 Abort: matcher pattern is too long (20009 bytes)
1383 Abort: matcher pattern is too long (20009 bytes)
1381 """
1384 """
1382 try:
1385 try:
1383 allgroups = []
1386 allgroups = []
1384 regexps = [_regex(k, p, globsuffix) for (k, p, s) in kindpats]
1387 regexps = [_regex(k, p, globsuffix) for (k, p, s) in kindpats]
1385 fullregexp = _joinregexes(regexps)
1388 fullregexp = _joinregexes(regexps)
1386
1389
1387 startidx = 0
1390 startidx = 0
1388 groupsize = 0
1391 groupsize = 0
1389 for idx, r in enumerate(regexps):
1392 for idx, r in enumerate(regexps):
1390 piecesize = len(r)
1393 piecesize = len(r)
1391 if piecesize > MAX_RE_SIZE:
1394 if piecesize > MAX_RE_SIZE:
1392 msg = _(b"matcher pattern is too long (%d bytes)") % piecesize
1395 msg = _(b"matcher pattern is too long (%d bytes)") % piecesize
1393 raise error.Abort(msg)
1396 raise error.Abort(msg)
1394 elif (groupsize + piecesize) > MAX_RE_SIZE:
1397 elif (groupsize + piecesize) > MAX_RE_SIZE:
1395 group = regexps[startidx:idx]
1398 group = regexps[startidx:idx]
1396 allgroups.append(_joinregexes(group))
1399 allgroups.append(_joinregexes(group))
1397 startidx = idx
1400 startidx = idx
1398 groupsize = 0
1401 groupsize = 0
1399 groupsize += piecesize + 1
1402 groupsize += piecesize + 1
1400
1403
1401 if startidx == 0:
1404 if startidx == 0:
1402 matcher = _rematcher(fullregexp)
1405 matcher = _rematcher(fullregexp)
1403 func = lambda s: bool(matcher(s))
1406 func = lambda s: bool(matcher(s))
1404 else:
1407 else:
1405 group = regexps[startidx:]
1408 group = regexps[startidx:]
1406 allgroups.append(_joinregexes(group))
1409 allgroups.append(_joinregexes(group))
1407 allmatchers = [_rematcher(g) for g in allgroups]
1410 allmatchers = [_rematcher(g) for g in allgroups]
1408 func = lambda s: any(m(s) for m in allmatchers)
1411 func = lambda s: any(m(s) for m in allmatchers)
1409 return fullregexp, func
1412 return fullregexp, func
1410 except re.error:
1413 except re.error:
1411 for k, p, s in kindpats:
1414 for k, p, s in kindpats:
1412 try:
1415 try:
1413 _rematcher(_regex(k, p, globsuffix))
1416 _rematcher(_regex(k, p, globsuffix))
1414 except re.error:
1417 except re.error:
1415 if s:
1418 if s:
1416 raise error.Abort(
1419 raise error.Abort(
1417 _(b"%s: invalid pattern (%s): %s") % (s, k, p)
1420 _(b"%s: invalid pattern (%s): %s") % (s, k, p)
1418 )
1421 )
1419 else:
1422 else:
1420 raise error.Abort(_(b"invalid pattern (%s): %s") % (k, p))
1423 raise error.Abort(_(b"invalid pattern (%s): %s") % (k, p))
1421 raise error.Abort(_(b"invalid pattern"))
1424 raise error.Abort(_(b"invalid pattern"))
1422
1425
1423
1426
1424 def _patternrootsanddirs(kindpats):
1427 def _patternrootsanddirs(kindpats):
1425 '''Returns roots and directories corresponding to each pattern.
1428 '''Returns roots and directories corresponding to each pattern.
1426
1429
1427 This calculates the roots and directories exactly matching the patterns and
1430 This calculates the roots and directories exactly matching the patterns and
1428 returns a tuple of (roots, dirs) for each. It does not return other
1431 returns a tuple of (roots, dirs) for each. It does not return other
1429 directories which may also need to be considered, like the parent
1432 directories which may also need to be considered, like the parent
1430 directories.
1433 directories.
1431 '''
1434 '''
1432 r = []
1435 r = []
1433 d = []
1436 d = []
1434 for kind, pat, source in kindpats:
1437 for kind, pat, source in kindpats:
1435 if kind in (b'glob', b'rootglob'): # find the non-glob prefix
1438 if kind in (b'glob', b'rootglob'): # find the non-glob prefix
1436 root = []
1439 root = []
1437 for p in pat.split(b'/'):
1440 for p in pat.split(b'/'):
1438 if b'[' in p or b'{' in p or b'*' in p or b'?' in p:
1441 if b'[' in p or b'{' in p or b'*' in p or b'?' in p:
1439 break
1442 break
1440 root.append(p)
1443 root.append(p)
1441 r.append(b'/'.join(root))
1444 r.append(b'/'.join(root))
1442 elif kind in (b'relpath', b'path'):
1445 elif kind in (b'relpath', b'path'):
1443 if pat == b'.':
1446 if pat == b'.':
1444 pat = b''
1447 pat = b''
1445 r.append(pat)
1448 r.append(pat)
1446 elif kind in (b'rootfilesin',):
1449 elif kind in (b'rootfilesin',):
1447 if pat == b'.':
1450 if pat == b'.':
1448 pat = b''
1451 pat = b''
1449 d.append(pat)
1452 d.append(pat)
1450 else: # relglob, re, relre
1453 else: # relglob, re, relre
1451 r.append(b'')
1454 r.append(b'')
1452 return r, d
1455 return r, d
1453
1456
1454
1457
1455 def _roots(kindpats):
1458 def _roots(kindpats):
1456 '''Returns root directories to match recursively from the given patterns.'''
1459 '''Returns root directories to match recursively from the given patterns.'''
1457 roots, dirs = _patternrootsanddirs(kindpats)
1460 roots, dirs = _patternrootsanddirs(kindpats)
1458 return roots
1461 return roots
1459
1462
1460
1463
1461 def _rootsdirsandparents(kindpats):
1464 def _rootsdirsandparents(kindpats):
1462 '''Returns roots and exact directories from patterns.
1465 '''Returns roots and exact directories from patterns.
1463
1466
1464 `roots` are directories to match recursively, `dirs` should
1467 `roots` are directories to match recursively, `dirs` should
1465 be matched non-recursively, and `parents` are the implicitly required
1468 be matched non-recursively, and `parents` are the implicitly required
1466 directories to walk to items in either roots or dirs.
1469 directories to walk to items in either roots or dirs.
1467
1470
1468 Returns a tuple of (roots, dirs, parents).
1471 Returns a tuple of (roots, dirs, parents).
1469
1472
1470 >>> r = _rootsdirsandparents(
1473 >>> r = _rootsdirsandparents(
1471 ... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
1474 ... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
1472 ... (b'glob', b'g*', b'')])
1475 ... (b'glob', b'g*', b'')])
1473 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1476 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1474 (['g/h', 'g/h', ''], []) ['', 'g']
1477 (['g/h', 'g/h', ''], []) ['', 'g']
1475 >>> r = _rootsdirsandparents(
1478 >>> r = _rootsdirsandparents(
1476 ... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
1479 ... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
1477 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1480 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1478 ([], ['g/h', '']) ['', 'g']
1481 ([], ['g/h', '']) ['', 'g']
1479 >>> r = _rootsdirsandparents(
1482 >>> r = _rootsdirsandparents(
1480 ... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
1483 ... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
1481 ... (b'path', b'', b'')])
1484 ... (b'path', b'', b'')])
1482 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1485 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1483 (['r', 'p/p', ''], []) ['', 'p']
1486 (['r', 'p/p', ''], []) ['', 'p']
1484 >>> r = _rootsdirsandparents(
1487 >>> r = _rootsdirsandparents(
1485 ... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
1488 ... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
1486 ... (b'relre', b'rr', b'')])
1489 ... (b'relre', b'rr', b'')])
1487 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1490 >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
1488 (['', '', ''], []) ['']
1491 (['', '', ''], []) ['']
1489 '''
1492 '''
1490 r, d = _patternrootsanddirs(kindpats)
1493 r, d = _patternrootsanddirs(kindpats)
1491
1494
1492 p = set()
1495 p = set()
1493 # Add the parents as non-recursive/exact directories, since they must be
1496 # Add the parents as non-recursive/exact directories, since they must be
1494 # scanned to get to either the roots or the other exact directories.
1497 # scanned to get to either the roots or the other exact directories.
1495 p.update(pathutil.dirs(d))
1498 p.update(pathutil.dirs(d))
1496 p.update(pathutil.dirs(r))
1499 p.update(pathutil.dirs(r))
1497
1500
1498 # FIXME: all uses of this function convert these to sets, do so before
1501 # FIXME: all uses of this function convert these to sets, do so before
1499 # returning.
1502 # returning.
1500 # FIXME: all uses of this function do not need anything in 'roots' and
1503 # FIXME: all uses of this function do not need anything in 'roots' and
1501 # 'dirs' to also be in 'parents', consider removing them before returning.
1504 # 'dirs' to also be in 'parents', consider removing them before returning.
1502 return r, d, p
1505 return r, d, p
1503
1506
1504
1507
1505 def _explicitfiles(kindpats):
1508 def _explicitfiles(kindpats):
1506 '''Returns the potential explicit filenames from the patterns.
1509 '''Returns the potential explicit filenames from the patterns.
1507
1510
1508 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1511 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
1509 ['foo/bar']
1512 ['foo/bar']
1510 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1513 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
1511 []
1514 []
1512 '''
1515 '''
1513 # Keep only the pattern kinds where one can specify filenames (vs only
1516 # Keep only the pattern kinds where one can specify filenames (vs only
1514 # directory names).
1517 # directory names).
1515 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
1518 filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
1516 return _roots(filable)
1519 return _roots(filable)
1517
1520
1518
1521
1519 def _prefix(kindpats):
1522 def _prefix(kindpats):
1520 '''Whether all the patterns match a prefix (i.e. recursively)'''
1523 '''Whether all the patterns match a prefix (i.e. recursively)'''
1521 for kind, pat, source in kindpats:
1524 for kind, pat, source in kindpats:
1522 if kind not in (b'path', b'relpath'):
1525 if kind not in (b'path', b'relpath'):
1523 return False
1526 return False
1524 return True
1527 return True
1525
1528
1526
1529
1527 _commentre = None
1530 _commentre = None
1528
1531
1529
1532
1530 def readpatternfile(filepath, warn, sourceinfo=False):
1533 def readpatternfile(filepath, warn, sourceinfo=False):
1531 '''parse a pattern file, returning a list of
1534 '''parse a pattern file, returning a list of
1532 patterns. These patterns should be given to compile()
1535 patterns. These patterns should be given to compile()
1533 to be validated and converted into a match function.
1536 to be validated and converted into a match function.
1534
1537
1535 trailing white space is dropped.
1538 trailing white space is dropped.
1536 the escape character is backslash.
1539 the escape character is backslash.
1537 comments start with #.
1540 comments start with #.
1538 empty lines are skipped.
1541 empty lines are skipped.
1539
1542
1540 lines can be of the following formats:
1543 lines can be of the following formats:
1541
1544
1542 syntax: regexp # defaults following lines to non-rooted regexps
1545 syntax: regexp # defaults following lines to non-rooted regexps
1543 syntax: glob # defaults following lines to non-rooted globs
1546 syntax: glob # defaults following lines to non-rooted globs
1544 re:pattern # non-rooted regular expression
1547 re:pattern # non-rooted regular expression
1545 glob:pattern # non-rooted glob
1548 glob:pattern # non-rooted glob
1546 rootglob:pat # rooted glob (same root as ^ in regexps)
1549 rootglob:pat # rooted glob (same root as ^ in regexps)
1547 pattern # pattern of the current default type
1550 pattern # pattern of the current default type
1548
1551
1549 if sourceinfo is set, returns a list of tuples:
1552 if sourceinfo is set, returns a list of tuples:
1550 (pattern, lineno, originalline).
1553 (pattern, lineno, originalline).
1551 This is useful to debug ignore patterns.
1554 This is useful to debug ignore patterns.
1552 '''
1555 '''
1553
1556
1554 if rustmod is not None:
1557 if rustmod is not None:
1555 result, warnings = rustmod.read_pattern_file(
1558 result, warnings = rustmod.read_pattern_file(
1556 filepath, bool(warn), sourceinfo,
1559 filepath, bool(warn), sourceinfo,
1557 )
1560 )
1558
1561
1559 for warning_params in warnings:
1562 for warning_params in warnings:
1560 # Can't be easily emitted from Rust, because it would require
1563 # Can't be easily emitted from Rust, because it would require
1561 # a mechanism for both gettext and calling the `warn` function.
1564 # a mechanism for both gettext and calling the `warn` function.
1562 warn(_(b"%s: ignoring invalid syntax '%s'\n") % warning_params)
1565 warn(_(b"%s: ignoring invalid syntax '%s'\n") % warning_params)
1563
1566
1564 return result
1567 return result
1565
1568
1566 syntaxes = {
1569 syntaxes = {
1567 b're': b'relre:',
1570 b're': b'relre:',
1568 b'regexp': b'relre:',
1571 b'regexp': b'relre:',
1569 b'glob': b'relglob:',
1572 b'glob': b'relglob:',
1570 b'rootglob': b'rootglob:',
1573 b'rootglob': b'rootglob:',
1571 b'include': b'include',
1574 b'include': b'include',
1572 b'subinclude': b'subinclude',
1575 b'subinclude': b'subinclude',
1573 }
1576 }
1574 syntax = b'relre:'
1577 syntax = b'relre:'
1575 patterns = []
1578 patterns = []
1576
1579
1577 fp = open(filepath, b'rb')
1580 fp = open(filepath, b'rb')
1578 for lineno, line in enumerate(util.iterfile(fp), start=1):
1581 for lineno, line in enumerate(util.iterfile(fp), start=1):
1579 if b"#" in line:
1582 if b"#" in line:
1580 global _commentre
1583 global _commentre
1581 if not _commentre:
1584 if not _commentre:
1582 _commentre = util.re.compile(br'((?:^|[^\\])(?:\\\\)*)#.*')
1585 _commentre = util.re.compile(br'((?:^|[^\\])(?:\\\\)*)#.*')
1583 # remove comments prefixed by an even number of escapes
1586 # remove comments prefixed by an even number of escapes
1584 m = _commentre.search(line)
1587 m = _commentre.search(line)
1585 if m:
1588 if m:
1586 line = line[: m.end(1)]
1589 line = line[: m.end(1)]
1587 # fixup properly escaped comments that survived the above
1590 # fixup properly escaped comments that survived the above
1588 line = line.replace(b"\\#", b"#")
1591 line = line.replace(b"\\#", b"#")
1589 line = line.rstrip()
1592 line = line.rstrip()
1590 if not line:
1593 if not line:
1591 continue
1594 continue
1592
1595
1593 if line.startswith(b'syntax:'):
1596 if line.startswith(b'syntax:'):
1594 s = line[7:].strip()
1597 s = line[7:].strip()
1595 try:
1598 try:
1596 syntax = syntaxes[s]
1599 syntax = syntaxes[s]
1597 except KeyError:
1600 except KeyError:
1598 if warn:
1601 if warn:
1599 warn(
1602 warn(
1600 _(b"%s: ignoring invalid syntax '%s'\n") % (filepath, s)
1603 _(b"%s: ignoring invalid syntax '%s'\n") % (filepath, s)
1601 )
1604 )
1602 continue
1605 continue
1603
1606
1604 linesyntax = syntax
1607 linesyntax = syntax
1605 for s, rels in pycompat.iteritems(syntaxes):
1608 for s, rels in pycompat.iteritems(syntaxes):
1606 if line.startswith(rels):
1609 if line.startswith(rels):
1607 linesyntax = rels
1610 linesyntax = rels
1608 line = line[len(rels) :]
1611 line = line[len(rels) :]
1609 break
1612 break
1610 elif line.startswith(s + b':'):
1613 elif line.startswith(s + b':'):
1611 linesyntax = rels
1614 linesyntax = rels
1612 line = line[len(s) + 1 :]
1615 line = line[len(s) + 1 :]
1613 break
1616 break
1614 if sourceinfo:
1617 if sourceinfo:
1615 patterns.append((linesyntax + line, lineno, line))
1618 patterns.append((linesyntax + line, lineno, line))
1616 else:
1619 else:
1617 patterns.append(linesyntax + line)
1620 patterns.append(linesyntax + line)
1618 fp.close()
1621 fp.close()
1619 return patterns
1622 return patterns
@@ -1,2052 +1,2052 b''
1 # subrepo.py - sub-repository classes and factory
1 # subrepo.py - sub-repository classes and factory
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import stat
15 import stat
16 import subprocess
16 import subprocess
17 import sys
17 import sys
18 import tarfile
18 import tarfile
19 import xml.dom.minidom
19 import xml.dom.minidom
20
20
21 from .i18n import _
21 from .i18n import _
22 from . import (
22 from . import (
23 cmdutil,
23 cmdutil,
24 encoding,
24 encoding,
25 error,
25 error,
26 exchange,
26 exchange,
27 logcmdutil,
27 logcmdutil,
28 match as matchmod,
28 match as matchmod,
29 node,
29 node,
30 pathutil,
30 pathutil,
31 phases,
31 phases,
32 pycompat,
32 pycompat,
33 scmutil,
33 scmutil,
34 subrepoutil,
34 subrepoutil,
35 util,
35 util,
36 vfs as vfsmod,
36 vfs as vfsmod,
37 )
37 )
38 from .utils import (
38 from .utils import (
39 dateutil,
39 dateutil,
40 procutil,
40 procutil,
41 stringutil,
41 stringutil,
42 )
42 )
43
43
44 hg = None
44 hg = None
45 reporelpath = subrepoutil.reporelpath
45 reporelpath = subrepoutil.reporelpath
46 subrelpath = subrepoutil.subrelpath
46 subrelpath = subrepoutil.subrelpath
47 _abssource = subrepoutil._abssource
47 _abssource = subrepoutil._abssource
48 propertycache = util.propertycache
48 propertycache = util.propertycache
49
49
50
50
51 def _expandedabspath(path):
51 def _expandedabspath(path):
52 '''
52 '''
53 get a path or url and if it is a path expand it and return an absolute path
53 get a path or url and if it is a path expand it and return an absolute path
54 '''
54 '''
55 expandedpath = util.urllocalpath(util.expandpath(path))
55 expandedpath = util.urllocalpath(util.expandpath(path))
56 u = util.url(expandedpath)
56 u = util.url(expandedpath)
57 if not u.scheme:
57 if not u.scheme:
58 path = util.normpath(os.path.abspath(u.path))
58 path = util.normpath(os.path.abspath(u.path))
59 return path
59 return path
60
60
61
61
62 def _getstorehashcachename(remotepath):
62 def _getstorehashcachename(remotepath):
63 '''get a unique filename for the store hash cache of a remote repository'''
63 '''get a unique filename for the store hash cache of a remote repository'''
64 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
65
65
66
66
67 class SubrepoAbort(error.Abort):
67 class SubrepoAbort(error.Abort):
68 """Exception class used to avoid handling a subrepo error more than once"""
68 """Exception class used to avoid handling a subrepo error more than once"""
69
69
70 def __init__(self, *args, **kw):
70 def __init__(self, *args, **kw):
71 self.subrepo = kw.pop('subrepo', None)
71 self.subrepo = kw.pop('subrepo', None)
72 self.cause = kw.pop('cause', None)
72 self.cause = kw.pop('cause', None)
73 error.Abort.__init__(self, *args, **kw)
73 error.Abort.__init__(self, *args, **kw)
74
74
75
75
76 def annotatesubrepoerror(func):
76 def annotatesubrepoerror(func):
77 def decoratedmethod(self, *args, **kargs):
77 def decoratedmethod(self, *args, **kargs):
78 try:
78 try:
79 res = func(self, *args, **kargs)
79 res = func(self, *args, **kargs)
80 except SubrepoAbort as ex:
80 except SubrepoAbort as ex:
81 # This exception has already been handled
81 # This exception has already been handled
82 raise ex
82 raise ex
83 except error.Abort as ex:
83 except error.Abort as ex:
84 subrepo = subrelpath(self)
84 subrepo = subrelpath(self)
85 errormsg = (
85 errormsg = (
86 stringutil.forcebytestr(ex)
86 stringutil.forcebytestr(ex)
87 + b' '
87 + b' '
88 + _(b'(in subrepository "%s")') % subrepo
88 + _(b'(in subrepository "%s")') % subrepo
89 )
89 )
90 # avoid handling this exception by raising a SubrepoAbort exception
90 # avoid handling this exception by raising a SubrepoAbort exception
91 raise SubrepoAbort(
91 raise SubrepoAbort(
92 errormsg, hint=ex.hint, subrepo=subrepo, cause=sys.exc_info()
92 errormsg, hint=ex.hint, subrepo=subrepo, cause=sys.exc_info()
93 )
93 )
94 return res
94 return res
95
95
96 return decoratedmethod
96 return decoratedmethod
97
97
98
98
99 def _updateprompt(ui, sub, dirty, local, remote):
99 def _updateprompt(ui, sub, dirty, local, remote):
100 if dirty:
100 if dirty:
101 msg = _(
101 msg = _(
102 b' subrepository sources for %s differ\n'
102 b' subrepository sources for %s differ\n'
103 b'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
103 b'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
104 b'what do you want to do?'
104 b'what do you want to do?'
105 b'$$ &Local $$ &Remote'
105 b'$$ &Local $$ &Remote'
106 ) % (subrelpath(sub), local, remote)
106 ) % (subrelpath(sub), local, remote)
107 else:
107 else:
108 msg = _(
108 msg = _(
109 b' subrepository sources for %s differ (in checked out '
109 b' subrepository sources for %s differ (in checked out '
110 b'version)\n'
110 b'version)\n'
111 b'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
111 b'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
112 b'what do you want to do?'
112 b'what do you want to do?'
113 b'$$ &Local $$ &Remote'
113 b'$$ &Local $$ &Remote'
114 ) % (subrelpath(sub), local, remote)
114 ) % (subrelpath(sub), local, remote)
115 return ui.promptchoice(msg, 0)
115 return ui.promptchoice(msg, 0)
116
116
117
117
118 def _sanitize(ui, vfs, ignore):
118 def _sanitize(ui, vfs, ignore):
119 for dirname, dirs, names in vfs.walk():
119 for dirname, dirs, names in vfs.walk():
120 for i, d in enumerate(dirs):
120 for i, d in enumerate(dirs):
121 if d.lower() == ignore:
121 if d.lower() == ignore:
122 del dirs[i]
122 del dirs[i]
123 break
123 break
124 if vfs.basename(dirname).lower() != b'.hg':
124 if vfs.basename(dirname).lower() != b'.hg':
125 continue
125 continue
126 for f in names:
126 for f in names:
127 if f.lower() == b'hgrc':
127 if f.lower() == b'hgrc':
128 ui.warn(
128 ui.warn(
129 _(
129 _(
130 b"warning: removing potentially hostile 'hgrc' "
130 b"warning: removing potentially hostile 'hgrc' "
131 b"in '%s'\n"
131 b"in '%s'\n"
132 )
132 )
133 % vfs.join(dirname)
133 % vfs.join(dirname)
134 )
134 )
135 vfs.unlink(vfs.reljoin(dirname, f))
135 vfs.unlink(vfs.reljoin(dirname, f))
136
136
137
137
138 def _auditsubrepopath(repo, path):
138 def _auditsubrepopath(repo, path):
139 # sanity check for potentially unsafe paths such as '~' and '$FOO'
139 # sanity check for potentially unsafe paths such as '~' and '$FOO'
140 if path.startswith(b'~') or b'$' in path or util.expandpath(path) != path:
140 if path.startswith(b'~') or b'$' in path or util.expandpath(path) != path:
141 raise error.Abort(
141 raise error.Abort(
142 _(b'subrepo path contains illegal component: %s') % path
142 _(b'subrepo path contains illegal component: %s') % path
143 )
143 )
144 # auditor doesn't check if the path itself is a symlink
144 # auditor doesn't check if the path itself is a symlink
145 pathutil.pathauditor(repo.root)(path)
145 pathutil.pathauditor(repo.root)(path)
146 if repo.wvfs.islink(path):
146 if repo.wvfs.islink(path):
147 raise error.Abort(_(b"subrepo '%s' traverses symbolic link") % path)
147 raise error.Abort(_(b"subrepo '%s' traverses symbolic link") % path)
148
148
149
149
150 SUBREPO_ALLOWED_DEFAULTS = {
150 SUBREPO_ALLOWED_DEFAULTS = {
151 b'hg': True,
151 b'hg': True,
152 b'git': False,
152 b'git': False,
153 b'svn': False,
153 b'svn': False,
154 }
154 }
155
155
156
156
157 def _checktype(ui, kind):
157 def _checktype(ui, kind):
158 # subrepos.allowed is a master kill switch. If disabled, subrepos are
158 # subrepos.allowed is a master kill switch. If disabled, subrepos are
159 # disabled period.
159 # disabled period.
160 if not ui.configbool(b'subrepos', b'allowed', True):
160 if not ui.configbool(b'subrepos', b'allowed', True):
161 raise error.Abort(
161 raise error.Abort(
162 _(b'subrepos not enabled'),
162 _(b'subrepos not enabled'),
163 hint=_(b"see 'hg help config.subrepos' for details"),
163 hint=_(b"see 'hg help config.subrepos' for details"),
164 )
164 )
165
165
166 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
166 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
167 if not ui.configbool(b'subrepos', b'%s:allowed' % kind, default):
167 if not ui.configbool(b'subrepos', b'%s:allowed' % kind, default):
168 raise error.Abort(
168 raise error.Abort(
169 _(b'%s subrepos not allowed') % kind,
169 _(b'%s subrepos not allowed') % kind,
170 hint=_(b"see 'hg help config.subrepos' for details"),
170 hint=_(b"see 'hg help config.subrepos' for details"),
171 )
171 )
172
172
173 if kind not in types:
173 if kind not in types:
174 raise error.Abort(_(b'unknown subrepo type %s') % kind)
174 raise error.Abort(_(b'unknown subrepo type %s') % kind)
175
175
176
176
177 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
177 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
178 """return instance of the right subrepo class for subrepo in path"""
178 """return instance of the right subrepo class for subrepo in path"""
179 # subrepo inherently violates our import layering rules
179 # subrepo inherently violates our import layering rules
180 # because it wants to make repo objects from deep inside the stack
180 # because it wants to make repo objects from deep inside the stack
181 # so we manually delay the circular imports to not break
181 # so we manually delay the circular imports to not break
182 # scripts that don't use our demand-loading
182 # scripts that don't use our demand-loading
183 global hg
183 global hg
184 from . import hg as h
184 from . import hg as h
185
185
186 hg = h
186 hg = h
187
187
188 repo = ctx.repo()
188 repo = ctx.repo()
189 _auditsubrepopath(repo, path)
189 _auditsubrepopath(repo, path)
190 state = ctx.substate[path]
190 state = ctx.substate[path]
191 _checktype(repo.ui, state[2])
191 _checktype(repo.ui, state[2])
192 if allowwdir:
192 if allowwdir:
193 state = (state[0], ctx.subrev(path), state[2])
193 state = (state[0], ctx.subrev(path), state[2])
194 return types[state[2]](ctx, path, state[:2], allowcreate)
194 return types[state[2]](ctx, path, state[:2], allowcreate)
195
195
196
196
197 def nullsubrepo(ctx, path, pctx):
197 def nullsubrepo(ctx, path, pctx):
198 """return an empty subrepo in pctx for the extant subrepo in ctx"""
198 """return an empty subrepo in pctx for the extant subrepo in ctx"""
199 # subrepo inherently violates our import layering rules
199 # subrepo inherently violates our import layering rules
200 # because it wants to make repo objects from deep inside the stack
200 # because it wants to make repo objects from deep inside the stack
201 # so we manually delay the circular imports to not break
201 # so we manually delay the circular imports to not break
202 # scripts that don't use our demand-loading
202 # scripts that don't use our demand-loading
203 global hg
203 global hg
204 from . import hg as h
204 from . import hg as h
205
205
206 hg = h
206 hg = h
207
207
208 repo = ctx.repo()
208 repo = ctx.repo()
209 _auditsubrepopath(repo, path)
209 _auditsubrepopath(repo, path)
210 state = ctx.substate[path]
210 state = ctx.substate[path]
211 _checktype(repo.ui, state[2])
211 _checktype(repo.ui, state[2])
212 subrev = b''
212 subrev = b''
213 if state[2] == b'hg':
213 if state[2] == b'hg':
214 subrev = b"0" * 40
214 subrev = b"0" * 40
215 return types[state[2]](pctx, path, (state[0], subrev), True)
215 return types[state[2]](pctx, path, (state[0], subrev), True)
216
216
217
217
218 # subrepo classes need to implement the following abstract class:
218 # subrepo classes need to implement the following abstract class:
219
219
220
220
221 class abstractsubrepo(object):
221 class abstractsubrepo(object):
222 def __init__(self, ctx, path):
222 def __init__(self, ctx, path):
223 """Initialize abstractsubrepo part
223 """Initialize abstractsubrepo part
224
224
225 ``ctx`` is the context referring this subrepository in the
225 ``ctx`` is the context referring this subrepository in the
226 parent repository.
226 parent repository.
227
227
228 ``path`` is the path to this subrepository as seen from
228 ``path`` is the path to this subrepository as seen from
229 innermost repository.
229 innermost repository.
230 """
230 """
231 self.ui = ctx.repo().ui
231 self.ui = ctx.repo().ui
232 self._ctx = ctx
232 self._ctx = ctx
233 self._path = path
233 self._path = path
234
234
235 def addwebdirpath(self, serverpath, webconf):
235 def addwebdirpath(self, serverpath, webconf):
236 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
236 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
237
237
238 ``serverpath`` is the path component of the URL for this repo.
238 ``serverpath`` is the path component of the URL for this repo.
239
239
240 ``webconf`` is the dictionary of hgwebdir entries.
240 ``webconf`` is the dictionary of hgwebdir entries.
241 """
241 """
242 pass
242 pass
243
243
244 def storeclean(self, path):
244 def storeclean(self, path):
245 """
245 """
246 returns true if the repository has not changed since it was last
246 returns true if the repository has not changed since it was last
247 cloned from or pushed to a given repository.
247 cloned from or pushed to a given repository.
248 """
248 """
249 return False
249 return False
250
250
251 def dirty(self, ignoreupdate=False, missing=False):
251 def dirty(self, ignoreupdate=False, missing=False):
252 """returns true if the dirstate of the subrepo is dirty or does not
252 """returns true if the dirstate of the subrepo is dirty or does not
253 match current stored state. If ignoreupdate is true, only check
253 match current stored state. If ignoreupdate is true, only check
254 whether the subrepo has uncommitted changes in its dirstate. If missing
254 whether the subrepo has uncommitted changes in its dirstate. If missing
255 is true, check for deleted files.
255 is true, check for deleted files.
256 """
256 """
257 raise NotImplementedError
257 raise NotImplementedError
258
258
259 def dirtyreason(self, ignoreupdate=False, missing=False):
259 def dirtyreason(self, ignoreupdate=False, missing=False):
260 """return reason string if it is ``dirty()``
260 """return reason string if it is ``dirty()``
261
261
262 Returned string should have enough information for the message
262 Returned string should have enough information for the message
263 of exception.
263 of exception.
264
264
265 This returns None, otherwise.
265 This returns None, otherwise.
266 """
266 """
267 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
267 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
268 return _(b'uncommitted changes in subrepository "%s"') % subrelpath(
268 return _(b'uncommitted changes in subrepository "%s"') % subrelpath(
269 self
269 self
270 )
270 )
271
271
272 def bailifchanged(self, ignoreupdate=False, hint=None):
272 def bailifchanged(self, ignoreupdate=False, hint=None):
273 """raise Abort if subrepository is ``dirty()``
273 """raise Abort if subrepository is ``dirty()``
274 """
274 """
275 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
275 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
276 if dirtyreason:
276 if dirtyreason:
277 raise error.Abort(dirtyreason, hint=hint)
277 raise error.Abort(dirtyreason, hint=hint)
278
278
279 def basestate(self):
279 def basestate(self):
280 """current working directory base state, disregarding .hgsubstate
280 """current working directory base state, disregarding .hgsubstate
281 state and working directory modifications"""
281 state and working directory modifications"""
282 raise NotImplementedError
282 raise NotImplementedError
283
283
284 def checknested(self, path):
284 def checknested(self, path):
285 """check if path is a subrepository within this repository"""
285 """check if path is a subrepository within this repository"""
286 return False
286 return False
287
287
288 def commit(self, text, user, date):
288 def commit(self, text, user, date):
289 """commit the current changes to the subrepo with the given
289 """commit the current changes to the subrepo with the given
290 log message. Use given user and date if possible. Return the
290 log message. Use given user and date if possible. Return the
291 new state of the subrepo.
291 new state of the subrepo.
292 """
292 """
293 raise NotImplementedError
293 raise NotImplementedError
294
294
295 def phase(self, state):
295 def phase(self, state):
296 """returns phase of specified state in the subrepository.
296 """returns phase of specified state in the subrepository.
297 """
297 """
298 return phases.public
298 return phases.public
299
299
300 def remove(self):
300 def remove(self):
301 """remove the subrepo
301 """remove the subrepo
302
302
303 (should verify the dirstate is not dirty first)
303 (should verify the dirstate is not dirty first)
304 """
304 """
305 raise NotImplementedError
305 raise NotImplementedError
306
306
307 def get(self, state, overwrite=False):
307 def get(self, state, overwrite=False):
308 """run whatever commands are needed to put the subrepo into
308 """run whatever commands are needed to put the subrepo into
309 this state
309 this state
310 """
310 """
311 raise NotImplementedError
311 raise NotImplementedError
312
312
313 def merge(self, state):
313 def merge(self, state):
314 """merge currently-saved state with the new state."""
314 """merge currently-saved state with the new state."""
315 raise NotImplementedError
315 raise NotImplementedError
316
316
317 def push(self, opts):
317 def push(self, opts):
318 """perform whatever action is analogous to 'hg push'
318 """perform whatever action is analogous to 'hg push'
319
319
320 This may be a no-op on some systems.
320 This may be a no-op on some systems.
321 """
321 """
322 raise NotImplementedError
322 raise NotImplementedError
323
323
324 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
324 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
325 return []
325 return []
326
326
327 def addremove(self, matcher, prefix, uipathfn, opts):
327 def addremove(self, matcher, prefix, uipathfn, opts):
328 self.ui.warn(b"%s: %s" % (prefix, _(b"addremove is not supported")))
328 self.ui.warn(b"%s: %s" % (prefix, _(b"addremove is not supported")))
329 return 1
329 return 1
330
330
331 def cat(self, match, fm, fntemplate, prefix, **opts):
331 def cat(self, match, fm, fntemplate, prefix, **opts):
332 return 1
332 return 1
333
333
334 def status(self, rev2, **opts):
334 def status(self, rev2, **opts):
335 return scmutil.status([], [], [], [], [], [], [])
335 return scmutil.status([], [], [], [], [], [], [])
336
336
337 def diff(self, ui, diffopts, node2, match, prefix, **opts):
337 def diff(self, ui, diffopts, node2, match, prefix, **opts):
338 pass
338 pass
339
339
340 def outgoing(self, ui, dest, opts):
340 def outgoing(self, ui, dest, opts):
341 return 1
341 return 1
342
342
343 def incoming(self, ui, source, opts):
343 def incoming(self, ui, source, opts):
344 return 1
344 return 1
345
345
346 def files(self):
346 def files(self):
347 """return filename iterator"""
347 """return filename iterator"""
348 raise NotImplementedError
348 raise NotImplementedError
349
349
350 def filedata(self, name, decode):
350 def filedata(self, name, decode):
351 """return file data, optionally passed through repo decoders"""
351 """return file data, optionally passed through repo decoders"""
352 raise NotImplementedError
352 raise NotImplementedError
353
353
354 def fileflags(self, name):
354 def fileflags(self, name):
355 """return file flags"""
355 """return file flags"""
356 return b''
356 return b''
357
357
358 def matchfileset(self, expr, badfn=None):
358 def matchfileset(self, cwd, expr, badfn=None):
359 """Resolve the fileset expression for this repo"""
359 """Resolve the fileset expression for this repo"""
360 return matchmod.never(badfn=badfn)
360 return matchmod.never(badfn=badfn)
361
361
362 def printfiles(self, ui, m, uipathfn, fm, fmt, subrepos):
362 def printfiles(self, ui, m, uipathfn, fm, fmt, subrepos):
363 """handle the files command for this subrepo"""
363 """handle the files command for this subrepo"""
364 return 1
364 return 1
365
365
366 def archive(self, archiver, prefix, match=None, decode=True):
366 def archive(self, archiver, prefix, match=None, decode=True):
367 if match is not None:
367 if match is not None:
368 files = [f for f in self.files() if match(f)]
368 files = [f for f in self.files() if match(f)]
369 else:
369 else:
370 files = self.files()
370 files = self.files()
371 total = len(files)
371 total = len(files)
372 relpath = subrelpath(self)
372 relpath = subrelpath(self)
373 progress = self.ui.makeprogress(
373 progress = self.ui.makeprogress(
374 _(b'archiving (%s)') % relpath, unit=_(b'files'), total=total
374 _(b'archiving (%s)') % relpath, unit=_(b'files'), total=total
375 )
375 )
376 progress.update(0)
376 progress.update(0)
377 for name in files:
377 for name in files:
378 flags = self.fileflags(name)
378 flags = self.fileflags(name)
379 mode = b'x' in flags and 0o755 or 0o644
379 mode = b'x' in flags and 0o755 or 0o644
380 symlink = b'l' in flags
380 symlink = b'l' in flags
381 archiver.addfile(
381 archiver.addfile(
382 prefix + name, mode, symlink, self.filedata(name, decode)
382 prefix + name, mode, symlink, self.filedata(name, decode)
383 )
383 )
384 progress.increment()
384 progress.increment()
385 progress.complete()
385 progress.complete()
386 return total
386 return total
387
387
388 def walk(self, match):
388 def walk(self, match):
389 '''
389 '''
390 walk recursively through the directory tree, finding all files
390 walk recursively through the directory tree, finding all files
391 matched by the match function
391 matched by the match function
392 '''
392 '''
393
393
394 def forget(self, match, prefix, uipathfn, dryrun, interactive):
394 def forget(self, match, prefix, uipathfn, dryrun, interactive):
395 return ([], [])
395 return ([], [])
396
396
397 def removefiles(
397 def removefiles(
398 self,
398 self,
399 matcher,
399 matcher,
400 prefix,
400 prefix,
401 uipathfn,
401 uipathfn,
402 after,
402 after,
403 force,
403 force,
404 subrepos,
404 subrepos,
405 dryrun,
405 dryrun,
406 warnings,
406 warnings,
407 ):
407 ):
408 """remove the matched files from the subrepository and the filesystem,
408 """remove the matched files from the subrepository and the filesystem,
409 possibly by force and/or after the file has been removed from the
409 possibly by force and/or after the file has been removed from the
410 filesystem. Return 0 on success, 1 on any warning.
410 filesystem. Return 0 on success, 1 on any warning.
411 """
411 """
412 warnings.append(
412 warnings.append(
413 _(b"warning: removefiles not implemented (%s)") % self._path
413 _(b"warning: removefiles not implemented (%s)") % self._path
414 )
414 )
415 return 1
415 return 1
416
416
417 def revert(self, substate, *pats, **opts):
417 def revert(self, substate, *pats, **opts):
418 self.ui.warn(
418 self.ui.warn(
419 _(b'%s: reverting %s subrepos is unsupported\n')
419 _(b'%s: reverting %s subrepos is unsupported\n')
420 % (substate[0], substate[2])
420 % (substate[0], substate[2])
421 )
421 )
422 return []
422 return []
423
423
424 def shortid(self, revid):
424 def shortid(self, revid):
425 return revid
425 return revid
426
426
427 def unshare(self):
427 def unshare(self):
428 '''
428 '''
429 convert this repository from shared to normal storage.
429 convert this repository from shared to normal storage.
430 '''
430 '''
431
431
432 def verify(self, onpush=False):
432 def verify(self, onpush=False):
433 """verify the revision of this repository that is held in `_state` is
433 """verify the revision of this repository that is held in `_state` is
434 present and not hidden. Return 0 on success or warning, 1 on any
434 present and not hidden. Return 0 on success or warning, 1 on any
435 error. In the case of ``onpush``, warnings or errors will raise an
435 error. In the case of ``onpush``, warnings or errors will raise an
436 exception if the result of pushing would be a broken remote repository.
436 exception if the result of pushing would be a broken remote repository.
437 """
437 """
438 return 0
438 return 0
439
439
440 @propertycache
440 @propertycache
441 def wvfs(self):
441 def wvfs(self):
442 """return vfs to access the working directory of this subrepository
442 """return vfs to access the working directory of this subrepository
443 """
443 """
444 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
444 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
445
445
446 @propertycache
446 @propertycache
447 def _relpath(self):
447 def _relpath(self):
448 """return path to this subrepository as seen from outermost repository
448 """return path to this subrepository as seen from outermost repository
449 """
449 """
450 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
450 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
451
451
452
452
453 class hgsubrepo(abstractsubrepo):
453 class hgsubrepo(abstractsubrepo):
454 def __init__(self, ctx, path, state, allowcreate):
454 def __init__(self, ctx, path, state, allowcreate):
455 super(hgsubrepo, self).__init__(ctx, path)
455 super(hgsubrepo, self).__init__(ctx, path)
456 self._state = state
456 self._state = state
457 r = ctx.repo()
457 r = ctx.repo()
458 root = r.wjoin(util.localpath(path))
458 root = r.wjoin(util.localpath(path))
459 create = allowcreate and not r.wvfs.exists(b'%s/.hg' % path)
459 create = allowcreate and not r.wvfs.exists(b'%s/.hg' % path)
460 # repository constructor does expand variables in path, which is
460 # repository constructor does expand variables in path, which is
461 # unsafe since subrepo path might come from untrusted source.
461 # unsafe since subrepo path might come from untrusted source.
462 if os.path.realpath(util.expandpath(root)) != root:
462 if os.path.realpath(util.expandpath(root)) != root:
463 raise error.Abort(
463 raise error.Abort(
464 _(b'subrepo path contains illegal component: %s') % path
464 _(b'subrepo path contains illegal component: %s') % path
465 )
465 )
466 self._repo = hg.repository(r.baseui, root, create=create)
466 self._repo = hg.repository(r.baseui, root, create=create)
467 if self._repo.root != root:
467 if self._repo.root != root:
468 raise error.ProgrammingError(
468 raise error.ProgrammingError(
469 b'failed to reject unsafe subrepo '
469 b'failed to reject unsafe subrepo '
470 b'path: %s (expanded to %s)' % (root, self._repo.root)
470 b'path: %s (expanded to %s)' % (root, self._repo.root)
471 )
471 )
472
472
473 # Propagate the parent's --hidden option
473 # Propagate the parent's --hidden option
474 if r is r.unfiltered():
474 if r is r.unfiltered():
475 self._repo = self._repo.unfiltered()
475 self._repo = self._repo.unfiltered()
476
476
477 self.ui = self._repo.ui
477 self.ui = self._repo.ui
478 for s, k in [(b'ui', b'commitsubrepos')]:
478 for s, k in [(b'ui', b'commitsubrepos')]:
479 v = r.ui.config(s, k)
479 v = r.ui.config(s, k)
480 if v:
480 if v:
481 self.ui.setconfig(s, k, v, b'subrepo')
481 self.ui.setconfig(s, k, v, b'subrepo')
482 # internal config: ui._usedassubrepo
482 # internal config: ui._usedassubrepo
483 self.ui.setconfig(b'ui', b'_usedassubrepo', b'True', b'subrepo')
483 self.ui.setconfig(b'ui', b'_usedassubrepo', b'True', b'subrepo')
484 self._initrepo(r, state[0], create)
484 self._initrepo(r, state[0], create)
485
485
486 @annotatesubrepoerror
486 @annotatesubrepoerror
487 def addwebdirpath(self, serverpath, webconf):
487 def addwebdirpath(self, serverpath, webconf):
488 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
488 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
489
489
490 def storeclean(self, path):
490 def storeclean(self, path):
491 with self._repo.lock():
491 with self._repo.lock():
492 return self._storeclean(path)
492 return self._storeclean(path)
493
493
494 def _storeclean(self, path):
494 def _storeclean(self, path):
495 clean = True
495 clean = True
496 itercache = self._calcstorehash(path)
496 itercache = self._calcstorehash(path)
497 for filehash in self._readstorehashcache(path):
497 for filehash in self._readstorehashcache(path):
498 if filehash != next(itercache, None):
498 if filehash != next(itercache, None):
499 clean = False
499 clean = False
500 break
500 break
501 if clean:
501 if clean:
502 # if not empty:
502 # if not empty:
503 # the cached and current pull states have a different size
503 # the cached and current pull states have a different size
504 clean = next(itercache, None) is None
504 clean = next(itercache, None) is None
505 return clean
505 return clean
506
506
507 def _calcstorehash(self, remotepath):
507 def _calcstorehash(self, remotepath):
508 '''calculate a unique "store hash"
508 '''calculate a unique "store hash"
509
509
510 This method is used to to detect when there are changes that may
510 This method is used to to detect when there are changes that may
511 require a push to a given remote path.'''
511 require a push to a given remote path.'''
512 # sort the files that will be hashed in increasing (likely) file size
512 # sort the files that will be hashed in increasing (likely) file size
513 filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
513 filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
514 yield b'# %s\n' % _expandedabspath(remotepath)
514 yield b'# %s\n' % _expandedabspath(remotepath)
515 vfs = self._repo.vfs
515 vfs = self._repo.vfs
516 for relname in filelist:
516 for relname in filelist:
517 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
517 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
518 yield b'%s = %s\n' % (relname, filehash)
518 yield b'%s = %s\n' % (relname, filehash)
519
519
520 @propertycache
520 @propertycache
521 def _cachestorehashvfs(self):
521 def _cachestorehashvfs(self):
522 return vfsmod.vfs(self._repo.vfs.join(b'cache/storehash'))
522 return vfsmod.vfs(self._repo.vfs.join(b'cache/storehash'))
523
523
524 def _readstorehashcache(self, remotepath):
524 def _readstorehashcache(self, remotepath):
525 '''read the store hash cache for a given remote repository'''
525 '''read the store hash cache for a given remote repository'''
526 cachefile = _getstorehashcachename(remotepath)
526 cachefile = _getstorehashcachename(remotepath)
527 return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
527 return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
528
528
529 def _cachestorehash(self, remotepath):
529 def _cachestorehash(self, remotepath):
530 '''cache the current store hash
530 '''cache the current store hash
531
531
532 Each remote repo requires its own store hash cache, because a subrepo
532 Each remote repo requires its own store hash cache, because a subrepo
533 store may be "clean" versus a given remote repo, but not versus another
533 store may be "clean" versus a given remote repo, but not versus another
534 '''
534 '''
535 cachefile = _getstorehashcachename(remotepath)
535 cachefile = _getstorehashcachename(remotepath)
536 with self._repo.lock():
536 with self._repo.lock():
537 storehash = list(self._calcstorehash(remotepath))
537 storehash = list(self._calcstorehash(remotepath))
538 vfs = self._cachestorehashvfs
538 vfs = self._cachestorehashvfs
539 vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
539 vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
540
540
541 def _getctx(self):
541 def _getctx(self):
542 '''fetch the context for this subrepo revision, possibly a workingctx
542 '''fetch the context for this subrepo revision, possibly a workingctx
543 '''
543 '''
544 if self._ctx.rev() is None:
544 if self._ctx.rev() is None:
545 return self._repo[None] # workingctx if parent is workingctx
545 return self._repo[None] # workingctx if parent is workingctx
546 else:
546 else:
547 rev = self._state[1]
547 rev = self._state[1]
548 return self._repo[rev]
548 return self._repo[rev]
549
549
550 @annotatesubrepoerror
550 @annotatesubrepoerror
551 def _initrepo(self, parentrepo, source, create):
551 def _initrepo(self, parentrepo, source, create):
552 self._repo._subparent = parentrepo
552 self._repo._subparent = parentrepo
553 self._repo._subsource = source
553 self._repo._subsource = source
554
554
555 if create:
555 if create:
556 lines = [b'[paths]\n']
556 lines = [b'[paths]\n']
557
557
558 def addpathconfig(key, value):
558 def addpathconfig(key, value):
559 if value:
559 if value:
560 lines.append(b'%s = %s\n' % (key, value))
560 lines.append(b'%s = %s\n' % (key, value))
561 self.ui.setconfig(b'paths', key, value, b'subrepo')
561 self.ui.setconfig(b'paths', key, value, b'subrepo')
562
562
563 defpath = _abssource(self._repo, abort=False)
563 defpath = _abssource(self._repo, abort=False)
564 defpushpath = _abssource(self._repo, True, abort=False)
564 defpushpath = _abssource(self._repo, True, abort=False)
565 addpathconfig(b'default', defpath)
565 addpathconfig(b'default', defpath)
566 if defpath != defpushpath:
566 if defpath != defpushpath:
567 addpathconfig(b'default-push', defpushpath)
567 addpathconfig(b'default-push', defpushpath)
568
568
569 self._repo.vfs.write(b'hgrc', util.tonativeeol(b''.join(lines)))
569 self._repo.vfs.write(b'hgrc', util.tonativeeol(b''.join(lines)))
570
570
571 @annotatesubrepoerror
571 @annotatesubrepoerror
572 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
572 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
573 return cmdutil.add(
573 return cmdutil.add(
574 ui, self._repo, match, prefix, uipathfn, explicitonly, **opts
574 ui, self._repo, match, prefix, uipathfn, explicitonly, **opts
575 )
575 )
576
576
577 @annotatesubrepoerror
577 @annotatesubrepoerror
578 def addremove(self, m, prefix, uipathfn, opts):
578 def addremove(self, m, prefix, uipathfn, opts):
579 # In the same way as sub directories are processed, once in a subrepo,
579 # In the same way as sub directories are processed, once in a subrepo,
580 # always entry any of its subrepos. Don't corrupt the options that will
580 # always entry any of its subrepos. Don't corrupt the options that will
581 # be used to process sibling subrepos however.
581 # be used to process sibling subrepos however.
582 opts = copy.copy(opts)
582 opts = copy.copy(opts)
583 opts[b'subrepos'] = True
583 opts[b'subrepos'] = True
584 return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
584 return scmutil.addremove(self._repo, m, prefix, uipathfn, opts)
585
585
586 @annotatesubrepoerror
586 @annotatesubrepoerror
587 def cat(self, match, fm, fntemplate, prefix, **opts):
587 def cat(self, match, fm, fntemplate, prefix, **opts):
588 rev = self._state[1]
588 rev = self._state[1]
589 ctx = self._repo[rev]
589 ctx = self._repo[rev]
590 return cmdutil.cat(
590 return cmdutil.cat(
591 self.ui, self._repo, ctx, match, fm, fntemplate, prefix, **opts
591 self.ui, self._repo, ctx, match, fm, fntemplate, prefix, **opts
592 )
592 )
593
593
594 @annotatesubrepoerror
594 @annotatesubrepoerror
595 def status(self, rev2, **opts):
595 def status(self, rev2, **opts):
596 try:
596 try:
597 rev1 = self._state[1]
597 rev1 = self._state[1]
598 ctx1 = self._repo[rev1]
598 ctx1 = self._repo[rev1]
599 ctx2 = self._repo[rev2]
599 ctx2 = self._repo[rev2]
600 return self._repo.status(ctx1, ctx2, **opts)
600 return self._repo.status(ctx1, ctx2, **opts)
601 except error.RepoLookupError as inst:
601 except error.RepoLookupError as inst:
602 self.ui.warn(
602 self.ui.warn(
603 _(b'warning: error "%s" in subrepository "%s"\n')
603 _(b'warning: error "%s" in subrepository "%s"\n')
604 % (inst, subrelpath(self))
604 % (inst, subrelpath(self))
605 )
605 )
606 return scmutil.status([], [], [], [], [], [], [])
606 return scmutil.status([], [], [], [], [], [], [])
607
607
608 @annotatesubrepoerror
608 @annotatesubrepoerror
609 def diff(self, ui, diffopts, node2, match, prefix, **opts):
609 def diff(self, ui, diffopts, node2, match, prefix, **opts):
610 try:
610 try:
611 node1 = node.bin(self._state[1])
611 node1 = node.bin(self._state[1])
612 # We currently expect node2 to come from substate and be
612 # We currently expect node2 to come from substate and be
613 # in hex format
613 # in hex format
614 if node2 is not None:
614 if node2 is not None:
615 node2 = node.bin(node2)
615 node2 = node.bin(node2)
616 logcmdutil.diffordiffstat(
616 logcmdutil.diffordiffstat(
617 ui,
617 ui,
618 self._repo,
618 self._repo,
619 diffopts,
619 diffopts,
620 node1,
620 node1,
621 node2,
621 node2,
622 match,
622 match,
623 prefix=prefix,
623 prefix=prefix,
624 listsubrepos=True,
624 listsubrepos=True,
625 **opts
625 **opts
626 )
626 )
627 except error.RepoLookupError as inst:
627 except error.RepoLookupError as inst:
628 self.ui.warn(
628 self.ui.warn(
629 _(b'warning: error "%s" in subrepository "%s"\n')
629 _(b'warning: error "%s" in subrepository "%s"\n')
630 % (inst, subrelpath(self))
630 % (inst, subrelpath(self))
631 )
631 )
632
632
633 @annotatesubrepoerror
633 @annotatesubrepoerror
634 def archive(self, archiver, prefix, match=None, decode=True):
634 def archive(self, archiver, prefix, match=None, decode=True):
635 self._get(self._state + (b'hg',))
635 self._get(self._state + (b'hg',))
636 files = self.files()
636 files = self.files()
637 if match:
637 if match:
638 files = [f for f in files if match(f)]
638 files = [f for f in files if match(f)]
639 rev = self._state[1]
639 rev = self._state[1]
640 ctx = self._repo[rev]
640 ctx = self._repo[rev]
641 scmutil.prefetchfiles(
641 scmutil.prefetchfiles(
642 self._repo, [ctx.rev()], scmutil.matchfiles(self._repo, files)
642 self._repo, [ctx.rev()], scmutil.matchfiles(self._repo, files)
643 )
643 )
644 total = abstractsubrepo.archive(self, archiver, prefix, match)
644 total = abstractsubrepo.archive(self, archiver, prefix, match)
645 for subpath in ctx.substate:
645 for subpath in ctx.substate:
646 s = subrepo(ctx, subpath, True)
646 s = subrepo(ctx, subpath, True)
647 submatch = matchmod.subdirmatcher(subpath, match)
647 submatch = matchmod.subdirmatcher(subpath, match)
648 subprefix = prefix + subpath + b'/'
648 subprefix = prefix + subpath + b'/'
649 total += s.archive(archiver, subprefix, submatch, decode)
649 total += s.archive(archiver, subprefix, submatch, decode)
650 return total
650 return total
651
651
652 @annotatesubrepoerror
652 @annotatesubrepoerror
653 def dirty(self, ignoreupdate=False, missing=False):
653 def dirty(self, ignoreupdate=False, missing=False):
654 r = self._state[1]
654 r = self._state[1]
655 if r == b'' and not ignoreupdate: # no state recorded
655 if r == b'' and not ignoreupdate: # no state recorded
656 return True
656 return True
657 w = self._repo[None]
657 w = self._repo[None]
658 if r != w.p1().hex() and not ignoreupdate:
658 if r != w.p1().hex() and not ignoreupdate:
659 # different version checked out
659 # different version checked out
660 return True
660 return True
661 return w.dirty(missing=missing) # working directory changed
661 return w.dirty(missing=missing) # working directory changed
662
662
663 def basestate(self):
663 def basestate(self):
664 return self._repo[b'.'].hex()
664 return self._repo[b'.'].hex()
665
665
666 def checknested(self, path):
666 def checknested(self, path):
667 return self._repo._checknested(self._repo.wjoin(path))
667 return self._repo._checknested(self._repo.wjoin(path))
668
668
669 @annotatesubrepoerror
669 @annotatesubrepoerror
670 def commit(self, text, user, date):
670 def commit(self, text, user, date):
671 # don't bother committing in the subrepo if it's only been
671 # don't bother committing in the subrepo if it's only been
672 # updated
672 # updated
673 if not self.dirty(True):
673 if not self.dirty(True):
674 return self._repo[b'.'].hex()
674 return self._repo[b'.'].hex()
675 self.ui.debug(b"committing subrepo %s\n" % subrelpath(self))
675 self.ui.debug(b"committing subrepo %s\n" % subrelpath(self))
676 n = self._repo.commit(text, user, date)
676 n = self._repo.commit(text, user, date)
677 if not n:
677 if not n:
678 return self._repo[b'.'].hex() # different version checked out
678 return self._repo[b'.'].hex() # different version checked out
679 return node.hex(n)
679 return node.hex(n)
680
680
681 @annotatesubrepoerror
681 @annotatesubrepoerror
682 def phase(self, state):
682 def phase(self, state):
683 return self._repo[state or b'.'].phase()
683 return self._repo[state or b'.'].phase()
684
684
685 @annotatesubrepoerror
685 @annotatesubrepoerror
686 def remove(self):
686 def remove(self):
687 # we can't fully delete the repository as it may contain
687 # we can't fully delete the repository as it may contain
688 # local-only history
688 # local-only history
689 self.ui.note(_(b'removing subrepo %s\n') % subrelpath(self))
689 self.ui.note(_(b'removing subrepo %s\n') % subrelpath(self))
690 hg.clean(self._repo, node.nullid, False)
690 hg.clean(self._repo, node.nullid, False)
691
691
692 def _get(self, state):
692 def _get(self, state):
693 source, revision, kind = state
693 source, revision, kind = state
694 parentrepo = self._repo._subparent
694 parentrepo = self._repo._subparent
695
695
696 if revision in self._repo.unfiltered():
696 if revision in self._repo.unfiltered():
697 # Allow shared subrepos tracked at null to setup the sharedpath
697 # Allow shared subrepos tracked at null to setup the sharedpath
698 if len(self._repo) != 0 or not parentrepo.shared():
698 if len(self._repo) != 0 or not parentrepo.shared():
699 return True
699 return True
700 self._repo._subsource = source
700 self._repo._subsource = source
701 srcurl = _abssource(self._repo)
701 srcurl = _abssource(self._repo)
702
702
703 # Defer creating the peer until after the status message is logged, in
703 # Defer creating the peer until after the status message is logged, in
704 # case there are network problems.
704 # case there are network problems.
705 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
705 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
706
706
707 if len(self._repo) == 0:
707 if len(self._repo) == 0:
708 # use self._repo.vfs instead of self.wvfs to remove .hg only
708 # use self._repo.vfs instead of self.wvfs to remove .hg only
709 self._repo.vfs.rmtree()
709 self._repo.vfs.rmtree()
710
710
711 # A remote subrepo could be shared if there is a local copy
711 # A remote subrepo could be shared if there is a local copy
712 # relative to the parent's share source. But clone pooling doesn't
712 # relative to the parent's share source. But clone pooling doesn't
713 # assemble the repos in a tree, so that can't be consistently done.
713 # assemble the repos in a tree, so that can't be consistently done.
714 # A simpler option is for the user to configure clone pooling, and
714 # A simpler option is for the user to configure clone pooling, and
715 # work with that.
715 # work with that.
716 if parentrepo.shared() and hg.islocal(srcurl):
716 if parentrepo.shared() and hg.islocal(srcurl):
717 self.ui.status(
717 self.ui.status(
718 _(b'sharing subrepo %s from %s\n')
718 _(b'sharing subrepo %s from %s\n')
719 % (subrelpath(self), srcurl)
719 % (subrelpath(self), srcurl)
720 )
720 )
721 shared = hg.share(
721 shared = hg.share(
722 self._repo._subparent.baseui,
722 self._repo._subparent.baseui,
723 getpeer(),
723 getpeer(),
724 self._repo.root,
724 self._repo.root,
725 update=False,
725 update=False,
726 bookmarks=False,
726 bookmarks=False,
727 )
727 )
728 self._repo = shared.local()
728 self._repo = shared.local()
729 else:
729 else:
730 # TODO: find a common place for this and this code in the
730 # TODO: find a common place for this and this code in the
731 # share.py wrap of the clone command.
731 # share.py wrap of the clone command.
732 if parentrepo.shared():
732 if parentrepo.shared():
733 pool = self.ui.config(b'share', b'pool')
733 pool = self.ui.config(b'share', b'pool')
734 if pool:
734 if pool:
735 pool = util.expandpath(pool)
735 pool = util.expandpath(pool)
736
736
737 shareopts = {
737 shareopts = {
738 b'pool': pool,
738 b'pool': pool,
739 b'mode': self.ui.config(b'share', b'poolnaming'),
739 b'mode': self.ui.config(b'share', b'poolnaming'),
740 }
740 }
741 else:
741 else:
742 shareopts = {}
742 shareopts = {}
743
743
744 self.ui.status(
744 self.ui.status(
745 _(b'cloning subrepo %s from %s\n')
745 _(b'cloning subrepo %s from %s\n')
746 % (subrelpath(self), util.hidepassword(srcurl))
746 % (subrelpath(self), util.hidepassword(srcurl))
747 )
747 )
748 other, cloned = hg.clone(
748 other, cloned = hg.clone(
749 self._repo._subparent.baseui,
749 self._repo._subparent.baseui,
750 {},
750 {},
751 getpeer(),
751 getpeer(),
752 self._repo.root,
752 self._repo.root,
753 update=False,
753 update=False,
754 shareopts=shareopts,
754 shareopts=shareopts,
755 )
755 )
756 self._repo = cloned.local()
756 self._repo = cloned.local()
757 self._initrepo(parentrepo, source, create=True)
757 self._initrepo(parentrepo, source, create=True)
758 self._cachestorehash(srcurl)
758 self._cachestorehash(srcurl)
759 else:
759 else:
760 self.ui.status(
760 self.ui.status(
761 _(b'pulling subrepo %s from %s\n')
761 _(b'pulling subrepo %s from %s\n')
762 % (subrelpath(self), util.hidepassword(srcurl))
762 % (subrelpath(self), util.hidepassword(srcurl))
763 )
763 )
764 cleansub = self.storeclean(srcurl)
764 cleansub = self.storeclean(srcurl)
765 exchange.pull(self._repo, getpeer())
765 exchange.pull(self._repo, getpeer())
766 if cleansub:
766 if cleansub:
767 # keep the repo clean after pull
767 # keep the repo clean after pull
768 self._cachestorehash(srcurl)
768 self._cachestorehash(srcurl)
769 return False
769 return False
770
770
771 @annotatesubrepoerror
771 @annotatesubrepoerror
772 def get(self, state, overwrite=False):
772 def get(self, state, overwrite=False):
773 inrepo = self._get(state)
773 inrepo = self._get(state)
774 source, revision, kind = state
774 source, revision, kind = state
775 repo = self._repo
775 repo = self._repo
776 repo.ui.debug(b"getting subrepo %s\n" % self._path)
776 repo.ui.debug(b"getting subrepo %s\n" % self._path)
777 if inrepo:
777 if inrepo:
778 urepo = repo.unfiltered()
778 urepo = repo.unfiltered()
779 ctx = urepo[revision]
779 ctx = urepo[revision]
780 if ctx.hidden():
780 if ctx.hidden():
781 urepo.ui.warn(
781 urepo.ui.warn(
782 _(b'revision %s in subrepository "%s" is hidden\n')
782 _(b'revision %s in subrepository "%s" is hidden\n')
783 % (revision[0:12], self._path)
783 % (revision[0:12], self._path)
784 )
784 )
785 repo = urepo
785 repo = urepo
786 hg.updaterepo(repo, revision, overwrite)
786 hg.updaterepo(repo, revision, overwrite)
787
787
788 @annotatesubrepoerror
788 @annotatesubrepoerror
789 def merge(self, state):
789 def merge(self, state):
790 self._get(state)
790 self._get(state)
791 cur = self._repo[b'.']
791 cur = self._repo[b'.']
792 dst = self._repo[state[1]]
792 dst = self._repo[state[1]]
793 anc = dst.ancestor(cur)
793 anc = dst.ancestor(cur)
794
794
795 def mergefunc():
795 def mergefunc():
796 if anc == cur and dst.branch() == cur.branch():
796 if anc == cur and dst.branch() == cur.branch():
797 self.ui.debug(
797 self.ui.debug(
798 b'updating subrepository "%s"\n' % subrelpath(self)
798 b'updating subrepository "%s"\n' % subrelpath(self)
799 )
799 )
800 hg.update(self._repo, state[1])
800 hg.update(self._repo, state[1])
801 elif anc == dst:
801 elif anc == dst:
802 self.ui.debug(
802 self.ui.debug(
803 b'skipping subrepository "%s"\n' % subrelpath(self)
803 b'skipping subrepository "%s"\n' % subrelpath(self)
804 )
804 )
805 else:
805 else:
806 self.ui.debug(
806 self.ui.debug(
807 b'merging subrepository "%s"\n' % subrelpath(self)
807 b'merging subrepository "%s"\n' % subrelpath(self)
808 )
808 )
809 hg.merge(self._repo, state[1], remind=False)
809 hg.merge(self._repo, state[1], remind=False)
810
810
811 wctx = self._repo[None]
811 wctx = self._repo[None]
812 if self.dirty():
812 if self.dirty():
813 if anc != dst:
813 if anc != dst:
814 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
814 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
815 mergefunc()
815 mergefunc()
816 else:
816 else:
817 mergefunc()
817 mergefunc()
818 else:
818 else:
819 mergefunc()
819 mergefunc()
820
820
821 @annotatesubrepoerror
821 @annotatesubrepoerror
822 def push(self, opts):
822 def push(self, opts):
823 force = opts.get(b'force')
823 force = opts.get(b'force')
824 newbranch = opts.get(b'new_branch')
824 newbranch = opts.get(b'new_branch')
825 ssh = opts.get(b'ssh')
825 ssh = opts.get(b'ssh')
826
826
827 # push subrepos depth-first for coherent ordering
827 # push subrepos depth-first for coherent ordering
828 c = self._repo[b'.']
828 c = self._repo[b'.']
829 subs = c.substate # only repos that are committed
829 subs = c.substate # only repos that are committed
830 for s in sorted(subs):
830 for s in sorted(subs):
831 if c.sub(s).push(opts) == 0:
831 if c.sub(s).push(opts) == 0:
832 return False
832 return False
833
833
834 dsturl = _abssource(self._repo, True)
834 dsturl = _abssource(self._repo, True)
835 if not force:
835 if not force:
836 if self.storeclean(dsturl):
836 if self.storeclean(dsturl):
837 self.ui.status(
837 self.ui.status(
838 _(b'no changes made to subrepo %s since last push to %s\n')
838 _(b'no changes made to subrepo %s since last push to %s\n')
839 % (subrelpath(self), util.hidepassword(dsturl))
839 % (subrelpath(self), util.hidepassword(dsturl))
840 )
840 )
841 return None
841 return None
842 self.ui.status(
842 self.ui.status(
843 _(b'pushing subrepo %s to %s\n')
843 _(b'pushing subrepo %s to %s\n')
844 % (subrelpath(self), util.hidepassword(dsturl))
844 % (subrelpath(self), util.hidepassword(dsturl))
845 )
845 )
846 other = hg.peer(self._repo, {b'ssh': ssh}, dsturl)
846 other = hg.peer(self._repo, {b'ssh': ssh}, dsturl)
847 res = exchange.push(self._repo, other, force, newbranch=newbranch)
847 res = exchange.push(self._repo, other, force, newbranch=newbranch)
848
848
849 # the repo is now clean
849 # the repo is now clean
850 self._cachestorehash(dsturl)
850 self._cachestorehash(dsturl)
851 return res.cgresult
851 return res.cgresult
852
852
853 @annotatesubrepoerror
853 @annotatesubrepoerror
854 def outgoing(self, ui, dest, opts):
854 def outgoing(self, ui, dest, opts):
855 if b'rev' in opts or b'branch' in opts:
855 if b'rev' in opts or b'branch' in opts:
856 opts = copy.copy(opts)
856 opts = copy.copy(opts)
857 opts.pop(b'rev', None)
857 opts.pop(b'rev', None)
858 opts.pop(b'branch', None)
858 opts.pop(b'branch', None)
859 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
859 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
860
860
861 @annotatesubrepoerror
861 @annotatesubrepoerror
862 def incoming(self, ui, source, opts):
862 def incoming(self, ui, source, opts):
863 if b'rev' in opts or b'branch' in opts:
863 if b'rev' in opts or b'branch' in opts:
864 opts = copy.copy(opts)
864 opts = copy.copy(opts)
865 opts.pop(b'rev', None)
865 opts.pop(b'rev', None)
866 opts.pop(b'branch', None)
866 opts.pop(b'branch', None)
867 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
867 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
868
868
869 @annotatesubrepoerror
869 @annotatesubrepoerror
870 def files(self):
870 def files(self):
871 rev = self._state[1]
871 rev = self._state[1]
872 ctx = self._repo[rev]
872 ctx = self._repo[rev]
873 return ctx.manifest().keys()
873 return ctx.manifest().keys()
874
874
875 def filedata(self, name, decode):
875 def filedata(self, name, decode):
876 rev = self._state[1]
876 rev = self._state[1]
877 data = self._repo[rev][name].data()
877 data = self._repo[rev][name].data()
878 if decode:
878 if decode:
879 data = self._repo.wwritedata(name, data)
879 data = self._repo.wwritedata(name, data)
880 return data
880 return data
881
881
882 def fileflags(self, name):
882 def fileflags(self, name):
883 rev = self._state[1]
883 rev = self._state[1]
884 ctx = self._repo[rev]
884 ctx = self._repo[rev]
885 return ctx.flags(name)
885 return ctx.flags(name)
886
886
887 @annotatesubrepoerror
887 @annotatesubrepoerror
888 def printfiles(self, ui, m, uipathfn, fm, fmt, subrepos):
888 def printfiles(self, ui, m, uipathfn, fm, fmt, subrepos):
889 # If the parent context is a workingctx, use the workingctx here for
889 # If the parent context is a workingctx, use the workingctx here for
890 # consistency.
890 # consistency.
891 if self._ctx.rev() is None:
891 if self._ctx.rev() is None:
892 ctx = self._repo[None]
892 ctx = self._repo[None]
893 else:
893 else:
894 rev = self._state[1]
894 rev = self._state[1]
895 ctx = self._repo[rev]
895 ctx = self._repo[rev]
896 return cmdutil.files(ui, ctx, m, uipathfn, fm, fmt, subrepos)
896 return cmdutil.files(ui, ctx, m, uipathfn, fm, fmt, subrepos)
897
897
898 @annotatesubrepoerror
898 @annotatesubrepoerror
899 def matchfileset(self, expr, badfn=None):
899 def matchfileset(self, cwd, expr, badfn=None):
900 if self._ctx.rev() is None:
900 if self._ctx.rev() is None:
901 ctx = self._repo[None]
901 ctx = self._repo[None]
902 else:
902 else:
903 rev = self._state[1]
903 rev = self._state[1]
904 ctx = self._repo[rev]
904 ctx = self._repo[rev]
905
905
906 matchers = [ctx.matchfileset(expr, badfn=badfn)]
906 matchers = [ctx.matchfileset(cwd, expr, badfn=badfn)]
907
907
908 for subpath in ctx.substate:
908 for subpath in ctx.substate:
909 sub = ctx.sub(subpath)
909 sub = ctx.sub(subpath)
910
910
911 try:
911 try:
912 sm = sub.matchfileset(expr, badfn=badfn)
912 sm = sub.matchfileset(cwd, expr, badfn=badfn)
913 pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn)
913 pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn)
914 matchers.append(pm)
914 matchers.append(pm)
915 except error.LookupError:
915 except error.LookupError:
916 self.ui.status(
916 self.ui.status(
917 _(b"skipping missing subrepository: %s\n")
917 _(b"skipping missing subrepository: %s\n")
918 % self.wvfs.reljoin(reporelpath(self), subpath)
918 % self.wvfs.reljoin(reporelpath(self), subpath)
919 )
919 )
920 if len(matchers) == 1:
920 if len(matchers) == 1:
921 return matchers[0]
921 return matchers[0]
922 return matchmod.unionmatcher(matchers)
922 return matchmod.unionmatcher(matchers)
923
923
924 def walk(self, match):
924 def walk(self, match):
925 ctx = self._repo[None]
925 ctx = self._repo[None]
926 return ctx.walk(match)
926 return ctx.walk(match)
927
927
928 @annotatesubrepoerror
928 @annotatesubrepoerror
929 def forget(self, match, prefix, uipathfn, dryrun, interactive):
929 def forget(self, match, prefix, uipathfn, dryrun, interactive):
930 return cmdutil.forget(
930 return cmdutil.forget(
931 self.ui,
931 self.ui,
932 self._repo,
932 self._repo,
933 match,
933 match,
934 prefix,
934 prefix,
935 uipathfn,
935 uipathfn,
936 True,
936 True,
937 dryrun=dryrun,
937 dryrun=dryrun,
938 interactive=interactive,
938 interactive=interactive,
939 )
939 )
940
940
941 @annotatesubrepoerror
941 @annotatesubrepoerror
942 def removefiles(
942 def removefiles(
943 self,
943 self,
944 matcher,
944 matcher,
945 prefix,
945 prefix,
946 uipathfn,
946 uipathfn,
947 after,
947 after,
948 force,
948 force,
949 subrepos,
949 subrepos,
950 dryrun,
950 dryrun,
951 warnings,
951 warnings,
952 ):
952 ):
953 return cmdutil.remove(
953 return cmdutil.remove(
954 self.ui,
954 self.ui,
955 self._repo,
955 self._repo,
956 matcher,
956 matcher,
957 prefix,
957 prefix,
958 uipathfn,
958 uipathfn,
959 after,
959 after,
960 force,
960 force,
961 subrepos,
961 subrepos,
962 dryrun,
962 dryrun,
963 )
963 )
964
964
965 @annotatesubrepoerror
965 @annotatesubrepoerror
966 def revert(self, substate, *pats, **opts):
966 def revert(self, substate, *pats, **opts):
967 # reverting a subrepo is a 2 step process:
967 # reverting a subrepo is a 2 step process:
968 # 1. if the no_backup is not set, revert all modified
968 # 1. if the no_backup is not set, revert all modified
969 # files inside the subrepo
969 # files inside the subrepo
970 # 2. update the subrepo to the revision specified in
970 # 2. update the subrepo to the revision specified in
971 # the corresponding substate dictionary
971 # the corresponding substate dictionary
972 self.ui.status(_(b'reverting subrepo %s\n') % substate[0])
972 self.ui.status(_(b'reverting subrepo %s\n') % substate[0])
973 if not opts.get('no_backup'):
973 if not opts.get('no_backup'):
974 # Revert all files on the subrepo, creating backups
974 # Revert all files on the subrepo, creating backups
975 # Note that this will not recursively revert subrepos
975 # Note that this will not recursively revert subrepos
976 # We could do it if there was a set:subrepos() predicate
976 # We could do it if there was a set:subrepos() predicate
977 opts = opts.copy()
977 opts = opts.copy()
978 opts['date'] = None
978 opts['date'] = None
979 opts['rev'] = substate[1]
979 opts['rev'] = substate[1]
980
980
981 self.filerevert(*pats, **opts)
981 self.filerevert(*pats, **opts)
982
982
983 # Update the repo to the revision specified in the given substate
983 # Update the repo to the revision specified in the given substate
984 if not opts.get('dry_run'):
984 if not opts.get('dry_run'):
985 self.get(substate, overwrite=True)
985 self.get(substate, overwrite=True)
986
986
987 def filerevert(self, *pats, **opts):
987 def filerevert(self, *pats, **opts):
988 ctx = self._repo[opts['rev']]
988 ctx = self._repo[opts['rev']]
989 parents = self._repo.dirstate.parents()
989 parents = self._repo.dirstate.parents()
990 if opts.get('all'):
990 if opts.get('all'):
991 pats = [b'set:modified()']
991 pats = [b'set:modified()']
992 else:
992 else:
993 pats = []
993 pats = []
994 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
994 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
995
995
996 def shortid(self, revid):
996 def shortid(self, revid):
997 return revid[:12]
997 return revid[:12]
998
998
999 @annotatesubrepoerror
999 @annotatesubrepoerror
1000 def unshare(self):
1000 def unshare(self):
1001 # subrepo inherently violates our import layering rules
1001 # subrepo inherently violates our import layering rules
1002 # because it wants to make repo objects from deep inside the stack
1002 # because it wants to make repo objects from deep inside the stack
1003 # so we manually delay the circular imports to not break
1003 # so we manually delay the circular imports to not break
1004 # scripts that don't use our demand-loading
1004 # scripts that don't use our demand-loading
1005 global hg
1005 global hg
1006 from . import hg as h
1006 from . import hg as h
1007
1007
1008 hg = h
1008 hg = h
1009
1009
1010 # Nothing prevents a user from sharing in a repo, and then making that a
1010 # Nothing prevents a user from sharing in a repo, and then making that a
1011 # subrepo. Alternately, the previous unshare attempt may have failed
1011 # subrepo. Alternately, the previous unshare attempt may have failed
1012 # part way through. So recurse whether or not this layer is shared.
1012 # part way through. So recurse whether or not this layer is shared.
1013 if self._repo.shared():
1013 if self._repo.shared():
1014 self.ui.status(_(b"unsharing subrepo '%s'\n") % self._relpath)
1014 self.ui.status(_(b"unsharing subrepo '%s'\n") % self._relpath)
1015
1015
1016 hg.unshare(self.ui, self._repo)
1016 hg.unshare(self.ui, self._repo)
1017
1017
1018 def verify(self, onpush=False):
1018 def verify(self, onpush=False):
1019 try:
1019 try:
1020 rev = self._state[1]
1020 rev = self._state[1]
1021 ctx = self._repo.unfiltered()[rev]
1021 ctx = self._repo.unfiltered()[rev]
1022 if ctx.hidden():
1022 if ctx.hidden():
1023 # Since hidden revisions aren't pushed/pulled, it seems worth an
1023 # Since hidden revisions aren't pushed/pulled, it seems worth an
1024 # explicit warning.
1024 # explicit warning.
1025 msg = _(b"subrepo '%s' is hidden in revision %s") % (
1025 msg = _(b"subrepo '%s' is hidden in revision %s") % (
1026 self._relpath,
1026 self._relpath,
1027 node.short(self._ctx.node()),
1027 node.short(self._ctx.node()),
1028 )
1028 )
1029
1029
1030 if onpush:
1030 if onpush:
1031 raise error.Abort(msg)
1031 raise error.Abort(msg)
1032 else:
1032 else:
1033 self._repo.ui.warn(b'%s\n' % msg)
1033 self._repo.ui.warn(b'%s\n' % msg)
1034 return 0
1034 return 0
1035 except error.RepoLookupError:
1035 except error.RepoLookupError:
1036 # A missing subrepo revision may be a case of needing to pull it, so
1036 # A missing subrepo revision may be a case of needing to pull it, so
1037 # don't treat this as an error for `hg verify`.
1037 # don't treat this as an error for `hg verify`.
1038 msg = _(b"subrepo '%s' not found in revision %s") % (
1038 msg = _(b"subrepo '%s' not found in revision %s") % (
1039 self._relpath,
1039 self._relpath,
1040 node.short(self._ctx.node()),
1040 node.short(self._ctx.node()),
1041 )
1041 )
1042
1042
1043 if onpush:
1043 if onpush:
1044 raise error.Abort(msg)
1044 raise error.Abort(msg)
1045 else:
1045 else:
1046 self._repo.ui.warn(b'%s\n' % msg)
1046 self._repo.ui.warn(b'%s\n' % msg)
1047 return 0
1047 return 0
1048
1048
1049 @propertycache
1049 @propertycache
1050 def wvfs(self):
1050 def wvfs(self):
1051 """return own wvfs for efficiency and consistency
1051 """return own wvfs for efficiency and consistency
1052 """
1052 """
1053 return self._repo.wvfs
1053 return self._repo.wvfs
1054
1054
1055 @propertycache
1055 @propertycache
1056 def _relpath(self):
1056 def _relpath(self):
1057 """return path to this subrepository as seen from outermost repository
1057 """return path to this subrepository as seen from outermost repository
1058 """
1058 """
1059 # Keep consistent dir separators by avoiding vfs.join(self._path)
1059 # Keep consistent dir separators by avoiding vfs.join(self._path)
1060 return reporelpath(self._repo)
1060 return reporelpath(self._repo)
1061
1061
1062
1062
1063 class svnsubrepo(abstractsubrepo):
1063 class svnsubrepo(abstractsubrepo):
1064 def __init__(self, ctx, path, state, allowcreate):
1064 def __init__(self, ctx, path, state, allowcreate):
1065 super(svnsubrepo, self).__init__(ctx, path)
1065 super(svnsubrepo, self).__init__(ctx, path)
1066 self._state = state
1066 self._state = state
1067 self._exe = procutil.findexe(b'svn')
1067 self._exe = procutil.findexe(b'svn')
1068 if not self._exe:
1068 if not self._exe:
1069 raise error.Abort(
1069 raise error.Abort(
1070 _(b"'svn' executable not found for subrepo '%s'") % self._path
1070 _(b"'svn' executable not found for subrepo '%s'") % self._path
1071 )
1071 )
1072
1072
1073 def _svncommand(self, commands, filename=b'', failok=False):
1073 def _svncommand(self, commands, filename=b'', failok=False):
1074 cmd = [self._exe]
1074 cmd = [self._exe]
1075 extrakw = {}
1075 extrakw = {}
1076 if not self.ui.interactive():
1076 if not self.ui.interactive():
1077 # Making stdin be a pipe should prevent svn from behaving
1077 # Making stdin be a pipe should prevent svn from behaving
1078 # interactively even if we can't pass --non-interactive.
1078 # interactively even if we can't pass --non-interactive.
1079 extrakw['stdin'] = subprocess.PIPE
1079 extrakw['stdin'] = subprocess.PIPE
1080 # Starting in svn 1.5 --non-interactive is a global flag
1080 # Starting in svn 1.5 --non-interactive is a global flag
1081 # instead of being per-command, but we need to support 1.4 so
1081 # instead of being per-command, but we need to support 1.4 so
1082 # we have to be intelligent about what commands take
1082 # we have to be intelligent about what commands take
1083 # --non-interactive.
1083 # --non-interactive.
1084 if commands[0] in (b'update', b'checkout', b'commit'):
1084 if commands[0] in (b'update', b'checkout', b'commit'):
1085 cmd.append(b'--non-interactive')
1085 cmd.append(b'--non-interactive')
1086 cmd.extend(commands)
1086 cmd.extend(commands)
1087 if filename is not None:
1087 if filename is not None:
1088 path = self.wvfs.reljoin(
1088 path = self.wvfs.reljoin(
1089 self._ctx.repo().origroot, self._path, filename
1089 self._ctx.repo().origroot, self._path, filename
1090 )
1090 )
1091 cmd.append(path)
1091 cmd.append(path)
1092 env = dict(encoding.environ)
1092 env = dict(encoding.environ)
1093 # Avoid localized output, preserve current locale for everything else.
1093 # Avoid localized output, preserve current locale for everything else.
1094 lc_all = env.get(b'LC_ALL')
1094 lc_all = env.get(b'LC_ALL')
1095 if lc_all:
1095 if lc_all:
1096 env[b'LANG'] = lc_all
1096 env[b'LANG'] = lc_all
1097 del env[b'LC_ALL']
1097 del env[b'LC_ALL']
1098 env[b'LC_MESSAGES'] = b'C'
1098 env[b'LC_MESSAGES'] = b'C'
1099 p = subprocess.Popen(
1099 p = subprocess.Popen(
1100 pycompat.rapply(procutil.tonativestr, cmd),
1100 pycompat.rapply(procutil.tonativestr, cmd),
1101 bufsize=-1,
1101 bufsize=-1,
1102 close_fds=procutil.closefds,
1102 close_fds=procutil.closefds,
1103 stdout=subprocess.PIPE,
1103 stdout=subprocess.PIPE,
1104 stderr=subprocess.PIPE,
1104 stderr=subprocess.PIPE,
1105 env=procutil.tonativeenv(env),
1105 env=procutil.tonativeenv(env),
1106 **extrakw
1106 **extrakw
1107 )
1107 )
1108 stdout, stderr = map(util.fromnativeeol, p.communicate())
1108 stdout, stderr = map(util.fromnativeeol, p.communicate())
1109 stderr = stderr.strip()
1109 stderr = stderr.strip()
1110 if not failok:
1110 if not failok:
1111 if p.returncode:
1111 if p.returncode:
1112 raise error.Abort(
1112 raise error.Abort(
1113 stderr or b'exited with code %d' % p.returncode
1113 stderr or b'exited with code %d' % p.returncode
1114 )
1114 )
1115 if stderr:
1115 if stderr:
1116 self.ui.warn(stderr + b'\n')
1116 self.ui.warn(stderr + b'\n')
1117 return stdout, stderr
1117 return stdout, stderr
1118
1118
1119 @propertycache
1119 @propertycache
1120 def _svnversion(self):
1120 def _svnversion(self):
1121 output, err = self._svncommand(
1121 output, err = self._svncommand(
1122 [b'--version', b'--quiet'], filename=None
1122 [b'--version', b'--quiet'], filename=None
1123 )
1123 )
1124 m = re.search(br'^(\d+)\.(\d+)', output)
1124 m = re.search(br'^(\d+)\.(\d+)', output)
1125 if not m:
1125 if not m:
1126 raise error.Abort(_(b'cannot retrieve svn tool version'))
1126 raise error.Abort(_(b'cannot retrieve svn tool version'))
1127 return (int(m.group(1)), int(m.group(2)))
1127 return (int(m.group(1)), int(m.group(2)))
1128
1128
1129 def _svnmissing(self):
1129 def _svnmissing(self):
1130 return not self.wvfs.exists(b'.svn')
1130 return not self.wvfs.exists(b'.svn')
1131
1131
1132 def _wcrevs(self):
1132 def _wcrevs(self):
1133 # Get the working directory revision as well as the last
1133 # Get the working directory revision as well as the last
1134 # commit revision so we can compare the subrepo state with
1134 # commit revision so we can compare the subrepo state with
1135 # both. We used to store the working directory one.
1135 # both. We used to store the working directory one.
1136 output, err = self._svncommand([b'info', b'--xml'])
1136 output, err = self._svncommand([b'info', b'--xml'])
1137 doc = xml.dom.minidom.parseString(output)
1137 doc = xml.dom.minidom.parseString(output)
1138 entries = doc.getElementsByTagName('entry')
1138 entries = doc.getElementsByTagName('entry')
1139 lastrev, rev = b'0', b'0'
1139 lastrev, rev = b'0', b'0'
1140 if entries:
1140 if entries:
1141 rev = pycompat.bytestr(entries[0].getAttribute('revision')) or b'0'
1141 rev = pycompat.bytestr(entries[0].getAttribute('revision')) or b'0'
1142 commits = entries[0].getElementsByTagName('commit')
1142 commits = entries[0].getElementsByTagName('commit')
1143 if commits:
1143 if commits:
1144 lastrev = (
1144 lastrev = (
1145 pycompat.bytestr(commits[0].getAttribute('revision'))
1145 pycompat.bytestr(commits[0].getAttribute('revision'))
1146 or b'0'
1146 or b'0'
1147 )
1147 )
1148 return (lastrev, rev)
1148 return (lastrev, rev)
1149
1149
1150 def _wcrev(self):
1150 def _wcrev(self):
1151 return self._wcrevs()[0]
1151 return self._wcrevs()[0]
1152
1152
1153 def _wcchanged(self):
1153 def _wcchanged(self):
1154 """Return (changes, extchanges, missing) where changes is True
1154 """Return (changes, extchanges, missing) where changes is True
1155 if the working directory was changed, extchanges is
1155 if the working directory was changed, extchanges is
1156 True if any of these changes concern an external entry and missing
1156 True if any of these changes concern an external entry and missing
1157 is True if any change is a missing entry.
1157 is True if any change is a missing entry.
1158 """
1158 """
1159 output, err = self._svncommand([b'status', b'--xml'])
1159 output, err = self._svncommand([b'status', b'--xml'])
1160 externals, changes, missing = [], [], []
1160 externals, changes, missing = [], [], []
1161 doc = xml.dom.minidom.parseString(output)
1161 doc = xml.dom.minidom.parseString(output)
1162 for e in doc.getElementsByTagName('entry'):
1162 for e in doc.getElementsByTagName('entry'):
1163 s = e.getElementsByTagName('wc-status')
1163 s = e.getElementsByTagName('wc-status')
1164 if not s:
1164 if not s:
1165 continue
1165 continue
1166 item = s[0].getAttribute('item')
1166 item = s[0].getAttribute('item')
1167 props = s[0].getAttribute('props')
1167 props = s[0].getAttribute('props')
1168 path = e.getAttribute('path').encode('utf8')
1168 path = e.getAttribute('path').encode('utf8')
1169 if item == 'external':
1169 if item == 'external':
1170 externals.append(path)
1170 externals.append(path)
1171 elif item == 'missing':
1171 elif item == 'missing':
1172 missing.append(path)
1172 missing.append(path)
1173 if item not in (
1173 if item not in (
1174 '',
1174 '',
1175 'normal',
1175 'normal',
1176 'unversioned',
1176 'unversioned',
1177 'external',
1177 'external',
1178 ) or props not in ('', 'none', 'normal'):
1178 ) or props not in ('', 'none', 'normal'):
1179 changes.append(path)
1179 changes.append(path)
1180 for path in changes:
1180 for path in changes:
1181 for ext in externals:
1181 for ext in externals:
1182 if path == ext or path.startswith(ext + pycompat.ossep):
1182 if path == ext or path.startswith(ext + pycompat.ossep):
1183 return True, True, bool(missing)
1183 return True, True, bool(missing)
1184 return bool(changes), False, bool(missing)
1184 return bool(changes), False, bool(missing)
1185
1185
1186 @annotatesubrepoerror
1186 @annotatesubrepoerror
1187 def dirty(self, ignoreupdate=False, missing=False):
1187 def dirty(self, ignoreupdate=False, missing=False):
1188 if self._svnmissing():
1188 if self._svnmissing():
1189 return self._state[1] != b''
1189 return self._state[1] != b''
1190 wcchanged = self._wcchanged()
1190 wcchanged = self._wcchanged()
1191 changed = wcchanged[0] or (missing and wcchanged[2])
1191 changed = wcchanged[0] or (missing and wcchanged[2])
1192 if not changed:
1192 if not changed:
1193 if self._state[1] in self._wcrevs() or ignoreupdate:
1193 if self._state[1] in self._wcrevs() or ignoreupdate:
1194 return False
1194 return False
1195 return True
1195 return True
1196
1196
1197 def basestate(self):
1197 def basestate(self):
1198 lastrev, rev = self._wcrevs()
1198 lastrev, rev = self._wcrevs()
1199 if lastrev != rev:
1199 if lastrev != rev:
1200 # Last committed rev is not the same than rev. We would
1200 # Last committed rev is not the same than rev. We would
1201 # like to take lastrev but we do not know if the subrepo
1201 # like to take lastrev but we do not know if the subrepo
1202 # URL exists at lastrev. Test it and fallback to rev it
1202 # URL exists at lastrev. Test it and fallback to rev it
1203 # is not there.
1203 # is not there.
1204 try:
1204 try:
1205 self._svncommand(
1205 self._svncommand(
1206 [b'list', b'%s@%s' % (self._state[0], lastrev)]
1206 [b'list', b'%s@%s' % (self._state[0], lastrev)]
1207 )
1207 )
1208 return lastrev
1208 return lastrev
1209 except error.Abort:
1209 except error.Abort:
1210 pass
1210 pass
1211 return rev
1211 return rev
1212
1212
1213 @annotatesubrepoerror
1213 @annotatesubrepoerror
1214 def commit(self, text, user, date):
1214 def commit(self, text, user, date):
1215 # user and date are out of our hands since svn is centralized
1215 # user and date are out of our hands since svn is centralized
1216 changed, extchanged, missing = self._wcchanged()
1216 changed, extchanged, missing = self._wcchanged()
1217 if not changed:
1217 if not changed:
1218 return self.basestate()
1218 return self.basestate()
1219 if extchanged:
1219 if extchanged:
1220 # Do not try to commit externals
1220 # Do not try to commit externals
1221 raise error.Abort(_(b'cannot commit svn externals'))
1221 raise error.Abort(_(b'cannot commit svn externals'))
1222 if missing:
1222 if missing:
1223 # svn can commit with missing entries but aborting like hg
1223 # svn can commit with missing entries but aborting like hg
1224 # seems a better approach.
1224 # seems a better approach.
1225 raise error.Abort(_(b'cannot commit missing svn entries'))
1225 raise error.Abort(_(b'cannot commit missing svn entries'))
1226 commitinfo, err = self._svncommand([b'commit', b'-m', text])
1226 commitinfo, err = self._svncommand([b'commit', b'-m', text])
1227 self.ui.status(commitinfo)
1227 self.ui.status(commitinfo)
1228 newrev = re.search(b'Committed revision ([0-9]+).', commitinfo)
1228 newrev = re.search(b'Committed revision ([0-9]+).', commitinfo)
1229 if not newrev:
1229 if not newrev:
1230 if not commitinfo.strip():
1230 if not commitinfo.strip():
1231 # Sometimes, our definition of "changed" differs from
1231 # Sometimes, our definition of "changed" differs from
1232 # svn one. For instance, svn ignores missing files
1232 # svn one. For instance, svn ignores missing files
1233 # when committing. If there are only missing files, no
1233 # when committing. If there are only missing files, no
1234 # commit is made, no output and no error code.
1234 # commit is made, no output and no error code.
1235 raise error.Abort(_(b'failed to commit svn changes'))
1235 raise error.Abort(_(b'failed to commit svn changes'))
1236 raise error.Abort(commitinfo.splitlines()[-1])
1236 raise error.Abort(commitinfo.splitlines()[-1])
1237 newrev = newrev.groups()[0]
1237 newrev = newrev.groups()[0]
1238 self.ui.status(self._svncommand([b'update', b'-r', newrev])[0])
1238 self.ui.status(self._svncommand([b'update', b'-r', newrev])[0])
1239 return newrev
1239 return newrev
1240
1240
1241 @annotatesubrepoerror
1241 @annotatesubrepoerror
1242 def remove(self):
1242 def remove(self):
1243 if self.dirty():
1243 if self.dirty():
1244 self.ui.warn(
1244 self.ui.warn(
1245 _(b'not removing repo %s because it has changes.\n')
1245 _(b'not removing repo %s because it has changes.\n')
1246 % self._path
1246 % self._path
1247 )
1247 )
1248 return
1248 return
1249 self.ui.note(_(b'removing subrepo %s\n') % self._path)
1249 self.ui.note(_(b'removing subrepo %s\n') % self._path)
1250
1250
1251 self.wvfs.rmtree(forcibly=True)
1251 self.wvfs.rmtree(forcibly=True)
1252 try:
1252 try:
1253 pwvfs = self._ctx.repo().wvfs
1253 pwvfs = self._ctx.repo().wvfs
1254 pwvfs.removedirs(pwvfs.dirname(self._path))
1254 pwvfs.removedirs(pwvfs.dirname(self._path))
1255 except OSError:
1255 except OSError:
1256 pass
1256 pass
1257
1257
1258 @annotatesubrepoerror
1258 @annotatesubrepoerror
1259 def get(self, state, overwrite=False):
1259 def get(self, state, overwrite=False):
1260 if overwrite:
1260 if overwrite:
1261 self._svncommand([b'revert', b'--recursive'])
1261 self._svncommand([b'revert', b'--recursive'])
1262 args = [b'checkout']
1262 args = [b'checkout']
1263 if self._svnversion >= (1, 5):
1263 if self._svnversion >= (1, 5):
1264 args.append(b'--force')
1264 args.append(b'--force')
1265 # The revision must be specified at the end of the URL to properly
1265 # The revision must be specified at the end of the URL to properly
1266 # update to a directory which has since been deleted and recreated.
1266 # update to a directory which has since been deleted and recreated.
1267 args.append(b'%s@%s' % (state[0], state[1]))
1267 args.append(b'%s@%s' % (state[0], state[1]))
1268
1268
1269 # SEC: check that the ssh url is safe
1269 # SEC: check that the ssh url is safe
1270 util.checksafessh(state[0])
1270 util.checksafessh(state[0])
1271
1271
1272 status, err = self._svncommand(args, failok=True)
1272 status, err = self._svncommand(args, failok=True)
1273 _sanitize(self.ui, self.wvfs, b'.svn')
1273 _sanitize(self.ui, self.wvfs, b'.svn')
1274 if not re.search(b'Checked out revision [0-9]+.', status):
1274 if not re.search(b'Checked out revision [0-9]+.', status):
1275 if b'is already a working copy for a different URL' in err and (
1275 if b'is already a working copy for a different URL' in err and (
1276 self._wcchanged()[:2] == (False, False)
1276 self._wcchanged()[:2] == (False, False)
1277 ):
1277 ):
1278 # obstructed but clean working copy, so just blow it away.
1278 # obstructed but clean working copy, so just blow it away.
1279 self.remove()
1279 self.remove()
1280 self.get(state, overwrite=False)
1280 self.get(state, overwrite=False)
1281 return
1281 return
1282 raise error.Abort((status or err).splitlines()[-1])
1282 raise error.Abort((status or err).splitlines()[-1])
1283 self.ui.status(status)
1283 self.ui.status(status)
1284
1284
1285 @annotatesubrepoerror
1285 @annotatesubrepoerror
1286 def merge(self, state):
1286 def merge(self, state):
1287 old = self._state[1]
1287 old = self._state[1]
1288 new = state[1]
1288 new = state[1]
1289 wcrev = self._wcrev()
1289 wcrev = self._wcrev()
1290 if new != wcrev:
1290 if new != wcrev:
1291 dirty = old == wcrev or self._wcchanged()[0]
1291 dirty = old == wcrev or self._wcchanged()[0]
1292 if _updateprompt(self.ui, self, dirty, wcrev, new):
1292 if _updateprompt(self.ui, self, dirty, wcrev, new):
1293 self.get(state, False)
1293 self.get(state, False)
1294
1294
1295 def push(self, opts):
1295 def push(self, opts):
1296 # push is a no-op for SVN
1296 # push is a no-op for SVN
1297 return True
1297 return True
1298
1298
1299 @annotatesubrepoerror
1299 @annotatesubrepoerror
1300 def files(self):
1300 def files(self):
1301 output = self._svncommand([b'list', b'--recursive', b'--xml'])[0]
1301 output = self._svncommand([b'list', b'--recursive', b'--xml'])[0]
1302 doc = xml.dom.minidom.parseString(output)
1302 doc = xml.dom.minidom.parseString(output)
1303 paths = []
1303 paths = []
1304 for e in doc.getElementsByTagName('entry'):
1304 for e in doc.getElementsByTagName('entry'):
1305 kind = pycompat.bytestr(e.getAttribute('kind'))
1305 kind = pycompat.bytestr(e.getAttribute('kind'))
1306 if kind != b'file':
1306 if kind != b'file':
1307 continue
1307 continue
1308 name = ''.join(
1308 name = ''.join(
1309 c.data
1309 c.data
1310 for c in e.getElementsByTagName('name')[0].childNodes
1310 for c in e.getElementsByTagName('name')[0].childNodes
1311 if c.nodeType == c.TEXT_NODE
1311 if c.nodeType == c.TEXT_NODE
1312 )
1312 )
1313 paths.append(name.encode('utf8'))
1313 paths.append(name.encode('utf8'))
1314 return paths
1314 return paths
1315
1315
1316 def filedata(self, name, decode):
1316 def filedata(self, name, decode):
1317 return self._svncommand([b'cat'], name)[0]
1317 return self._svncommand([b'cat'], name)[0]
1318
1318
1319
1319
1320 class gitsubrepo(abstractsubrepo):
1320 class gitsubrepo(abstractsubrepo):
1321 def __init__(self, ctx, path, state, allowcreate):
1321 def __init__(self, ctx, path, state, allowcreate):
1322 super(gitsubrepo, self).__init__(ctx, path)
1322 super(gitsubrepo, self).__init__(ctx, path)
1323 self._state = state
1323 self._state = state
1324 self._abspath = ctx.repo().wjoin(path)
1324 self._abspath = ctx.repo().wjoin(path)
1325 self._subparent = ctx.repo()
1325 self._subparent = ctx.repo()
1326 self._ensuregit()
1326 self._ensuregit()
1327
1327
1328 def _ensuregit(self):
1328 def _ensuregit(self):
1329 try:
1329 try:
1330 self._gitexecutable = b'git'
1330 self._gitexecutable = b'git'
1331 out, err = self._gitnodir([b'--version'])
1331 out, err = self._gitnodir([b'--version'])
1332 except OSError as e:
1332 except OSError as e:
1333 genericerror = _(b"error executing git for subrepo '%s': %s")
1333 genericerror = _(b"error executing git for subrepo '%s': %s")
1334 notfoundhint = _(b"check git is installed and in your PATH")
1334 notfoundhint = _(b"check git is installed and in your PATH")
1335 if e.errno != errno.ENOENT:
1335 if e.errno != errno.ENOENT:
1336 raise error.Abort(
1336 raise error.Abort(
1337 genericerror % (self._path, encoding.strtolocal(e.strerror))
1337 genericerror % (self._path, encoding.strtolocal(e.strerror))
1338 )
1338 )
1339 elif pycompat.iswindows:
1339 elif pycompat.iswindows:
1340 try:
1340 try:
1341 self._gitexecutable = b'git.cmd'
1341 self._gitexecutable = b'git.cmd'
1342 out, err = self._gitnodir([b'--version'])
1342 out, err = self._gitnodir([b'--version'])
1343 except OSError as e2:
1343 except OSError as e2:
1344 if e2.errno == errno.ENOENT:
1344 if e2.errno == errno.ENOENT:
1345 raise error.Abort(
1345 raise error.Abort(
1346 _(
1346 _(
1347 b"couldn't find 'git' or 'git.cmd'"
1347 b"couldn't find 'git' or 'git.cmd'"
1348 b" for subrepo '%s'"
1348 b" for subrepo '%s'"
1349 )
1349 )
1350 % self._path,
1350 % self._path,
1351 hint=notfoundhint,
1351 hint=notfoundhint,
1352 )
1352 )
1353 else:
1353 else:
1354 raise error.Abort(
1354 raise error.Abort(
1355 genericerror
1355 genericerror
1356 % (self._path, encoding.strtolocal(e2.strerror))
1356 % (self._path, encoding.strtolocal(e2.strerror))
1357 )
1357 )
1358 else:
1358 else:
1359 raise error.Abort(
1359 raise error.Abort(
1360 _(b"couldn't find git for subrepo '%s'") % self._path,
1360 _(b"couldn't find git for subrepo '%s'") % self._path,
1361 hint=notfoundhint,
1361 hint=notfoundhint,
1362 )
1362 )
1363 versionstatus = self._checkversion(out)
1363 versionstatus = self._checkversion(out)
1364 if versionstatus == b'unknown':
1364 if versionstatus == b'unknown':
1365 self.ui.warn(_(b'cannot retrieve git version\n'))
1365 self.ui.warn(_(b'cannot retrieve git version\n'))
1366 elif versionstatus == b'abort':
1366 elif versionstatus == b'abort':
1367 raise error.Abort(
1367 raise error.Abort(
1368 _(b'git subrepo requires at least 1.6.0 or later')
1368 _(b'git subrepo requires at least 1.6.0 or later')
1369 )
1369 )
1370 elif versionstatus == b'warning':
1370 elif versionstatus == b'warning':
1371 self.ui.warn(_(b'git subrepo requires at least 1.6.0 or later\n'))
1371 self.ui.warn(_(b'git subrepo requires at least 1.6.0 or later\n'))
1372
1372
1373 @staticmethod
1373 @staticmethod
1374 def _gitversion(out):
1374 def _gitversion(out):
1375 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1375 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1376 if m:
1376 if m:
1377 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1377 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1378
1378
1379 m = re.search(br'^git version (\d+)\.(\d+)', out)
1379 m = re.search(br'^git version (\d+)\.(\d+)', out)
1380 if m:
1380 if m:
1381 return (int(m.group(1)), int(m.group(2)), 0)
1381 return (int(m.group(1)), int(m.group(2)), 0)
1382
1382
1383 return -1
1383 return -1
1384
1384
1385 @staticmethod
1385 @staticmethod
1386 def _checkversion(out):
1386 def _checkversion(out):
1387 '''ensure git version is new enough
1387 '''ensure git version is new enough
1388
1388
1389 >>> _checkversion = gitsubrepo._checkversion
1389 >>> _checkversion = gitsubrepo._checkversion
1390 >>> _checkversion(b'git version 1.6.0')
1390 >>> _checkversion(b'git version 1.6.0')
1391 'ok'
1391 'ok'
1392 >>> _checkversion(b'git version 1.8.5')
1392 >>> _checkversion(b'git version 1.8.5')
1393 'ok'
1393 'ok'
1394 >>> _checkversion(b'git version 1.4.0')
1394 >>> _checkversion(b'git version 1.4.0')
1395 'abort'
1395 'abort'
1396 >>> _checkversion(b'git version 1.5.0')
1396 >>> _checkversion(b'git version 1.5.0')
1397 'warning'
1397 'warning'
1398 >>> _checkversion(b'git version 1.9-rc0')
1398 >>> _checkversion(b'git version 1.9-rc0')
1399 'ok'
1399 'ok'
1400 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1400 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1401 'ok'
1401 'ok'
1402 >>> _checkversion(b'git version 1.9.0.GIT')
1402 >>> _checkversion(b'git version 1.9.0.GIT')
1403 'ok'
1403 'ok'
1404 >>> _checkversion(b'git version 12345')
1404 >>> _checkversion(b'git version 12345')
1405 'unknown'
1405 'unknown'
1406 >>> _checkversion(b'no')
1406 >>> _checkversion(b'no')
1407 'unknown'
1407 'unknown'
1408 '''
1408 '''
1409 version = gitsubrepo._gitversion(out)
1409 version = gitsubrepo._gitversion(out)
1410 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1410 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1411 # despite the docstring comment. For now, error on 1.4.0, warn on
1411 # despite the docstring comment. For now, error on 1.4.0, warn on
1412 # 1.5.0 but attempt to continue.
1412 # 1.5.0 but attempt to continue.
1413 if version == -1:
1413 if version == -1:
1414 return b'unknown'
1414 return b'unknown'
1415 if version < (1, 5, 0):
1415 if version < (1, 5, 0):
1416 return b'abort'
1416 return b'abort'
1417 elif version < (1, 6, 0):
1417 elif version < (1, 6, 0):
1418 return b'warning'
1418 return b'warning'
1419 return b'ok'
1419 return b'ok'
1420
1420
1421 def _gitcommand(self, commands, env=None, stream=False):
1421 def _gitcommand(self, commands, env=None, stream=False):
1422 return self._gitdir(commands, env=env, stream=stream)[0]
1422 return self._gitdir(commands, env=env, stream=stream)[0]
1423
1423
1424 def _gitdir(self, commands, env=None, stream=False):
1424 def _gitdir(self, commands, env=None, stream=False):
1425 return self._gitnodir(
1425 return self._gitnodir(
1426 commands, env=env, stream=stream, cwd=self._abspath
1426 commands, env=env, stream=stream, cwd=self._abspath
1427 )
1427 )
1428
1428
1429 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1429 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1430 """Calls the git command
1430 """Calls the git command
1431
1431
1432 The methods tries to call the git command. versions prior to 1.6.0
1432 The methods tries to call the git command. versions prior to 1.6.0
1433 are not supported and very probably fail.
1433 are not supported and very probably fail.
1434 """
1434 """
1435 self.ui.debug(b'%s: git %s\n' % (self._relpath, b' '.join(commands)))
1435 self.ui.debug(b'%s: git %s\n' % (self._relpath, b' '.join(commands)))
1436 if env is None:
1436 if env is None:
1437 env = encoding.environ.copy()
1437 env = encoding.environ.copy()
1438 # disable localization for Git output (issue5176)
1438 # disable localization for Git output (issue5176)
1439 env[b'LC_ALL'] = b'C'
1439 env[b'LC_ALL'] = b'C'
1440 # fix for Git CVE-2015-7545
1440 # fix for Git CVE-2015-7545
1441 if b'GIT_ALLOW_PROTOCOL' not in env:
1441 if b'GIT_ALLOW_PROTOCOL' not in env:
1442 env[b'GIT_ALLOW_PROTOCOL'] = b'file:git:http:https:ssh'
1442 env[b'GIT_ALLOW_PROTOCOL'] = b'file:git:http:https:ssh'
1443 # unless ui.quiet is set, print git's stderr,
1443 # unless ui.quiet is set, print git's stderr,
1444 # which is mostly progress and useful info
1444 # which is mostly progress and useful info
1445 errpipe = None
1445 errpipe = None
1446 if self.ui.quiet:
1446 if self.ui.quiet:
1447 errpipe = pycompat.open(os.devnull, b'w')
1447 errpipe = pycompat.open(os.devnull, b'w')
1448 if self.ui._colormode and len(commands) and commands[0] == b"diff":
1448 if self.ui._colormode and len(commands) and commands[0] == b"diff":
1449 # insert the argument in the front,
1449 # insert the argument in the front,
1450 # the end of git diff arguments is used for paths
1450 # the end of git diff arguments is used for paths
1451 commands.insert(1, b'--color')
1451 commands.insert(1, b'--color')
1452 p = subprocess.Popen(
1452 p = subprocess.Popen(
1453 pycompat.rapply(
1453 pycompat.rapply(
1454 procutil.tonativestr, [self._gitexecutable] + commands
1454 procutil.tonativestr, [self._gitexecutable] + commands
1455 ),
1455 ),
1456 bufsize=-1,
1456 bufsize=-1,
1457 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1457 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1458 env=procutil.tonativeenv(env),
1458 env=procutil.tonativeenv(env),
1459 close_fds=procutil.closefds,
1459 close_fds=procutil.closefds,
1460 stdout=subprocess.PIPE,
1460 stdout=subprocess.PIPE,
1461 stderr=errpipe,
1461 stderr=errpipe,
1462 )
1462 )
1463 if stream:
1463 if stream:
1464 return p.stdout, None
1464 return p.stdout, None
1465
1465
1466 retdata = p.stdout.read().strip()
1466 retdata = p.stdout.read().strip()
1467 # wait for the child to exit to avoid race condition.
1467 # wait for the child to exit to avoid race condition.
1468 p.wait()
1468 p.wait()
1469
1469
1470 if p.returncode != 0 and p.returncode != 1:
1470 if p.returncode != 0 and p.returncode != 1:
1471 # there are certain error codes that are ok
1471 # there are certain error codes that are ok
1472 command = commands[0]
1472 command = commands[0]
1473 if command in (b'cat-file', b'symbolic-ref'):
1473 if command in (b'cat-file', b'symbolic-ref'):
1474 return retdata, p.returncode
1474 return retdata, p.returncode
1475 # for all others, abort
1475 # for all others, abort
1476 raise error.Abort(
1476 raise error.Abort(
1477 _(b'git %s error %d in %s')
1477 _(b'git %s error %d in %s')
1478 % (command, p.returncode, self._relpath)
1478 % (command, p.returncode, self._relpath)
1479 )
1479 )
1480
1480
1481 return retdata, p.returncode
1481 return retdata, p.returncode
1482
1482
1483 def _gitmissing(self):
1483 def _gitmissing(self):
1484 return not self.wvfs.exists(b'.git')
1484 return not self.wvfs.exists(b'.git')
1485
1485
1486 def _gitstate(self):
1486 def _gitstate(self):
1487 return self._gitcommand([b'rev-parse', b'HEAD'])
1487 return self._gitcommand([b'rev-parse', b'HEAD'])
1488
1488
1489 def _gitcurrentbranch(self):
1489 def _gitcurrentbranch(self):
1490 current, err = self._gitdir([b'symbolic-ref', b'HEAD', b'--quiet'])
1490 current, err = self._gitdir([b'symbolic-ref', b'HEAD', b'--quiet'])
1491 if err:
1491 if err:
1492 current = None
1492 current = None
1493 return current
1493 return current
1494
1494
1495 def _gitremote(self, remote):
1495 def _gitremote(self, remote):
1496 out = self._gitcommand([b'remote', b'show', b'-n', remote])
1496 out = self._gitcommand([b'remote', b'show', b'-n', remote])
1497 line = out.split(b'\n')[1]
1497 line = out.split(b'\n')[1]
1498 i = line.index(b'URL: ') + len(b'URL: ')
1498 i = line.index(b'URL: ') + len(b'URL: ')
1499 return line[i:]
1499 return line[i:]
1500
1500
1501 def _githavelocally(self, revision):
1501 def _githavelocally(self, revision):
1502 out, code = self._gitdir([b'cat-file', b'-e', revision])
1502 out, code = self._gitdir([b'cat-file', b'-e', revision])
1503 return code == 0
1503 return code == 0
1504
1504
1505 def _gitisancestor(self, r1, r2):
1505 def _gitisancestor(self, r1, r2):
1506 base = self._gitcommand([b'merge-base', r1, r2])
1506 base = self._gitcommand([b'merge-base', r1, r2])
1507 return base == r1
1507 return base == r1
1508
1508
1509 def _gitisbare(self):
1509 def _gitisbare(self):
1510 return self._gitcommand([b'config', b'--bool', b'core.bare']) == b'true'
1510 return self._gitcommand([b'config', b'--bool', b'core.bare']) == b'true'
1511
1511
1512 def _gitupdatestat(self):
1512 def _gitupdatestat(self):
1513 """This must be run before git diff-index.
1513 """This must be run before git diff-index.
1514 diff-index only looks at changes to file stat;
1514 diff-index only looks at changes to file stat;
1515 this command looks at file contents and updates the stat."""
1515 this command looks at file contents and updates the stat."""
1516 self._gitcommand([b'update-index', b'-q', b'--refresh'])
1516 self._gitcommand([b'update-index', b'-q', b'--refresh'])
1517
1517
1518 def _gitbranchmap(self):
1518 def _gitbranchmap(self):
1519 '''returns 2 things:
1519 '''returns 2 things:
1520 a map from git branch to revision
1520 a map from git branch to revision
1521 a map from revision to branches'''
1521 a map from revision to branches'''
1522 branch2rev = {}
1522 branch2rev = {}
1523 rev2branch = {}
1523 rev2branch = {}
1524
1524
1525 out = self._gitcommand(
1525 out = self._gitcommand(
1526 [b'for-each-ref', b'--format', b'%(objectname) %(refname)']
1526 [b'for-each-ref', b'--format', b'%(objectname) %(refname)']
1527 )
1527 )
1528 for line in out.split(b'\n'):
1528 for line in out.split(b'\n'):
1529 revision, ref = line.split(b' ')
1529 revision, ref = line.split(b' ')
1530 if not ref.startswith(b'refs/heads/') and not ref.startswith(
1530 if not ref.startswith(b'refs/heads/') and not ref.startswith(
1531 b'refs/remotes/'
1531 b'refs/remotes/'
1532 ):
1532 ):
1533 continue
1533 continue
1534 if ref.startswith(b'refs/remotes/') and ref.endswith(b'/HEAD'):
1534 if ref.startswith(b'refs/remotes/') and ref.endswith(b'/HEAD'):
1535 continue # ignore remote/HEAD redirects
1535 continue # ignore remote/HEAD redirects
1536 branch2rev[ref] = revision
1536 branch2rev[ref] = revision
1537 rev2branch.setdefault(revision, []).append(ref)
1537 rev2branch.setdefault(revision, []).append(ref)
1538 return branch2rev, rev2branch
1538 return branch2rev, rev2branch
1539
1539
1540 def _gittracking(self, branches):
1540 def _gittracking(self, branches):
1541 """return map of remote branch to local tracking branch"""
1541 """return map of remote branch to local tracking branch"""
1542 # assumes no more than one local tracking branch for each remote
1542 # assumes no more than one local tracking branch for each remote
1543 tracking = {}
1543 tracking = {}
1544 for b in branches:
1544 for b in branches:
1545 if b.startswith(b'refs/remotes/'):
1545 if b.startswith(b'refs/remotes/'):
1546 continue
1546 continue
1547 bname = b.split(b'/', 2)[2]
1547 bname = b.split(b'/', 2)[2]
1548 remote = self._gitcommand([b'config', b'branch.%s.remote' % bname])
1548 remote = self._gitcommand([b'config', b'branch.%s.remote' % bname])
1549 if remote:
1549 if remote:
1550 ref = self._gitcommand([b'config', b'branch.%s.merge' % bname])
1550 ref = self._gitcommand([b'config', b'branch.%s.merge' % bname])
1551 tracking[
1551 tracking[
1552 b'refs/remotes/%s/%s' % (remote, ref.split(b'/', 2)[2])
1552 b'refs/remotes/%s/%s' % (remote, ref.split(b'/', 2)[2])
1553 ] = b
1553 ] = b
1554 return tracking
1554 return tracking
1555
1555
1556 def _abssource(self, source):
1556 def _abssource(self, source):
1557 if b'://' not in source:
1557 if b'://' not in source:
1558 # recognize the scp syntax as an absolute source
1558 # recognize the scp syntax as an absolute source
1559 colon = source.find(b':')
1559 colon = source.find(b':')
1560 if colon != -1 and b'/' not in source[:colon]:
1560 if colon != -1 and b'/' not in source[:colon]:
1561 return source
1561 return source
1562 self._subsource = source
1562 self._subsource = source
1563 return _abssource(self)
1563 return _abssource(self)
1564
1564
1565 def _fetch(self, source, revision):
1565 def _fetch(self, source, revision):
1566 if self._gitmissing():
1566 if self._gitmissing():
1567 # SEC: check for safe ssh url
1567 # SEC: check for safe ssh url
1568 util.checksafessh(source)
1568 util.checksafessh(source)
1569
1569
1570 source = self._abssource(source)
1570 source = self._abssource(source)
1571 self.ui.status(
1571 self.ui.status(
1572 _(b'cloning subrepo %s from %s\n') % (self._relpath, source)
1572 _(b'cloning subrepo %s from %s\n') % (self._relpath, source)
1573 )
1573 )
1574 self._gitnodir([b'clone', source, self._abspath])
1574 self._gitnodir([b'clone', source, self._abspath])
1575 if self._githavelocally(revision):
1575 if self._githavelocally(revision):
1576 return
1576 return
1577 self.ui.status(
1577 self.ui.status(
1578 _(b'pulling subrepo %s from %s\n')
1578 _(b'pulling subrepo %s from %s\n')
1579 % (self._relpath, self._gitremote(b'origin'))
1579 % (self._relpath, self._gitremote(b'origin'))
1580 )
1580 )
1581 # try only origin: the originally cloned repo
1581 # try only origin: the originally cloned repo
1582 self._gitcommand([b'fetch'])
1582 self._gitcommand([b'fetch'])
1583 if not self._githavelocally(revision):
1583 if not self._githavelocally(revision):
1584 raise error.Abort(
1584 raise error.Abort(
1585 _(b'revision %s does not exist in subrepository "%s"\n')
1585 _(b'revision %s does not exist in subrepository "%s"\n')
1586 % (revision, self._relpath)
1586 % (revision, self._relpath)
1587 )
1587 )
1588
1588
1589 @annotatesubrepoerror
1589 @annotatesubrepoerror
1590 def dirty(self, ignoreupdate=False, missing=False):
1590 def dirty(self, ignoreupdate=False, missing=False):
1591 if self._gitmissing():
1591 if self._gitmissing():
1592 return self._state[1] != b''
1592 return self._state[1] != b''
1593 if self._gitisbare():
1593 if self._gitisbare():
1594 return True
1594 return True
1595 if not ignoreupdate and self._state[1] != self._gitstate():
1595 if not ignoreupdate and self._state[1] != self._gitstate():
1596 # different version checked out
1596 # different version checked out
1597 return True
1597 return True
1598 # check for staged changes or modified files; ignore untracked files
1598 # check for staged changes or modified files; ignore untracked files
1599 self._gitupdatestat()
1599 self._gitupdatestat()
1600 out, code = self._gitdir([b'diff-index', b'--quiet', b'HEAD'])
1600 out, code = self._gitdir([b'diff-index', b'--quiet', b'HEAD'])
1601 return code == 1
1601 return code == 1
1602
1602
1603 def basestate(self):
1603 def basestate(self):
1604 return self._gitstate()
1604 return self._gitstate()
1605
1605
1606 @annotatesubrepoerror
1606 @annotatesubrepoerror
1607 def get(self, state, overwrite=False):
1607 def get(self, state, overwrite=False):
1608 source, revision, kind = state
1608 source, revision, kind = state
1609 if not revision:
1609 if not revision:
1610 self.remove()
1610 self.remove()
1611 return
1611 return
1612 self._fetch(source, revision)
1612 self._fetch(source, revision)
1613 # if the repo was set to be bare, unbare it
1613 # if the repo was set to be bare, unbare it
1614 if self._gitisbare():
1614 if self._gitisbare():
1615 self._gitcommand([b'config', b'core.bare', b'false'])
1615 self._gitcommand([b'config', b'core.bare', b'false'])
1616 if self._gitstate() == revision:
1616 if self._gitstate() == revision:
1617 self._gitcommand([b'reset', b'--hard', b'HEAD'])
1617 self._gitcommand([b'reset', b'--hard', b'HEAD'])
1618 return
1618 return
1619 elif self._gitstate() == revision:
1619 elif self._gitstate() == revision:
1620 if overwrite:
1620 if overwrite:
1621 # first reset the index to unmark new files for commit, because
1621 # first reset the index to unmark new files for commit, because
1622 # reset --hard will otherwise throw away files added for commit,
1622 # reset --hard will otherwise throw away files added for commit,
1623 # not just unmark them.
1623 # not just unmark them.
1624 self._gitcommand([b'reset', b'HEAD'])
1624 self._gitcommand([b'reset', b'HEAD'])
1625 self._gitcommand([b'reset', b'--hard', b'HEAD'])
1625 self._gitcommand([b'reset', b'--hard', b'HEAD'])
1626 return
1626 return
1627 branch2rev, rev2branch = self._gitbranchmap()
1627 branch2rev, rev2branch = self._gitbranchmap()
1628
1628
1629 def checkout(args):
1629 def checkout(args):
1630 cmd = [b'checkout']
1630 cmd = [b'checkout']
1631 if overwrite:
1631 if overwrite:
1632 # first reset the index to unmark new files for commit, because
1632 # first reset the index to unmark new files for commit, because
1633 # the -f option will otherwise throw away files added for
1633 # the -f option will otherwise throw away files added for
1634 # commit, not just unmark them.
1634 # commit, not just unmark them.
1635 self._gitcommand([b'reset', b'HEAD'])
1635 self._gitcommand([b'reset', b'HEAD'])
1636 cmd.append(b'-f')
1636 cmd.append(b'-f')
1637 self._gitcommand(cmd + args)
1637 self._gitcommand(cmd + args)
1638 _sanitize(self.ui, self.wvfs, b'.git')
1638 _sanitize(self.ui, self.wvfs, b'.git')
1639
1639
1640 def rawcheckout():
1640 def rawcheckout():
1641 # no branch to checkout, check it out with no branch
1641 # no branch to checkout, check it out with no branch
1642 self.ui.warn(
1642 self.ui.warn(
1643 _(b'checking out detached HEAD in subrepository "%s"\n')
1643 _(b'checking out detached HEAD in subrepository "%s"\n')
1644 % self._relpath
1644 % self._relpath
1645 )
1645 )
1646 self.ui.warn(
1646 self.ui.warn(
1647 _(b'check out a git branch if you intend to make changes\n')
1647 _(b'check out a git branch if you intend to make changes\n')
1648 )
1648 )
1649 checkout([b'-q', revision])
1649 checkout([b'-q', revision])
1650
1650
1651 if revision not in rev2branch:
1651 if revision not in rev2branch:
1652 rawcheckout()
1652 rawcheckout()
1653 return
1653 return
1654 branches = rev2branch[revision]
1654 branches = rev2branch[revision]
1655 firstlocalbranch = None
1655 firstlocalbranch = None
1656 for b in branches:
1656 for b in branches:
1657 if b == b'refs/heads/master':
1657 if b == b'refs/heads/master':
1658 # master trumps all other branches
1658 # master trumps all other branches
1659 checkout([b'refs/heads/master'])
1659 checkout([b'refs/heads/master'])
1660 return
1660 return
1661 if not firstlocalbranch and not b.startswith(b'refs/remotes/'):
1661 if not firstlocalbranch and not b.startswith(b'refs/remotes/'):
1662 firstlocalbranch = b
1662 firstlocalbranch = b
1663 if firstlocalbranch:
1663 if firstlocalbranch:
1664 checkout([firstlocalbranch])
1664 checkout([firstlocalbranch])
1665 return
1665 return
1666
1666
1667 tracking = self._gittracking(branch2rev.keys())
1667 tracking = self._gittracking(branch2rev.keys())
1668 # choose a remote branch already tracked if possible
1668 # choose a remote branch already tracked if possible
1669 remote = branches[0]
1669 remote = branches[0]
1670 if remote not in tracking:
1670 if remote not in tracking:
1671 for b in branches:
1671 for b in branches:
1672 if b in tracking:
1672 if b in tracking:
1673 remote = b
1673 remote = b
1674 break
1674 break
1675
1675
1676 if remote not in tracking:
1676 if remote not in tracking:
1677 # create a new local tracking branch
1677 # create a new local tracking branch
1678 local = remote.split(b'/', 3)[3]
1678 local = remote.split(b'/', 3)[3]
1679 checkout([b'-b', local, remote])
1679 checkout([b'-b', local, remote])
1680 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1680 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1681 # When updating to a tracked remote branch,
1681 # When updating to a tracked remote branch,
1682 # if the local tracking branch is downstream of it,
1682 # if the local tracking branch is downstream of it,
1683 # a normal `git pull` would have performed a "fast-forward merge"
1683 # a normal `git pull` would have performed a "fast-forward merge"
1684 # which is equivalent to updating the local branch to the remote.
1684 # which is equivalent to updating the local branch to the remote.
1685 # Since we are only looking at branching at update, we need to
1685 # Since we are only looking at branching at update, we need to
1686 # detect this situation and perform this action lazily.
1686 # detect this situation and perform this action lazily.
1687 if tracking[remote] != self._gitcurrentbranch():
1687 if tracking[remote] != self._gitcurrentbranch():
1688 checkout([tracking[remote]])
1688 checkout([tracking[remote]])
1689 self._gitcommand([b'merge', b'--ff', remote])
1689 self._gitcommand([b'merge', b'--ff', remote])
1690 _sanitize(self.ui, self.wvfs, b'.git')
1690 _sanitize(self.ui, self.wvfs, b'.git')
1691 else:
1691 else:
1692 # a real merge would be required, just checkout the revision
1692 # a real merge would be required, just checkout the revision
1693 rawcheckout()
1693 rawcheckout()
1694
1694
1695 @annotatesubrepoerror
1695 @annotatesubrepoerror
1696 def commit(self, text, user, date):
1696 def commit(self, text, user, date):
1697 if self._gitmissing():
1697 if self._gitmissing():
1698 raise error.Abort(_(b"subrepo %s is missing") % self._relpath)
1698 raise error.Abort(_(b"subrepo %s is missing") % self._relpath)
1699 cmd = [b'commit', b'-a', b'-m', text]
1699 cmd = [b'commit', b'-a', b'-m', text]
1700 env = encoding.environ.copy()
1700 env = encoding.environ.copy()
1701 if user:
1701 if user:
1702 cmd += [b'--author', user]
1702 cmd += [b'--author', user]
1703 if date:
1703 if date:
1704 # git's date parser silently ignores when seconds < 1e9
1704 # git's date parser silently ignores when seconds < 1e9
1705 # convert to ISO8601
1705 # convert to ISO8601
1706 env[b'GIT_AUTHOR_DATE'] = dateutil.datestr(
1706 env[b'GIT_AUTHOR_DATE'] = dateutil.datestr(
1707 date, b'%Y-%m-%dT%H:%M:%S %1%2'
1707 date, b'%Y-%m-%dT%H:%M:%S %1%2'
1708 )
1708 )
1709 self._gitcommand(cmd, env=env)
1709 self._gitcommand(cmd, env=env)
1710 # make sure commit works otherwise HEAD might not exist under certain
1710 # make sure commit works otherwise HEAD might not exist under certain
1711 # circumstances
1711 # circumstances
1712 return self._gitstate()
1712 return self._gitstate()
1713
1713
1714 @annotatesubrepoerror
1714 @annotatesubrepoerror
1715 def merge(self, state):
1715 def merge(self, state):
1716 source, revision, kind = state
1716 source, revision, kind = state
1717 self._fetch(source, revision)
1717 self._fetch(source, revision)
1718 base = self._gitcommand([b'merge-base', revision, self._state[1]])
1718 base = self._gitcommand([b'merge-base', revision, self._state[1]])
1719 self._gitupdatestat()
1719 self._gitupdatestat()
1720 out, code = self._gitdir([b'diff-index', b'--quiet', b'HEAD'])
1720 out, code = self._gitdir([b'diff-index', b'--quiet', b'HEAD'])
1721
1721
1722 def mergefunc():
1722 def mergefunc():
1723 if base == revision:
1723 if base == revision:
1724 self.get(state) # fast forward merge
1724 self.get(state) # fast forward merge
1725 elif base != self._state[1]:
1725 elif base != self._state[1]:
1726 self._gitcommand([b'merge', b'--no-commit', revision])
1726 self._gitcommand([b'merge', b'--no-commit', revision])
1727 _sanitize(self.ui, self.wvfs, b'.git')
1727 _sanitize(self.ui, self.wvfs, b'.git')
1728
1728
1729 if self.dirty():
1729 if self.dirty():
1730 if self._gitstate() != revision:
1730 if self._gitstate() != revision:
1731 dirty = self._gitstate() == self._state[1] or code != 0
1731 dirty = self._gitstate() == self._state[1] or code != 0
1732 if _updateprompt(
1732 if _updateprompt(
1733 self.ui, self, dirty, self._state[1][:7], revision[:7]
1733 self.ui, self, dirty, self._state[1][:7], revision[:7]
1734 ):
1734 ):
1735 mergefunc()
1735 mergefunc()
1736 else:
1736 else:
1737 mergefunc()
1737 mergefunc()
1738
1738
1739 @annotatesubrepoerror
1739 @annotatesubrepoerror
1740 def push(self, opts):
1740 def push(self, opts):
1741 force = opts.get(b'force')
1741 force = opts.get(b'force')
1742
1742
1743 if not self._state[1]:
1743 if not self._state[1]:
1744 return True
1744 return True
1745 if self._gitmissing():
1745 if self._gitmissing():
1746 raise error.Abort(_(b"subrepo %s is missing") % self._relpath)
1746 raise error.Abort(_(b"subrepo %s is missing") % self._relpath)
1747 # if a branch in origin contains the revision, nothing to do
1747 # if a branch in origin contains the revision, nothing to do
1748 branch2rev, rev2branch = self._gitbranchmap()
1748 branch2rev, rev2branch = self._gitbranchmap()
1749 if self._state[1] in rev2branch:
1749 if self._state[1] in rev2branch:
1750 for b in rev2branch[self._state[1]]:
1750 for b in rev2branch[self._state[1]]:
1751 if b.startswith(b'refs/remotes/origin/'):
1751 if b.startswith(b'refs/remotes/origin/'):
1752 return True
1752 return True
1753 for b, revision in pycompat.iteritems(branch2rev):
1753 for b, revision in pycompat.iteritems(branch2rev):
1754 if b.startswith(b'refs/remotes/origin/'):
1754 if b.startswith(b'refs/remotes/origin/'):
1755 if self._gitisancestor(self._state[1], revision):
1755 if self._gitisancestor(self._state[1], revision):
1756 return True
1756 return True
1757 # otherwise, try to push the currently checked out branch
1757 # otherwise, try to push the currently checked out branch
1758 cmd = [b'push']
1758 cmd = [b'push']
1759 if force:
1759 if force:
1760 cmd.append(b'--force')
1760 cmd.append(b'--force')
1761
1761
1762 current = self._gitcurrentbranch()
1762 current = self._gitcurrentbranch()
1763 if current:
1763 if current:
1764 # determine if the current branch is even useful
1764 # determine if the current branch is even useful
1765 if not self._gitisancestor(self._state[1], current):
1765 if not self._gitisancestor(self._state[1], current):
1766 self.ui.warn(
1766 self.ui.warn(
1767 _(
1767 _(
1768 b'unrelated git branch checked out '
1768 b'unrelated git branch checked out '
1769 b'in subrepository "%s"\n'
1769 b'in subrepository "%s"\n'
1770 )
1770 )
1771 % self._relpath
1771 % self._relpath
1772 )
1772 )
1773 return False
1773 return False
1774 self.ui.status(
1774 self.ui.status(
1775 _(b'pushing branch %s of subrepository "%s"\n')
1775 _(b'pushing branch %s of subrepository "%s"\n')
1776 % (current.split(b'/', 2)[2], self._relpath)
1776 % (current.split(b'/', 2)[2], self._relpath)
1777 )
1777 )
1778 ret = self._gitdir(cmd + [b'origin', current])
1778 ret = self._gitdir(cmd + [b'origin', current])
1779 return ret[1] == 0
1779 return ret[1] == 0
1780 else:
1780 else:
1781 self.ui.warn(
1781 self.ui.warn(
1782 _(
1782 _(
1783 b'no branch checked out in subrepository "%s"\n'
1783 b'no branch checked out in subrepository "%s"\n'
1784 b'cannot push revision %s\n'
1784 b'cannot push revision %s\n'
1785 )
1785 )
1786 % (self._relpath, self._state[1])
1786 % (self._relpath, self._state[1])
1787 )
1787 )
1788 return False
1788 return False
1789
1789
1790 @annotatesubrepoerror
1790 @annotatesubrepoerror
1791 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
1791 def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
1792 if self._gitmissing():
1792 if self._gitmissing():
1793 return []
1793 return []
1794
1794
1795 s = self.status(None, unknown=True, clean=True)
1795 s = self.status(None, unknown=True, clean=True)
1796
1796
1797 tracked = set()
1797 tracked = set()
1798 # dirstates 'amn' warn, 'r' is added again
1798 # dirstates 'amn' warn, 'r' is added again
1799 for l in (s.modified, s.added, s.deleted, s.clean):
1799 for l in (s.modified, s.added, s.deleted, s.clean):
1800 tracked.update(l)
1800 tracked.update(l)
1801
1801
1802 # Unknown files not of interest will be rejected by the matcher
1802 # Unknown files not of interest will be rejected by the matcher
1803 files = s.unknown
1803 files = s.unknown
1804 files.extend(match.files())
1804 files.extend(match.files())
1805
1805
1806 rejected = []
1806 rejected = []
1807
1807
1808 files = [f for f in sorted(set(files)) if match(f)]
1808 files = [f for f in sorted(set(files)) if match(f)]
1809 for f in files:
1809 for f in files:
1810 exact = match.exact(f)
1810 exact = match.exact(f)
1811 command = [b"add"]
1811 command = [b"add"]
1812 if exact:
1812 if exact:
1813 command.append(b"-f") # should be added, even if ignored
1813 command.append(b"-f") # should be added, even if ignored
1814 if ui.verbose or not exact:
1814 if ui.verbose or not exact:
1815 ui.status(_(b'adding %s\n') % uipathfn(f))
1815 ui.status(_(b'adding %s\n') % uipathfn(f))
1816
1816
1817 if f in tracked: # hg prints 'adding' even if already tracked
1817 if f in tracked: # hg prints 'adding' even if already tracked
1818 if exact:
1818 if exact:
1819 rejected.append(f)
1819 rejected.append(f)
1820 continue
1820 continue
1821 if not opts.get('dry_run'):
1821 if not opts.get('dry_run'):
1822 self._gitcommand(command + [f])
1822 self._gitcommand(command + [f])
1823
1823
1824 for f in rejected:
1824 for f in rejected:
1825 ui.warn(_(b"%s already tracked!\n") % uipathfn(f))
1825 ui.warn(_(b"%s already tracked!\n") % uipathfn(f))
1826
1826
1827 return rejected
1827 return rejected
1828
1828
1829 @annotatesubrepoerror
1829 @annotatesubrepoerror
1830 def remove(self):
1830 def remove(self):
1831 if self._gitmissing():
1831 if self._gitmissing():
1832 return
1832 return
1833 if self.dirty():
1833 if self.dirty():
1834 self.ui.warn(
1834 self.ui.warn(
1835 _(b'not removing repo %s because it has changes.\n')
1835 _(b'not removing repo %s because it has changes.\n')
1836 % self._relpath
1836 % self._relpath
1837 )
1837 )
1838 return
1838 return
1839 # we can't fully delete the repository as it may contain
1839 # we can't fully delete the repository as it may contain
1840 # local-only history
1840 # local-only history
1841 self.ui.note(_(b'removing subrepo %s\n') % self._relpath)
1841 self.ui.note(_(b'removing subrepo %s\n') % self._relpath)
1842 self._gitcommand([b'config', b'core.bare', b'true'])
1842 self._gitcommand([b'config', b'core.bare', b'true'])
1843 for f, kind in self.wvfs.readdir():
1843 for f, kind in self.wvfs.readdir():
1844 if f == b'.git':
1844 if f == b'.git':
1845 continue
1845 continue
1846 if kind == stat.S_IFDIR:
1846 if kind == stat.S_IFDIR:
1847 self.wvfs.rmtree(f)
1847 self.wvfs.rmtree(f)
1848 else:
1848 else:
1849 self.wvfs.unlink(f)
1849 self.wvfs.unlink(f)
1850
1850
1851 def archive(self, archiver, prefix, match=None, decode=True):
1851 def archive(self, archiver, prefix, match=None, decode=True):
1852 total = 0
1852 total = 0
1853 source, revision = self._state
1853 source, revision = self._state
1854 if not revision:
1854 if not revision:
1855 return total
1855 return total
1856 self._fetch(source, revision)
1856 self._fetch(source, revision)
1857
1857
1858 # Parse git's native archive command.
1858 # Parse git's native archive command.
1859 # This should be much faster than manually traversing the trees
1859 # This should be much faster than manually traversing the trees
1860 # and objects with many subprocess calls.
1860 # and objects with many subprocess calls.
1861 tarstream = self._gitcommand([b'archive', revision], stream=True)
1861 tarstream = self._gitcommand([b'archive', revision], stream=True)
1862 tar = tarfile.open(fileobj=tarstream, mode='r|')
1862 tar = tarfile.open(fileobj=tarstream, mode='r|')
1863 relpath = subrelpath(self)
1863 relpath = subrelpath(self)
1864 progress = self.ui.makeprogress(
1864 progress = self.ui.makeprogress(
1865 _(b'archiving (%s)') % relpath, unit=_(b'files')
1865 _(b'archiving (%s)') % relpath, unit=_(b'files')
1866 )
1866 )
1867 progress.update(0)
1867 progress.update(0)
1868 for info in tar:
1868 for info in tar:
1869 if info.isdir():
1869 if info.isdir():
1870 continue
1870 continue
1871 bname = pycompat.fsencode(info.name)
1871 bname = pycompat.fsencode(info.name)
1872 if match and not match(bname):
1872 if match and not match(bname):
1873 continue
1873 continue
1874 if info.issym():
1874 if info.issym():
1875 data = info.linkname
1875 data = info.linkname
1876 else:
1876 else:
1877 data = tar.extractfile(info).read()
1877 data = tar.extractfile(info).read()
1878 archiver.addfile(prefix + bname, info.mode, info.issym(), data)
1878 archiver.addfile(prefix + bname, info.mode, info.issym(), data)
1879 total += 1
1879 total += 1
1880 progress.increment()
1880 progress.increment()
1881 progress.complete()
1881 progress.complete()
1882 return total
1882 return total
1883
1883
1884 @annotatesubrepoerror
1884 @annotatesubrepoerror
1885 def cat(self, match, fm, fntemplate, prefix, **opts):
1885 def cat(self, match, fm, fntemplate, prefix, **opts):
1886 rev = self._state[1]
1886 rev = self._state[1]
1887 if match.anypats():
1887 if match.anypats():
1888 return 1 # No support for include/exclude yet
1888 return 1 # No support for include/exclude yet
1889
1889
1890 if not match.files():
1890 if not match.files():
1891 return 1
1891 return 1
1892
1892
1893 # TODO: add support for non-plain formatter (see cmdutil.cat())
1893 # TODO: add support for non-plain formatter (see cmdutil.cat())
1894 for f in match.files():
1894 for f in match.files():
1895 output = self._gitcommand([b"show", b"%s:%s" % (rev, f)])
1895 output = self._gitcommand([b"show", b"%s:%s" % (rev, f)])
1896 fp = cmdutil.makefileobj(
1896 fp = cmdutil.makefileobj(
1897 self._ctx, fntemplate, pathname=self.wvfs.reljoin(prefix, f)
1897 self._ctx, fntemplate, pathname=self.wvfs.reljoin(prefix, f)
1898 )
1898 )
1899 fp.write(output)
1899 fp.write(output)
1900 fp.close()
1900 fp.close()
1901 return 0
1901 return 0
1902
1902
1903 @annotatesubrepoerror
1903 @annotatesubrepoerror
1904 def status(self, rev2, **opts):
1904 def status(self, rev2, **opts):
1905 rev1 = self._state[1]
1905 rev1 = self._state[1]
1906 if self._gitmissing() or not rev1:
1906 if self._gitmissing() or not rev1:
1907 # if the repo is missing, return no results
1907 # if the repo is missing, return no results
1908 return scmutil.status([], [], [], [], [], [], [])
1908 return scmutil.status([], [], [], [], [], [], [])
1909 modified, added, removed = [], [], []
1909 modified, added, removed = [], [], []
1910 self._gitupdatestat()
1910 self._gitupdatestat()
1911 if rev2:
1911 if rev2:
1912 command = [b'diff-tree', b'--no-renames', b'-r', rev1, rev2]
1912 command = [b'diff-tree', b'--no-renames', b'-r', rev1, rev2]
1913 else:
1913 else:
1914 command = [b'diff-index', b'--no-renames', rev1]
1914 command = [b'diff-index', b'--no-renames', rev1]
1915 out = self._gitcommand(command)
1915 out = self._gitcommand(command)
1916 for line in out.split(b'\n'):
1916 for line in out.split(b'\n'):
1917 tab = line.find(b'\t')
1917 tab = line.find(b'\t')
1918 if tab == -1:
1918 if tab == -1:
1919 continue
1919 continue
1920 status, f = line[tab - 1 : tab], line[tab + 1 :]
1920 status, f = line[tab - 1 : tab], line[tab + 1 :]
1921 if status == b'M':
1921 if status == b'M':
1922 modified.append(f)
1922 modified.append(f)
1923 elif status == b'A':
1923 elif status == b'A':
1924 added.append(f)
1924 added.append(f)
1925 elif status == b'D':
1925 elif status == b'D':
1926 removed.append(f)
1926 removed.append(f)
1927
1927
1928 deleted, unknown, ignored, clean = [], [], [], []
1928 deleted, unknown, ignored, clean = [], [], [], []
1929
1929
1930 command = [b'status', b'--porcelain', b'-z']
1930 command = [b'status', b'--porcelain', b'-z']
1931 if opts.get('unknown'):
1931 if opts.get('unknown'):
1932 command += [b'--untracked-files=all']
1932 command += [b'--untracked-files=all']
1933 if opts.get('ignored'):
1933 if opts.get('ignored'):
1934 command += [b'--ignored']
1934 command += [b'--ignored']
1935 out = self._gitcommand(command)
1935 out = self._gitcommand(command)
1936
1936
1937 changedfiles = set()
1937 changedfiles = set()
1938 changedfiles.update(modified)
1938 changedfiles.update(modified)
1939 changedfiles.update(added)
1939 changedfiles.update(added)
1940 changedfiles.update(removed)
1940 changedfiles.update(removed)
1941 for line in out.split(b'\0'):
1941 for line in out.split(b'\0'):
1942 if not line:
1942 if not line:
1943 continue
1943 continue
1944 st = line[0:2]
1944 st = line[0:2]
1945 # moves and copies show 2 files on one line
1945 # moves and copies show 2 files on one line
1946 if line.find(b'\0') >= 0:
1946 if line.find(b'\0') >= 0:
1947 filename1, filename2 = line[3:].split(b'\0')
1947 filename1, filename2 = line[3:].split(b'\0')
1948 else:
1948 else:
1949 filename1 = line[3:]
1949 filename1 = line[3:]
1950 filename2 = None
1950 filename2 = None
1951
1951
1952 changedfiles.add(filename1)
1952 changedfiles.add(filename1)
1953 if filename2:
1953 if filename2:
1954 changedfiles.add(filename2)
1954 changedfiles.add(filename2)
1955
1955
1956 if st == b'??':
1956 if st == b'??':
1957 unknown.append(filename1)
1957 unknown.append(filename1)
1958 elif st == b'!!':
1958 elif st == b'!!':
1959 ignored.append(filename1)
1959 ignored.append(filename1)
1960
1960
1961 if opts.get('clean'):
1961 if opts.get('clean'):
1962 out = self._gitcommand([b'ls-files'])
1962 out = self._gitcommand([b'ls-files'])
1963 for f in out.split(b'\n'):
1963 for f in out.split(b'\n'):
1964 if not f in changedfiles:
1964 if not f in changedfiles:
1965 clean.append(f)
1965 clean.append(f)
1966
1966
1967 return scmutil.status(
1967 return scmutil.status(
1968 modified, added, removed, deleted, unknown, ignored, clean
1968 modified, added, removed, deleted, unknown, ignored, clean
1969 )
1969 )
1970
1970
1971 @annotatesubrepoerror
1971 @annotatesubrepoerror
1972 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1972 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1973 node1 = self._state[1]
1973 node1 = self._state[1]
1974 cmd = [b'diff', b'--no-renames']
1974 cmd = [b'diff', b'--no-renames']
1975 if opts['stat']:
1975 if opts['stat']:
1976 cmd.append(b'--stat')
1976 cmd.append(b'--stat')
1977 else:
1977 else:
1978 # for Git, this also implies '-p'
1978 # for Git, this also implies '-p'
1979 cmd.append(b'-U%d' % diffopts.context)
1979 cmd.append(b'-U%d' % diffopts.context)
1980
1980
1981 if diffopts.noprefix:
1981 if diffopts.noprefix:
1982 cmd.extend(
1982 cmd.extend(
1983 [b'--src-prefix=%s/' % prefix, b'--dst-prefix=%s/' % prefix]
1983 [b'--src-prefix=%s/' % prefix, b'--dst-prefix=%s/' % prefix]
1984 )
1984 )
1985 else:
1985 else:
1986 cmd.extend(
1986 cmd.extend(
1987 [b'--src-prefix=a/%s/' % prefix, b'--dst-prefix=b/%s/' % prefix]
1987 [b'--src-prefix=a/%s/' % prefix, b'--dst-prefix=b/%s/' % prefix]
1988 )
1988 )
1989
1989
1990 if diffopts.ignorews:
1990 if diffopts.ignorews:
1991 cmd.append(b'--ignore-all-space')
1991 cmd.append(b'--ignore-all-space')
1992 if diffopts.ignorewsamount:
1992 if diffopts.ignorewsamount:
1993 cmd.append(b'--ignore-space-change')
1993 cmd.append(b'--ignore-space-change')
1994 if (
1994 if (
1995 self._gitversion(self._gitcommand([b'--version'])) >= (1, 8, 4)
1995 self._gitversion(self._gitcommand([b'--version'])) >= (1, 8, 4)
1996 and diffopts.ignoreblanklines
1996 and diffopts.ignoreblanklines
1997 ):
1997 ):
1998 cmd.append(b'--ignore-blank-lines')
1998 cmd.append(b'--ignore-blank-lines')
1999
1999
2000 cmd.append(node1)
2000 cmd.append(node1)
2001 if node2:
2001 if node2:
2002 cmd.append(node2)
2002 cmd.append(node2)
2003
2003
2004 output = b""
2004 output = b""
2005 if match.always():
2005 if match.always():
2006 output += self._gitcommand(cmd) + b'\n'
2006 output += self._gitcommand(cmd) + b'\n'
2007 else:
2007 else:
2008 st = self.status(node2)
2008 st = self.status(node2)
2009 files = [
2009 files = [
2010 f
2010 f
2011 for sublist in (st.modified, st.added, st.removed)
2011 for sublist in (st.modified, st.added, st.removed)
2012 for f in sublist
2012 for f in sublist
2013 ]
2013 ]
2014 for f in files:
2014 for f in files:
2015 if match(f):
2015 if match(f):
2016 output += self._gitcommand(cmd + [b'--', f]) + b'\n'
2016 output += self._gitcommand(cmd + [b'--', f]) + b'\n'
2017
2017
2018 if output.strip():
2018 if output.strip():
2019 ui.write(output)
2019 ui.write(output)
2020
2020
2021 @annotatesubrepoerror
2021 @annotatesubrepoerror
2022 def revert(self, substate, *pats, **opts):
2022 def revert(self, substate, *pats, **opts):
2023 self.ui.status(_(b'reverting subrepo %s\n') % substate[0])
2023 self.ui.status(_(b'reverting subrepo %s\n') % substate[0])
2024 if not opts.get('no_backup'):
2024 if not opts.get('no_backup'):
2025 status = self.status(None)
2025 status = self.status(None)
2026 names = status.modified
2026 names = status.modified
2027 for name in names:
2027 for name in names:
2028 # backuppath() expects a path relative to the parent repo (the
2028 # backuppath() expects a path relative to the parent repo (the
2029 # repo that ui.origbackuppath is relative to)
2029 # repo that ui.origbackuppath is relative to)
2030 parentname = os.path.join(self._path, name)
2030 parentname = os.path.join(self._path, name)
2031 bakname = scmutil.backuppath(
2031 bakname = scmutil.backuppath(
2032 self.ui, self._subparent, parentname
2032 self.ui, self._subparent, parentname
2033 )
2033 )
2034 self.ui.note(
2034 self.ui.note(
2035 _(b'saving current version of %s as %s\n')
2035 _(b'saving current version of %s as %s\n')
2036 % (name, os.path.relpath(bakname))
2036 % (name, os.path.relpath(bakname))
2037 )
2037 )
2038 util.rename(self.wvfs.join(name), bakname)
2038 util.rename(self.wvfs.join(name), bakname)
2039
2039
2040 if not opts.get('dry_run'):
2040 if not opts.get('dry_run'):
2041 self.get(substate, overwrite=True)
2041 self.get(substate, overwrite=True)
2042 return []
2042 return []
2043
2043
2044 def shortid(self, revid):
2044 def shortid(self, revid):
2045 return revid[:7]
2045 return revid[:7]
2046
2046
2047
2047
2048 types = {
2048 types = {
2049 b'hg': hgsubrepo,
2049 b'hg': hgsubrepo,
2050 b'svn': svnsubrepo,
2050 b'svn': svnsubrepo,
2051 b'git': gitsubrepo,
2051 b'git': gitsubrepo,
2052 }
2052 }
@@ -1,1697 +1,1693 b''
1 A script that implements uppercasing of specific lines in a file. This
1 A script that implements uppercasing of specific lines in a file. This
2 approximates the behavior of code formatters well enough for our tests.
2 approximates the behavior of code formatters well enough for our tests.
3
3
4 $ UPPERCASEPY="$TESTTMP/uppercase.py"
4 $ UPPERCASEPY="$TESTTMP/uppercase.py"
5 $ cat > $UPPERCASEPY <<EOF
5 $ cat > $UPPERCASEPY <<EOF
6 > import sys
6 > import sys
7 > from mercurial.utils.procutil import setbinary
7 > from mercurial.utils.procutil import setbinary
8 > setbinary(sys.stdin)
8 > setbinary(sys.stdin)
9 > setbinary(sys.stdout)
9 > setbinary(sys.stdout)
10 > lines = set()
10 > lines = set()
11 > for arg in sys.argv[1:]:
11 > for arg in sys.argv[1:]:
12 > if arg == 'all':
12 > if arg == 'all':
13 > sys.stdout.write(sys.stdin.read().upper())
13 > sys.stdout.write(sys.stdin.read().upper())
14 > sys.exit(0)
14 > sys.exit(0)
15 > else:
15 > else:
16 > first, last = arg.split('-')
16 > first, last = arg.split('-')
17 > lines.update(range(int(first), int(last) + 1))
17 > lines.update(range(int(first), int(last) + 1))
18 > for i, line in enumerate(sys.stdin.readlines()):
18 > for i, line in enumerate(sys.stdin.readlines()):
19 > if i + 1 in lines:
19 > if i + 1 in lines:
20 > sys.stdout.write(line.upper())
20 > sys.stdout.write(line.upper())
21 > else:
21 > else:
22 > sys.stdout.write(line)
22 > sys.stdout.write(line)
23 > EOF
23 > EOF
24 $ TESTLINES="foo\nbar\nbaz\nqux\n"
24 $ TESTLINES="foo\nbar\nbaz\nqux\n"
25 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY
25 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY
26 foo
26 foo
27 bar
27 bar
28 baz
28 baz
29 qux
29 qux
30 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY all
30 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY all
31 FOO
31 FOO
32 BAR
32 BAR
33 BAZ
33 BAZ
34 QUX
34 QUX
35 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 1-1
35 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 1-1
36 FOO
36 FOO
37 bar
37 bar
38 baz
38 baz
39 qux
39 qux
40 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 1-2
40 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 1-2
41 FOO
41 FOO
42 BAR
42 BAR
43 baz
43 baz
44 qux
44 qux
45 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 2-3
45 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 2-3
46 foo
46 foo
47 BAR
47 BAR
48 BAZ
48 BAZ
49 qux
49 qux
50 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 2-2 4-4
50 $ printf $TESTLINES | "$PYTHON" $UPPERCASEPY 2-2 4-4
51 foo
51 foo
52 BAR
52 BAR
53 baz
53 baz
54 QUX
54 QUX
55
55
56 Set up the config with two simple fixers: one that fixes specific line ranges,
56 Set up the config with two simple fixers: one that fixes specific line ranges,
57 and one that always fixes the whole file. They both "fix" files by converting
57 and one that always fixes the whole file. They both "fix" files by converting
58 letters to uppercase. They use different file extensions, so each test case can
58 letters to uppercase. They use different file extensions, so each test case can
59 choose which behavior to use by naming files.
59 choose which behavior to use by naming files.
60
60
61 $ cat >> $HGRCPATH <<EOF
61 $ cat >> $HGRCPATH <<EOF
62 > [extensions]
62 > [extensions]
63 > fix =
63 > fix =
64 > [experimental]
64 > [experimental]
65 > evolution.createmarkers=True
65 > evolution.createmarkers=True
66 > evolution.allowunstable=True
66 > evolution.allowunstable=True
67 > [fix]
67 > [fix]
68 > uppercase-whole-file:command="$PYTHON" $UPPERCASEPY all
68 > uppercase-whole-file:command="$PYTHON" $UPPERCASEPY all
69 > uppercase-whole-file:pattern=set:**.whole
69 > uppercase-whole-file:pattern=set:**.whole
70 > uppercase-changed-lines:command="$PYTHON" $UPPERCASEPY
70 > uppercase-changed-lines:command="$PYTHON" $UPPERCASEPY
71 > uppercase-changed-lines:linerange={first}-{last}
71 > uppercase-changed-lines:linerange={first}-{last}
72 > uppercase-changed-lines:pattern=set:**.changed
72 > uppercase-changed-lines:pattern=set:**.changed
73 > EOF
73 > EOF
74
74
75 Help text for fix.
75 Help text for fix.
76
76
77 $ hg help fix
77 $ hg help fix
78 hg fix [OPTION]... [FILE]...
78 hg fix [OPTION]... [FILE]...
79
79
80 rewrite file content in changesets or working directory
80 rewrite file content in changesets or working directory
81
81
82 Runs any configured tools to fix the content of files. Only affects files
82 Runs any configured tools to fix the content of files. Only affects files
83 with changes, unless file arguments are provided. Only affects changed
83 with changes, unless file arguments are provided. Only affects changed
84 lines of files, unless the --whole flag is used. Some tools may always
84 lines of files, unless the --whole flag is used. Some tools may always
85 affect the whole file regardless of --whole.
85 affect the whole file regardless of --whole.
86
86
87 If revisions are specified with --rev, those revisions will be checked,
87 If revisions are specified with --rev, those revisions will be checked,
88 and they may be replaced with new revisions that have fixed file content.
88 and they may be replaced with new revisions that have fixed file content.
89 It is desirable to specify all descendants of each specified revision, so
89 It is desirable to specify all descendants of each specified revision, so
90 that the fixes propagate to the descendants. If all descendants are fixed
90 that the fixes propagate to the descendants. If all descendants are fixed
91 at the same time, no merging, rebasing, or evolution will be required.
91 at the same time, no merging, rebasing, or evolution will be required.
92
92
93 If --working-dir is used, files with uncommitted changes in the working
93 If --working-dir is used, files with uncommitted changes in the working
94 copy will be fixed. If the checked-out revision is also fixed, the working
94 copy will be fixed. If the checked-out revision is also fixed, the working
95 directory will update to the replacement revision.
95 directory will update to the replacement revision.
96
96
97 When determining what lines of each file to fix at each revision, the
97 When determining what lines of each file to fix at each revision, the
98 whole set of revisions being fixed is considered, so that fixes to earlier
98 whole set of revisions being fixed is considered, so that fixes to earlier
99 revisions are not forgotten in later ones. The --base flag can be used to
99 revisions are not forgotten in later ones. The --base flag can be used to
100 override this default behavior, though it is not usually desirable to do
100 override this default behavior, though it is not usually desirable to do
101 so.
101 so.
102
102
103 (use 'hg help -e fix' to show help for the fix extension)
103 (use 'hg help -e fix' to show help for the fix extension)
104
104
105 options ([+] can be repeated):
105 options ([+] can be repeated):
106
106
107 --all fix all non-public non-obsolete revisions
107 --all fix all non-public non-obsolete revisions
108 --base REV [+] revisions to diff against (overrides automatic selection,
108 --base REV [+] revisions to diff against (overrides automatic selection,
109 and applies to every revision being fixed)
109 and applies to every revision being fixed)
110 -r --rev REV [+] revisions to fix
110 -r --rev REV [+] revisions to fix
111 -w --working-dir fix the working directory
111 -w --working-dir fix the working directory
112 --whole always fix every line of a file
112 --whole always fix every line of a file
113
113
114 (some details hidden, use --verbose to show complete help)
114 (some details hidden, use --verbose to show complete help)
115
115
116 $ hg help -e fix
116 $ hg help -e fix
117 fix extension - rewrite file content in changesets or working copy
117 fix extension - rewrite file content in changesets or working copy
118 (EXPERIMENTAL)
118 (EXPERIMENTAL)
119
119
120 Provides a command that runs configured tools on the contents of modified
120 Provides a command that runs configured tools on the contents of modified
121 files, writing back any fixes to the working copy or replacing changesets.
121 files, writing back any fixes to the working copy or replacing changesets.
122
122
123 Here is an example configuration that causes 'hg fix' to apply automatic
123 Here is an example configuration that causes 'hg fix' to apply automatic
124 formatting fixes to modified lines in C++ code:
124 formatting fixes to modified lines in C++ code:
125
125
126 [fix]
126 [fix]
127 clang-format:command=clang-format --assume-filename={rootpath}
127 clang-format:command=clang-format --assume-filename={rootpath}
128 clang-format:linerange=--lines={first}:{last}
128 clang-format:linerange=--lines={first}:{last}
129 clang-format:pattern=set:**.cpp or **.hpp
129 clang-format:pattern=set:**.cpp or **.hpp
130
130
131 The :command suboption forms the first part of the shell command that will be
131 The :command suboption forms the first part of the shell command that will be
132 used to fix a file. The content of the file is passed on standard input, and
132 used to fix a file. The content of the file is passed on standard input, and
133 the fixed file content is expected on standard output. Any output on standard
133 the fixed file content is expected on standard output. Any output on standard
134 error will be displayed as a warning. If the exit status is not zero, the file
134 error will be displayed as a warning. If the exit status is not zero, the file
135 will not be affected. A placeholder warning is displayed if there is a non-
135 will not be affected. A placeholder warning is displayed if there is a non-
136 zero exit status but no standard error output. Some values may be substituted
136 zero exit status but no standard error output. Some values may be substituted
137 into the command:
137 into the command:
138
138
139 {rootpath} The path of the file being fixed, relative to the repo root
139 {rootpath} The path of the file being fixed, relative to the repo root
140 {basename} The name of the file being fixed, without the directory path
140 {basename} The name of the file being fixed, without the directory path
141
141
142 If the :linerange suboption is set, the tool will only be run if there are
142 If the :linerange suboption is set, the tool will only be run if there are
143 changed lines in a file. The value of this suboption is appended to the shell
143 changed lines in a file. The value of this suboption is appended to the shell
144 command once for every range of changed lines in the file. Some values may be
144 command once for every range of changed lines in the file. Some values may be
145 substituted into the command:
145 substituted into the command:
146
146
147 {first} The 1-based line number of the first line in the modified range
147 {first} The 1-based line number of the first line in the modified range
148 {last} The 1-based line number of the last line in the modified range
148 {last} The 1-based line number of the last line in the modified range
149
149
150 Deleted sections of a file will be ignored by :linerange, because there is no
150 Deleted sections of a file will be ignored by :linerange, because there is no
151 corresponding line range in the version being fixed.
151 corresponding line range in the version being fixed.
152
152
153 By default, tools that set :linerange will only be executed if there is at
153 By default, tools that set :linerange will only be executed if there is at
154 least one changed line range. This is meant to prevent accidents like running
154 least one changed line range. This is meant to prevent accidents like running
155 a code formatter in such a way that it unexpectedly reformats the whole file.
155 a code formatter in such a way that it unexpectedly reformats the whole file.
156 If such a tool needs to operate on unchanged files, it should set the
156 If such a tool needs to operate on unchanged files, it should set the
157 :skipclean suboption to false.
157 :skipclean suboption to false.
158
158
159 The :pattern suboption determines which files will be passed through each
159 The :pattern suboption determines which files will be passed through each
160 configured tool. See 'hg help patterns' for possible values. However, all
160 configured tool. See 'hg help patterns' for possible values. However, all
161 patterns are relative to the repo root, even if that text says they are
161 patterns are relative to the repo root, even if that text says they are
162 relative to the current working directory. If there are file arguments to 'hg
162 relative to the current working directory. If there are file arguments to 'hg
163 fix', the intersection of these patterns is used.
163 fix', the intersection of these patterns is used.
164
164
165 There is also a configurable limit for the maximum size of file that will be
165 There is also a configurable limit for the maximum size of file that will be
166 processed by 'hg fix':
166 processed by 'hg fix':
167
167
168 [fix]
168 [fix]
169 maxfilesize = 2MB
169 maxfilesize = 2MB
170
170
171 Normally, execution of configured tools will continue after a failure
171 Normally, execution of configured tools will continue after a failure
172 (indicated by a non-zero exit status). It can also be configured to abort
172 (indicated by a non-zero exit status). It can also be configured to abort
173 after the first such failure, so that no files will be affected if any tool
173 after the first such failure, so that no files will be affected if any tool
174 fails. This abort will also cause 'hg fix' to exit with a non-zero status:
174 fails. This abort will also cause 'hg fix' to exit with a non-zero status:
175
175
176 [fix]
176 [fix]
177 failure = abort
177 failure = abort
178
178
179 When multiple tools are configured to affect a file, they execute in an order
179 When multiple tools are configured to affect a file, they execute in an order
180 defined by the :priority suboption. The priority suboption has a default value
180 defined by the :priority suboption. The priority suboption has a default value
181 of zero for each tool. Tools are executed in order of descending priority. The
181 of zero for each tool. Tools are executed in order of descending priority. The
182 execution order of tools with equal priority is unspecified. For example, you
182 execution order of tools with equal priority is unspecified. For example, you
183 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
183 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
184 in a text file by ensuring that 'sort' runs before 'head':
184 in a text file by ensuring that 'sort' runs before 'head':
185
185
186 [fix]
186 [fix]
187 sort:command = sort -n
187 sort:command = sort -n
188 head:command = head -n 10
188 head:command = head -n 10
189 sort:pattern = numbers.txt
189 sort:pattern = numbers.txt
190 head:pattern = numbers.txt
190 head:pattern = numbers.txt
191 sort:priority = 2
191 sort:priority = 2
192 head:priority = 1
192 head:priority = 1
193
193
194 To account for changes made by each tool, the line numbers used for
194 To account for changes made by each tool, the line numbers used for
195 incremental formatting are recomputed before executing the next tool. So, each
195 incremental formatting are recomputed before executing the next tool. So, each
196 tool may see different values for the arguments added by the :linerange
196 tool may see different values for the arguments added by the :linerange
197 suboption.
197 suboption.
198
198
199 Each fixer tool is allowed to return some metadata in addition to the fixed
199 Each fixer tool is allowed to return some metadata in addition to the fixed
200 file content. The metadata must be placed before the file content on stdout,
200 file content. The metadata must be placed before the file content on stdout,
201 separated from the file content by a zero byte. The metadata is parsed as a
201 separated from the file content by a zero byte. The metadata is parsed as a
202 JSON value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer
202 JSON value (so, it should be UTF-8 encoded and contain no zero bytes). A fixer
203 tool is expected to produce this metadata encoding if and only if the
203 tool is expected to produce this metadata encoding if and only if the
204 :metadata suboption is true:
204 :metadata suboption is true:
205
205
206 [fix]
206 [fix]
207 tool:command = tool --prepend-json-metadata
207 tool:command = tool --prepend-json-metadata
208 tool:metadata = true
208 tool:metadata = true
209
209
210 The metadata values are passed to hooks, which can be used to print summaries
210 The metadata values are passed to hooks, which can be used to print summaries
211 or perform other post-fixing work. The supported hooks are:
211 or perform other post-fixing work. The supported hooks are:
212
212
213 "postfixfile"
213 "postfixfile"
214 Run once for each file in each revision where any fixer tools made changes
214 Run once for each file in each revision where any fixer tools made changes
215 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
215 to the file content. Provides "$HG_REV" and "$HG_PATH" to identify the file,
216 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
216 and "$HG_METADATA" with a map of fixer names to metadata values from fixer
217 tools that affected the file. Fixer tools that didn't affect the file have a
217 tools that affected the file. Fixer tools that didn't affect the file have a
218 valueof None. Only fixer tools that executed are present in the metadata.
218 valueof None. Only fixer tools that executed are present in the metadata.
219
219
220 "postfix"
220 "postfix"
221 Run once after all files and revisions have been handled. Provides
221 Run once after all files and revisions have been handled. Provides
222 "$HG_REPLACEMENTS" with information about what revisions were created and
222 "$HG_REPLACEMENTS" with information about what revisions were created and
223 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
223 made obsolete. Provides a boolean "$HG_WDIRWRITTEN" to indicate whether any
224 files in the working copy were updated. Provides a list "$HG_METADATA"
224 files in the working copy were updated. Provides a list "$HG_METADATA"
225 mapping fixer tool names to lists of metadata values returned from
225 mapping fixer tool names to lists of metadata values returned from
226 executions that modified a file. This aggregates the same metadata
226 executions that modified a file. This aggregates the same metadata
227 previously passed to the "postfixfile" hook.
227 previously passed to the "postfixfile" hook.
228
228
229 Fixer tools are run the in repository's root directory. This allows them to
229 Fixer tools are run the in repository's root directory. This allows them to
230 read configuration files from the working copy, or even write to the working
230 read configuration files from the working copy, or even write to the working
231 copy. The working copy is not updated to match the revision being fixed. In
231 copy. The working copy is not updated to match the revision being fixed. In
232 fact, several revisions may be fixed in parallel. Writes to the working copy
232 fact, several revisions may be fixed in parallel. Writes to the working copy
233 are not amended into the revision being fixed; fixer tools should always write
233 are not amended into the revision being fixed; fixer tools should always write
234 fixed file content back to stdout as documented above.
234 fixed file content back to stdout as documented above.
235
235
236 list of commands:
236 list of commands:
237
237
238 fix rewrite file content in changesets or working directory
238 fix rewrite file content in changesets or working directory
239
239
240 (use 'hg help -v -e fix' to show built-in aliases and global options)
240 (use 'hg help -v -e fix' to show built-in aliases and global options)
241
241
242 There is no default behavior in the absence of --rev and --working-dir.
242 There is no default behavior in the absence of --rev and --working-dir.
243
243
244 $ hg init badusage
244 $ hg init badusage
245 $ cd badusage
245 $ cd badusage
246
246
247 $ hg fix
247 $ hg fix
248 abort: no changesets specified
248 abort: no changesets specified
249 (use --rev or --working-dir)
249 (use --rev or --working-dir)
250 [255]
250 [255]
251 $ hg fix --whole
251 $ hg fix --whole
252 abort: no changesets specified
252 abort: no changesets specified
253 (use --rev or --working-dir)
253 (use --rev or --working-dir)
254 [255]
254 [255]
255 $ hg fix --base 0
255 $ hg fix --base 0
256 abort: no changesets specified
256 abort: no changesets specified
257 (use --rev or --working-dir)
257 (use --rev or --working-dir)
258 [255]
258 [255]
259
259
260 Fixing a public revision isn't allowed. It should abort early enough that
260 Fixing a public revision isn't allowed. It should abort early enough that
261 nothing happens, even to the working directory.
261 nothing happens, even to the working directory.
262
262
263 $ printf "hello\n" > hello.whole
263 $ printf "hello\n" > hello.whole
264 $ hg commit -Aqm "hello"
264 $ hg commit -Aqm "hello"
265 $ hg phase -r 0 --public
265 $ hg phase -r 0 --public
266 $ hg fix -r 0
266 $ hg fix -r 0
267 abort: cannot fix public changesets
267 abort: cannot fix public changesets
268 (see 'hg help phases' for details)
268 (see 'hg help phases' for details)
269 [255]
269 [255]
270 $ hg fix -r 0 --working-dir
270 $ hg fix -r 0 --working-dir
271 abort: cannot fix public changesets
271 abort: cannot fix public changesets
272 (see 'hg help phases' for details)
272 (see 'hg help phases' for details)
273 [255]
273 [255]
274 $ hg cat -r tip hello.whole
274 $ hg cat -r tip hello.whole
275 hello
275 hello
276 $ cat hello.whole
276 $ cat hello.whole
277 hello
277 hello
278
278
279 $ cd ..
279 $ cd ..
280
280
281 Fixing a clean working directory should do nothing. Even the --whole flag
281 Fixing a clean working directory should do nothing. Even the --whole flag
282 shouldn't cause any clean files to be fixed. Specifying a clean file explicitly
282 shouldn't cause any clean files to be fixed. Specifying a clean file explicitly
283 should only fix it if the fixer always fixes the whole file. The combination of
283 should only fix it if the fixer always fixes the whole file. The combination of
284 an explicit filename and --whole should format the entire file regardless.
284 an explicit filename and --whole should format the entire file regardless.
285
285
286 $ hg init fixcleanwdir
286 $ hg init fixcleanwdir
287 $ cd fixcleanwdir
287 $ cd fixcleanwdir
288
288
289 $ printf "hello\n" > hello.changed
289 $ printf "hello\n" > hello.changed
290 $ printf "world\n" > hello.whole
290 $ printf "world\n" > hello.whole
291 $ hg commit -Aqm "foo"
291 $ hg commit -Aqm "foo"
292 $ hg fix --working-dir
292 $ hg fix --working-dir
293 $ hg diff
293 $ hg diff
294 $ hg fix --working-dir --whole
294 $ hg fix --working-dir --whole
295 $ hg diff
295 $ hg diff
296 $ hg fix --working-dir *
296 $ hg fix --working-dir *
297 $ cat *
297 $ cat *
298 hello
298 hello
299 WORLD
299 WORLD
300 $ hg revert --all --no-backup
300 $ hg revert --all --no-backup
301 reverting hello.whole
301 reverting hello.whole
302 $ hg fix --working-dir * --whole
302 $ hg fix --working-dir * --whole
303 $ cat *
303 $ cat *
304 HELLO
304 HELLO
305 WORLD
305 WORLD
306
306
307 The same ideas apply to fixing a revision, so we create a revision that doesn't
307 The same ideas apply to fixing a revision, so we create a revision that doesn't
308 modify either of the files in question and try fixing it. This also tests that
308 modify either of the files in question and try fixing it. This also tests that
309 we ignore a file that doesn't match any configured fixer.
309 we ignore a file that doesn't match any configured fixer.
310
310
311 $ hg revert --all --no-backup
311 $ hg revert --all --no-backup
312 reverting hello.changed
312 reverting hello.changed
313 reverting hello.whole
313 reverting hello.whole
314 $ printf "unimportant\n" > some.file
314 $ printf "unimportant\n" > some.file
315 $ hg commit -Aqm "some other file"
315 $ hg commit -Aqm "some other file"
316
316
317 $ hg fix -r .
317 $ hg fix -r .
318 $ hg cat -r tip *
318 $ hg cat -r tip *
319 hello
319 hello
320 world
320 world
321 unimportant
321 unimportant
322 $ hg fix -r . --whole
322 $ hg fix -r . --whole
323 $ hg cat -r tip *
323 $ hg cat -r tip *
324 hello
324 hello
325 world
325 world
326 unimportant
326 unimportant
327 $ hg fix -r . *
327 $ hg fix -r . *
328 $ hg cat -r tip *
328 $ hg cat -r tip *
329 hello
329 hello
330 WORLD
330 WORLD
331 unimportant
331 unimportant
332 $ hg fix -r . * --whole --config experimental.evolution.allowdivergence=true
332 $ hg fix -r . * --whole --config experimental.evolution.allowdivergence=true
333 2 new content-divergent changesets
333 2 new content-divergent changesets
334 $ hg cat -r tip *
334 $ hg cat -r tip *
335 HELLO
335 HELLO
336 WORLD
336 WORLD
337 unimportant
337 unimportant
338
338
339 $ cd ..
339 $ cd ..
340
340
341 Fixing the working directory should still work if there are no revisions.
341 Fixing the working directory should still work if there are no revisions.
342
342
343 $ hg init norevisions
343 $ hg init norevisions
344 $ cd norevisions
344 $ cd norevisions
345
345
346 $ printf "something\n" > something.whole
346 $ printf "something\n" > something.whole
347 $ hg add
347 $ hg add
348 adding something.whole
348 adding something.whole
349 $ hg fix --working-dir
349 $ hg fix --working-dir
350 $ cat something.whole
350 $ cat something.whole
351 SOMETHING
351 SOMETHING
352
352
353 $ cd ..
353 $ cd ..
354
354
355 Test the effect of fixing the working directory for each possible status, with
355 Test the effect of fixing the working directory for each possible status, with
356 and without providing explicit file arguments.
356 and without providing explicit file arguments.
357
357
358 $ hg init implicitlyfixstatus
358 $ hg init implicitlyfixstatus
359 $ cd implicitlyfixstatus
359 $ cd implicitlyfixstatus
360
360
361 $ printf "modified\n" > modified.whole
361 $ printf "modified\n" > modified.whole
362 $ printf "removed\n" > removed.whole
362 $ printf "removed\n" > removed.whole
363 $ printf "deleted\n" > deleted.whole
363 $ printf "deleted\n" > deleted.whole
364 $ printf "clean\n" > clean.whole
364 $ printf "clean\n" > clean.whole
365 $ printf "ignored.whole" > .hgignore
365 $ printf "ignored.whole" > .hgignore
366 $ hg commit -Aqm "stuff"
366 $ hg commit -Aqm "stuff"
367
367
368 $ printf "modified!!!\n" > modified.whole
368 $ printf "modified!!!\n" > modified.whole
369 $ printf "unknown\n" > unknown.whole
369 $ printf "unknown\n" > unknown.whole
370 $ printf "ignored\n" > ignored.whole
370 $ printf "ignored\n" > ignored.whole
371 $ printf "added\n" > added.whole
371 $ printf "added\n" > added.whole
372 $ hg add added.whole
372 $ hg add added.whole
373 $ hg remove removed.whole
373 $ hg remove removed.whole
374 $ rm deleted.whole
374 $ rm deleted.whole
375
375
376 $ hg status --all
376 $ hg status --all
377 M modified.whole
377 M modified.whole
378 A added.whole
378 A added.whole
379 R removed.whole
379 R removed.whole
380 ! deleted.whole
380 ! deleted.whole
381 ? unknown.whole
381 ? unknown.whole
382 I ignored.whole
382 I ignored.whole
383 C .hgignore
383 C .hgignore
384 C clean.whole
384 C clean.whole
385
385
386 $ hg fix --working-dir
386 $ hg fix --working-dir
387
387
388 $ hg status --all
388 $ hg status --all
389 M modified.whole
389 M modified.whole
390 A added.whole
390 A added.whole
391 R removed.whole
391 R removed.whole
392 ! deleted.whole
392 ! deleted.whole
393 ? unknown.whole
393 ? unknown.whole
394 I ignored.whole
394 I ignored.whole
395 C .hgignore
395 C .hgignore
396 C clean.whole
396 C clean.whole
397
397
398 $ cat *.whole
398 $ cat *.whole
399 ADDED
399 ADDED
400 clean
400 clean
401 ignored
401 ignored
402 MODIFIED!!!
402 MODIFIED!!!
403 unknown
403 unknown
404
404
405 $ printf "modified!!!\n" > modified.whole
405 $ printf "modified!!!\n" > modified.whole
406 $ printf "added\n" > added.whole
406 $ printf "added\n" > added.whole
407
407
408 Listing the files explicitly causes untracked files to also be fixed, but
408 Listing the files explicitly causes untracked files to also be fixed, but
409 ignored files are still unaffected.
409 ignored files are still unaffected.
410
410
411 $ hg fix --working-dir *.whole
411 $ hg fix --working-dir *.whole
412
412
413 $ hg status --all
413 $ hg status --all
414 M clean.whole
414 M clean.whole
415 M modified.whole
415 M modified.whole
416 A added.whole
416 A added.whole
417 R removed.whole
417 R removed.whole
418 ! deleted.whole
418 ! deleted.whole
419 ? unknown.whole
419 ? unknown.whole
420 I ignored.whole
420 I ignored.whole
421 C .hgignore
421 C .hgignore
422
422
423 $ cat *.whole
423 $ cat *.whole
424 ADDED
424 ADDED
425 CLEAN
425 CLEAN
426 ignored
426 ignored
427 MODIFIED!!!
427 MODIFIED!!!
428 UNKNOWN
428 UNKNOWN
429
429
430 $ cd ..
430 $ cd ..
431
431
432 Test that incremental fixing works on files with additions, deletions, and
432 Test that incremental fixing works on files with additions, deletions, and
433 changes in multiple line ranges. Note that deletions do not generally cause
433 changes in multiple line ranges. Note that deletions do not generally cause
434 neighboring lines to be fixed, so we don't return a line range for purely
434 neighboring lines to be fixed, so we don't return a line range for purely
435 deleted sections. In the future we should support a :deletion config that
435 deleted sections. In the future we should support a :deletion config that
436 allows fixers to know where deletions are located.
436 allows fixers to know where deletions are located.
437
437
438 $ hg init incrementalfixedlines
438 $ hg init incrementalfixedlines
439 $ cd incrementalfixedlines
439 $ cd incrementalfixedlines
440
440
441 $ printf "a\nb\nc\nd\ne\nf\ng\n" > foo.txt
441 $ printf "a\nb\nc\nd\ne\nf\ng\n" > foo.txt
442 $ hg commit -Aqm "foo"
442 $ hg commit -Aqm "foo"
443 $ printf "zz\na\nc\ndd\nee\nff\nf\ngg\n" > foo.txt
443 $ printf "zz\na\nc\ndd\nee\nff\nf\ngg\n" > foo.txt
444
444
445 $ hg --config "fix.fail:command=echo" \
445 $ hg --config "fix.fail:command=echo" \
446 > --config "fix.fail:linerange={first}:{last}" \
446 > --config "fix.fail:linerange={first}:{last}" \
447 > --config "fix.fail:pattern=foo.txt" \
447 > --config "fix.fail:pattern=foo.txt" \
448 > fix --working-dir
448 > fix --working-dir
449 $ cat foo.txt
449 $ cat foo.txt
450 1:1 4:6 8:8
450 1:1 4:6 8:8
451
451
452 $ cd ..
452 $ cd ..
453
453
454 Test that --whole fixes all lines regardless of the diffs present.
454 Test that --whole fixes all lines regardless of the diffs present.
455
455
456 $ hg init wholeignoresdiffs
456 $ hg init wholeignoresdiffs
457 $ cd wholeignoresdiffs
457 $ cd wholeignoresdiffs
458
458
459 $ printf "a\nb\nc\nd\ne\nf\ng\n" > foo.changed
459 $ printf "a\nb\nc\nd\ne\nf\ng\n" > foo.changed
460 $ hg commit -Aqm "foo"
460 $ hg commit -Aqm "foo"
461 $ printf "zz\na\nc\ndd\nee\nff\nf\ngg\n" > foo.changed
461 $ printf "zz\na\nc\ndd\nee\nff\nf\ngg\n" > foo.changed
462
462
463 $ hg fix --working-dir
463 $ hg fix --working-dir
464 $ cat foo.changed
464 $ cat foo.changed
465 ZZ
465 ZZ
466 a
466 a
467 c
467 c
468 DD
468 DD
469 EE
469 EE
470 FF
470 FF
471 f
471 f
472 GG
472 GG
473
473
474 $ hg fix --working-dir --whole
474 $ hg fix --working-dir --whole
475 $ cat foo.changed
475 $ cat foo.changed
476 ZZ
476 ZZ
477 A
477 A
478 C
478 C
479 DD
479 DD
480 EE
480 EE
481 FF
481 FF
482 F
482 F
483 GG
483 GG
484
484
485 $ cd ..
485 $ cd ..
486
486
487 We should do nothing with symlinks, and their targets should be unaffected. Any
487 We should do nothing with symlinks, and their targets should be unaffected. Any
488 other behavior would be more complicated to implement and harder to document.
488 other behavior would be more complicated to implement and harder to document.
489
489
490 #if symlink
490 #if symlink
491 $ hg init dontmesswithsymlinks
491 $ hg init dontmesswithsymlinks
492 $ cd dontmesswithsymlinks
492 $ cd dontmesswithsymlinks
493
493
494 $ printf "hello\n" > hello.whole
494 $ printf "hello\n" > hello.whole
495 $ ln -s hello.whole hellolink
495 $ ln -s hello.whole hellolink
496 $ hg add
496 $ hg add
497 adding hello.whole
497 adding hello.whole
498 adding hellolink
498 adding hellolink
499 $ hg fix --working-dir hellolink
499 $ hg fix --working-dir hellolink
500 $ hg status
500 $ hg status
501 A hello.whole
501 A hello.whole
502 A hellolink
502 A hellolink
503
503
504 $ cd ..
504 $ cd ..
505 #endif
505 #endif
506
506
507 We should allow fixers to run on binary files, even though this doesn't sound
507 We should allow fixers to run on binary files, even though this doesn't sound
508 like a common use case. There's not much benefit to disallowing it, and users
508 like a common use case. There's not much benefit to disallowing it, and users
509 can add "and not binary()" to their filesets if needed. The Mercurial
509 can add "and not binary()" to their filesets if needed. The Mercurial
510 philosophy is generally to not handle binary files specially anyway.
510 philosophy is generally to not handle binary files specially anyway.
511
511
512 $ hg init cantouchbinaryfiles
512 $ hg init cantouchbinaryfiles
513 $ cd cantouchbinaryfiles
513 $ cd cantouchbinaryfiles
514
514
515 $ printf "hello\0\n" > hello.whole
515 $ printf "hello\0\n" > hello.whole
516 $ hg add
516 $ hg add
517 adding hello.whole
517 adding hello.whole
518 $ hg fix --working-dir 'set:binary()'
518 $ hg fix --working-dir 'set:binary()'
519 $ cat hello.whole
519 $ cat hello.whole
520 HELLO\x00 (esc)
520 HELLO\x00 (esc)
521
521
522 $ cd ..
522 $ cd ..
523
523
524 We have a config for the maximum size of file we will attempt to fix. This can
524 We have a config for the maximum size of file we will attempt to fix. This can
525 be helpful to avoid running unsuspecting fixer tools on huge inputs, which
525 be helpful to avoid running unsuspecting fixer tools on huge inputs, which
526 could happen by accident without a well considered configuration. A more
526 could happen by accident without a well considered configuration. A more
527 precise configuration could use the size() fileset function if one global limit
527 precise configuration could use the size() fileset function if one global limit
528 is undesired.
528 is undesired.
529
529
530 $ hg init maxfilesize
530 $ hg init maxfilesize
531 $ cd maxfilesize
531 $ cd maxfilesize
532
532
533 $ printf "this file is huge\n" > hello.whole
533 $ printf "this file is huge\n" > hello.whole
534 $ hg add
534 $ hg add
535 adding hello.whole
535 adding hello.whole
536 $ hg --config fix.maxfilesize=10 fix --working-dir
536 $ hg --config fix.maxfilesize=10 fix --working-dir
537 ignoring file larger than 10 bytes: hello.whole
537 ignoring file larger than 10 bytes: hello.whole
538 $ cat hello.whole
538 $ cat hello.whole
539 this file is huge
539 this file is huge
540
540
541 $ cd ..
541 $ cd ..
542
542
543 If we specify a file to fix, other files should be left alone, even if they
543 If we specify a file to fix, other files should be left alone, even if they
544 have changes.
544 have changes.
545
545
546 $ hg init fixonlywhatitellyouto
546 $ hg init fixonlywhatitellyouto
547 $ cd fixonlywhatitellyouto
547 $ cd fixonlywhatitellyouto
548
548
549 $ printf "fix me!\n" > fixme.whole
549 $ printf "fix me!\n" > fixme.whole
550 $ printf "not me.\n" > notme.whole
550 $ printf "not me.\n" > notme.whole
551 $ hg add
551 $ hg add
552 adding fixme.whole
552 adding fixme.whole
553 adding notme.whole
553 adding notme.whole
554 $ hg fix --working-dir fixme.whole
554 $ hg fix --working-dir fixme.whole
555 $ cat *.whole
555 $ cat *.whole
556 FIX ME!
556 FIX ME!
557 not me.
557 not me.
558
558
559 $ cd ..
559 $ cd ..
560
560
561 If we try to fix a missing file, we still fix other files.
561 If we try to fix a missing file, we still fix other files.
562
562
563 $ hg init fixmissingfile
563 $ hg init fixmissingfile
564 $ cd fixmissingfile
564 $ cd fixmissingfile
565
565
566 $ printf "fix me!\n" > foo.whole
566 $ printf "fix me!\n" > foo.whole
567 $ hg add
567 $ hg add
568 adding foo.whole
568 adding foo.whole
569 $ hg fix --working-dir foo.whole bar.whole
569 $ hg fix --working-dir foo.whole bar.whole
570 bar.whole: $ENOENT$
570 bar.whole: $ENOENT$
571 $ cat *.whole
571 $ cat *.whole
572 FIX ME!
572 FIX ME!
573
573
574 $ cd ..
574 $ cd ..
575
575
576 Specifying a directory name should fix all its files and subdirectories.
576 Specifying a directory name should fix all its files and subdirectories.
577
577
578 $ hg init fixdirectory
578 $ hg init fixdirectory
579 $ cd fixdirectory
579 $ cd fixdirectory
580
580
581 $ mkdir -p dir1/dir2
581 $ mkdir -p dir1/dir2
582 $ printf "foo\n" > foo.whole
582 $ printf "foo\n" > foo.whole
583 $ printf "bar\n" > dir1/bar.whole
583 $ printf "bar\n" > dir1/bar.whole
584 $ printf "baz\n" > dir1/dir2/baz.whole
584 $ printf "baz\n" > dir1/dir2/baz.whole
585 $ hg add
585 $ hg add
586 adding dir1/bar.whole
586 adding dir1/bar.whole
587 adding dir1/dir2/baz.whole
587 adding dir1/dir2/baz.whole
588 adding foo.whole
588 adding foo.whole
589 $ hg fix --working-dir dir1
589 $ hg fix --working-dir dir1
590 $ cat foo.whole dir1/bar.whole dir1/dir2/baz.whole
590 $ cat foo.whole dir1/bar.whole dir1/dir2/baz.whole
591 foo
591 foo
592 BAR
592 BAR
593 BAZ
593 BAZ
594
594
595 $ cd ..
595 $ cd ..
596
596
597 Fixing a file in the working directory that needs no fixes should not actually
597 Fixing a file in the working directory that needs no fixes should not actually
598 write back to the file, so for example the mtime shouldn't change.
598 write back to the file, so for example the mtime shouldn't change.
599
599
600 $ hg init donttouchunfixedfiles
600 $ hg init donttouchunfixedfiles
601 $ cd donttouchunfixedfiles
601 $ cd donttouchunfixedfiles
602
602
603 $ printf "NO FIX NEEDED\n" > foo.whole
603 $ printf "NO FIX NEEDED\n" > foo.whole
604 $ hg add
604 $ hg add
605 adding foo.whole
605 adding foo.whole
606 $ cp -p foo.whole foo.whole.orig
606 $ cp -p foo.whole foo.whole.orig
607 $ cp -p foo.whole.orig foo.whole
607 $ cp -p foo.whole.orig foo.whole
608 $ sleep 2 # mtime has a resolution of one or two seconds.
608 $ sleep 2 # mtime has a resolution of one or two seconds.
609 $ hg fix --working-dir
609 $ hg fix --working-dir
610 $ f foo.whole.orig --newer foo.whole
610 $ f foo.whole.orig --newer foo.whole
611 foo.whole.orig: newer than foo.whole
611 foo.whole.orig: newer than foo.whole
612
612
613 $ cd ..
613 $ cd ..
614
614
615 When a fixer prints to stderr, we don't assume that it has failed. We show the
615 When a fixer prints to stderr, we don't assume that it has failed. We show the
616 error messages to the user, and we still let the fixer affect the file it was
616 error messages to the user, and we still let the fixer affect the file it was
617 fixing if its exit code is zero. Some code formatters might emit error messages
617 fixing if its exit code is zero. Some code formatters might emit error messages
618 on stderr and nothing on stdout, which would cause us the clear the file,
618 on stderr and nothing on stdout, which would cause us the clear the file,
619 except that they also exit with a non-zero code. We show the user which fixer
619 except that they also exit with a non-zero code. We show the user which fixer
620 emitted the stderr, and which revision, but we assume that the fixer will print
620 emitted the stderr, and which revision, but we assume that the fixer will print
621 the filename if it is relevant (since the issue may be non-specific). There is
621 the filename if it is relevant (since the issue may be non-specific). There is
622 also a config to abort (without affecting any files whatsoever) if we see any
622 also a config to abort (without affecting any files whatsoever) if we see any
623 tool with a non-zero exit status.
623 tool with a non-zero exit status.
624
624
625 $ hg init showstderr
625 $ hg init showstderr
626 $ cd showstderr
626 $ cd showstderr
627
627
628 $ printf "hello\n" > hello.txt
628 $ printf "hello\n" > hello.txt
629 $ hg add
629 $ hg add
630 adding hello.txt
630 adding hello.txt
631 $ cat > $TESTTMP/work.sh <<'EOF'
631 $ cat > $TESTTMP/work.sh <<'EOF'
632 > printf 'HELLO\n'
632 > printf 'HELLO\n'
633 > printf "$@: some\nerror that didn't stop the tool" >&2
633 > printf "$@: some\nerror that didn't stop the tool" >&2
634 > exit 0 # success despite the stderr output
634 > exit 0 # success despite the stderr output
635 > EOF
635 > EOF
636 $ hg --config "fix.work:command=sh $TESTTMP/work.sh {rootpath}" \
636 $ hg --config "fix.work:command=sh $TESTTMP/work.sh {rootpath}" \
637 > --config "fix.work:pattern=hello.txt" \
637 > --config "fix.work:pattern=hello.txt" \
638 > fix --working-dir
638 > fix --working-dir
639 [wdir] work: hello.txt: some
639 [wdir] work: hello.txt: some
640 [wdir] work: error that didn't stop the tool
640 [wdir] work: error that didn't stop the tool
641 $ cat hello.txt
641 $ cat hello.txt
642 HELLO
642 HELLO
643
643
644 $ printf "goodbye\n" > hello.txt
644 $ printf "goodbye\n" > hello.txt
645 $ printf "foo\n" > foo.whole
645 $ printf "foo\n" > foo.whole
646 $ hg add
646 $ hg add
647 adding foo.whole
647 adding foo.whole
648 $ cat > $TESTTMP/fail.sh <<'EOF'
648 $ cat > $TESTTMP/fail.sh <<'EOF'
649 > printf 'GOODBYE\n'
649 > printf 'GOODBYE\n'
650 > printf "$@: some\nerror that did stop the tool\n" >&2
650 > printf "$@: some\nerror that did stop the tool\n" >&2
651 > exit 42 # success despite the stdout output
651 > exit 42 # success despite the stdout output
652 > EOF
652 > EOF
653 $ hg --config "fix.fail:command=sh $TESTTMP/fail.sh {rootpath}" \
653 $ hg --config "fix.fail:command=sh $TESTTMP/fail.sh {rootpath}" \
654 > --config "fix.fail:pattern=hello.txt" \
654 > --config "fix.fail:pattern=hello.txt" \
655 > --config "fix.failure=abort" \
655 > --config "fix.failure=abort" \
656 > fix --working-dir
656 > fix --working-dir
657 [wdir] fail: hello.txt: some
657 [wdir] fail: hello.txt: some
658 [wdir] fail: error that did stop the tool
658 [wdir] fail: error that did stop the tool
659 abort: no fixes will be applied
659 abort: no fixes will be applied
660 (use --config fix.failure=continue to apply any successful fixes anyway)
660 (use --config fix.failure=continue to apply any successful fixes anyway)
661 [255]
661 [255]
662 $ cat hello.txt
662 $ cat hello.txt
663 goodbye
663 goodbye
664 $ cat foo.whole
664 $ cat foo.whole
665 foo
665 foo
666
666
667 $ hg --config "fix.fail:command=sh $TESTTMP/fail.sh {rootpath}" \
667 $ hg --config "fix.fail:command=sh $TESTTMP/fail.sh {rootpath}" \
668 > --config "fix.fail:pattern=hello.txt" \
668 > --config "fix.fail:pattern=hello.txt" \
669 > fix --working-dir
669 > fix --working-dir
670 [wdir] fail: hello.txt: some
670 [wdir] fail: hello.txt: some
671 [wdir] fail: error that did stop the tool
671 [wdir] fail: error that did stop the tool
672 $ cat hello.txt
672 $ cat hello.txt
673 goodbye
673 goodbye
674 $ cat foo.whole
674 $ cat foo.whole
675 FOO
675 FOO
676
676
677 $ hg --config "fix.fail:command=exit 42" \
677 $ hg --config "fix.fail:command=exit 42" \
678 > --config "fix.fail:pattern=hello.txt" \
678 > --config "fix.fail:pattern=hello.txt" \
679 > fix --working-dir
679 > fix --working-dir
680 [wdir] fail: exited with status 42
680 [wdir] fail: exited with status 42
681
681
682 $ cd ..
682 $ cd ..
683
683
684 Fixing the working directory and its parent revision at the same time should
684 Fixing the working directory and its parent revision at the same time should
685 check out the replacement revision for the parent. This prevents any new
685 check out the replacement revision for the parent. This prevents any new
686 uncommitted changes from appearing. We test this for a clean working directory
686 uncommitted changes from appearing. We test this for a clean working directory
687 and a dirty one. In both cases, all lines/files changed since the grandparent
687 and a dirty one. In both cases, all lines/files changed since the grandparent
688 will be fixed. The grandparent is the "baserev" for both the parent and the
688 will be fixed. The grandparent is the "baserev" for both the parent and the
689 working copy.
689 working copy.
690
690
691 $ hg init fixdotandcleanwdir
691 $ hg init fixdotandcleanwdir
692 $ cd fixdotandcleanwdir
692 $ cd fixdotandcleanwdir
693
693
694 $ printf "hello\n" > hello.whole
694 $ printf "hello\n" > hello.whole
695 $ printf "world\n" > world.whole
695 $ printf "world\n" > world.whole
696 $ hg commit -Aqm "the parent commit"
696 $ hg commit -Aqm "the parent commit"
697
697
698 $ hg parents --template '{rev} {desc}\n'
698 $ hg parents --template '{rev} {desc}\n'
699 0 the parent commit
699 0 the parent commit
700 $ hg fix --working-dir -r .
700 $ hg fix --working-dir -r .
701 $ hg parents --template '{rev} {desc}\n'
701 $ hg parents --template '{rev} {desc}\n'
702 1 the parent commit
702 1 the parent commit
703 $ hg cat -r . *.whole
703 $ hg cat -r . *.whole
704 HELLO
704 HELLO
705 WORLD
705 WORLD
706 $ cat *.whole
706 $ cat *.whole
707 HELLO
707 HELLO
708 WORLD
708 WORLD
709 $ hg status
709 $ hg status
710
710
711 $ cd ..
711 $ cd ..
712
712
713 Same test with a dirty working copy.
713 Same test with a dirty working copy.
714
714
715 $ hg init fixdotanddirtywdir
715 $ hg init fixdotanddirtywdir
716 $ cd fixdotanddirtywdir
716 $ cd fixdotanddirtywdir
717
717
718 $ printf "hello\n" > hello.whole
718 $ printf "hello\n" > hello.whole
719 $ printf "world\n" > world.whole
719 $ printf "world\n" > world.whole
720 $ hg commit -Aqm "the parent commit"
720 $ hg commit -Aqm "the parent commit"
721
721
722 $ printf "hello,\n" > hello.whole
722 $ printf "hello,\n" > hello.whole
723 $ printf "world!\n" > world.whole
723 $ printf "world!\n" > world.whole
724
724
725 $ hg parents --template '{rev} {desc}\n'
725 $ hg parents --template '{rev} {desc}\n'
726 0 the parent commit
726 0 the parent commit
727 $ hg fix --working-dir -r .
727 $ hg fix --working-dir -r .
728 $ hg parents --template '{rev} {desc}\n'
728 $ hg parents --template '{rev} {desc}\n'
729 1 the parent commit
729 1 the parent commit
730 $ hg cat -r . *.whole
730 $ hg cat -r . *.whole
731 HELLO
731 HELLO
732 WORLD
732 WORLD
733 $ cat *.whole
733 $ cat *.whole
734 HELLO,
734 HELLO,
735 WORLD!
735 WORLD!
736 $ hg status
736 $ hg status
737 M hello.whole
737 M hello.whole
738 M world.whole
738 M world.whole
739
739
740 $ cd ..
740 $ cd ..
741
741
742 When we have a chain of commits that change mutually exclusive lines of code,
742 When we have a chain of commits that change mutually exclusive lines of code,
743 we should be able to do incremental fixing that causes each commit in the chain
743 we should be able to do incremental fixing that causes each commit in the chain
744 to include fixes made to the previous commits. This prevents children from
744 to include fixes made to the previous commits. This prevents children from
745 backing out the fixes made in their parents. A dirty working directory is
745 backing out the fixes made in their parents. A dirty working directory is
746 conceptually similar to another commit in the chain.
746 conceptually similar to another commit in the chain.
747
747
748 $ hg init incrementallyfixchain
748 $ hg init incrementallyfixchain
749 $ cd incrementallyfixchain
749 $ cd incrementallyfixchain
750
750
751 $ cat > file.changed <<EOF
751 $ cat > file.changed <<EOF
752 > first
752 > first
753 > second
753 > second
754 > third
754 > third
755 > fourth
755 > fourth
756 > fifth
756 > fifth
757 > EOF
757 > EOF
758 $ hg commit -Aqm "the common ancestor (the baserev)"
758 $ hg commit -Aqm "the common ancestor (the baserev)"
759 $ cat > file.changed <<EOF
759 $ cat > file.changed <<EOF
760 > first (changed)
760 > first (changed)
761 > second
761 > second
762 > third
762 > third
763 > fourth
763 > fourth
764 > fifth
764 > fifth
765 > EOF
765 > EOF
766 $ hg commit -Aqm "the first commit to fix"
766 $ hg commit -Aqm "the first commit to fix"
767 $ cat > file.changed <<EOF
767 $ cat > file.changed <<EOF
768 > first (changed)
768 > first (changed)
769 > second
769 > second
770 > third (changed)
770 > third (changed)
771 > fourth
771 > fourth
772 > fifth
772 > fifth
773 > EOF
773 > EOF
774 $ hg commit -Aqm "the second commit to fix"
774 $ hg commit -Aqm "the second commit to fix"
775 $ cat > file.changed <<EOF
775 $ cat > file.changed <<EOF
776 > first (changed)
776 > first (changed)
777 > second
777 > second
778 > third (changed)
778 > third (changed)
779 > fourth
779 > fourth
780 > fifth (changed)
780 > fifth (changed)
781 > EOF
781 > EOF
782
782
783 $ hg fix -r . -r '.^' --working-dir
783 $ hg fix -r . -r '.^' --working-dir
784
784
785 $ hg parents --template '{rev}\n'
785 $ hg parents --template '{rev}\n'
786 4
786 4
787 $ hg cat -r '.^^' file.changed
787 $ hg cat -r '.^^' file.changed
788 first
788 first
789 second
789 second
790 third
790 third
791 fourth
791 fourth
792 fifth
792 fifth
793 $ hg cat -r '.^' file.changed
793 $ hg cat -r '.^' file.changed
794 FIRST (CHANGED)
794 FIRST (CHANGED)
795 second
795 second
796 third
796 third
797 fourth
797 fourth
798 fifth
798 fifth
799 $ hg cat -r . file.changed
799 $ hg cat -r . file.changed
800 FIRST (CHANGED)
800 FIRST (CHANGED)
801 second
801 second
802 THIRD (CHANGED)
802 THIRD (CHANGED)
803 fourth
803 fourth
804 fifth
804 fifth
805 $ cat file.changed
805 $ cat file.changed
806 FIRST (CHANGED)
806 FIRST (CHANGED)
807 second
807 second
808 THIRD (CHANGED)
808 THIRD (CHANGED)
809 fourth
809 fourth
810 FIFTH (CHANGED)
810 FIFTH (CHANGED)
811
811
812 $ cd ..
812 $ cd ..
813
813
814 If we incrementally fix a merge commit, we should fix any lines that changed
814 If we incrementally fix a merge commit, we should fix any lines that changed
815 versus either parent. You could imagine only fixing the intersection or some
815 versus either parent. You could imagine only fixing the intersection or some
816 other subset, but this is necessary if either parent is being fixed. It
816 other subset, but this is necessary if either parent is being fixed. It
817 prevents us from forgetting fixes made in either parent.
817 prevents us from forgetting fixes made in either parent.
818
818
819 $ hg init incrementallyfixmergecommit
819 $ hg init incrementallyfixmergecommit
820 $ cd incrementallyfixmergecommit
820 $ cd incrementallyfixmergecommit
821
821
822 $ printf "a\nb\nc\n" > file.changed
822 $ printf "a\nb\nc\n" > file.changed
823 $ hg commit -Aqm "ancestor"
823 $ hg commit -Aqm "ancestor"
824
824
825 $ printf "aa\nb\nc\n" > file.changed
825 $ printf "aa\nb\nc\n" > file.changed
826 $ hg commit -m "change a"
826 $ hg commit -m "change a"
827
827
828 $ hg checkout '.^'
828 $ hg checkout '.^'
829 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
829 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
830 $ printf "a\nb\ncc\n" > file.changed
830 $ printf "a\nb\ncc\n" > file.changed
831 $ hg commit -m "change c"
831 $ hg commit -m "change c"
832 created new head
832 created new head
833
833
834 $ hg merge
834 $ hg merge
835 merging file.changed
835 merging file.changed
836 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
836 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
837 (branch merge, don't forget to commit)
837 (branch merge, don't forget to commit)
838 $ hg commit -m "merge"
838 $ hg commit -m "merge"
839 $ hg cat -r . file.changed
839 $ hg cat -r . file.changed
840 aa
840 aa
841 b
841 b
842 cc
842 cc
843
843
844 $ hg fix -r . --working-dir
844 $ hg fix -r . --working-dir
845 $ hg cat -r . file.changed
845 $ hg cat -r . file.changed
846 AA
846 AA
847 b
847 b
848 CC
848 CC
849
849
850 $ cd ..
850 $ cd ..
851
851
852 Abort fixing revisions if there is an unfinished operation. We don't want to
852 Abort fixing revisions if there is an unfinished operation. We don't want to
853 make things worse by editing files or stripping/obsoleting things. Also abort
853 make things worse by editing files or stripping/obsoleting things. Also abort
854 fixing the working directory if there are unresolved merge conflicts.
854 fixing the working directory if there are unresolved merge conflicts.
855
855
856 $ hg init abortunresolved
856 $ hg init abortunresolved
857 $ cd abortunresolved
857 $ cd abortunresolved
858
858
859 $ echo "foo1" > foo.whole
859 $ echo "foo1" > foo.whole
860 $ hg commit -Aqm "foo 1"
860 $ hg commit -Aqm "foo 1"
861
861
862 $ hg update null
862 $ hg update null
863 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
863 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
864 $ echo "foo2" > foo.whole
864 $ echo "foo2" > foo.whole
865 $ hg commit -Aqm "foo 2"
865 $ hg commit -Aqm "foo 2"
866
866
867 $ hg --config extensions.rebase= rebase -r 1 -d 0
867 $ hg --config extensions.rebase= rebase -r 1 -d 0
868 rebasing 1:c3b6dc0e177a "foo 2" (tip)
868 rebasing 1:c3b6dc0e177a "foo 2" (tip)
869 merging foo.whole
869 merging foo.whole
870 warning: conflicts while merging foo.whole! (edit, then use 'hg resolve --mark')
870 warning: conflicts while merging foo.whole! (edit, then use 'hg resolve --mark')
871 unresolved conflicts (see hg resolve, then hg rebase --continue)
871 unresolved conflicts (see hg resolve, then hg rebase --continue)
872 [1]
872 [1]
873
873
874 $ hg --config extensions.rebase= fix --working-dir
874 $ hg --config extensions.rebase= fix --working-dir
875 abort: unresolved conflicts
875 abort: unresolved conflicts
876 (use 'hg resolve')
876 (use 'hg resolve')
877 [255]
877 [255]
878
878
879 $ hg --config extensions.rebase= fix -r .
879 $ hg --config extensions.rebase= fix -r .
880 abort: rebase in progress
880 abort: rebase in progress
881 (use 'hg rebase --continue' or 'hg rebase --abort')
881 (use 'hg rebase --continue' or 'hg rebase --abort')
882 [255]
882 [255]
883
883
884 $ cd ..
884 $ cd ..
885
885
886 When fixing a file that was renamed, we should diff against the source of the
886 When fixing a file that was renamed, we should diff against the source of the
887 rename for incremental fixing and we should correctly reproduce the rename in
887 rename for incremental fixing and we should correctly reproduce the rename in
888 the replacement revision.
888 the replacement revision.
889
889
890 $ hg init fixrenamecommit
890 $ hg init fixrenamecommit
891 $ cd fixrenamecommit
891 $ cd fixrenamecommit
892
892
893 $ printf "a\nb\nc\n" > source.changed
893 $ printf "a\nb\nc\n" > source.changed
894 $ hg commit -Aqm "source revision"
894 $ hg commit -Aqm "source revision"
895 $ hg move source.changed dest.changed
895 $ hg move source.changed dest.changed
896 $ printf "a\nb\ncc\n" > dest.changed
896 $ printf "a\nb\ncc\n" > dest.changed
897 $ hg commit -m "dest revision"
897 $ hg commit -m "dest revision"
898
898
899 $ hg fix -r .
899 $ hg fix -r .
900 $ hg log -r tip --copies --template "{file_copies}\n"
900 $ hg log -r tip --copies --template "{file_copies}\n"
901 dest.changed (source.changed)
901 dest.changed (source.changed)
902 $ hg cat -r tip dest.changed
902 $ hg cat -r tip dest.changed
903 a
903 a
904 b
904 b
905 CC
905 CC
906
906
907 $ cd ..
907 $ cd ..
908
908
909 When fixing revisions that remove files we must ensure that the replacement
909 When fixing revisions that remove files we must ensure that the replacement
910 actually removes the file, whereas it could accidentally leave it unchanged or
910 actually removes the file, whereas it could accidentally leave it unchanged or
911 write an empty string to it.
911 write an empty string to it.
912
912
913 $ hg init fixremovedfile
913 $ hg init fixremovedfile
914 $ cd fixremovedfile
914 $ cd fixremovedfile
915
915
916 $ printf "foo\n" > foo.whole
916 $ printf "foo\n" > foo.whole
917 $ printf "bar\n" > bar.whole
917 $ printf "bar\n" > bar.whole
918 $ hg commit -Aqm "add files"
918 $ hg commit -Aqm "add files"
919 $ hg remove bar.whole
919 $ hg remove bar.whole
920 $ hg commit -m "remove file"
920 $ hg commit -m "remove file"
921 $ hg status --change .
921 $ hg status --change .
922 R bar.whole
922 R bar.whole
923 $ hg fix -r . foo.whole
923 $ hg fix -r . foo.whole
924 $ hg status --change tip
924 $ hg status --change tip
925 M foo.whole
925 M foo.whole
926 R bar.whole
926 R bar.whole
927
927
928 $ cd ..
928 $ cd ..
929
929
930 If fixing a revision finds no fixes to make, no replacement revision should be
930 If fixing a revision finds no fixes to make, no replacement revision should be
931 created.
931 created.
932
932
933 $ hg init nofixesneeded
933 $ hg init nofixesneeded
934 $ cd nofixesneeded
934 $ cd nofixesneeded
935
935
936 $ printf "FOO\n" > foo.whole
936 $ printf "FOO\n" > foo.whole
937 $ hg commit -Aqm "add file"
937 $ hg commit -Aqm "add file"
938 $ hg log --template '{rev}\n'
938 $ hg log --template '{rev}\n'
939 0
939 0
940 $ hg fix -r .
940 $ hg fix -r .
941 $ hg log --template '{rev}\n'
941 $ hg log --template '{rev}\n'
942 0
942 0
943
943
944 $ cd ..
944 $ cd ..
945
945
946 If fixing a commit reverts all the changes in the commit, we replace it with a
946 If fixing a commit reverts all the changes in the commit, we replace it with a
947 commit that changes no files.
947 commit that changes no files.
948
948
949 $ hg init nochangesleft
949 $ hg init nochangesleft
950 $ cd nochangesleft
950 $ cd nochangesleft
951
951
952 $ printf "FOO\n" > foo.whole
952 $ printf "FOO\n" > foo.whole
953 $ hg commit -Aqm "add file"
953 $ hg commit -Aqm "add file"
954 $ printf "foo\n" > foo.whole
954 $ printf "foo\n" > foo.whole
955 $ hg commit -m "edit file"
955 $ hg commit -m "edit file"
956 $ hg status --change .
956 $ hg status --change .
957 M foo.whole
957 M foo.whole
958 $ hg fix -r .
958 $ hg fix -r .
959 $ hg status --change tip
959 $ hg status --change tip
960
960
961 $ cd ..
961 $ cd ..
962
962
963 If we fix a parent and child revision together, the child revision must be
963 If we fix a parent and child revision together, the child revision must be
964 replaced if the parent is replaced, even if the diffs of the child needed no
964 replaced if the parent is replaced, even if the diffs of the child needed no
965 fixes. However, we're free to not replace revisions that need no fixes and have
965 fixes. However, we're free to not replace revisions that need no fixes and have
966 no ancestors that are replaced.
966 no ancestors that are replaced.
967
967
968 $ hg init mustreplacechild
968 $ hg init mustreplacechild
969 $ cd mustreplacechild
969 $ cd mustreplacechild
970
970
971 $ printf "FOO\n" > foo.whole
971 $ printf "FOO\n" > foo.whole
972 $ hg commit -Aqm "add foo"
972 $ hg commit -Aqm "add foo"
973 $ printf "foo\n" > foo.whole
973 $ printf "foo\n" > foo.whole
974 $ hg commit -m "edit foo"
974 $ hg commit -m "edit foo"
975 $ printf "BAR\n" > bar.whole
975 $ printf "BAR\n" > bar.whole
976 $ hg commit -Aqm "add bar"
976 $ hg commit -Aqm "add bar"
977
977
978 $ hg log --graph --template '{rev} {files}'
978 $ hg log --graph --template '{rev} {files}'
979 @ 2 bar.whole
979 @ 2 bar.whole
980 |
980 |
981 o 1 foo.whole
981 o 1 foo.whole
982 |
982 |
983 o 0 foo.whole
983 o 0 foo.whole
984
984
985 $ hg fix -r 0:2
985 $ hg fix -r 0:2
986 $ hg log --graph --template '{rev} {files}'
986 $ hg log --graph --template '{rev} {files}'
987 o 4 bar.whole
987 o 4 bar.whole
988 |
988 |
989 o 3
989 o 3
990 |
990 |
991 | @ 2 bar.whole
991 | @ 2 bar.whole
992 | |
992 | |
993 | x 1 foo.whole
993 | x 1 foo.whole
994 |/
994 |/
995 o 0 foo.whole
995 o 0 foo.whole
996
996
997
997
998 $ cd ..
998 $ cd ..
999
999
1000 It's also possible that the child needs absolutely no changes, but we still
1000 It's also possible that the child needs absolutely no changes, but we still
1001 need to replace it to update its parent. If we skipped replacing the child
1001 need to replace it to update its parent. If we skipped replacing the child
1002 because it had no file content changes, it would become an orphan for no good
1002 because it had no file content changes, it would become an orphan for no good
1003 reason.
1003 reason.
1004
1004
1005 $ hg init mustreplacechildevenifnop
1005 $ hg init mustreplacechildevenifnop
1006 $ cd mustreplacechildevenifnop
1006 $ cd mustreplacechildevenifnop
1007
1007
1008 $ printf "Foo\n" > foo.whole
1008 $ printf "Foo\n" > foo.whole
1009 $ hg commit -Aqm "add a bad foo"
1009 $ hg commit -Aqm "add a bad foo"
1010 $ printf "FOO\n" > foo.whole
1010 $ printf "FOO\n" > foo.whole
1011 $ hg commit -m "add a good foo"
1011 $ hg commit -m "add a good foo"
1012 $ hg fix -r . -r '.^'
1012 $ hg fix -r . -r '.^'
1013 $ hg log --graph --template '{rev} {desc}'
1013 $ hg log --graph --template '{rev} {desc}'
1014 o 3 add a good foo
1014 o 3 add a good foo
1015 |
1015 |
1016 o 2 add a bad foo
1016 o 2 add a bad foo
1017
1017
1018 @ 1 add a good foo
1018 @ 1 add a good foo
1019 |
1019 |
1020 x 0 add a bad foo
1020 x 0 add a bad foo
1021
1021
1022
1022
1023 $ cd ..
1023 $ cd ..
1024
1024
1025 Similar to the case above, the child revision may become empty as a result of
1025 Similar to the case above, the child revision may become empty as a result of
1026 fixing its parent. We should still create an empty replacement child.
1026 fixing its parent. We should still create an empty replacement child.
1027 TODO: determine how this should interact with ui.allowemptycommit given that
1027 TODO: determine how this should interact with ui.allowemptycommit given that
1028 the empty replacement could have children.
1028 the empty replacement could have children.
1029
1029
1030 $ hg init mustreplacechildevenifempty
1030 $ hg init mustreplacechildevenifempty
1031 $ cd mustreplacechildevenifempty
1031 $ cd mustreplacechildevenifempty
1032
1032
1033 $ printf "foo\n" > foo.whole
1033 $ printf "foo\n" > foo.whole
1034 $ hg commit -Aqm "add foo"
1034 $ hg commit -Aqm "add foo"
1035 $ printf "Foo\n" > foo.whole
1035 $ printf "Foo\n" > foo.whole
1036 $ hg commit -m "edit foo"
1036 $ hg commit -m "edit foo"
1037 $ hg fix -r . -r '.^'
1037 $ hg fix -r . -r '.^'
1038 $ hg log --graph --template '{rev} {desc}\n' --stat
1038 $ hg log --graph --template '{rev} {desc}\n' --stat
1039 o 3 edit foo
1039 o 3 edit foo
1040 |
1040 |
1041 o 2 add foo
1041 o 2 add foo
1042 foo.whole | 1 +
1042 foo.whole | 1 +
1043 1 files changed, 1 insertions(+), 0 deletions(-)
1043 1 files changed, 1 insertions(+), 0 deletions(-)
1044
1044
1045 @ 1 edit foo
1045 @ 1 edit foo
1046 | foo.whole | 2 +-
1046 | foo.whole | 2 +-
1047 | 1 files changed, 1 insertions(+), 1 deletions(-)
1047 | 1 files changed, 1 insertions(+), 1 deletions(-)
1048 |
1048 |
1049 x 0 add foo
1049 x 0 add foo
1050 foo.whole | 1 +
1050 foo.whole | 1 +
1051 1 files changed, 1 insertions(+), 0 deletions(-)
1051 1 files changed, 1 insertions(+), 0 deletions(-)
1052
1052
1053
1053
1054 $ cd ..
1054 $ cd ..
1055
1055
1056 Fixing a secret commit should replace it with another secret commit.
1056 Fixing a secret commit should replace it with another secret commit.
1057
1057
1058 $ hg init fixsecretcommit
1058 $ hg init fixsecretcommit
1059 $ cd fixsecretcommit
1059 $ cd fixsecretcommit
1060
1060
1061 $ printf "foo\n" > foo.whole
1061 $ printf "foo\n" > foo.whole
1062 $ hg commit -Aqm "add foo" --secret
1062 $ hg commit -Aqm "add foo" --secret
1063 $ hg fix -r .
1063 $ hg fix -r .
1064 $ hg log --template '{rev} {phase}\n'
1064 $ hg log --template '{rev} {phase}\n'
1065 1 secret
1065 1 secret
1066 0 secret
1066 0 secret
1067
1067
1068 $ cd ..
1068 $ cd ..
1069
1069
1070 We should also preserve phase when fixing a draft commit while the user has
1070 We should also preserve phase when fixing a draft commit while the user has
1071 their default set to secret.
1071 their default set to secret.
1072
1072
1073 $ hg init respectphasesnewcommit
1073 $ hg init respectphasesnewcommit
1074 $ cd respectphasesnewcommit
1074 $ cd respectphasesnewcommit
1075
1075
1076 $ printf "foo\n" > foo.whole
1076 $ printf "foo\n" > foo.whole
1077 $ hg commit -Aqm "add foo"
1077 $ hg commit -Aqm "add foo"
1078 $ hg --config phases.newcommit=secret fix -r .
1078 $ hg --config phases.newcommit=secret fix -r .
1079 $ hg log --template '{rev} {phase}\n'
1079 $ hg log --template '{rev} {phase}\n'
1080 1 draft
1080 1 draft
1081 0 draft
1081 0 draft
1082
1082
1083 $ cd ..
1083 $ cd ..
1084
1084
1085 Debug output should show what fixer commands are being subprocessed, which is
1085 Debug output should show what fixer commands are being subprocessed, which is
1086 useful for anyone trying to set up a new config.
1086 useful for anyone trying to set up a new config.
1087
1087
1088 $ hg init debugoutput
1088 $ hg init debugoutput
1089 $ cd debugoutput
1089 $ cd debugoutput
1090
1090
1091 $ printf "foo\nbar\nbaz\n" > foo.changed
1091 $ printf "foo\nbar\nbaz\n" > foo.changed
1092 $ hg commit -Aqm "foo"
1092 $ hg commit -Aqm "foo"
1093 $ printf "Foo\nbar\nBaz\n" > foo.changed
1093 $ printf "Foo\nbar\nBaz\n" > foo.changed
1094 $ hg --debug fix --working-dir
1094 $ hg --debug fix --working-dir
1095 subprocess: * $TESTTMP/uppercase.py 1-1 3-3 (glob)
1095 subprocess: * $TESTTMP/uppercase.py 1-1 3-3 (glob)
1096
1096
1097 $ cd ..
1097 $ cd ..
1098
1098
1099 Fixing an obsolete revision can cause divergence, so we abort unless the user
1099 Fixing an obsolete revision can cause divergence, so we abort unless the user
1100 configures to allow it. This is not yet smart enough to know whether there is a
1100 configures to allow it. This is not yet smart enough to know whether there is a
1101 successor, but even then it is not likely intentional or idiomatic to fix an
1101 successor, but even then it is not likely intentional or idiomatic to fix an
1102 obsolete revision.
1102 obsolete revision.
1103
1103
1104 $ hg init abortobsoleterev
1104 $ hg init abortobsoleterev
1105 $ cd abortobsoleterev
1105 $ cd abortobsoleterev
1106
1106
1107 $ printf "foo\n" > foo.changed
1107 $ printf "foo\n" > foo.changed
1108 $ hg commit -Aqm "foo"
1108 $ hg commit -Aqm "foo"
1109 $ hg debugobsolete `hg parents --template '{node}'`
1109 $ hg debugobsolete `hg parents --template '{node}'`
1110 1 new obsolescence markers
1110 1 new obsolescence markers
1111 obsoleted 1 changesets
1111 obsoleted 1 changesets
1112 $ hg --hidden fix -r 0
1112 $ hg --hidden fix -r 0
1113 abort: fixing obsolete revision could cause divergence
1113 abort: fixing obsolete revision could cause divergence
1114 [255]
1114 [255]
1115
1115
1116 $ hg --hidden fix -r 0 --config experimental.evolution.allowdivergence=true
1116 $ hg --hidden fix -r 0 --config experimental.evolution.allowdivergence=true
1117 $ hg cat -r tip foo.changed
1117 $ hg cat -r tip foo.changed
1118 FOO
1118 FOO
1119
1119
1120 $ cd ..
1120 $ cd ..
1121
1121
1122 Test all of the available substitution values for fixer commands.
1122 Test all of the available substitution values for fixer commands.
1123
1123
1124 $ hg init substitution
1124 $ hg init substitution
1125 $ cd substitution
1125 $ cd substitution
1126
1126
1127 $ mkdir foo
1127 $ mkdir foo
1128 $ printf "hello\ngoodbye\n" > foo/bar
1128 $ printf "hello\ngoodbye\n" > foo/bar
1129 $ hg add
1129 $ hg add
1130 adding foo/bar
1130 adding foo/bar
1131 $ hg --config "fix.fail:command=printf '%s\n' '{rootpath}' '{basename}'" \
1131 $ hg --config "fix.fail:command=printf '%s\n' '{rootpath}' '{basename}'" \
1132 > --config "fix.fail:linerange='{first}' '{last}'" \
1132 > --config "fix.fail:linerange='{first}' '{last}'" \
1133 > --config "fix.fail:pattern=foo/bar" \
1133 > --config "fix.fail:pattern=foo/bar" \
1134 > fix --working-dir
1134 > fix --working-dir
1135 $ cat foo/bar
1135 $ cat foo/bar
1136 foo/bar
1136 foo/bar
1137 bar
1137 bar
1138 1
1138 1
1139 2
1139 2
1140
1140
1141 $ cd ..
1141 $ cd ..
1142
1142
1143 The --base flag should allow picking the revisions to diff against for changed
1143 The --base flag should allow picking the revisions to diff against for changed
1144 files and incremental line formatting.
1144 files and incremental line formatting.
1145
1145
1146 $ hg init baseflag
1146 $ hg init baseflag
1147 $ cd baseflag
1147 $ cd baseflag
1148
1148
1149 $ printf "one\ntwo\n" > foo.changed
1149 $ printf "one\ntwo\n" > foo.changed
1150 $ printf "bar\n" > bar.changed
1150 $ printf "bar\n" > bar.changed
1151 $ hg commit -Aqm "first"
1151 $ hg commit -Aqm "first"
1152 $ printf "one\nTwo\n" > foo.changed
1152 $ printf "one\nTwo\n" > foo.changed
1153 $ hg commit -m "second"
1153 $ hg commit -m "second"
1154 $ hg fix -w --base .
1154 $ hg fix -w --base .
1155 $ hg status
1155 $ hg status
1156 $ hg fix -w --base null
1156 $ hg fix -w --base null
1157 $ cat foo.changed
1157 $ cat foo.changed
1158 ONE
1158 ONE
1159 TWO
1159 TWO
1160 $ cat bar.changed
1160 $ cat bar.changed
1161 BAR
1161 BAR
1162
1162
1163 $ cd ..
1163 $ cd ..
1164
1164
1165 If the user asks to fix the parent of another commit, they are asking to create
1165 If the user asks to fix the parent of another commit, they are asking to create
1166 an orphan. We must respect experimental.evolution.allowunstable.
1166 an orphan. We must respect experimental.evolution.allowunstable.
1167
1167
1168 $ hg init allowunstable
1168 $ hg init allowunstable
1169 $ cd allowunstable
1169 $ cd allowunstable
1170
1170
1171 $ printf "one\n" > foo.whole
1171 $ printf "one\n" > foo.whole
1172 $ hg commit -Aqm "first"
1172 $ hg commit -Aqm "first"
1173 $ printf "two\n" > foo.whole
1173 $ printf "two\n" > foo.whole
1174 $ hg commit -m "second"
1174 $ hg commit -m "second"
1175 $ hg --config experimental.evolution.allowunstable=False fix -r '.^'
1175 $ hg --config experimental.evolution.allowunstable=False fix -r '.^'
1176 abort: cannot fix changeset with children
1176 abort: cannot fix changeset with children
1177 [255]
1177 [255]
1178 $ hg fix -r '.^'
1178 $ hg fix -r '.^'
1179 1 new orphan changesets
1179 1 new orphan changesets
1180 $ hg cat -r 2 foo.whole
1180 $ hg cat -r 2 foo.whole
1181 ONE
1181 ONE
1182
1182
1183 $ cd ..
1183 $ cd ..
1184
1184
1185 The --base flag affects the set of files being fixed. So while the --whole flag
1185 The --base flag affects the set of files being fixed. So while the --whole flag
1186 makes the base irrelevant for changed line ranges, it still changes the
1186 makes the base irrelevant for changed line ranges, it still changes the
1187 meaning and effect of the command. In this example, no files or lines are fixed
1187 meaning and effect of the command. In this example, no files or lines are fixed
1188 until we specify the base, but then we do fix unchanged lines.
1188 until we specify the base, but then we do fix unchanged lines.
1189
1189
1190 $ hg init basewhole
1190 $ hg init basewhole
1191 $ cd basewhole
1191 $ cd basewhole
1192 $ printf "foo1\n" > foo.changed
1192 $ printf "foo1\n" > foo.changed
1193 $ hg commit -Aqm "first"
1193 $ hg commit -Aqm "first"
1194 $ printf "foo2\n" >> foo.changed
1194 $ printf "foo2\n" >> foo.changed
1195 $ printf "bar\n" > bar.changed
1195 $ printf "bar\n" > bar.changed
1196 $ hg commit -Aqm "second"
1196 $ hg commit -Aqm "second"
1197
1197
1198 $ hg fix --working-dir --whole
1198 $ hg fix --working-dir --whole
1199 $ cat *.changed
1199 $ cat *.changed
1200 bar
1200 bar
1201 foo1
1201 foo1
1202 foo2
1202 foo2
1203
1203
1204 $ hg fix --working-dir --base 0 --whole
1204 $ hg fix --working-dir --base 0 --whole
1205 $ cat *.changed
1205 $ cat *.changed
1206 BAR
1206 BAR
1207 FOO1
1207 FOO1
1208 FOO2
1208 FOO2
1209
1209
1210 $ cd ..
1210 $ cd ..
1211
1211
1212 The execution order of tools can be controlled. This example doesn't work if
1212 The execution order of tools can be controlled. This example doesn't work if
1213 you sort after truncating, but the config defines the correct order while the
1213 you sort after truncating, but the config defines the correct order while the
1214 definitions are out of order (which might imply the incorrect order given the
1214 definitions are out of order (which might imply the incorrect order given the
1215 implementation of fix). The goal is to use multiple tools to select the lowest
1215 implementation of fix). The goal is to use multiple tools to select the lowest
1216 5 numbers in the file.
1216 5 numbers in the file.
1217
1217
1218 $ hg init priorityexample
1218 $ hg init priorityexample
1219 $ cd priorityexample
1219 $ cd priorityexample
1220
1220
1221 $ cat >> .hg/hgrc <<EOF
1221 $ cat >> .hg/hgrc <<EOF
1222 > [fix]
1222 > [fix]
1223 > head:command = head -n 5
1223 > head:command = head -n 5
1224 > head:pattern = numbers.txt
1224 > head:pattern = numbers.txt
1225 > head:priority = 1
1225 > head:priority = 1
1226 > sort:command = sort -n
1226 > sort:command = sort -n
1227 > sort:pattern = numbers.txt
1227 > sort:pattern = numbers.txt
1228 > sort:priority = 2
1228 > sort:priority = 2
1229 > EOF
1229 > EOF
1230
1230
1231 $ printf "8\n2\n3\n6\n7\n4\n9\n5\n1\n0\n" > numbers.txt
1231 $ printf "8\n2\n3\n6\n7\n4\n9\n5\n1\n0\n" > numbers.txt
1232 $ hg add -q
1232 $ hg add -q
1233 $ hg fix -w
1233 $ hg fix -w
1234 $ cat numbers.txt
1234 $ cat numbers.txt
1235 0
1235 0
1236 1
1236 1
1237 2
1237 2
1238 3
1238 3
1239 4
1239 4
1240
1240
1241 And of course we should be able to break this by reversing the execution order.
1241 And of course we should be able to break this by reversing the execution order.
1242 Test negative priorities while we're at it.
1242 Test negative priorities while we're at it.
1243
1243
1244 $ cat >> .hg/hgrc <<EOF
1244 $ cat >> .hg/hgrc <<EOF
1245 > [fix]
1245 > [fix]
1246 > head:priority = -1
1246 > head:priority = -1
1247 > sort:priority = -2
1247 > sort:priority = -2
1248 > EOF
1248 > EOF
1249 $ printf "8\n2\n3\n6\n7\n4\n9\n5\n1\n0\n" > numbers.txt
1249 $ printf "8\n2\n3\n6\n7\n4\n9\n5\n1\n0\n" > numbers.txt
1250 $ hg fix -w
1250 $ hg fix -w
1251 $ cat numbers.txt
1251 $ cat numbers.txt
1252 2
1252 2
1253 3
1253 3
1254 6
1254 6
1255 7
1255 7
1256 8
1256 8
1257
1257
1258 $ cd ..
1258 $ cd ..
1259
1259
1260 It's possible for repeated applications of a fixer tool to create cycles in the
1260 It's possible for repeated applications of a fixer tool to create cycles in the
1261 generated content of a file. For example, two users with different versions of
1261 generated content of a file. For example, two users with different versions of
1262 a code formatter might fight over the formatting when they run hg fix. In the
1262 a code formatter might fight over the formatting when they run hg fix. In the
1263 absence of other changes, this means we could produce commits with the same
1263 absence of other changes, this means we could produce commits with the same
1264 hash in subsequent runs of hg fix. This is a problem unless we support
1264 hash in subsequent runs of hg fix. This is a problem unless we support
1265 obsolescence cycles well. We avoid this by adding an extra field to the
1265 obsolescence cycles well. We avoid this by adding an extra field to the
1266 successor which forces it to have a new hash. That's why this test creates
1266 successor which forces it to have a new hash. That's why this test creates
1267 three revisions instead of two.
1267 three revisions instead of two.
1268
1268
1269 $ hg init cyclictool
1269 $ hg init cyclictool
1270 $ cd cyclictool
1270 $ cd cyclictool
1271
1271
1272 $ cat >> .hg/hgrc <<EOF
1272 $ cat >> .hg/hgrc <<EOF
1273 > [fix]
1273 > [fix]
1274 > swapletters:command = tr ab ba
1274 > swapletters:command = tr ab ba
1275 > swapletters:pattern = foo
1275 > swapletters:pattern = foo
1276 > EOF
1276 > EOF
1277
1277
1278 $ echo ab > foo
1278 $ echo ab > foo
1279 $ hg commit -Aqm foo
1279 $ hg commit -Aqm foo
1280
1280
1281 $ hg fix -r 0
1281 $ hg fix -r 0
1282 $ hg fix -r 1
1282 $ hg fix -r 1
1283
1283
1284 $ hg cat -r 0 foo --hidden
1284 $ hg cat -r 0 foo --hidden
1285 ab
1285 ab
1286 $ hg cat -r 1 foo --hidden
1286 $ hg cat -r 1 foo --hidden
1287 ba
1287 ba
1288 $ hg cat -r 2 foo
1288 $ hg cat -r 2 foo
1289 ab
1289 ab
1290
1290
1291 $ cd ..
1291 $ cd ..
1292
1292
1293 We run fixer tools in the repo root so they can look for config files or other
1293 We run fixer tools in the repo root so they can look for config files or other
1294 important things in the working directory. This does NOT mean we are
1294 important things in the working directory. This does NOT mean we are
1295 reconstructing a working copy of every revision being fixed; we're just giving
1295 reconstructing a working copy of every revision being fixed; we're just giving
1296 the tool knowledge of the repo's location in case it can do something
1296 the tool knowledge of the repo's location in case it can do something
1297 reasonable with that.
1297 reasonable with that.
1298
1298
1299 $ hg init subprocesscwd
1299 $ hg init subprocesscwd
1300 $ cd subprocesscwd
1300 $ cd subprocesscwd
1301
1301
1302 $ cat >> .hg/hgrc <<EOF
1302 $ cat >> .hg/hgrc <<EOF
1303 > [fix]
1303 > [fix]
1304 > printcwd:command = "$PYTHON" -c "import os; print(os.getcwd())"
1304 > printcwd:command = "$PYTHON" -c "import os; print(os.getcwd())"
1305 > printcwd:pattern = relpath:foo/bar
1305 > printcwd:pattern = relpath:foo/bar
1306 > filesetpwd:command = "$PYTHON" -c "import os; print('fs: ' + os.getcwd())"
1306 > filesetpwd:command = "$PYTHON" -c "import os; print('fs: ' + os.getcwd())"
1307 > filesetpwd:pattern = set:**quux
1307 > filesetpwd:pattern = set:**quux
1308 > EOF
1308 > EOF
1309
1309
1310 $ mkdir foo
1310 $ mkdir foo
1311 $ printf "bar\n" > foo/bar
1311 $ printf "bar\n" > foo/bar
1312 $ printf "quux\n" > quux
1312 $ printf "quux\n" > quux
1313 $ hg commit -Aqm blah
1313 $ hg commit -Aqm blah
1314
1314
1315 $ hg fix -w -r . foo/bar
1315 $ hg fix -w -r . foo/bar
1316 $ hg cat -r tip foo/bar
1316 $ hg cat -r tip foo/bar
1317 $TESTTMP/subprocesscwd
1317 $TESTTMP/subprocesscwd
1318 $ cat foo/bar
1318 $ cat foo/bar
1319 $TESTTMP/subprocesscwd
1319 $TESTTMP/subprocesscwd
1320
1320
1321 $ cd foo
1321 $ cd foo
1322
1322
1323 $ hg fix -w -r . bar
1323 $ hg fix -w -r . bar
1324 $ hg cat -r tip bar ../quux
1324 $ hg cat -r tip bar ../quux
1325 $TESTTMP/subprocesscwd
1325 $TESTTMP/subprocesscwd
1326 quux
1326 quux
1327 $ cat bar ../quux
1327 $ cat bar ../quux
1328 $TESTTMP/subprocesscwd
1328 $TESTTMP/subprocesscwd
1329 quux
1329 quux
1330 $ echo modified > bar
1330 $ echo modified > bar
1331 $ hg fix -w bar
1331 $ hg fix -w bar
1332 $ cat bar
1332 $ cat bar
1333 $TESTTMP/subprocesscwd
1333 $TESTTMP/subprocesscwd
1334
1334
1335 Apparently fixing p1() and its descendants doesn't include wdir() unless
1335 Apparently fixing p1() and its descendants doesn't include wdir() unless
1336 explicitly stated.
1336 explicitly stated.
1337
1337
1338 BROKEN: fileset matches aren't relative to repo.root for commits
1339
1340 $ hg fix -r '.::'
1338 $ hg fix -r '.::'
1341 $ hg cat -r . ../quux
1339 $ hg cat -r . ../quux
1342 quux
1340 quux
1343 $ hg cat -r tip ../quux
1341 $ hg cat -r tip ../quux
1344 quux
1342 fs: $TESTTMP/subprocesscwd
1345 $ cat ../quux
1343 $ cat ../quux
1346 quux
1344 quux
1347
1345
1348 Clean files are not fixed unless explicitly named
1346 Clean files are not fixed unless explicitly named
1349 $ echo 'dirty' > ../quux
1347 $ echo 'dirty' > ../quux
1350
1348
1351 BROKEN: fileset matches aren't relative to repo.root for wdir
1352
1353 $ hg fix --working-dir
1349 $ hg fix --working-dir
1354 $ cat ../quux
1350 $ cat ../quux
1355 dirty
1351 fs: $TESTTMP/subprocesscwd
1356
1352
1357 $ cd ../..
1353 $ cd ../..
1358
1354
1359 Tools configured without a pattern are ignored. It would be too dangerous to
1355 Tools configured without a pattern are ignored. It would be too dangerous to
1360 run them on all files, because this might happen while testing a configuration
1356 run them on all files, because this might happen while testing a configuration
1361 that also deletes all of the file content. There is no reasonable subset of the
1357 that also deletes all of the file content. There is no reasonable subset of the
1362 files to use as a default. Users should be explicit about what files are
1358 files to use as a default. Users should be explicit about what files are
1363 affected by a tool. This test also confirms that we don't crash when the
1359 affected by a tool. This test also confirms that we don't crash when the
1364 pattern config is missing, and that we only warn about it once.
1360 pattern config is missing, and that we only warn about it once.
1365
1361
1366 $ hg init nopatternconfigured
1362 $ hg init nopatternconfigured
1367 $ cd nopatternconfigured
1363 $ cd nopatternconfigured
1368
1364
1369 $ printf "foo" > foo
1365 $ printf "foo" > foo
1370 $ printf "bar" > bar
1366 $ printf "bar" > bar
1371 $ hg add -q
1367 $ hg add -q
1372 $ hg fix --debug --working-dir --config "fix.nopattern:command=echo fixed"
1368 $ hg fix --debug --working-dir --config "fix.nopattern:command=echo fixed"
1373 fixer tool has no pattern configuration: nopattern
1369 fixer tool has no pattern configuration: nopattern
1374 $ cat foo bar
1370 $ cat foo bar
1375 foobar (no-eol)
1371 foobar (no-eol)
1376 $ hg fix --debug --working-dir --config "fix.nocommand:pattern=foo.bar"
1372 $ hg fix --debug --working-dir --config "fix.nocommand:pattern=foo.bar"
1377 fixer tool has no command configuration: nocommand
1373 fixer tool has no command configuration: nocommand
1378
1374
1379 $ cd ..
1375 $ cd ..
1380
1376
1381 Tools can be disabled. Disabled tools do nothing but print a debug message.
1377 Tools can be disabled. Disabled tools do nothing but print a debug message.
1382
1378
1383 $ hg init disabled
1379 $ hg init disabled
1384 $ cd disabled
1380 $ cd disabled
1385
1381
1386 $ printf "foo\n" > foo
1382 $ printf "foo\n" > foo
1387 $ hg add -q
1383 $ hg add -q
1388 $ hg fix --debug --working-dir --config "fix.disabled:command=echo fixed" \
1384 $ hg fix --debug --working-dir --config "fix.disabled:command=echo fixed" \
1389 > --config "fix.disabled:pattern=foo" \
1385 > --config "fix.disabled:pattern=foo" \
1390 > --config "fix.disabled:enabled=false"
1386 > --config "fix.disabled:enabled=false"
1391 ignoring disabled fixer tool: disabled
1387 ignoring disabled fixer tool: disabled
1392 $ cat foo
1388 $ cat foo
1393 foo
1389 foo
1394
1390
1395 $ cd ..
1391 $ cd ..
1396
1392
1397 Test that we can configure a fixer to affect all files regardless of the cwd.
1393 Test that we can configure a fixer to affect all files regardless of the cwd.
1398 The way we invoke matching must not prohibit this.
1394 The way we invoke matching must not prohibit this.
1399
1395
1400 $ hg init affectallfiles
1396 $ hg init affectallfiles
1401 $ cd affectallfiles
1397 $ cd affectallfiles
1402
1398
1403 $ mkdir foo bar
1399 $ mkdir foo bar
1404 $ printf "foo" > foo/file
1400 $ printf "foo" > foo/file
1405 $ printf "bar" > bar/file
1401 $ printf "bar" > bar/file
1406 $ printf "baz" > baz_file
1402 $ printf "baz" > baz_file
1407 $ hg add -q
1403 $ hg add -q
1408
1404
1409 $ cd bar
1405 $ cd bar
1410 $ hg fix --working-dir --config "fix.cooltool:command=echo fixed" \
1406 $ hg fix --working-dir --config "fix.cooltool:command=echo fixed" \
1411 > --config "fix.cooltool:pattern=glob:**"
1407 > --config "fix.cooltool:pattern=glob:**"
1412 $ cd ..
1408 $ cd ..
1413
1409
1414 $ cat foo/file
1410 $ cat foo/file
1415 fixed
1411 fixed
1416 $ cat bar/file
1412 $ cat bar/file
1417 fixed
1413 fixed
1418 $ cat baz_file
1414 $ cat baz_file
1419 fixed
1415 fixed
1420
1416
1421 $ cd ..
1417 $ cd ..
1422
1418
1423 Tools should be able to run on unchanged files, even if they set :linerange.
1419 Tools should be able to run on unchanged files, even if they set :linerange.
1424 This includes a corner case where deleted chunks of a file are not considered
1420 This includes a corner case where deleted chunks of a file are not considered
1425 changes.
1421 changes.
1426
1422
1427 $ hg init skipclean
1423 $ hg init skipclean
1428 $ cd skipclean
1424 $ cd skipclean
1429
1425
1430 $ printf "a\nb\nc\n" > foo
1426 $ printf "a\nb\nc\n" > foo
1431 $ printf "a\nb\nc\n" > bar
1427 $ printf "a\nb\nc\n" > bar
1432 $ printf "a\nb\nc\n" > baz
1428 $ printf "a\nb\nc\n" > baz
1433 $ hg commit -Aqm "base"
1429 $ hg commit -Aqm "base"
1434
1430
1435 $ printf "a\nc\n" > foo
1431 $ printf "a\nc\n" > foo
1436 $ printf "a\nx\nc\n" > baz
1432 $ printf "a\nx\nc\n" > baz
1437
1433
1438 $ cat >> print.py <<EOF
1434 $ cat >> print.py <<EOF
1439 > import sys
1435 > import sys
1440 > for a in sys.argv[1:]:
1436 > for a in sys.argv[1:]:
1441 > print(a)
1437 > print(a)
1442 > EOF
1438 > EOF
1443
1439
1444 $ hg fix --working-dir foo bar baz \
1440 $ hg fix --working-dir foo bar baz \
1445 > --config "fix.changedlines:command=\"$PYTHON\" print.py \"Line ranges:\"" \
1441 > --config "fix.changedlines:command=\"$PYTHON\" print.py \"Line ranges:\"" \
1446 > --config 'fix.changedlines:linerange="{first} through {last}"' \
1442 > --config 'fix.changedlines:linerange="{first} through {last}"' \
1447 > --config 'fix.changedlines:pattern=glob:**' \
1443 > --config 'fix.changedlines:pattern=glob:**' \
1448 > --config 'fix.changedlines:skipclean=false'
1444 > --config 'fix.changedlines:skipclean=false'
1449
1445
1450 $ cat foo
1446 $ cat foo
1451 Line ranges:
1447 Line ranges:
1452 $ cat bar
1448 $ cat bar
1453 Line ranges:
1449 Line ranges:
1454 $ cat baz
1450 $ cat baz
1455 Line ranges:
1451 Line ranges:
1456 2 through 2
1452 2 through 2
1457
1453
1458 $ cd ..
1454 $ cd ..
1459
1455
1460 Test various cases around merges. We were previously dropping files if they were
1456 Test various cases around merges. We were previously dropping files if they were
1461 created on only the p2 side of the merge, so let's test permutations of:
1457 created on only the p2 side of the merge, so let's test permutations of:
1462 * added, was fixed
1458 * added, was fixed
1463 * added, considered for fixing but was already good
1459 * added, considered for fixing but was already good
1464 * added, not considered for fixing
1460 * added, not considered for fixing
1465 * modified, was fixed
1461 * modified, was fixed
1466 * modified, considered for fixing but was already good
1462 * modified, considered for fixing but was already good
1467 * modified, not considered for fixing
1463 * modified, not considered for fixing
1468
1464
1469 Before the bug was fixed where we would drop files, this test demonstrated the
1465 Before the bug was fixed where we would drop files, this test demonstrated the
1470 following issues:
1466 following issues:
1471 * new_in_r1.ignored, new_in_r1_already_good.changed, and
1467 * new_in_r1.ignored, new_in_r1_already_good.changed, and
1472 > mod_in_r1_already_good.changed were NOT in the manifest for the merge commit
1468 > mod_in_r1_already_good.changed were NOT in the manifest for the merge commit
1473 * mod_in_r1.ignored had its contents from r0, NOT r1.
1469 * mod_in_r1.ignored had its contents from r0, NOT r1.
1474
1470
1475 We're also setting a named branch for every commit to demonstrate that the
1471 We're also setting a named branch for every commit to demonstrate that the
1476 branch is kept intact and there aren't issues updating to another branch in the
1472 branch is kept intact and there aren't issues updating to another branch in the
1477 middle of fix.
1473 middle of fix.
1478
1474
1479 $ hg init merge_keeps_files
1475 $ hg init merge_keeps_files
1480 $ cd merge_keeps_files
1476 $ cd merge_keeps_files
1481 $ for f in r0 mod_in_r1 mod_in_r2 mod_in_merge mod_in_child; do
1477 $ for f in r0 mod_in_r1 mod_in_r2 mod_in_merge mod_in_child; do
1482 > for c in changed whole ignored; do
1478 > for c in changed whole ignored; do
1483 > printf "hello\n" > $f.$c
1479 > printf "hello\n" > $f.$c
1484 > done
1480 > done
1485 > printf "HELLO\n" > "mod_in_${f}_already_good.changed"
1481 > printf "HELLO\n" > "mod_in_${f}_already_good.changed"
1486 > done
1482 > done
1487 $ hg branch -q r0
1483 $ hg branch -q r0
1488 $ hg ci -Aqm 'r0'
1484 $ hg ci -Aqm 'r0'
1489 $ hg phase -p
1485 $ hg phase -p
1490 $ make_test_files() {
1486 $ make_test_files() {
1491 > printf "world\n" >> "mod_in_$1.changed"
1487 > printf "world\n" >> "mod_in_$1.changed"
1492 > printf "world\n" >> "mod_in_$1.whole"
1488 > printf "world\n" >> "mod_in_$1.whole"
1493 > printf "world\n" >> "mod_in_$1.ignored"
1489 > printf "world\n" >> "mod_in_$1.ignored"
1494 > printf "WORLD\n" >> "mod_in_$1_already_good.changed"
1490 > printf "WORLD\n" >> "mod_in_$1_already_good.changed"
1495 > printf "new in $1\n" > "new_in_$1.changed"
1491 > printf "new in $1\n" > "new_in_$1.changed"
1496 > printf "new in $1\n" > "new_in_$1.whole"
1492 > printf "new in $1\n" > "new_in_$1.whole"
1497 > printf "new in $1\n" > "new_in_$1.ignored"
1493 > printf "new in $1\n" > "new_in_$1.ignored"
1498 > printf "ALREADY GOOD, NEW IN THIS REV\n" > "new_in_$1_already_good.changed"
1494 > printf "ALREADY GOOD, NEW IN THIS REV\n" > "new_in_$1_already_good.changed"
1499 > }
1495 > }
1500 $ make_test_commit() {
1496 $ make_test_commit() {
1501 > make_test_files "$1"
1497 > make_test_files "$1"
1502 > hg branch -q "$1"
1498 > hg branch -q "$1"
1503 > hg ci -Aqm "$2"
1499 > hg ci -Aqm "$2"
1504 > }
1500 > }
1505 $ make_test_commit r1 "merge me, pt1"
1501 $ make_test_commit r1 "merge me, pt1"
1506 $ hg co -q ".^"
1502 $ hg co -q ".^"
1507 $ make_test_commit r2 "merge me, pt2"
1503 $ make_test_commit r2 "merge me, pt2"
1508 $ hg merge -qr 1
1504 $ hg merge -qr 1
1509 $ make_test_commit merge "evil merge"
1505 $ make_test_commit merge "evil merge"
1510 $ make_test_commit child "child of merge"
1506 $ make_test_commit child "child of merge"
1511 $ make_test_files wdir
1507 $ make_test_files wdir
1512 $ hg fix -r 'not public()' -w
1508 $ hg fix -r 'not public()' -w
1513 $ hg log -G -T'{rev}:{shortest(node,8)}: branch:{branch} desc:{desc}'
1509 $ hg log -G -T'{rev}:{shortest(node,8)}: branch:{branch} desc:{desc}'
1514 @ 8:c22ce900: branch:child desc:child of merge
1510 @ 8:c22ce900: branch:child desc:child of merge
1515 |
1511 |
1516 o 7:5a30615a: branch:merge desc:evil merge
1512 o 7:5a30615a: branch:merge desc:evil merge
1517 |\
1513 |\
1518 | o 6:4e5acdc4: branch:r2 desc:merge me, pt2
1514 | o 6:4e5acdc4: branch:r2 desc:merge me, pt2
1519 | |
1515 | |
1520 o | 5:eea01878: branch:r1 desc:merge me, pt1
1516 o | 5:eea01878: branch:r1 desc:merge me, pt1
1521 |/
1517 |/
1522 o 0:0c548d87: branch:r0 desc:r0
1518 o 0:0c548d87: branch:r0 desc:r0
1523
1519
1524 $ hg files -r tip
1520 $ hg files -r tip
1525 mod_in_child.changed
1521 mod_in_child.changed
1526 mod_in_child.ignored
1522 mod_in_child.ignored
1527 mod_in_child.whole
1523 mod_in_child.whole
1528 mod_in_child_already_good.changed
1524 mod_in_child_already_good.changed
1529 mod_in_merge.changed
1525 mod_in_merge.changed
1530 mod_in_merge.ignored
1526 mod_in_merge.ignored
1531 mod_in_merge.whole
1527 mod_in_merge.whole
1532 mod_in_merge_already_good.changed
1528 mod_in_merge_already_good.changed
1533 mod_in_mod_in_child_already_good.changed
1529 mod_in_mod_in_child_already_good.changed
1534 mod_in_mod_in_merge_already_good.changed
1530 mod_in_mod_in_merge_already_good.changed
1535 mod_in_mod_in_r1_already_good.changed
1531 mod_in_mod_in_r1_already_good.changed
1536 mod_in_mod_in_r2_already_good.changed
1532 mod_in_mod_in_r2_already_good.changed
1537 mod_in_r0_already_good.changed
1533 mod_in_r0_already_good.changed
1538 mod_in_r1.changed
1534 mod_in_r1.changed
1539 mod_in_r1.ignored
1535 mod_in_r1.ignored
1540 mod_in_r1.whole
1536 mod_in_r1.whole
1541 mod_in_r1_already_good.changed
1537 mod_in_r1_already_good.changed
1542 mod_in_r2.changed
1538 mod_in_r2.changed
1543 mod_in_r2.ignored
1539 mod_in_r2.ignored
1544 mod_in_r2.whole
1540 mod_in_r2.whole
1545 mod_in_r2_already_good.changed
1541 mod_in_r2_already_good.changed
1546 new_in_child.changed
1542 new_in_child.changed
1547 new_in_child.ignored
1543 new_in_child.ignored
1548 new_in_child.whole
1544 new_in_child.whole
1549 new_in_child_already_good.changed
1545 new_in_child_already_good.changed
1550 new_in_merge.changed
1546 new_in_merge.changed
1551 new_in_merge.ignored
1547 new_in_merge.ignored
1552 new_in_merge.whole
1548 new_in_merge.whole
1553 new_in_merge_already_good.changed
1549 new_in_merge_already_good.changed
1554 new_in_r1.changed
1550 new_in_r1.changed
1555 new_in_r1.ignored
1551 new_in_r1.ignored
1556 new_in_r1.whole
1552 new_in_r1.whole
1557 new_in_r1_already_good.changed
1553 new_in_r1_already_good.changed
1558 new_in_r2.changed
1554 new_in_r2.changed
1559 new_in_r2.ignored
1555 new_in_r2.ignored
1560 new_in_r2.whole
1556 new_in_r2.whole
1561 new_in_r2_already_good.changed
1557 new_in_r2_already_good.changed
1562 r0.changed
1558 r0.changed
1563 r0.ignored
1559 r0.ignored
1564 r0.whole
1560 r0.whole
1565 $ for f in "$(hg files -r tip)"; do hg cat -r tip $f -T'{path}:\n{data}\n'; done
1561 $ for f in "$(hg files -r tip)"; do hg cat -r tip $f -T'{path}:\n{data}\n'; done
1566 mod_in_child.changed:
1562 mod_in_child.changed:
1567 hello
1563 hello
1568 WORLD
1564 WORLD
1569
1565
1570 mod_in_child.ignored:
1566 mod_in_child.ignored:
1571 hello
1567 hello
1572 world
1568 world
1573
1569
1574 mod_in_child.whole:
1570 mod_in_child.whole:
1575 HELLO
1571 HELLO
1576 WORLD
1572 WORLD
1577
1573
1578 mod_in_child_already_good.changed:
1574 mod_in_child_already_good.changed:
1579 WORLD
1575 WORLD
1580
1576
1581 mod_in_merge.changed:
1577 mod_in_merge.changed:
1582 hello
1578 hello
1583 WORLD
1579 WORLD
1584
1580
1585 mod_in_merge.ignored:
1581 mod_in_merge.ignored:
1586 hello
1582 hello
1587 world
1583 world
1588
1584
1589 mod_in_merge.whole:
1585 mod_in_merge.whole:
1590 HELLO
1586 HELLO
1591 WORLD
1587 WORLD
1592
1588
1593 mod_in_merge_already_good.changed:
1589 mod_in_merge_already_good.changed:
1594 WORLD
1590 WORLD
1595
1591
1596 mod_in_mod_in_child_already_good.changed:
1592 mod_in_mod_in_child_already_good.changed:
1597 HELLO
1593 HELLO
1598
1594
1599 mod_in_mod_in_merge_already_good.changed:
1595 mod_in_mod_in_merge_already_good.changed:
1600 HELLO
1596 HELLO
1601
1597
1602 mod_in_mod_in_r1_already_good.changed:
1598 mod_in_mod_in_r1_already_good.changed:
1603 HELLO
1599 HELLO
1604
1600
1605 mod_in_mod_in_r2_already_good.changed:
1601 mod_in_mod_in_r2_already_good.changed:
1606 HELLO
1602 HELLO
1607
1603
1608 mod_in_r0_already_good.changed:
1604 mod_in_r0_already_good.changed:
1609 HELLO
1605 HELLO
1610
1606
1611 mod_in_r1.changed:
1607 mod_in_r1.changed:
1612 hello
1608 hello
1613 WORLD
1609 WORLD
1614
1610
1615 mod_in_r1.ignored:
1611 mod_in_r1.ignored:
1616 hello
1612 hello
1617 world
1613 world
1618
1614
1619 mod_in_r1.whole:
1615 mod_in_r1.whole:
1620 HELLO
1616 HELLO
1621 WORLD
1617 WORLD
1622
1618
1623 mod_in_r1_already_good.changed:
1619 mod_in_r1_already_good.changed:
1624 WORLD
1620 WORLD
1625
1621
1626 mod_in_r2.changed:
1622 mod_in_r2.changed:
1627 hello
1623 hello
1628 WORLD
1624 WORLD
1629
1625
1630 mod_in_r2.ignored:
1626 mod_in_r2.ignored:
1631 hello
1627 hello
1632 world
1628 world
1633
1629
1634 mod_in_r2.whole:
1630 mod_in_r2.whole:
1635 HELLO
1631 HELLO
1636 WORLD
1632 WORLD
1637
1633
1638 mod_in_r2_already_good.changed:
1634 mod_in_r2_already_good.changed:
1639 WORLD
1635 WORLD
1640
1636
1641 new_in_child.changed:
1637 new_in_child.changed:
1642 NEW IN CHILD
1638 NEW IN CHILD
1643
1639
1644 new_in_child.ignored:
1640 new_in_child.ignored:
1645 new in child
1641 new in child
1646
1642
1647 new_in_child.whole:
1643 new_in_child.whole:
1648 NEW IN CHILD
1644 NEW IN CHILD
1649
1645
1650 new_in_child_already_good.changed:
1646 new_in_child_already_good.changed:
1651 ALREADY GOOD, NEW IN THIS REV
1647 ALREADY GOOD, NEW IN THIS REV
1652
1648
1653 new_in_merge.changed:
1649 new_in_merge.changed:
1654 NEW IN MERGE
1650 NEW IN MERGE
1655
1651
1656 new_in_merge.ignored:
1652 new_in_merge.ignored:
1657 new in merge
1653 new in merge
1658
1654
1659 new_in_merge.whole:
1655 new_in_merge.whole:
1660 NEW IN MERGE
1656 NEW IN MERGE
1661
1657
1662 new_in_merge_already_good.changed:
1658 new_in_merge_already_good.changed:
1663 ALREADY GOOD, NEW IN THIS REV
1659 ALREADY GOOD, NEW IN THIS REV
1664
1660
1665 new_in_r1.changed:
1661 new_in_r1.changed:
1666 NEW IN R1
1662 NEW IN R1
1667
1663
1668 new_in_r1.ignored:
1664 new_in_r1.ignored:
1669 new in r1
1665 new in r1
1670
1666
1671 new_in_r1.whole:
1667 new_in_r1.whole:
1672 NEW IN R1
1668 NEW IN R1
1673
1669
1674 new_in_r1_already_good.changed:
1670 new_in_r1_already_good.changed:
1675 ALREADY GOOD, NEW IN THIS REV
1671 ALREADY GOOD, NEW IN THIS REV
1676
1672
1677 new_in_r2.changed:
1673 new_in_r2.changed:
1678 NEW IN R2
1674 NEW IN R2
1679
1675
1680 new_in_r2.ignored:
1676 new_in_r2.ignored:
1681 new in r2
1677 new in r2
1682
1678
1683 new_in_r2.whole:
1679 new_in_r2.whole:
1684 NEW IN R2
1680 NEW IN R2
1685
1681
1686 new_in_r2_already_good.changed:
1682 new_in_r2_already_good.changed:
1687 ALREADY GOOD, NEW IN THIS REV
1683 ALREADY GOOD, NEW IN THIS REV
1688
1684
1689 r0.changed:
1685 r0.changed:
1690 hello
1686 hello
1691
1687
1692 r0.ignored:
1688 r0.ignored:
1693 hello
1689 hello
1694
1690
1695 r0.whole:
1691 r0.whole:
1696 hello
1692 hello
1697
1693
General Comments 0
You need to be logged in to leave comments. Login now