##// END OF EJS Templates
manifestcache: support multiple cache addition in one debug command run...
marmoute -
r42124:1fe278aa default
parent child Browse files
Show More
@@ -1,3427 +1,3429 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 from .revlogutils import (
90 from .revlogutils import (
91 deltas as deltautil
91 deltas as deltautil
92 )
92 )
93
93
94 release = lockmod.release
94 release = lockmod.release
95
95
96 command = registrar.command()
96 command = registrar.command()
97
97
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
100 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
101 if len(args) == 3:
101 if len(args) == 3:
102 index, rev1, rev2 = args
102 index, rev1, rev2 = args
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 lookup = r.lookup
104 lookup = r.lookup
105 elif len(args) == 2:
105 elif len(args) == 2:
106 if not repo:
106 if not repo:
107 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
108 '(.hg not found)'))
108 '(.hg not found)'))
109 rev1, rev2 = args
109 rev1, rev2 = args
110 r = repo.changelog
110 r = repo.changelog
111 lookup = repo.lookup
111 lookup = repo.lookup
112 else:
112 else:
113 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116
116
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
118 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
119 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
120 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
122 gen.apply(repo)
122 gen.apply(repo)
123
123
124 @command('debugbuilddag',
124 @command('debugbuilddag',
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
128 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
129 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
130 mergeable_file=False,
130 mergeable_file=False,
131 overwritten_file=False,
131 overwritten_file=False,
132 new_file=False):
132 new_file=False):
133 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
134
134
135 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
136 command line.
136 command line.
137
137
138 Elements:
138 Elements:
139
139
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
141 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
142 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
143 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
144 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
145 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
148 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
149 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
150 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
151
151
152 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
153
153
154 A backref is either
154 A backref is either
155
155
156 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
157 node, or
157 node, or
158 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
159 - empty to denote the default parent.
159 - empty to denote the default parent.
160
160
161 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 """
163 """
164
164
165 if text is None:
165 if text is None:
166 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
167 text = ui.fin.read()
167 text = ui.fin.read()
168
168
169 cl = repo.changelog
169 cl = repo.changelog
170 if len(cl) > 0:
170 if len(cl) > 0:
171 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
172
172
173 # determine number of revs in DAG
173 # determine number of revs in DAG
174 total = 0
174 total = 0
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 total += 1
177 total += 1
178
178
179 if mergeable_file:
179 if mergeable_file:
180 linesperrev = 2
180 linesperrev = 2
181 # make a file with k lines per rev
181 # make a file with k lines per rev
182 initialmergedlines = ['%d' % i
182 initialmergedlines = ['%d' % i
183 for i in pycompat.xrange(0, total * linesperrev)]
183 for i in pycompat.xrange(0, total * linesperrev)]
184 initialmergedlines.append("")
184 initialmergedlines.append("")
185
185
186 tags = []
186 tags = []
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 total=total)
188 total=total)
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 at = -1
190 at = -1
191 atbranch = 'default'
191 atbranch = 'default'
192 nodeids = []
192 nodeids = []
193 id = 0
193 id = 0
194 progress.update(id)
194 progress.update(id)
195 for type, data in dagparser.parsedag(text):
195 for type, data in dagparser.parsedag(text):
196 if type == 'n':
196 if type == 'n':
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 id, ps = data
198 id, ps = data
199
199
200 files = []
200 files = []
201 filecontent = {}
201 filecontent = {}
202
202
203 p2 = None
203 p2 = None
204 if mergeable_file:
204 if mergeable_file:
205 fn = "mf"
205 fn = "mf"
206 p1 = repo[ps[0]]
206 p1 = repo[ps[0]]
207 if len(ps) > 1:
207 if len(ps) > 1:
208 p2 = repo[ps[1]]
208 p2 = repo[ps[1]]
209 pa = p1.ancestor(p2)
209 pa = p1.ancestor(p2)
210 base, local, other = [x[fn].data() for x in (pa, p1,
210 base, local, other = [x[fn].data() for x in (pa, p1,
211 p2)]
211 p2)]
212 m3 = simplemerge.Merge3Text(base, local, other)
212 m3 = simplemerge.Merge3Text(base, local, other)
213 ml = [l.strip() for l in m3.merge_lines()]
213 ml = [l.strip() for l in m3.merge_lines()]
214 ml.append("")
214 ml.append("")
215 elif at > 0:
215 elif at > 0:
216 ml = p1[fn].data().split("\n")
216 ml = p1[fn].data().split("\n")
217 else:
217 else:
218 ml = initialmergedlines
218 ml = initialmergedlines
219 ml[id * linesperrev] += " r%i" % id
219 ml[id * linesperrev] += " r%i" % id
220 mergedtext = "\n".join(ml)
220 mergedtext = "\n".join(ml)
221 files.append(fn)
221 files.append(fn)
222 filecontent[fn] = mergedtext
222 filecontent[fn] = mergedtext
223
223
224 if overwritten_file:
224 if overwritten_file:
225 fn = "of"
225 fn = "of"
226 files.append(fn)
226 files.append(fn)
227 filecontent[fn] = "r%i\n" % id
227 filecontent[fn] = "r%i\n" % id
228
228
229 if new_file:
229 if new_file:
230 fn = "nf%i" % id
230 fn = "nf%i" % id
231 files.append(fn)
231 files.append(fn)
232 filecontent[fn] = "r%i\n" % id
232 filecontent[fn] = "r%i\n" % id
233 if len(ps) > 1:
233 if len(ps) > 1:
234 if not p2:
234 if not p2:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 for fn in p2:
236 for fn in p2:
237 if fn.startswith("nf"):
237 if fn.startswith("nf"):
238 files.append(fn)
238 files.append(fn)
239 filecontent[fn] = p2[fn].data()
239 filecontent[fn] = p2[fn].data()
240
240
241 def fctxfn(repo, cx, path):
241 def fctxfn(repo, cx, path):
242 if path in filecontent:
242 if path in filecontent:
243 return context.memfilectx(repo, cx, path,
243 return context.memfilectx(repo, cx, path,
244 filecontent[path])
244 filecontent[path])
245 return None
245 return None
246
246
247 if len(ps) == 0 or ps[0] < 0:
247 if len(ps) == 0 or ps[0] < 0:
248 pars = [None, None]
248 pars = [None, None]
249 elif len(ps) == 1:
249 elif len(ps) == 1:
250 pars = [nodeids[ps[0]], None]
250 pars = [nodeids[ps[0]], None]
251 else:
251 else:
252 pars = [nodeids[p] for p in ps]
252 pars = [nodeids[p] for p in ps]
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 date=(id, 0),
254 date=(id, 0),
255 user="debugbuilddag",
255 user="debugbuilddag",
256 extra={'branch': atbranch})
256 extra={'branch': atbranch})
257 nodeid = repo.commitctx(cx)
257 nodeid = repo.commitctx(cx)
258 nodeids.append(nodeid)
258 nodeids.append(nodeid)
259 at = id
259 at = id
260 elif type == 'l':
260 elif type == 'l':
261 id, name = data
261 id, name = data
262 ui.note(('tag %s\n' % name))
262 ui.note(('tag %s\n' % name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 elif type == 'a':
264 elif type == 'a':
265 ui.note(('branch %s\n' % data))
265 ui.note(('branch %s\n' % data))
266 atbranch = data
266 atbranch = data
267 progress.update(id)
267 progress.update(id)
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271
271
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 indent_string = ' ' * indent
273 indent_string = ' ' * indent
274 if all:
274 if all:
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 % indent_string)
276 % indent_string)
277
277
278 def showchunks(named):
278 def showchunks(named):
279 ui.write("\n%s%s\n" % (indent_string, named))
279 ui.write("\n%s%s\n" % (indent_string, named))
280 for deltadata in gen.deltaiter():
280 for deltadata in gen.deltaiter():
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 ui.write("%s%s %s %s %s %s %d\n" %
282 ui.write("%s%s %s %s %s %s %d\n" %
283 (indent_string, hex(node), hex(p1), hex(p2),
283 (indent_string, hex(node), hex(p1), hex(p2),
284 hex(cs), hex(deltabase), len(delta)))
284 hex(cs), hex(deltabase), len(delta)))
285
285
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 showchunks("changelog")
287 showchunks("changelog")
288 chunkdata = gen.manifestheader()
288 chunkdata = gen.manifestheader()
289 showchunks("manifest")
289 showchunks("manifest")
290 for chunkdata in iter(gen.filelogheader, {}):
290 for chunkdata in iter(gen.filelogheader, {}):
291 fname = chunkdata['filename']
291 fname = chunkdata['filename']
292 showchunks(fname)
292 showchunks(fname)
293 else:
293 else:
294 if isinstance(gen, bundle2.unbundle20):
294 if isinstance(gen, bundle2.unbundle20):
295 raise error.Abort(_('use debugbundle2 for this file'))
295 raise error.Abort(_('use debugbundle2 for this file'))
296 chunkdata = gen.changelogheader()
296 chunkdata = gen.changelogheader()
297 for deltadata in gen.deltaiter():
297 for deltadata in gen.deltaiter():
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 ui.write("%s%s\n" % (indent_string, hex(node)))
299 ui.write("%s%s\n" % (indent_string, hex(node)))
300
300
301 def _debugobsmarkers(ui, part, indent=0, **opts):
301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 """display version and markers contained in 'data'"""
302 """display version and markers contained in 'data'"""
303 opts = pycompat.byteskwargs(opts)
303 opts = pycompat.byteskwargs(opts)
304 data = part.read()
304 data = part.read()
305 indent_string = ' ' * indent
305 indent_string = ' ' * indent
306 try:
306 try:
307 version, markers = obsolete._readmarkers(data)
307 version, markers = obsolete._readmarkers(data)
308 except error.UnknownVersion as exc:
308 except error.UnknownVersion as exc:
309 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg %= indent_string, exc.version, len(data)
310 msg %= indent_string, exc.version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 else:
312 else:
313 msg = "%sversion: %d (%d bytes)\n"
313 msg = "%sversion: %d (%d bytes)\n"
314 msg %= indent_string, version, len(data)
314 msg %= indent_string, version, len(data)
315 ui.write(msg)
315 ui.write(msg)
316 fm = ui.formatter('debugobsolete', opts)
316 fm = ui.formatter('debugobsolete', opts)
317 for rawmarker in sorted(markers):
317 for rawmarker in sorted(markers):
318 m = obsutil.marker(None, rawmarker)
318 m = obsutil.marker(None, rawmarker)
319 fm.startitem()
319 fm.startitem()
320 fm.plain(indent_string)
320 fm.plain(indent_string)
321 cmdutil.showmarker(fm, m)
321 cmdutil.showmarker(fm, m)
322 fm.end()
322 fm.end()
323
323
324 def _debugphaseheads(ui, data, indent=0):
324 def _debugphaseheads(ui, data, indent=0):
325 """display version and markers contained in 'data'"""
325 """display version and markers contained in 'data'"""
326 indent_string = ' ' * indent
326 indent_string = ' ' * indent
327 headsbyphase = phases.binarydecode(data)
327 headsbyphase = phases.binarydecode(data)
328 for phase in phases.allphases:
328 for phase in phases.allphases:
329 for head in headsbyphase[phase]:
329 for head in headsbyphase[phase]:
330 ui.write(indent_string)
330 ui.write(indent_string)
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332
332
333 def _quasirepr(thing):
333 def _quasirepr(thing):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 return '{%s}' % (
335 return '{%s}' % (
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 return pycompat.bytestr(repr(thing))
337 return pycompat.bytestr(repr(thing))
338
338
339 def _debugbundle2(ui, gen, all=None, **opts):
339 def _debugbundle2(ui, gen, all=None, **opts):
340 """lists the contents of a bundle2"""
340 """lists the contents of a bundle2"""
341 if not isinstance(gen, bundle2.unbundle20):
341 if not isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_('not a bundle2 file'))
342 raise error.Abort(_('not a bundle2 file'))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 parttypes = opts.get(r'part_type', [])
344 parttypes = opts.get(r'part_type', [])
345 for part in gen.iterparts():
345 for part in gen.iterparts():
346 if parttypes and part.type not in parttypes:
346 if parttypes and part.type not in parttypes:
347 continue
347 continue
348 msg = '%s -- %s (mandatory: %r)\n'
348 msg = '%s -- %s (mandatory: %r)\n'
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 if part.type == 'changegroup':
350 if part.type == 'changegroup':
351 version = part.params.get('version', '01')
351 version = part.params.get('version', '01')
352 cg = changegroup.getunbundler(version, part, 'UN')
352 cg = changegroup.getunbundler(version, part, 'UN')
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 if part.type == 'obsmarkers':
355 if part.type == 'obsmarkers':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugobsmarkers(ui, part, indent=4, **opts)
357 _debugobsmarkers(ui, part, indent=4, **opts)
358 if part.type == 'phase-heads':
358 if part.type == 'phase-heads':
359 if not ui.quiet:
359 if not ui.quiet:
360 _debugphaseheads(ui, part, indent=4)
360 _debugphaseheads(ui, part, indent=4)
361
361
362 @command('debugbundle',
362 @command('debugbundle',
363 [('a', 'all', None, _('show all details')),
363 [('a', 'all', None, _('show all details')),
364 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 _('FILE'),
366 _('FILE'),
367 norepo=True)
367 norepo=True)
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 """lists the contents of a bundle"""
369 """lists the contents of a bundle"""
370 with hg.openpath(ui, bundlepath) as f:
370 with hg.openpath(ui, bundlepath) as f:
371 if spec:
371 if spec:
372 spec = exchange.getbundlespec(ui, f)
372 spec = exchange.getbundlespec(ui, f)
373 ui.write('%s\n' % spec)
373 ui.write('%s\n' % spec)
374 return
374 return
375
375
376 gen = exchange.readbundle(ui, f, bundlepath)
376 gen = exchange.readbundle(ui, f, bundlepath)
377 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
378 return _debugbundle2(ui, gen, all=all, **opts)
378 return _debugbundle2(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
380
380
381 @command('debugcapabilities',
381 @command('debugcapabilities',
382 [], _('PATH'),
382 [], _('PATH'),
383 norepo=True)
383 norepo=True)
384 def debugcapabilities(ui, path, **opts):
384 def debugcapabilities(ui, path, **opts):
385 """lists the capabilities of a remote peer"""
385 """lists the capabilities of a remote peer"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 peer = hg.peer(ui, opts, path)
387 peer = hg.peer(ui, opts, path)
388 caps = peer.capabilities()
388 caps = peer.capabilities()
389 ui.write(('Main capabilities:\n'))
389 ui.write(('Main capabilities:\n'))
390 for c in sorted(caps):
390 for c in sorted(caps):
391 ui.write((' %s\n') % c)
391 ui.write((' %s\n') % c)
392 b2caps = bundle2.bundle2caps(peer)
392 b2caps = bundle2.bundle2caps(peer)
393 if b2caps:
393 if b2caps:
394 ui.write(('Bundle2 capabilities:\n'))
394 ui.write(('Bundle2 capabilities:\n'))
395 for key, values in sorted(b2caps.iteritems()):
395 for key, values in sorted(b2caps.iteritems()):
396 ui.write((' %s\n') % key)
396 ui.write((' %s\n') % key)
397 for v in values:
397 for v in values:
398 ui.write((' %s\n') % v)
398 ui.write((' %s\n') % v)
399
399
400 @command('debugcheckstate', [], '')
400 @command('debugcheckstate', [], '')
401 def debugcheckstate(ui, repo):
401 def debugcheckstate(ui, repo):
402 """validate the correctness of the current dirstate"""
402 """validate the correctness of the current dirstate"""
403 parent1, parent2 = repo.dirstate.parents()
403 parent1, parent2 = repo.dirstate.parents()
404 m1 = repo[parent1].manifest()
404 m1 = repo[parent1].manifest()
405 m2 = repo[parent2].manifest()
405 m2 = repo[parent2].manifest()
406 errors = 0
406 errors = 0
407 for f in repo.dirstate:
407 for f in repo.dirstate:
408 state = repo.dirstate[f]
408 state = repo.dirstate[f]
409 if state in "nr" and f not in m1:
409 if state in "nr" and f not in m1:
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "a" and f in m1:
412 if state in "a" and f in m1:
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 errors += 1
414 errors += 1
415 if state in "m" and f not in m1 and f not in m2:
415 if state in "m" and f not in m1 and f not in m2:
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 (f, state))
417 (f, state))
418 errors += 1
418 errors += 1
419 for f in m1:
419 for f in m1:
420 state = repo.dirstate[f]
420 state = repo.dirstate[f]
421 if state not in "nrm":
421 if state not in "nrm":
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 errors += 1
423 errors += 1
424 if errors:
424 if errors:
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 raise error.Abort(error)
426 raise error.Abort(error)
427
427
428 @command('debugcolor',
428 @command('debugcolor',
429 [('', 'style', None, _('show all configured styles'))],
429 [('', 'style', None, _('show all configured styles'))],
430 'hg debugcolor')
430 'hg debugcolor')
431 def debugcolor(ui, repo, **opts):
431 def debugcolor(ui, repo, **opts):
432 """show available color, effects or style"""
432 """show available color, effects or style"""
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 if opts.get(r'style'):
434 if opts.get(r'style'):
435 return _debugdisplaystyle(ui)
435 return _debugdisplaystyle(ui)
436 else:
436 else:
437 return _debugdisplaycolor(ui)
437 return _debugdisplaycolor(ui)
438
438
439 def _debugdisplaycolor(ui):
439 def _debugdisplaycolor(ui):
440 ui = ui.copy()
440 ui = ui.copy()
441 ui._styles.clear()
441 ui._styles.clear()
442 for effect in color._activeeffects(ui).keys():
442 for effect in color._activeeffects(ui).keys():
443 ui._styles[effect] = effect
443 ui._styles[effect] = effect
444 if ui._terminfoparams:
444 if ui._terminfoparams:
445 for k, v in ui.configitems('color'):
445 for k, v in ui.configitems('color'):
446 if k.startswith('color.'):
446 if k.startswith('color.'):
447 ui._styles[k] = k[6:]
447 ui._styles[k] = k[6:]
448 elif k.startswith('terminfo.'):
448 elif k.startswith('terminfo.'):
449 ui._styles[k] = k[9:]
449 ui._styles[k] = k[9:]
450 ui.write(_('available colors:\n'))
450 ui.write(_('available colors:\n'))
451 # sort label with a '_' after the other to group '_background' entry.
451 # sort label with a '_' after the other to group '_background' entry.
452 items = sorted(ui._styles.items(),
452 items = sorted(ui._styles.items(),
453 key=lambda i: ('_' in i[0], i[0], i[1]))
453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 for colorname, label in items:
454 for colorname, label in items:
455 ui.write(('%s\n') % colorname, label=label)
455 ui.write(('%s\n') % colorname, label=label)
456
456
457 def _debugdisplaystyle(ui):
457 def _debugdisplaystyle(ui):
458 ui.write(_('available style:\n'))
458 ui.write(_('available style:\n'))
459 if not ui._styles:
459 if not ui._styles:
460 return
460 return
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 start = r.start
626 start = r.start
627 length = r.length
627 length = r.length
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 withsparseread = getattr(r, '_withsparseread', False)
629 withsparseread = getattr(r, '_withsparseread', False)
630
630
631 def revinfo(rev):
631 def revinfo(rev):
632 e = index[rev]
632 e = index[rev]
633 compsize = e[1]
633 compsize = e[1]
634 uncompsize = e[2]
634 uncompsize = e[2]
635 chainsize = 0
635 chainsize = 0
636
636
637 if generaldelta:
637 if generaldelta:
638 if e[3] == e[5]:
638 if e[3] == e[5]:
639 deltatype = 'p1'
639 deltatype = 'p1'
640 elif e[3] == e[6]:
640 elif e[3] == e[6]:
641 deltatype = 'p2'
641 deltatype = 'p2'
642 elif e[3] == rev - 1:
642 elif e[3] == rev - 1:
643 deltatype = 'prev'
643 deltatype = 'prev'
644 elif e[3] == rev:
644 elif e[3] == rev:
645 deltatype = 'base'
645 deltatype = 'base'
646 else:
646 else:
647 deltatype = 'other'
647 deltatype = 'other'
648 else:
648 else:
649 if e[3] == rev:
649 if e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'prev'
652 deltatype = 'prev'
653
653
654 chain = r._deltachain(rev)[0]
654 chain = r._deltachain(rev)[0]
655 for iterrev in chain:
655 for iterrev in chain:
656 e = index[iterrev]
656 e = index[iterrev]
657 chainsize += e[1]
657 chainsize += e[1]
658
658
659 return compsize, uncompsize, deltatype, chain, chainsize
659 return compsize, uncompsize, deltatype, chain, chainsize
660
660
661 fm = ui.formatter('debugdeltachain', opts)
661 fm = ui.formatter('debugdeltachain', opts)
662
662
663 fm.plain(' rev chain# chainlen prev delta '
663 fm.plain(' rev chain# chainlen prev delta '
664 'size rawsize chainsize ratio lindist extradist '
664 'size rawsize chainsize ratio lindist extradist '
665 'extraratio')
665 'extraratio')
666 if withsparseread:
666 if withsparseread:
667 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain('\n')
668 fm.plain('\n')
669
669
670 chainbases = {}
670 chainbases = {}
671 for rev in r:
671 for rev in r:
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 chainbase = chain[0]
673 chainbase = chain[0]
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 if uncomp != 0:
684 if uncomp != 0:
685 chainratio = float(chainsize) / float(uncomp)
685 chainratio = float(chainsize) / float(uncomp)
686 else:
686 else:
687 chainratio = chainsize
687 chainratio = chainsize
688
688
689 if chainsize != 0:
689 if chainsize != 0:
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691 else:
691 else:
692 extraratio = extradist
692 extraratio = extradist
693
693
694 fm.startitem()
694 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
697 'extraratio',
697 'extraratio',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
701 extraratio,
701 extraratio,
702 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
705 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
706 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
707 if withsparseread:
707 if withsparseread:
708 readsize = 0
708 readsize = 0
709 largestblock = 0
709 largestblock = 0
710 srchunks = 0
710 srchunks = 0
711
711
712 for revschunk in deltautil.slicechunk(r, chain):
712 for revschunk in deltautil.slicechunk(r, chain):
713 srchunks += 1
713 srchunks += 1
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
716
716
717 readsize += blksize
717 readsize += blksize
718 if largestblock < blksize:
718 if largestblock < blksize:
719 largestblock = blksize
719 largestblock = blksize
720
720
721 if readsize:
721 if readsize:
722 readdensity = float(chainsize) / float(readsize)
722 readdensity = float(chainsize) / float(readsize)
723 else:
723 else:
724 readdensity = 1
724 readdensity = 1
725
725
726 fm.write('readsize largestblock readdensity srchunks',
726 fm.write('readsize largestblock readdensity srchunks',
727 ' %10d %10d %9.5f %8d',
727 ' %10d %10d %9.5f %8d',
728 readsize, largestblock, readdensity, srchunks,
728 readsize, largestblock, readdensity, srchunks,
729 readsize=readsize, largestblock=largestblock,
729 readsize=readsize, largestblock=largestblock,
730 readdensity=readdensity, srchunks=srchunks)
730 readdensity=readdensity, srchunks=srchunks)
731
731
732 fm.plain('\n')
732 fm.plain('\n')
733
733
734 fm.end()
734 fm.end()
735
735
736 @command('debugdirstate|debugstate',
736 @command('debugdirstate|debugstate',
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
739 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
740 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
741 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
742 """show the contents of the current dirstate"""
743
743
744 nodates = not opts[r'dates']
744 nodates = not opts[r'dates']
745 if opts.get(r'nodates') is not None:
745 if opts.get(r'nodates') is not None:
746 nodates = True
746 nodates = True
747 datesort = opts.get(r'datesort')
747 datesort = opts.get(r'datesort')
748
748
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ] + cmdutil.remoteopts,
775 ] + cmdutil.remoteopts,
776 _('[--rev REV] [OTHER]'))
776 _('[--rev REV] [OTHER]'))
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 """runs the changeset discovery protocol in isolation"""
778 """runs the changeset discovery protocol in isolation"""
779 opts = pycompat.byteskwargs(opts)
779 opts = pycompat.byteskwargs(opts)
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remote = hg.peer(repo, opts, remoteurl)
781 remote = hg.peer(repo, opts, remoteurl)
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783
783
784 # make sure tests are repeatable
784 # make sure tests are repeatable
785 random.seed(12323)
785 random.seed(12323)
786
786
787 def doit(pushedrevs, remoteheads, remote=remote):
787 def doit(pushedrevs, remoteheads, remote=remote):
788 if opts.get('old'):
788 if opts.get('old'):
789 if not util.safehasattr(remote, 'branches'):
789 if not util.safehasattr(remote, 'branches'):
790 # enable in-client legacy support
790 # enable in-client legacy support
791 remote = localrepo.locallegacypeer(remote.local())
791 remote = localrepo.locallegacypeer(remote.local())
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 force=True)
793 force=True)
794 common = set(common)
794 common = set(common)
795 if not opts.get('nonheads'):
795 if not opts.get('nonheads'):
796 ui.write(("unpruned common: %s\n") %
796 ui.write(("unpruned common: %s\n") %
797 " ".join(sorted(short(n) for n in common)))
797 " ".join(sorted(short(n) for n in common)))
798
798
799 clnode = repo.changelog.node
799 clnode = repo.changelog.node
800 common = repo.revs('heads(::%ln)', common)
800 common = repo.revs('heads(::%ln)', common)
801 common = {clnode(r) for r in common}
801 common = {clnode(r) for r in common}
802 else:
802 else:
803 nodes = None
803 nodes = None
804 if pushedrevs:
804 if pushedrevs:
805 revs = scmutil.revrange(repo, pushedrevs)
805 revs = scmutil.revrange(repo, pushedrevs)
806 nodes = [repo[r].node() for r in revs]
806 nodes = [repo[r].node() for r in revs]
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 ancestorsof=nodes)
808 ancestorsof=nodes)
809 common = set(common)
809 common = set(common)
810 rheads = set(hds)
810 rheads = set(hds)
811 lheads = set(repo.heads())
811 lheads = set(repo.heads())
812 ui.write(("common heads: %s\n") %
812 ui.write(("common heads: %s\n") %
813 " ".join(sorted(short(n) for n in common)))
813 " ".join(sorted(short(n) for n in common)))
814 if lheads <= common:
814 if lheads <= common:
815 ui.write(("local is subset\n"))
815 ui.write(("local is subset\n"))
816 elif rheads <= common:
816 elif rheads <= common:
817 ui.write(("remote is subset\n"))
817 ui.write(("remote is subset\n"))
818
818
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 localrevs = opts['rev']
820 localrevs = opts['rev']
821 doit(localrevs, remoterevs)
821 doit(localrevs, remoterevs)
822
822
823 _chunksize = 4 << 10
823 _chunksize = 4 << 10
824
824
825 @command('debugdownload',
825 @command('debugdownload',
826 [
826 [
827 ('o', 'output', '', _('path')),
827 ('o', 'output', '', _('path')),
828 ],
828 ],
829 optionalrepo=True)
829 optionalrepo=True)
830 def debugdownload(ui, repo, url, output=None, **opts):
830 def debugdownload(ui, repo, url, output=None, **opts):
831 """download a resource using Mercurial logic and config
831 """download a resource using Mercurial logic and config
832 """
832 """
833 fh = urlmod.open(ui, url, output)
833 fh = urlmod.open(ui, url, output)
834
834
835 dest = ui
835 dest = ui
836 if output:
836 if output:
837 dest = open(output, "wb", _chunksize)
837 dest = open(output, "wb", _chunksize)
838 try:
838 try:
839 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
840 while data:
840 while data:
841 dest.write(data)
841 dest.write(data)
842 data = fh.read(_chunksize)
842 data = fh.read(_chunksize)
843 finally:
843 finally:
844 if output:
844 if output:
845 dest.close()
845 dest.close()
846
846
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 def debugextensions(ui, repo, **opts):
848 def debugextensions(ui, repo, **opts):
849 '''show information about active extensions'''
849 '''show information about active extensions'''
850 opts = pycompat.byteskwargs(opts)
850 opts = pycompat.byteskwargs(opts)
851 exts = extensions.extensions(ui)
851 exts = extensions.extensions(ui)
852 hgver = util.version()
852 hgver = util.version()
853 fm = ui.formatter('debugextensions', opts)
853 fm = ui.formatter('debugextensions', opts)
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 isinternal = extensions.ismoduleinternal(extmod)
855 isinternal = extensions.ismoduleinternal(extmod)
856 extsource = pycompat.fsencode(extmod.__file__)
856 extsource = pycompat.fsencode(extmod.__file__)
857 if isinternal:
857 if isinternal:
858 exttestedwith = [] # never expose magic string to users
858 exttestedwith = [] # never expose magic string to users
859 else:
859 else:
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 extbuglink = getattr(extmod, 'buglink', None)
861 extbuglink = getattr(extmod, 'buglink', None)
862
862
863 fm.startitem()
863 fm.startitem()
864
864
865 if ui.quiet or ui.verbose:
865 if ui.quiet or ui.verbose:
866 fm.write('name', '%s\n', extname)
866 fm.write('name', '%s\n', extname)
867 else:
867 else:
868 fm.write('name', '%s', extname)
868 fm.write('name', '%s', extname)
869 if isinternal or hgver in exttestedwith:
869 if isinternal or hgver in exttestedwith:
870 fm.plain('\n')
870 fm.plain('\n')
871 elif not exttestedwith:
871 elif not exttestedwith:
872 fm.plain(_(' (untested!)\n'))
872 fm.plain(_(' (untested!)\n'))
873 else:
873 else:
874 lasttestedversion = exttestedwith[-1]
874 lasttestedversion = exttestedwith[-1]
875 fm.plain(' (%s!)\n' % lasttestedversion)
875 fm.plain(' (%s!)\n' % lasttestedversion)
876
876
877 fm.condwrite(ui.verbose and extsource, 'source',
877 fm.condwrite(ui.verbose and extsource, 'source',
878 _(' location: %s\n'), extsource or "")
878 _(' location: %s\n'), extsource or "")
879
879
880 if ui.verbose:
880 if ui.verbose:
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 fm.data(bundled=isinternal)
882 fm.data(bundled=isinternal)
883
883
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 _(' tested with: %s\n'),
885 _(' tested with: %s\n'),
886 fm.formatlist(exttestedwith, name='ver'))
886 fm.formatlist(exttestedwith, name='ver'))
887
887
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 _(' bug reporting: %s\n'), extbuglink or "")
889 _(' bug reporting: %s\n'), extbuglink or "")
890
890
891 fm.end()
891 fm.end()
892
892
893 @command('debugfileset',
893 @command('debugfileset',
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 ('', 'all-files', False,
895 ('', 'all-files', False,
896 _('test files from all revisions and working directory')),
896 _('test files from all revisions and working directory')),
897 ('s', 'show-matcher', None,
897 ('s', 'show-matcher', None,
898 _('print internal representation of matcher')),
898 _('print internal representation of matcher')),
899 ('p', 'show-stage', [],
899 ('p', 'show-stage', [],
900 _('print parsed tree at the given stage'), _('NAME'))],
900 _('print parsed tree at the given stage'), _('NAME'))],
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 def debugfileset(ui, repo, expr, **opts):
902 def debugfileset(ui, repo, expr, **opts):
903 '''parse and apply a fileset specification'''
903 '''parse and apply a fileset specification'''
904 from . import fileset
904 from . import fileset
905 fileset.symbols # force import of fileset so we have predicates to optimize
905 fileset.symbols # force import of fileset so we have predicates to optimize
906 opts = pycompat.byteskwargs(opts)
906 opts = pycompat.byteskwargs(opts)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908
908
909 stages = [
909 stages = [
910 ('parsed', pycompat.identity),
910 ('parsed', pycompat.identity),
911 ('analyzed', filesetlang.analyze),
911 ('analyzed', filesetlang.analyze),
912 ('optimized', filesetlang.optimize),
912 ('optimized', filesetlang.optimize),
913 ]
913 ]
914 stagenames = set(n for n, f in stages)
914 stagenames = set(n for n, f in stages)
915
915
916 showalways = set()
916 showalways = set()
917 if ui.verbose and not opts['show_stage']:
917 if ui.verbose and not opts['show_stage']:
918 # show parsed tree by --verbose (deprecated)
918 # show parsed tree by --verbose (deprecated)
919 showalways.add('parsed')
919 showalways.add('parsed')
920 if opts['show_stage'] == ['all']:
920 if opts['show_stage'] == ['all']:
921 showalways.update(stagenames)
921 showalways.update(stagenames)
922 else:
922 else:
923 for n in opts['show_stage']:
923 for n in opts['show_stage']:
924 if n not in stagenames:
924 if n not in stagenames:
925 raise error.Abort(_('invalid stage name: %s') % n)
925 raise error.Abort(_('invalid stage name: %s') % n)
926 showalways.update(opts['show_stage'])
926 showalways.update(opts['show_stage'])
927
927
928 tree = filesetlang.parse(expr)
928 tree = filesetlang.parse(expr)
929 for n, f in stages:
929 for n, f in stages:
930 tree = f(tree)
930 tree = f(tree)
931 if n in showalways:
931 if n in showalways:
932 if opts['show_stage'] or n != 'parsed':
932 if opts['show_stage'] or n != 'parsed':
933 ui.write(("* %s:\n") % n)
933 ui.write(("* %s:\n") % n)
934 ui.write(filesetlang.prettyformat(tree), "\n")
934 ui.write(filesetlang.prettyformat(tree), "\n")
935
935
936 files = set()
936 files = set()
937 if opts['all_files']:
937 if opts['all_files']:
938 for r in repo:
938 for r in repo:
939 c = repo[r]
939 c = repo[r]
940 files.update(c.files())
940 files.update(c.files())
941 files.update(c.substate)
941 files.update(c.substate)
942 if opts['all_files'] or ctx.rev() is None:
942 if opts['all_files'] or ctx.rev() is None:
943 wctx = repo[None]
943 wctx = repo[None]
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 subrepos=list(wctx.substate),
945 subrepos=list(wctx.substate),
946 unknown=True, ignored=True))
946 unknown=True, ignored=True))
947 files.update(wctx.substate)
947 files.update(wctx.substate)
948 else:
948 else:
949 files.update(ctx.files())
949 files.update(ctx.files())
950 files.update(ctx.substate)
950 files.update(ctx.substate)
951
951
952 m = ctx.matchfileset(expr)
952 m = ctx.matchfileset(expr)
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 for f in sorted(files):
955 for f in sorted(files):
956 if not m(f):
956 if not m(f):
957 continue
957 continue
958 ui.write("%s\n" % f)
958 ui.write("%s\n" % f)
959
959
960 @command('debugformat',
960 @command('debugformat',
961 [] + cmdutil.formatteropts)
961 [] + cmdutil.formatteropts)
962 def debugformat(ui, repo, **opts):
962 def debugformat(ui, repo, **opts):
963 """display format information about the current repository
963 """display format information about the current repository
964
964
965 Use --verbose to get extra information about current config value and
965 Use --verbose to get extra information about current config value and
966 Mercurial default."""
966 Mercurial default."""
967 opts = pycompat.byteskwargs(opts)
967 opts = pycompat.byteskwargs(opts)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970
970
971 def makeformatname(name):
971 def makeformatname(name):
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973
973
974 fm = ui.formatter('debugformat', opts)
974 fm = ui.formatter('debugformat', opts)
975 if fm.isplain():
975 if fm.isplain():
976 def formatvalue(value):
976 def formatvalue(value):
977 if util.safehasattr(value, 'startswith'):
977 if util.safehasattr(value, 'startswith'):
978 return value
978 return value
979 if value:
979 if value:
980 return 'yes'
980 return 'yes'
981 else:
981 else:
982 return 'no'
982 return 'no'
983 else:
983 else:
984 formatvalue = pycompat.identity
984 formatvalue = pycompat.identity
985
985
986 fm.plain('format-variant')
986 fm.plain('format-variant')
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 fm.plain(' repo')
988 fm.plain(' repo')
989 if ui.verbose:
989 if ui.verbose:
990 fm.plain(' config default')
990 fm.plain(' config default')
991 fm.plain('\n')
991 fm.plain('\n')
992 for fv in upgrade.allformatvariant:
992 for fv in upgrade.allformatvariant:
993 fm.startitem()
993 fm.startitem()
994 repovalue = fv.fromrepo(repo)
994 repovalue = fv.fromrepo(repo)
995 configvalue = fv.fromconfig(repo)
995 configvalue = fv.fromconfig(repo)
996
996
997 if repovalue != configvalue:
997 if repovalue != configvalue:
998 namelabel = 'formatvariant.name.mismatchconfig'
998 namelabel = 'formatvariant.name.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 elif repovalue != fv.default:
1000 elif repovalue != fv.default:
1001 namelabel = 'formatvariant.name.mismatchdefault'
1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 else:
1003 else:
1004 namelabel = 'formatvariant.name.uptodate'
1004 namelabel = 'formatvariant.name.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1006
1006
1007 fm.write('name', makeformatname(fv.name), fv.name,
1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 label=namelabel)
1008 label=namelabel)
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 label=repolabel)
1010 label=repolabel)
1011 if fv.default != configvalue:
1011 if fv.default != configvalue:
1012 configlabel = 'formatvariant.config.special'
1012 configlabel = 'formatvariant.config.special'
1013 else:
1013 else:
1014 configlabel = 'formatvariant.config.default'
1014 configlabel = 'formatvariant.config.default'
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 label=configlabel)
1016 label=configlabel)
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 label='formatvariant.default')
1018 label='formatvariant.default')
1019 fm.plain('\n')
1019 fm.plain('\n')
1020 fm.end()
1020 fm.end()
1021
1021
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 def debugfsinfo(ui, path="."):
1023 def debugfsinfo(ui, path="."):
1024 """show information detected about current filesystem"""
1024 """show information detected about current filesystem"""
1025 ui.write(('path: %s\n') % path)
1025 ui.write(('path: %s\n') % path)
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 casesensitive = '(unknown)'
1031 casesensitive = '(unknown)'
1032 try:
1032 try:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 except OSError:
1035 except OSError:
1036 pass
1036 pass
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038
1038
1039 @command('debuggetbundle',
1039 @command('debuggetbundle',
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 _('REPO FILE [-H|-C ID]...'),
1043 _('REPO FILE [-H|-C ID]...'),
1044 norepo=True)
1044 norepo=True)
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 """retrieves a bundle from a repo
1046 """retrieves a bundle from a repo
1047
1047
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 given file.
1049 given file.
1050 """
1050 """
1051 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1052 repo = hg.peer(ui, opts, repopath)
1052 repo = hg.peer(ui, opts, repopath)
1053 if not repo.capable('getbundle'):
1053 if not repo.capable('getbundle'):
1054 raise error.Abort("getbundle() not supported by target repository")
1054 raise error.Abort("getbundle() not supported by target repository")
1055 args = {}
1055 args = {}
1056 if common:
1056 if common:
1057 args[r'common'] = [bin(s) for s in common]
1057 args[r'common'] = [bin(s) for s in common]
1058 if head:
1058 if head:
1059 args[r'heads'] = [bin(s) for s in head]
1059 args[r'heads'] = [bin(s) for s in head]
1060 # TODO: get desired bundlecaps from command line.
1060 # TODO: get desired bundlecaps from command line.
1061 args[r'bundlecaps'] = None
1061 args[r'bundlecaps'] = None
1062 bundle = repo.getbundle('debug', **args)
1062 bundle = repo.getbundle('debug', **args)
1063
1063
1064 bundletype = opts.get('type', 'bzip2').lower()
1064 bundletype = opts.get('type', 'bzip2').lower()
1065 btypes = {'none': 'HG10UN',
1065 btypes = {'none': 'HG10UN',
1066 'bzip2': 'HG10BZ',
1066 'bzip2': 'HG10BZ',
1067 'gzip': 'HG10GZ',
1067 'gzip': 'HG10GZ',
1068 'bundle2': 'HG20'}
1068 'bundle2': 'HG20'}
1069 bundletype = btypes.get(bundletype)
1069 bundletype = btypes.get(bundletype)
1070 if bundletype not in bundle2.bundletypes:
1070 if bundletype not in bundle2.bundletypes:
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073
1073
1074 @command('debugignore', [], '[FILE]')
1074 @command('debugignore', [], '[FILE]')
1075 def debugignore(ui, repo, *files, **opts):
1075 def debugignore(ui, repo, *files, **opts):
1076 """display the combined ignore pattern and information about ignored files
1076 """display the combined ignore pattern and information about ignored files
1077
1077
1078 With no argument display the combined ignore pattern.
1078 With no argument display the combined ignore pattern.
1079
1079
1080 Given space separated file names, shows if the given file is ignored and
1080 Given space separated file names, shows if the given file is ignored and
1081 if so, show the ignore rule (file and line number) that matched it.
1081 if so, show the ignore rule (file and line number) that matched it.
1082 """
1082 """
1083 ignore = repo.dirstate._ignore
1083 ignore = repo.dirstate._ignore
1084 if not files:
1084 if not files:
1085 # Show all the patterns
1085 # Show all the patterns
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 else:
1087 else:
1088 m = scmutil.match(repo[None], pats=files)
1088 m = scmutil.match(repo[None], pats=files)
1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 for f in m.files():
1090 for f in m.files():
1091 nf = util.normpath(f)
1091 nf = util.normpath(f)
1092 ignored = None
1092 ignored = None
1093 ignoredata = None
1093 ignoredata = None
1094 if nf != '.':
1094 if nf != '.':
1095 if ignore(nf):
1095 if ignore(nf):
1096 ignored = nf
1096 ignored = nf
1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 else:
1098 else:
1099 for p in util.finddirs(nf):
1099 for p in util.finddirs(nf):
1100 if ignore(p):
1100 if ignore(p):
1101 ignored = p
1101 ignored = p
1102 ignoredata = repo.dirstate._ignorefileandline(p)
1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 break
1103 break
1104 if ignored:
1104 if ignored:
1105 if ignored == nf:
1105 if ignored == nf:
1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 else:
1107 else:
1108 ui.write(_("%s is ignored because of "
1108 ui.write(_("%s is ignored because of "
1109 "containing folder %s\n")
1109 "containing folder %s\n")
1110 % (uipathfn(f), ignored))
1110 % (uipathfn(f), ignored))
1111 ignorefile, lineno, line = ignoredata
1111 ignorefile, lineno, line = ignoredata
1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 % (ignorefile, lineno, line))
1113 % (ignorefile, lineno, line))
1114 else:
1114 else:
1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116
1116
1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 _('-c|-m|FILE'))
1118 _('-c|-m|FILE'))
1119 def debugindex(ui, repo, file_=None, **opts):
1119 def debugindex(ui, repo, file_=None, **opts):
1120 """dump index data for a storage primitive"""
1120 """dump index data for a storage primitive"""
1121 opts = pycompat.byteskwargs(opts)
1121 opts = pycompat.byteskwargs(opts)
1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123
1123
1124 if ui.debugflag:
1124 if ui.debugflag:
1125 shortfn = hex
1125 shortfn = hex
1126 else:
1126 else:
1127 shortfn = short
1127 shortfn = short
1128
1128
1129 idlen = 12
1129 idlen = 12
1130 for i in store:
1130 for i in store:
1131 idlen = len(shortfn(store.node(i)))
1131 idlen = len(shortfn(store.node(i)))
1132 break
1132 break
1133
1133
1134 fm = ui.formatter('debugindex', opts)
1134 fm = ui.formatter('debugindex', opts)
1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 b'nodeid'.ljust(idlen),
1136 b'nodeid'.ljust(idlen),
1137 b'p1'.ljust(idlen)))
1137 b'p1'.ljust(idlen)))
1138
1138
1139 for rev in store:
1139 for rev in store:
1140 node = store.node(rev)
1140 node = store.node(rev)
1141 parents = store.parents(node)
1141 parents = store.parents(node)
1142
1142
1143 fm.startitem()
1143 fm.startitem()
1144 fm.write(b'rev', b'%6d ', rev)
1144 fm.write(b'rev', b'%6d ', rev)
1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 fm.write(b'node', '%s ', shortfn(node))
1146 fm.write(b'node', '%s ', shortfn(node))
1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 fm.plain(b'\n')
1149 fm.plain(b'\n')
1150
1150
1151 fm.end()
1151 fm.end()
1152
1152
1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 _('-c|-m|FILE'), optionalrepo=True)
1154 _('-c|-m|FILE'), optionalrepo=True)
1155 def debugindexdot(ui, repo, file_=None, **opts):
1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 """dump an index DAG as a graphviz dot file"""
1156 """dump an index DAG as a graphviz dot file"""
1157 opts = pycompat.byteskwargs(opts)
1157 opts = pycompat.byteskwargs(opts)
1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 ui.write(("digraph G {\n"))
1159 ui.write(("digraph G {\n"))
1160 for i in r:
1160 for i in r:
1161 node = r.node(i)
1161 node = r.node(i)
1162 pp = r.parents(node)
1162 pp = r.parents(node)
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 if pp[1] != nullid:
1164 if pp[1] != nullid:
1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 ui.write("}\n")
1166 ui.write("}\n")
1167
1167
1168 @command('debugindexstats', [])
1168 @command('debugindexstats', [])
1169 def debugindexstats(ui, repo):
1169 def debugindexstats(ui, repo):
1170 """show stats related to the changelog index"""
1170 """show stats related to the changelog index"""
1171 repo.changelog.shortest(nullid, 1)
1171 repo.changelog.shortest(nullid, 1)
1172 index = repo.changelog.index
1172 index = repo.changelog.index
1173 if not util.safehasattr(index, 'stats'):
1173 if not util.safehasattr(index, 'stats'):
1174 raise error.Abort(_('debugindexstats only works with native code'))
1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 for k, v in sorted(index.stats().items()):
1175 for k, v in sorted(index.stats().items()):
1176 ui.write('%s: %d\n' % (k, v))
1176 ui.write('%s: %d\n' % (k, v))
1177
1177
1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 def debuginstall(ui, **opts):
1179 def debuginstall(ui, **opts):
1180 '''test Mercurial installation
1180 '''test Mercurial installation
1181
1181
1182 Returns 0 on success.
1182 Returns 0 on success.
1183 '''
1183 '''
1184 opts = pycompat.byteskwargs(opts)
1184 opts = pycompat.byteskwargs(opts)
1185
1185
1186 problems = 0
1186 problems = 0
1187
1187
1188 fm = ui.formatter('debuginstall', opts)
1188 fm = ui.formatter('debuginstall', opts)
1189 fm.startitem()
1189 fm.startitem()
1190
1190
1191 # encoding
1191 # encoding
1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 err = None
1193 err = None
1194 try:
1194 try:
1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 except LookupError as inst:
1196 except LookupError as inst:
1197 err = stringutil.forcebytestr(inst)
1197 err = stringutil.forcebytestr(inst)
1198 problems += 1
1198 problems += 1
1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 " (check that your locale is properly set)\n"), err)
1200 " (check that your locale is properly set)\n"), err)
1201
1201
1202 # Python
1202 # Python
1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 pycompat.sysexecutable)
1204 pycompat.sysexecutable)
1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 ("%d.%d.%d" % sys.version_info[:3]))
1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209
1209
1210 security = set(sslutil.supportedprotocols)
1210 security = set(sslutil.supportedprotocols)
1211 if sslutil.hassni:
1211 if sslutil.hassni:
1212 security.add('sni')
1212 security.add('sni')
1213
1213
1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 fm.formatlist(sorted(security), name='protocol',
1215 fm.formatlist(sorted(security), name='protocol',
1216 fmt='%s', sep=','))
1216 fmt='%s', sep=','))
1217
1217
1218 # These are warnings, not errors. So don't increment problem count. This
1218 # These are warnings, not errors. So don't increment problem count. This
1219 # may change in the future.
1219 # may change in the future.
1220 if 'tls1.2' not in security:
1220 if 'tls1.2' not in security:
1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 'network connections lack modern security\n'))
1222 'network connections lack modern security\n'))
1223 if 'sni' not in security:
1223 if 'sni' not in security:
1224 fm.plain(_(' SNI not supported by Python install; may have '
1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 'connectivity issues with some servers\n'))
1225 'connectivity issues with some servers\n'))
1226
1226
1227 # TODO print CA cert info
1227 # TODO print CA cert info
1228
1228
1229 # hg version
1229 # hg version
1230 hgver = util.version()
1230 hgver = util.version()
1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 hgver.split('+')[0])
1232 hgver.split('+')[0])
1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 '+'.join(hgver.split('+')[1:]))
1234 '+'.join(hgver.split('+')[1:]))
1235
1235
1236 # compiled modules
1236 # compiled modules
1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 policy.policy)
1238 policy.policy)
1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 os.path.dirname(pycompat.fsencode(__file__)))
1240 os.path.dirname(pycompat.fsencode(__file__)))
1241
1241
1242 if policy.policy in ('c', 'allow'):
1242 if policy.policy in ('c', 'allow'):
1243 err = None
1243 err = None
1244 try:
1244 try:
1245 from .cext import (
1245 from .cext import (
1246 base85,
1246 base85,
1247 bdiff,
1247 bdiff,
1248 mpatch,
1248 mpatch,
1249 osutil,
1249 osutil,
1250 )
1250 )
1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 except Exception as inst:
1252 except Exception as inst:
1253 err = stringutil.forcebytestr(inst)
1253 err = stringutil.forcebytestr(inst)
1254 problems += 1
1254 problems += 1
1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256
1256
1257 compengines = util.compengines._engines.values()
1257 compengines = util.compengines._engines.values()
1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 fm.formatlist(sorted(e.name() for e in compengines),
1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 name='compengine', fmt='%s', sep=', '))
1260 name='compengine', fmt='%s', sep=', '))
1261 fm.write('compenginesavail', _('checking available compression engines '
1261 fm.write('compenginesavail', _('checking available compression engines '
1262 '(%s)\n'),
1262 '(%s)\n'),
1263 fm.formatlist(sorted(e.name() for e in compengines
1263 fm.formatlist(sorted(e.name() for e in compengines
1264 if e.available()),
1264 if e.available()),
1265 name='compengine', fmt='%s', sep=', '))
1265 name='compengine', fmt='%s', sep=', '))
1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 fm.write('compenginesserver', _('checking available compression engines '
1267 fm.write('compenginesserver', _('checking available compression engines '
1268 'for wire protocol (%s)\n'),
1268 'for wire protocol (%s)\n'),
1269 fm.formatlist([e.name() for e in wirecompengines
1269 fm.formatlist([e.name() for e in wirecompengines
1270 if e.wireprotosupport()],
1270 if e.wireprotosupport()],
1271 name='compengine', fmt='%s', sep=', '))
1271 name='compengine', fmt='%s', sep=', '))
1272 re2 = 'missing'
1272 re2 = 'missing'
1273 if util._re2:
1273 if util._re2:
1274 re2 = 'available'
1274 re2 = 'available'
1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 fm.data(re2=bool(util._re2))
1276 fm.data(re2=bool(util._re2))
1277
1277
1278 # templates
1278 # templates
1279 p = templater.templatepaths()
1279 p = templater.templatepaths()
1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 if p:
1282 if p:
1283 m = templater.templatepath("map-cmdline.default")
1283 m = templater.templatepath("map-cmdline.default")
1284 if m:
1284 if m:
1285 # template found, check if it is working
1285 # template found, check if it is working
1286 err = None
1286 err = None
1287 try:
1287 try:
1288 templater.templater.frommapfile(m)
1288 templater.templater.frommapfile(m)
1289 except Exception as inst:
1289 except Exception as inst:
1290 err = stringutil.forcebytestr(inst)
1290 err = stringutil.forcebytestr(inst)
1291 p = None
1291 p = None
1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 else:
1293 else:
1294 p = None
1294 p = None
1295 fm.condwrite(p, 'defaulttemplate',
1295 fm.condwrite(p, 'defaulttemplate',
1296 _("checking default template (%s)\n"), m)
1296 _("checking default template (%s)\n"), m)
1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 _(" template '%s' not found\n"), "default")
1298 _(" template '%s' not found\n"), "default")
1299 if not p:
1299 if not p:
1300 problems += 1
1300 problems += 1
1301 fm.condwrite(not p, '',
1301 fm.condwrite(not p, '',
1302 _(" (templates seem to have been installed incorrectly)\n"))
1302 _(" (templates seem to have been installed incorrectly)\n"))
1303
1303
1304 # editor
1304 # editor
1305 editor = ui.geteditor()
1305 editor = ui.geteditor()
1306 editor = util.expandpath(editor)
1306 editor = util.expandpath(editor)
1307 editorbin = procutil.shellsplit(editor)[0]
1307 editorbin = procutil.shellsplit(editor)[0]
1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 cmdpath = procutil.findexe(editorbin)
1309 cmdpath = procutil.findexe(editorbin)
1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 _(" No commit editor set and can't find %s in PATH\n"
1311 _(" No commit editor set and can't find %s in PATH\n"
1312 " (specify a commit editor in your configuration"
1312 " (specify a commit editor in your configuration"
1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 _(" Can't find editor '%s' in PATH\n"
1315 _(" Can't find editor '%s' in PATH\n"
1316 " (specify a commit editor in your configuration"
1316 " (specify a commit editor in your configuration"
1317 " file)\n"), not cmdpath and editorbin)
1317 " file)\n"), not cmdpath and editorbin)
1318 if not cmdpath and editor != 'vi':
1318 if not cmdpath and editor != 'vi':
1319 problems += 1
1319 problems += 1
1320
1320
1321 # check username
1321 # check username
1322 username = None
1322 username = None
1323 err = None
1323 err = None
1324 try:
1324 try:
1325 username = ui.username()
1325 username = ui.username()
1326 except error.Abort as e:
1326 except error.Abort as e:
1327 err = stringutil.forcebytestr(e)
1327 err = stringutil.forcebytestr(e)
1328 problems += 1
1328 problems += 1
1329
1329
1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 " (specify a username in your configuration file)\n"), err)
1332 " (specify a username in your configuration file)\n"), err)
1333
1333
1334 fm.condwrite(not problems, '',
1334 fm.condwrite(not problems, '',
1335 _("no problems detected\n"))
1335 _("no problems detected\n"))
1336 if not problems:
1336 if not problems:
1337 fm.data(problems=problems)
1337 fm.data(problems=problems)
1338 fm.condwrite(problems, 'problems',
1338 fm.condwrite(problems, 'problems',
1339 _("%d problems detected,"
1339 _("%d problems detected,"
1340 " please check your install!\n"), problems)
1340 " please check your install!\n"), problems)
1341 fm.end()
1341 fm.end()
1342
1342
1343 return problems
1343 return problems
1344
1344
1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 def debugknown(ui, repopath, *ids, **opts):
1346 def debugknown(ui, repopath, *ids, **opts):
1347 """test whether node ids are known to a repo
1347 """test whether node ids are known to a repo
1348
1348
1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 and 1s indicating unknown/known.
1350 and 1s indicating unknown/known.
1351 """
1351 """
1352 opts = pycompat.byteskwargs(opts)
1352 opts = pycompat.byteskwargs(opts)
1353 repo = hg.peer(ui, opts, repopath)
1353 repo = hg.peer(ui, opts, repopath)
1354 if not repo.capable('known'):
1354 if not repo.capable('known'):
1355 raise error.Abort("known() not supported by target repository")
1355 raise error.Abort("known() not supported by target repository")
1356 flags = repo.known([bin(s) for s in ids])
1356 flags = repo.known([bin(s) for s in ids])
1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358
1358
1359 @command('debuglabelcomplete', [], _('LABEL...'))
1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 def debuglabelcomplete(ui, repo, *args):
1360 def debuglabelcomplete(ui, repo, *args):
1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 debugnamecomplete(ui, repo, *args)
1362 debugnamecomplete(ui, repo, *args)
1363
1363
1364 @command('debuglocks',
1364 @command('debuglocks',
1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 ('W', 'force-wlock', None,
1366 ('W', 'force-wlock', None,
1367 _('free the working state lock (DANGEROUS)')),
1367 _('free the working state lock (DANGEROUS)')),
1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 ('S', 'set-wlock', None,
1369 ('S', 'set-wlock', None,
1370 _('set the working state lock until stopped'))],
1370 _('set the working state lock until stopped'))],
1371 _('[OPTION]...'))
1371 _('[OPTION]...'))
1372 def debuglocks(ui, repo, **opts):
1372 def debuglocks(ui, repo, **opts):
1373 """show or modify state of locks
1373 """show or modify state of locks
1374
1374
1375 By default, this command will show which locks are held. This
1375 By default, this command will show which locks are held. This
1376 includes the user and process holding the lock, the amount of time
1376 includes the user and process holding the lock, the amount of time
1377 the lock has been held, and the machine name where the process is
1377 the lock has been held, and the machine name where the process is
1378 running if it's not local.
1378 running if it's not local.
1379
1379
1380 Locks protect the integrity of Mercurial's data, so should be
1380 Locks protect the integrity of Mercurial's data, so should be
1381 treated with care. System crashes or other interruptions may cause
1381 treated with care. System crashes or other interruptions may cause
1382 locks to not be properly released, though Mercurial will usually
1382 locks to not be properly released, though Mercurial will usually
1383 detect and remove such stale locks automatically.
1383 detect and remove such stale locks automatically.
1384
1384
1385 However, detecting stale locks may not always be possible (for
1385 However, detecting stale locks may not always be possible (for
1386 instance, on a shared filesystem). Removing locks may also be
1386 instance, on a shared filesystem). Removing locks may also be
1387 blocked by filesystem permissions.
1387 blocked by filesystem permissions.
1388
1388
1389 Setting a lock will prevent other commands from changing the data.
1389 Setting a lock will prevent other commands from changing the data.
1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 The set locks are removed when the command exits.
1391 The set locks are removed when the command exits.
1392
1392
1393 Returns 0 if no locks are held.
1393 Returns 0 if no locks are held.
1394
1394
1395 """
1395 """
1396
1396
1397 if opts.get(r'force_lock'):
1397 if opts.get(r'force_lock'):
1398 repo.svfs.unlink('lock')
1398 repo.svfs.unlink('lock')
1399 if opts.get(r'force_wlock'):
1399 if opts.get(r'force_wlock'):
1400 repo.vfs.unlink('wlock')
1400 repo.vfs.unlink('wlock')
1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 return 0
1402 return 0
1403
1403
1404 locks = []
1404 locks = []
1405 try:
1405 try:
1406 if opts.get(r'set_wlock'):
1406 if opts.get(r'set_wlock'):
1407 try:
1407 try:
1408 locks.append(repo.wlock(False))
1408 locks.append(repo.wlock(False))
1409 except error.LockHeld:
1409 except error.LockHeld:
1410 raise error.Abort(_('wlock is already held'))
1410 raise error.Abort(_('wlock is already held'))
1411 if opts.get(r'set_lock'):
1411 if opts.get(r'set_lock'):
1412 try:
1412 try:
1413 locks.append(repo.lock(False))
1413 locks.append(repo.lock(False))
1414 except error.LockHeld:
1414 except error.LockHeld:
1415 raise error.Abort(_('lock is already held'))
1415 raise error.Abort(_('lock is already held'))
1416 if len(locks):
1416 if len(locks):
1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 return 0
1418 return 0
1419 finally:
1419 finally:
1420 release(*locks)
1420 release(*locks)
1421
1421
1422 now = time.time()
1422 now = time.time()
1423 held = 0
1423 held = 0
1424
1424
1425 def report(vfs, name, method):
1425 def report(vfs, name, method):
1426 # this causes stale locks to get reaped for more accurate reporting
1426 # this causes stale locks to get reaped for more accurate reporting
1427 try:
1427 try:
1428 l = method(False)
1428 l = method(False)
1429 except error.LockHeld:
1429 except error.LockHeld:
1430 l = None
1430 l = None
1431
1431
1432 if l:
1432 if l:
1433 l.release()
1433 l.release()
1434 else:
1434 else:
1435 try:
1435 try:
1436 st = vfs.lstat(name)
1436 st = vfs.lstat(name)
1437 age = now - st[stat.ST_MTIME]
1437 age = now - st[stat.ST_MTIME]
1438 user = util.username(st.st_uid)
1438 user = util.username(st.st_uid)
1439 locker = vfs.readlock(name)
1439 locker = vfs.readlock(name)
1440 if ":" in locker:
1440 if ":" in locker:
1441 host, pid = locker.split(':')
1441 host, pid = locker.split(':')
1442 if host == socket.gethostname():
1442 if host == socket.gethostname():
1443 locker = 'user %s, process %s' % (user or b'None', pid)
1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 else:
1444 else:
1445 locker = ('user %s, process %s, host %s'
1445 locker = ('user %s, process %s, host %s'
1446 % (user or b'None', pid, host))
1446 % (user or b'None', pid, host))
1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 return 1
1448 return 1
1449 except OSError as e:
1449 except OSError as e:
1450 if e.errno != errno.ENOENT:
1450 if e.errno != errno.ENOENT:
1451 raise
1451 raise
1452
1452
1453 ui.write(("%-6s free\n") % (name + ":"))
1453 ui.write(("%-6s free\n") % (name + ":"))
1454 return 0
1454 return 0
1455
1455
1456 held += report(repo.svfs, "lock", repo.lock)
1456 held += report(repo.svfs, "lock", repo.lock)
1457 held += report(repo.vfs, "wlock", repo.wlock)
1457 held += report(repo.vfs, "wlock", repo.wlock)
1458
1458
1459 return held
1459 return held
1460
1460
1461 @command('debugmanifestfulltextcache', [
1461 @command('debugmanifestfulltextcache', [
1462 ('', 'clear', False, _('clear the cache')),
1462 ('', 'clear', False, _('clear the cache')),
1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1463 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1464 _('NODE'))
1464 _('NODE'))
1465 ], '')
1465 ], '')
1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1466 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1467 """show, clear or amend the contents of the manifest fulltext cache"""
1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468
1468
1469 def getcache():
1469 def getcache():
1470 r = repo.manifestlog.getstorage(b'')
1470 r = repo.manifestlog.getstorage(b'')
1471 try:
1471 try:
1472 return r._fulltextcache
1472 return r._fulltextcache
1473 except AttributeError:
1473 except AttributeError:
1474 msg = _("Current revlog implementation doesn't appear to have a "
1474 msg = _("Current revlog implementation doesn't appear to have a "
1475 "manifest fulltext cache\n")
1475 "manifest fulltext cache\n")
1476 raise error.Abort(msg)
1476 raise error.Abort(msg)
1477
1477
1478 if opts.get(r'clear'):
1478 if opts.get(r'clear'):
1479 with repo.lock():
1479 with repo.lock():
1480 cache = getcache()
1480 cache = getcache()
1481 cache.clear(clear_persisted_data=True)
1481 cache.clear(clear_persisted_data=True)
1482 return
1482 return
1483
1483
1484 if add:
1484 if add:
1485 with repo.lock():
1485 with repo.lock():
1486 try:
1486 m = repo.manifestlog
1487 m = repo.manifestlog
1487 store = m.getstorage(b'')
1488 manifest = m[m.getstorage(b'').lookup(add)]
1488 for n in add:
1489 except error.LookupError as e:
1489 try:
1490 raise error.Abort(e, hint="Check your manifest node id")
1490 manifest = m[store.lookup(n)]
1491 manifest.read() # stores revisision in cache too
1491 except error.LookupError as e:
1492 raise error.Abort(e, hint="Check your manifest node id")
1493 manifest.read() # stores revisision in cache too
1492 return
1494 return
1493
1495
1494 cache = getcache()
1496 cache = getcache()
1495 if not len(cache):
1497 if not len(cache):
1496 ui.write(_('cache empty\n'))
1498 ui.write(_('cache empty\n'))
1497 else:
1499 else:
1498 ui.write(
1500 ui.write(
1499 _('cache contains %d manifest entries, in order of most to '
1501 _('cache contains %d manifest entries, in order of most to '
1500 'least recent:\n') % (len(cache),))
1502 'least recent:\n') % (len(cache),))
1501 totalsize = 0
1503 totalsize = 0
1502 for nodeid in cache:
1504 for nodeid in cache:
1503 # Use cache.get to not update the LRU order
1505 # Use cache.get to not update the LRU order
1504 data = cache.get(nodeid)
1506 data = cache.get(nodeid)
1505 size = len(data)
1507 size = len(data)
1506 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1508 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1507 ui.write(_('id: %s, size %s\n') % (
1509 ui.write(_('id: %s, size %s\n') % (
1508 hex(nodeid), util.bytecount(size)))
1510 hex(nodeid), util.bytecount(size)))
1509 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1511 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1510 ui.write(
1512 ui.write(
1511 _('total cache data size %s, on-disk %s\n') % (
1513 _('total cache data size %s, on-disk %s\n') % (
1512 util.bytecount(totalsize), util.bytecount(ondisk))
1514 util.bytecount(totalsize), util.bytecount(ondisk))
1513 )
1515 )
1514
1516
1515 @command('debugmergestate', [], '')
1517 @command('debugmergestate', [], '')
1516 def debugmergestate(ui, repo, *args):
1518 def debugmergestate(ui, repo, *args):
1517 """print merge state
1519 """print merge state
1518
1520
1519 Use --verbose to print out information about whether v1 or v2 merge state
1521 Use --verbose to print out information about whether v1 or v2 merge state
1520 was chosen."""
1522 was chosen."""
1521 def _hashornull(h):
1523 def _hashornull(h):
1522 if h == nullhex:
1524 if h == nullhex:
1523 return 'null'
1525 return 'null'
1524 else:
1526 else:
1525 return h
1527 return h
1526
1528
1527 def printrecords(version):
1529 def printrecords(version):
1528 ui.write(('* version %d records\n') % version)
1530 ui.write(('* version %d records\n') % version)
1529 if version == 1:
1531 if version == 1:
1530 records = v1records
1532 records = v1records
1531 else:
1533 else:
1532 records = v2records
1534 records = v2records
1533
1535
1534 for rtype, record in records:
1536 for rtype, record in records:
1535 # pretty print some record types
1537 # pretty print some record types
1536 if rtype == 'L':
1538 if rtype == 'L':
1537 ui.write(('local: %s\n') % record)
1539 ui.write(('local: %s\n') % record)
1538 elif rtype == 'O':
1540 elif rtype == 'O':
1539 ui.write(('other: %s\n') % record)
1541 ui.write(('other: %s\n') % record)
1540 elif rtype == 'm':
1542 elif rtype == 'm':
1541 driver, mdstate = record.split('\0', 1)
1543 driver, mdstate = record.split('\0', 1)
1542 ui.write(('merge driver: %s (state "%s")\n')
1544 ui.write(('merge driver: %s (state "%s")\n')
1543 % (driver, mdstate))
1545 % (driver, mdstate))
1544 elif rtype in 'FDC':
1546 elif rtype in 'FDC':
1545 r = record.split('\0')
1547 r = record.split('\0')
1546 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1548 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1547 if version == 1:
1549 if version == 1:
1548 onode = 'not stored in v1 format'
1550 onode = 'not stored in v1 format'
1549 flags = r[7]
1551 flags = r[7]
1550 else:
1552 else:
1551 onode, flags = r[7:9]
1553 onode, flags = r[7:9]
1552 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1554 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1553 % (f, rtype, state, _hashornull(hash)))
1555 % (f, rtype, state, _hashornull(hash)))
1554 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1556 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1555 ui.write((' ancestor path: %s (node %s)\n')
1557 ui.write((' ancestor path: %s (node %s)\n')
1556 % (afile, _hashornull(anode)))
1558 % (afile, _hashornull(anode)))
1557 ui.write((' other path: %s (node %s)\n')
1559 ui.write((' other path: %s (node %s)\n')
1558 % (ofile, _hashornull(onode)))
1560 % (ofile, _hashornull(onode)))
1559 elif rtype == 'f':
1561 elif rtype == 'f':
1560 filename, rawextras = record.split('\0', 1)
1562 filename, rawextras = record.split('\0', 1)
1561 extras = rawextras.split('\0')
1563 extras = rawextras.split('\0')
1562 i = 0
1564 i = 0
1563 extrastrings = []
1565 extrastrings = []
1564 while i < len(extras):
1566 while i < len(extras):
1565 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1567 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1566 i += 2
1568 i += 2
1567
1569
1568 ui.write(('file extras: %s (%s)\n')
1570 ui.write(('file extras: %s (%s)\n')
1569 % (filename, ', '.join(extrastrings)))
1571 % (filename, ', '.join(extrastrings)))
1570 elif rtype == 'l':
1572 elif rtype == 'l':
1571 labels = record.split('\0', 2)
1573 labels = record.split('\0', 2)
1572 labels = [l for l in labels if len(l) > 0]
1574 labels = [l for l in labels if len(l) > 0]
1573 ui.write(('labels:\n'))
1575 ui.write(('labels:\n'))
1574 ui.write((' local: %s\n' % labels[0]))
1576 ui.write((' local: %s\n' % labels[0]))
1575 ui.write((' other: %s\n' % labels[1]))
1577 ui.write((' other: %s\n' % labels[1]))
1576 if len(labels) > 2:
1578 if len(labels) > 2:
1577 ui.write((' base: %s\n' % labels[2]))
1579 ui.write((' base: %s\n' % labels[2]))
1578 else:
1580 else:
1579 ui.write(('unrecognized entry: %s\t%s\n')
1581 ui.write(('unrecognized entry: %s\t%s\n')
1580 % (rtype, record.replace('\0', '\t')))
1582 % (rtype, record.replace('\0', '\t')))
1581
1583
1582 # Avoid mergestate.read() since it may raise an exception for unsupported
1584 # Avoid mergestate.read() since it may raise an exception for unsupported
1583 # merge state records. We shouldn't be doing this, but this is OK since this
1585 # merge state records. We shouldn't be doing this, but this is OK since this
1584 # command is pretty low-level.
1586 # command is pretty low-level.
1585 ms = mergemod.mergestate(repo)
1587 ms = mergemod.mergestate(repo)
1586
1588
1587 # sort so that reasonable information is on top
1589 # sort so that reasonable information is on top
1588 v1records = ms._readrecordsv1()
1590 v1records = ms._readrecordsv1()
1589 v2records = ms._readrecordsv2()
1591 v2records = ms._readrecordsv2()
1590 order = 'LOml'
1592 order = 'LOml'
1591 def key(r):
1593 def key(r):
1592 idx = order.find(r[0])
1594 idx = order.find(r[0])
1593 if idx == -1:
1595 if idx == -1:
1594 return (1, r[1])
1596 return (1, r[1])
1595 else:
1597 else:
1596 return (0, idx)
1598 return (0, idx)
1597 v1records.sort(key=key)
1599 v1records.sort(key=key)
1598 v2records.sort(key=key)
1600 v2records.sort(key=key)
1599
1601
1600 if not v1records and not v2records:
1602 if not v1records and not v2records:
1601 ui.write(('no merge state found\n'))
1603 ui.write(('no merge state found\n'))
1602 elif not v2records:
1604 elif not v2records:
1603 ui.note(('no version 2 merge state\n'))
1605 ui.note(('no version 2 merge state\n'))
1604 printrecords(1)
1606 printrecords(1)
1605 elif ms._v1v2match(v1records, v2records):
1607 elif ms._v1v2match(v1records, v2records):
1606 ui.note(('v1 and v2 states match: using v2\n'))
1608 ui.note(('v1 and v2 states match: using v2\n'))
1607 printrecords(2)
1609 printrecords(2)
1608 else:
1610 else:
1609 ui.note(('v1 and v2 states mismatch: using v1\n'))
1611 ui.note(('v1 and v2 states mismatch: using v1\n'))
1610 printrecords(1)
1612 printrecords(1)
1611 if ui.verbose:
1613 if ui.verbose:
1612 printrecords(2)
1614 printrecords(2)
1613
1615
1614 @command('debugnamecomplete', [], _('NAME...'))
1616 @command('debugnamecomplete', [], _('NAME...'))
1615 def debugnamecomplete(ui, repo, *args):
1617 def debugnamecomplete(ui, repo, *args):
1616 '''complete "names" - tags, open branch names, bookmark names'''
1618 '''complete "names" - tags, open branch names, bookmark names'''
1617
1619
1618 names = set()
1620 names = set()
1619 # since we previously only listed open branches, we will handle that
1621 # since we previously only listed open branches, we will handle that
1620 # specially (after this for loop)
1622 # specially (after this for loop)
1621 for name, ns in repo.names.iteritems():
1623 for name, ns in repo.names.iteritems():
1622 if name != 'branches':
1624 if name != 'branches':
1623 names.update(ns.listnames(repo))
1625 names.update(ns.listnames(repo))
1624 names.update(tag for (tag, heads, tip, closed)
1626 names.update(tag for (tag, heads, tip, closed)
1625 in repo.branchmap().iterbranches() if not closed)
1627 in repo.branchmap().iterbranches() if not closed)
1626 completions = set()
1628 completions = set()
1627 if not args:
1629 if not args:
1628 args = ['']
1630 args = ['']
1629 for a in args:
1631 for a in args:
1630 completions.update(n for n in names if n.startswith(a))
1632 completions.update(n for n in names if n.startswith(a))
1631 ui.write('\n'.join(sorted(completions)))
1633 ui.write('\n'.join(sorted(completions)))
1632 ui.write('\n')
1634 ui.write('\n')
1633
1635
1634 @command('debugobsolete',
1636 @command('debugobsolete',
1635 [('', 'flags', 0, _('markers flag')),
1637 [('', 'flags', 0, _('markers flag')),
1636 ('', 'record-parents', False,
1638 ('', 'record-parents', False,
1637 _('record parent information for the precursor')),
1639 _('record parent information for the precursor')),
1638 ('r', 'rev', [], _('display markers relevant to REV')),
1640 ('r', 'rev', [], _('display markers relevant to REV')),
1639 ('', 'exclusive', False, _('restrict display to markers only '
1641 ('', 'exclusive', False, _('restrict display to markers only '
1640 'relevant to REV')),
1642 'relevant to REV')),
1641 ('', 'index', False, _('display index of the marker')),
1643 ('', 'index', False, _('display index of the marker')),
1642 ('', 'delete', [], _('delete markers specified by indices')),
1644 ('', 'delete', [], _('delete markers specified by indices')),
1643 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1645 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1644 _('[OBSOLETED [REPLACEMENT ...]]'))
1646 _('[OBSOLETED [REPLACEMENT ...]]'))
1645 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1647 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1646 """create arbitrary obsolete marker
1648 """create arbitrary obsolete marker
1647
1649
1648 With no arguments, displays the list of obsolescence markers."""
1650 With no arguments, displays the list of obsolescence markers."""
1649
1651
1650 opts = pycompat.byteskwargs(opts)
1652 opts = pycompat.byteskwargs(opts)
1651
1653
1652 def parsenodeid(s):
1654 def parsenodeid(s):
1653 try:
1655 try:
1654 # We do not use revsingle/revrange functions here to accept
1656 # We do not use revsingle/revrange functions here to accept
1655 # arbitrary node identifiers, possibly not present in the
1657 # arbitrary node identifiers, possibly not present in the
1656 # local repository.
1658 # local repository.
1657 n = bin(s)
1659 n = bin(s)
1658 if len(n) != len(nullid):
1660 if len(n) != len(nullid):
1659 raise TypeError()
1661 raise TypeError()
1660 return n
1662 return n
1661 except TypeError:
1663 except TypeError:
1662 raise error.Abort('changeset references must be full hexadecimal '
1664 raise error.Abort('changeset references must be full hexadecimal '
1663 'node identifiers')
1665 'node identifiers')
1664
1666
1665 if opts.get('delete'):
1667 if opts.get('delete'):
1666 indices = []
1668 indices = []
1667 for v in opts.get('delete'):
1669 for v in opts.get('delete'):
1668 try:
1670 try:
1669 indices.append(int(v))
1671 indices.append(int(v))
1670 except ValueError:
1672 except ValueError:
1671 raise error.Abort(_('invalid index value: %r') % v,
1673 raise error.Abort(_('invalid index value: %r') % v,
1672 hint=_('use integers for indices'))
1674 hint=_('use integers for indices'))
1673
1675
1674 if repo.currenttransaction():
1676 if repo.currenttransaction():
1675 raise error.Abort(_('cannot delete obsmarkers in the middle '
1677 raise error.Abort(_('cannot delete obsmarkers in the middle '
1676 'of transaction.'))
1678 'of transaction.'))
1677
1679
1678 with repo.lock():
1680 with repo.lock():
1679 n = repair.deleteobsmarkers(repo.obsstore, indices)
1681 n = repair.deleteobsmarkers(repo.obsstore, indices)
1680 ui.write(_('deleted %i obsolescence markers\n') % n)
1682 ui.write(_('deleted %i obsolescence markers\n') % n)
1681
1683
1682 return
1684 return
1683
1685
1684 if precursor is not None:
1686 if precursor is not None:
1685 if opts['rev']:
1687 if opts['rev']:
1686 raise error.Abort('cannot select revision when creating marker')
1688 raise error.Abort('cannot select revision when creating marker')
1687 metadata = {}
1689 metadata = {}
1688 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1690 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1689 succs = tuple(parsenodeid(succ) for succ in successors)
1691 succs = tuple(parsenodeid(succ) for succ in successors)
1690 l = repo.lock()
1692 l = repo.lock()
1691 try:
1693 try:
1692 tr = repo.transaction('debugobsolete')
1694 tr = repo.transaction('debugobsolete')
1693 try:
1695 try:
1694 date = opts.get('date')
1696 date = opts.get('date')
1695 if date:
1697 if date:
1696 date = dateutil.parsedate(date)
1698 date = dateutil.parsedate(date)
1697 else:
1699 else:
1698 date = None
1700 date = None
1699 prec = parsenodeid(precursor)
1701 prec = parsenodeid(precursor)
1700 parents = None
1702 parents = None
1701 if opts['record_parents']:
1703 if opts['record_parents']:
1702 if prec not in repo.unfiltered():
1704 if prec not in repo.unfiltered():
1703 raise error.Abort('cannot used --record-parents on '
1705 raise error.Abort('cannot used --record-parents on '
1704 'unknown changesets')
1706 'unknown changesets')
1705 parents = repo.unfiltered()[prec].parents()
1707 parents = repo.unfiltered()[prec].parents()
1706 parents = tuple(p.node() for p in parents)
1708 parents = tuple(p.node() for p in parents)
1707 repo.obsstore.create(tr, prec, succs, opts['flags'],
1709 repo.obsstore.create(tr, prec, succs, opts['flags'],
1708 parents=parents, date=date,
1710 parents=parents, date=date,
1709 metadata=metadata, ui=ui)
1711 metadata=metadata, ui=ui)
1710 tr.close()
1712 tr.close()
1711 except ValueError as exc:
1713 except ValueError as exc:
1712 raise error.Abort(_('bad obsmarker input: %s') %
1714 raise error.Abort(_('bad obsmarker input: %s') %
1713 pycompat.bytestr(exc))
1715 pycompat.bytestr(exc))
1714 finally:
1716 finally:
1715 tr.release()
1717 tr.release()
1716 finally:
1718 finally:
1717 l.release()
1719 l.release()
1718 else:
1720 else:
1719 if opts['rev']:
1721 if opts['rev']:
1720 revs = scmutil.revrange(repo, opts['rev'])
1722 revs = scmutil.revrange(repo, opts['rev'])
1721 nodes = [repo[r].node() for r in revs]
1723 nodes = [repo[r].node() for r in revs]
1722 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1724 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1723 exclusive=opts['exclusive']))
1725 exclusive=opts['exclusive']))
1724 markers.sort(key=lambda x: x._data)
1726 markers.sort(key=lambda x: x._data)
1725 else:
1727 else:
1726 markers = obsutil.getmarkers(repo)
1728 markers = obsutil.getmarkers(repo)
1727
1729
1728 markerstoiter = markers
1730 markerstoiter = markers
1729 isrelevant = lambda m: True
1731 isrelevant = lambda m: True
1730 if opts.get('rev') and opts.get('index'):
1732 if opts.get('rev') and opts.get('index'):
1731 markerstoiter = obsutil.getmarkers(repo)
1733 markerstoiter = obsutil.getmarkers(repo)
1732 markerset = set(markers)
1734 markerset = set(markers)
1733 isrelevant = lambda m: m in markerset
1735 isrelevant = lambda m: m in markerset
1734
1736
1735 fm = ui.formatter('debugobsolete', opts)
1737 fm = ui.formatter('debugobsolete', opts)
1736 for i, m in enumerate(markerstoiter):
1738 for i, m in enumerate(markerstoiter):
1737 if not isrelevant(m):
1739 if not isrelevant(m):
1738 # marker can be irrelevant when we're iterating over a set
1740 # marker can be irrelevant when we're iterating over a set
1739 # of markers (markerstoiter) which is bigger than the set
1741 # of markers (markerstoiter) which is bigger than the set
1740 # of markers we want to display (markers)
1742 # of markers we want to display (markers)
1741 # this can happen if both --index and --rev options are
1743 # this can happen if both --index and --rev options are
1742 # provided and thus we need to iterate over all of the markers
1744 # provided and thus we need to iterate over all of the markers
1743 # to get the correct indices, but only display the ones that
1745 # to get the correct indices, but only display the ones that
1744 # are relevant to --rev value
1746 # are relevant to --rev value
1745 continue
1747 continue
1746 fm.startitem()
1748 fm.startitem()
1747 ind = i if opts.get('index') else None
1749 ind = i if opts.get('index') else None
1748 cmdutil.showmarker(fm, m, index=ind)
1750 cmdutil.showmarker(fm, m, index=ind)
1749 fm.end()
1751 fm.end()
1750
1752
1751 @command('debugp1copies',
1753 @command('debugp1copies',
1752 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1754 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1753 _('[-r REV]'))
1755 _('[-r REV]'))
1754 def debugp1copies(ui, repo, **opts):
1756 def debugp1copies(ui, repo, **opts):
1755 """dump copy information compared to p1"""
1757 """dump copy information compared to p1"""
1756
1758
1757 opts = pycompat.byteskwargs(opts)
1759 opts = pycompat.byteskwargs(opts)
1758 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1760 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1759 for dst, src in ctx.p1copies().items():
1761 for dst, src in ctx.p1copies().items():
1760 ui.write('%s -> %s\n' % (src, dst))
1762 ui.write('%s -> %s\n' % (src, dst))
1761
1763
1762 @command('debugp2copies',
1764 @command('debugp2copies',
1763 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1765 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1764 _('[-r REV]'))
1766 _('[-r REV]'))
1765 def debugp1copies(ui, repo, **opts):
1767 def debugp1copies(ui, repo, **opts):
1766 """dump copy information compared to p2"""
1768 """dump copy information compared to p2"""
1767
1769
1768 opts = pycompat.byteskwargs(opts)
1770 opts = pycompat.byteskwargs(opts)
1769 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1771 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1770 for dst, src in ctx.p2copies().items():
1772 for dst, src in ctx.p2copies().items():
1771 ui.write('%s -> %s\n' % (src, dst))
1773 ui.write('%s -> %s\n' % (src, dst))
1772
1774
1773 @command('debugpathcomplete',
1775 @command('debugpathcomplete',
1774 [('f', 'full', None, _('complete an entire path')),
1776 [('f', 'full', None, _('complete an entire path')),
1775 ('n', 'normal', None, _('show only normal files')),
1777 ('n', 'normal', None, _('show only normal files')),
1776 ('a', 'added', None, _('show only added files')),
1778 ('a', 'added', None, _('show only added files')),
1777 ('r', 'removed', None, _('show only removed files'))],
1779 ('r', 'removed', None, _('show only removed files'))],
1778 _('FILESPEC...'))
1780 _('FILESPEC...'))
1779 def debugpathcomplete(ui, repo, *specs, **opts):
1781 def debugpathcomplete(ui, repo, *specs, **opts):
1780 '''complete part or all of a tracked path
1782 '''complete part or all of a tracked path
1781
1783
1782 This command supports shells that offer path name completion. It
1784 This command supports shells that offer path name completion. It
1783 currently completes only files already known to the dirstate.
1785 currently completes only files already known to the dirstate.
1784
1786
1785 Completion extends only to the next path segment unless
1787 Completion extends only to the next path segment unless
1786 --full is specified, in which case entire paths are used.'''
1788 --full is specified, in which case entire paths are used.'''
1787
1789
1788 def complete(path, acceptable):
1790 def complete(path, acceptable):
1789 dirstate = repo.dirstate
1791 dirstate = repo.dirstate
1790 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1792 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1791 rootdir = repo.root + pycompat.ossep
1793 rootdir = repo.root + pycompat.ossep
1792 if spec != repo.root and not spec.startswith(rootdir):
1794 if spec != repo.root and not spec.startswith(rootdir):
1793 return [], []
1795 return [], []
1794 if os.path.isdir(spec):
1796 if os.path.isdir(spec):
1795 spec += '/'
1797 spec += '/'
1796 spec = spec[len(rootdir):]
1798 spec = spec[len(rootdir):]
1797 fixpaths = pycompat.ossep != '/'
1799 fixpaths = pycompat.ossep != '/'
1798 if fixpaths:
1800 if fixpaths:
1799 spec = spec.replace(pycompat.ossep, '/')
1801 spec = spec.replace(pycompat.ossep, '/')
1800 speclen = len(spec)
1802 speclen = len(spec)
1801 fullpaths = opts[r'full']
1803 fullpaths = opts[r'full']
1802 files, dirs = set(), set()
1804 files, dirs = set(), set()
1803 adddir, addfile = dirs.add, files.add
1805 adddir, addfile = dirs.add, files.add
1804 for f, st in dirstate.iteritems():
1806 for f, st in dirstate.iteritems():
1805 if f.startswith(spec) and st[0] in acceptable:
1807 if f.startswith(spec) and st[0] in acceptable:
1806 if fixpaths:
1808 if fixpaths:
1807 f = f.replace('/', pycompat.ossep)
1809 f = f.replace('/', pycompat.ossep)
1808 if fullpaths:
1810 if fullpaths:
1809 addfile(f)
1811 addfile(f)
1810 continue
1812 continue
1811 s = f.find(pycompat.ossep, speclen)
1813 s = f.find(pycompat.ossep, speclen)
1812 if s >= 0:
1814 if s >= 0:
1813 adddir(f[:s])
1815 adddir(f[:s])
1814 else:
1816 else:
1815 addfile(f)
1817 addfile(f)
1816 return files, dirs
1818 return files, dirs
1817
1819
1818 acceptable = ''
1820 acceptable = ''
1819 if opts[r'normal']:
1821 if opts[r'normal']:
1820 acceptable += 'nm'
1822 acceptable += 'nm'
1821 if opts[r'added']:
1823 if opts[r'added']:
1822 acceptable += 'a'
1824 acceptable += 'a'
1823 if opts[r'removed']:
1825 if opts[r'removed']:
1824 acceptable += 'r'
1826 acceptable += 'r'
1825 cwd = repo.getcwd()
1827 cwd = repo.getcwd()
1826 if not specs:
1828 if not specs:
1827 specs = ['.']
1829 specs = ['.']
1828
1830
1829 files, dirs = set(), set()
1831 files, dirs = set(), set()
1830 for spec in specs:
1832 for spec in specs:
1831 f, d = complete(spec, acceptable or 'nmar')
1833 f, d = complete(spec, acceptable or 'nmar')
1832 files.update(f)
1834 files.update(f)
1833 dirs.update(d)
1835 dirs.update(d)
1834 files.update(dirs)
1836 files.update(dirs)
1835 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1837 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1836 ui.write('\n')
1838 ui.write('\n')
1837
1839
1838 @command('debugpathcopies',
1840 @command('debugpathcopies',
1839 cmdutil.walkopts,
1841 cmdutil.walkopts,
1840 'hg debugpathcopies REV1 REV2 [FILE]',
1842 'hg debugpathcopies REV1 REV2 [FILE]',
1841 inferrepo=True)
1843 inferrepo=True)
1842 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1844 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1843 """show copies between two revisions"""
1845 """show copies between two revisions"""
1844 ctx1 = scmutil.revsingle(repo, rev1)
1846 ctx1 = scmutil.revsingle(repo, rev1)
1845 ctx2 = scmutil.revsingle(repo, rev2)
1847 ctx2 = scmutil.revsingle(repo, rev2)
1846 m = scmutil.match(ctx1, pats, opts)
1848 m = scmutil.match(ctx1, pats, opts)
1847 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1849 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1848 ui.write('%s -> %s\n' % (src, dst))
1850 ui.write('%s -> %s\n' % (src, dst))
1849
1851
1850 @command('debugpeer', [], _('PATH'), norepo=True)
1852 @command('debugpeer', [], _('PATH'), norepo=True)
1851 def debugpeer(ui, path):
1853 def debugpeer(ui, path):
1852 """establish a connection to a peer repository"""
1854 """establish a connection to a peer repository"""
1853 # Always enable peer request logging. Requires --debug to display
1855 # Always enable peer request logging. Requires --debug to display
1854 # though.
1856 # though.
1855 overrides = {
1857 overrides = {
1856 ('devel', 'debug.peer-request'): True,
1858 ('devel', 'debug.peer-request'): True,
1857 }
1859 }
1858
1860
1859 with ui.configoverride(overrides):
1861 with ui.configoverride(overrides):
1860 peer = hg.peer(ui, {}, path)
1862 peer = hg.peer(ui, {}, path)
1861
1863
1862 local = peer.local() is not None
1864 local = peer.local() is not None
1863 canpush = peer.canpush()
1865 canpush = peer.canpush()
1864
1866
1865 ui.write(_('url: %s\n') % peer.url())
1867 ui.write(_('url: %s\n') % peer.url())
1866 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1868 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1867 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1869 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1868
1870
1869 @command('debugpickmergetool',
1871 @command('debugpickmergetool',
1870 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1872 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1871 ('', 'changedelete', None, _('emulate merging change and delete')),
1873 ('', 'changedelete', None, _('emulate merging change and delete')),
1872 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1874 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1873 _('[PATTERN]...'),
1875 _('[PATTERN]...'),
1874 inferrepo=True)
1876 inferrepo=True)
1875 def debugpickmergetool(ui, repo, *pats, **opts):
1877 def debugpickmergetool(ui, repo, *pats, **opts):
1876 """examine which merge tool is chosen for specified file
1878 """examine which merge tool is chosen for specified file
1877
1879
1878 As described in :hg:`help merge-tools`, Mercurial examines
1880 As described in :hg:`help merge-tools`, Mercurial examines
1879 configurations below in this order to decide which merge tool is
1881 configurations below in this order to decide which merge tool is
1880 chosen for specified file.
1882 chosen for specified file.
1881
1883
1882 1. ``--tool`` option
1884 1. ``--tool`` option
1883 2. ``HGMERGE`` environment variable
1885 2. ``HGMERGE`` environment variable
1884 3. configurations in ``merge-patterns`` section
1886 3. configurations in ``merge-patterns`` section
1885 4. configuration of ``ui.merge``
1887 4. configuration of ``ui.merge``
1886 5. configurations in ``merge-tools`` section
1888 5. configurations in ``merge-tools`` section
1887 6. ``hgmerge`` tool (for historical reason only)
1889 6. ``hgmerge`` tool (for historical reason only)
1888 7. default tool for fallback (``:merge`` or ``:prompt``)
1890 7. default tool for fallback (``:merge`` or ``:prompt``)
1889
1891
1890 This command writes out examination result in the style below::
1892 This command writes out examination result in the style below::
1891
1893
1892 FILE = MERGETOOL
1894 FILE = MERGETOOL
1893
1895
1894 By default, all files known in the first parent context of the
1896 By default, all files known in the first parent context of the
1895 working directory are examined. Use file patterns and/or -I/-X
1897 working directory are examined. Use file patterns and/or -I/-X
1896 options to limit target files. -r/--rev is also useful to examine
1898 options to limit target files. -r/--rev is also useful to examine
1897 files in another context without actual updating to it.
1899 files in another context without actual updating to it.
1898
1900
1899 With --debug, this command shows warning messages while matching
1901 With --debug, this command shows warning messages while matching
1900 against ``merge-patterns`` and so on, too. It is recommended to
1902 against ``merge-patterns`` and so on, too. It is recommended to
1901 use this option with explicit file patterns and/or -I/-X options,
1903 use this option with explicit file patterns and/or -I/-X options,
1902 because this option increases amount of output per file according
1904 because this option increases amount of output per file according
1903 to configurations in hgrc.
1905 to configurations in hgrc.
1904
1906
1905 With -v/--verbose, this command shows configurations below at
1907 With -v/--verbose, this command shows configurations below at
1906 first (only if specified).
1908 first (only if specified).
1907
1909
1908 - ``--tool`` option
1910 - ``--tool`` option
1909 - ``HGMERGE`` environment variable
1911 - ``HGMERGE`` environment variable
1910 - configuration of ``ui.merge``
1912 - configuration of ``ui.merge``
1911
1913
1912 If merge tool is chosen before matching against
1914 If merge tool is chosen before matching against
1913 ``merge-patterns``, this command can't show any helpful
1915 ``merge-patterns``, this command can't show any helpful
1914 information, even with --debug. In such case, information above is
1916 information, even with --debug. In such case, information above is
1915 useful to know why a merge tool is chosen.
1917 useful to know why a merge tool is chosen.
1916 """
1918 """
1917 opts = pycompat.byteskwargs(opts)
1919 opts = pycompat.byteskwargs(opts)
1918 overrides = {}
1920 overrides = {}
1919 if opts['tool']:
1921 if opts['tool']:
1920 overrides[('ui', 'forcemerge')] = opts['tool']
1922 overrides[('ui', 'forcemerge')] = opts['tool']
1921 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1923 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1922
1924
1923 with ui.configoverride(overrides, 'debugmergepatterns'):
1925 with ui.configoverride(overrides, 'debugmergepatterns'):
1924 hgmerge = encoding.environ.get("HGMERGE")
1926 hgmerge = encoding.environ.get("HGMERGE")
1925 if hgmerge is not None:
1927 if hgmerge is not None:
1926 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1928 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1927 uimerge = ui.config("ui", "merge")
1929 uimerge = ui.config("ui", "merge")
1928 if uimerge:
1930 if uimerge:
1929 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1931 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1930
1932
1931 ctx = scmutil.revsingle(repo, opts.get('rev'))
1933 ctx = scmutil.revsingle(repo, opts.get('rev'))
1932 m = scmutil.match(ctx, pats, opts)
1934 m = scmutil.match(ctx, pats, opts)
1933 changedelete = opts['changedelete']
1935 changedelete = opts['changedelete']
1934 for path in ctx.walk(m):
1936 for path in ctx.walk(m):
1935 fctx = ctx[path]
1937 fctx = ctx[path]
1936 try:
1938 try:
1937 if not ui.debugflag:
1939 if not ui.debugflag:
1938 ui.pushbuffer(error=True)
1940 ui.pushbuffer(error=True)
1939 tool, toolpath = filemerge._picktool(repo, ui, path,
1941 tool, toolpath = filemerge._picktool(repo, ui, path,
1940 fctx.isbinary(),
1942 fctx.isbinary(),
1941 'l' in fctx.flags(),
1943 'l' in fctx.flags(),
1942 changedelete)
1944 changedelete)
1943 finally:
1945 finally:
1944 if not ui.debugflag:
1946 if not ui.debugflag:
1945 ui.popbuffer()
1947 ui.popbuffer()
1946 ui.write(('%s = %s\n') % (path, tool))
1948 ui.write(('%s = %s\n') % (path, tool))
1947
1949
1948 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1950 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1949 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1951 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1950 '''access the pushkey key/value protocol
1952 '''access the pushkey key/value protocol
1951
1953
1952 With two args, list the keys in the given namespace.
1954 With two args, list the keys in the given namespace.
1953
1955
1954 With five args, set a key to new if it currently is set to old.
1956 With five args, set a key to new if it currently is set to old.
1955 Reports success or failure.
1957 Reports success or failure.
1956 '''
1958 '''
1957
1959
1958 target = hg.peer(ui, {}, repopath)
1960 target = hg.peer(ui, {}, repopath)
1959 if keyinfo:
1961 if keyinfo:
1960 key, old, new = keyinfo
1962 key, old, new = keyinfo
1961 with target.commandexecutor() as e:
1963 with target.commandexecutor() as e:
1962 r = e.callcommand('pushkey', {
1964 r = e.callcommand('pushkey', {
1963 'namespace': namespace,
1965 'namespace': namespace,
1964 'key': key,
1966 'key': key,
1965 'old': old,
1967 'old': old,
1966 'new': new,
1968 'new': new,
1967 }).result()
1969 }).result()
1968
1970
1969 ui.status(pycompat.bytestr(r) + '\n')
1971 ui.status(pycompat.bytestr(r) + '\n')
1970 return not r
1972 return not r
1971 else:
1973 else:
1972 for k, v in sorted(target.listkeys(namespace).iteritems()):
1974 for k, v in sorted(target.listkeys(namespace).iteritems()):
1973 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1975 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1974 stringutil.escapestr(v)))
1976 stringutil.escapestr(v)))
1975
1977
1976 @command('debugpvec', [], _('A B'))
1978 @command('debugpvec', [], _('A B'))
1977 def debugpvec(ui, repo, a, b=None):
1979 def debugpvec(ui, repo, a, b=None):
1978 ca = scmutil.revsingle(repo, a)
1980 ca = scmutil.revsingle(repo, a)
1979 cb = scmutil.revsingle(repo, b)
1981 cb = scmutil.revsingle(repo, b)
1980 pa = pvec.ctxpvec(ca)
1982 pa = pvec.ctxpvec(ca)
1981 pb = pvec.ctxpvec(cb)
1983 pb = pvec.ctxpvec(cb)
1982 if pa == pb:
1984 if pa == pb:
1983 rel = "="
1985 rel = "="
1984 elif pa > pb:
1986 elif pa > pb:
1985 rel = ">"
1987 rel = ">"
1986 elif pa < pb:
1988 elif pa < pb:
1987 rel = "<"
1989 rel = "<"
1988 elif pa | pb:
1990 elif pa | pb:
1989 rel = "|"
1991 rel = "|"
1990 ui.write(_("a: %s\n") % pa)
1992 ui.write(_("a: %s\n") % pa)
1991 ui.write(_("b: %s\n") % pb)
1993 ui.write(_("b: %s\n") % pb)
1992 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1994 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1993 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1995 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1994 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1996 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1995 pa.distance(pb), rel))
1997 pa.distance(pb), rel))
1996
1998
1997 @command('debugrebuilddirstate|debugrebuildstate',
1999 @command('debugrebuilddirstate|debugrebuildstate',
1998 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2000 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1999 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2001 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2000 'the working copy parent')),
2002 'the working copy parent')),
2001 ],
2003 ],
2002 _('[-r REV]'))
2004 _('[-r REV]'))
2003 def debugrebuilddirstate(ui, repo, rev, **opts):
2005 def debugrebuilddirstate(ui, repo, rev, **opts):
2004 """rebuild the dirstate as it would look like for the given revision
2006 """rebuild the dirstate as it would look like for the given revision
2005
2007
2006 If no revision is specified the first current parent will be used.
2008 If no revision is specified the first current parent will be used.
2007
2009
2008 The dirstate will be set to the files of the given revision.
2010 The dirstate will be set to the files of the given revision.
2009 The actual working directory content or existing dirstate
2011 The actual working directory content or existing dirstate
2010 information such as adds or removes is not considered.
2012 information such as adds or removes is not considered.
2011
2013
2012 ``minimal`` will only rebuild the dirstate status for files that claim to be
2014 ``minimal`` will only rebuild the dirstate status for files that claim to be
2013 tracked but are not in the parent manifest, or that exist in the parent
2015 tracked but are not in the parent manifest, or that exist in the parent
2014 manifest but are not in the dirstate. It will not change adds, removes, or
2016 manifest but are not in the dirstate. It will not change adds, removes, or
2015 modified files that are in the working copy parent.
2017 modified files that are in the working copy parent.
2016
2018
2017 One use of this command is to make the next :hg:`status` invocation
2019 One use of this command is to make the next :hg:`status` invocation
2018 check the actual file content.
2020 check the actual file content.
2019 """
2021 """
2020 ctx = scmutil.revsingle(repo, rev)
2022 ctx = scmutil.revsingle(repo, rev)
2021 with repo.wlock():
2023 with repo.wlock():
2022 dirstate = repo.dirstate
2024 dirstate = repo.dirstate
2023 changedfiles = None
2025 changedfiles = None
2024 # See command doc for what minimal does.
2026 # See command doc for what minimal does.
2025 if opts.get(r'minimal'):
2027 if opts.get(r'minimal'):
2026 manifestfiles = set(ctx.manifest().keys())
2028 manifestfiles = set(ctx.manifest().keys())
2027 dirstatefiles = set(dirstate)
2029 dirstatefiles = set(dirstate)
2028 manifestonly = manifestfiles - dirstatefiles
2030 manifestonly = manifestfiles - dirstatefiles
2029 dsonly = dirstatefiles - manifestfiles
2031 dsonly = dirstatefiles - manifestfiles
2030 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2032 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2031 changedfiles = manifestonly | dsnotadded
2033 changedfiles = manifestonly | dsnotadded
2032
2034
2033 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2035 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2034
2036
2035 @command('debugrebuildfncache', [], '')
2037 @command('debugrebuildfncache', [], '')
2036 def debugrebuildfncache(ui, repo):
2038 def debugrebuildfncache(ui, repo):
2037 """rebuild the fncache file"""
2039 """rebuild the fncache file"""
2038 repair.rebuildfncache(ui, repo)
2040 repair.rebuildfncache(ui, repo)
2039
2041
2040 @command('debugrename',
2042 @command('debugrename',
2041 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2043 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2042 _('[-r REV] [FILE]...'))
2044 _('[-r REV] [FILE]...'))
2043 def debugrename(ui, repo, *pats, **opts):
2045 def debugrename(ui, repo, *pats, **opts):
2044 """dump rename information"""
2046 """dump rename information"""
2045
2047
2046 opts = pycompat.byteskwargs(opts)
2048 opts = pycompat.byteskwargs(opts)
2047 ctx = scmutil.revsingle(repo, opts.get('rev'))
2049 ctx = scmutil.revsingle(repo, opts.get('rev'))
2048 m = scmutil.match(ctx, pats, opts)
2050 m = scmutil.match(ctx, pats, opts)
2049 for abs in ctx.walk(m):
2051 for abs in ctx.walk(m):
2050 fctx = ctx[abs]
2052 fctx = ctx[abs]
2051 o = fctx.filelog().renamed(fctx.filenode())
2053 o = fctx.filelog().renamed(fctx.filenode())
2052 rel = repo.pathto(abs)
2054 rel = repo.pathto(abs)
2053 if o:
2055 if o:
2054 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2056 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2055 else:
2057 else:
2056 ui.write(_("%s not renamed\n") % rel)
2058 ui.write(_("%s not renamed\n") % rel)
2057
2059
2058 @command('debugrevlog', cmdutil.debugrevlogopts +
2060 @command('debugrevlog', cmdutil.debugrevlogopts +
2059 [('d', 'dump', False, _('dump index data'))],
2061 [('d', 'dump', False, _('dump index data'))],
2060 _('-c|-m|FILE'),
2062 _('-c|-m|FILE'),
2061 optionalrepo=True)
2063 optionalrepo=True)
2062 def debugrevlog(ui, repo, file_=None, **opts):
2064 def debugrevlog(ui, repo, file_=None, **opts):
2063 """show data and statistics about a revlog"""
2065 """show data and statistics about a revlog"""
2064 opts = pycompat.byteskwargs(opts)
2066 opts = pycompat.byteskwargs(opts)
2065 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2067 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2066
2068
2067 if opts.get("dump"):
2069 if opts.get("dump"):
2068 numrevs = len(r)
2070 numrevs = len(r)
2069 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2071 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2070 " rawsize totalsize compression heads chainlen\n"))
2072 " rawsize totalsize compression heads chainlen\n"))
2071 ts = 0
2073 ts = 0
2072 heads = set()
2074 heads = set()
2073
2075
2074 for rev in pycompat.xrange(numrevs):
2076 for rev in pycompat.xrange(numrevs):
2075 dbase = r.deltaparent(rev)
2077 dbase = r.deltaparent(rev)
2076 if dbase == -1:
2078 if dbase == -1:
2077 dbase = rev
2079 dbase = rev
2078 cbase = r.chainbase(rev)
2080 cbase = r.chainbase(rev)
2079 clen = r.chainlen(rev)
2081 clen = r.chainlen(rev)
2080 p1, p2 = r.parentrevs(rev)
2082 p1, p2 = r.parentrevs(rev)
2081 rs = r.rawsize(rev)
2083 rs = r.rawsize(rev)
2082 ts = ts + rs
2084 ts = ts + rs
2083 heads -= set(r.parentrevs(rev))
2085 heads -= set(r.parentrevs(rev))
2084 heads.add(rev)
2086 heads.add(rev)
2085 try:
2087 try:
2086 compression = ts / r.end(rev)
2088 compression = ts / r.end(rev)
2087 except ZeroDivisionError:
2089 except ZeroDivisionError:
2088 compression = 0
2090 compression = 0
2089 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2091 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2090 "%11d %5d %8d\n" %
2092 "%11d %5d %8d\n" %
2091 (rev, p1, p2, r.start(rev), r.end(rev),
2093 (rev, p1, p2, r.start(rev), r.end(rev),
2092 r.start(dbase), r.start(cbase),
2094 r.start(dbase), r.start(cbase),
2093 r.start(p1), r.start(p2),
2095 r.start(p1), r.start(p2),
2094 rs, ts, compression, len(heads), clen))
2096 rs, ts, compression, len(heads), clen))
2095 return 0
2097 return 0
2096
2098
2097 v = r.version
2099 v = r.version
2098 format = v & 0xFFFF
2100 format = v & 0xFFFF
2099 flags = []
2101 flags = []
2100 gdelta = False
2102 gdelta = False
2101 if v & revlog.FLAG_INLINE_DATA:
2103 if v & revlog.FLAG_INLINE_DATA:
2102 flags.append('inline')
2104 flags.append('inline')
2103 if v & revlog.FLAG_GENERALDELTA:
2105 if v & revlog.FLAG_GENERALDELTA:
2104 gdelta = True
2106 gdelta = True
2105 flags.append('generaldelta')
2107 flags.append('generaldelta')
2106 if not flags:
2108 if not flags:
2107 flags = ['(none)']
2109 flags = ['(none)']
2108
2110
2109 ### tracks merge vs single parent
2111 ### tracks merge vs single parent
2110 nummerges = 0
2112 nummerges = 0
2111
2113
2112 ### tracks ways the "delta" are build
2114 ### tracks ways the "delta" are build
2113 # nodelta
2115 # nodelta
2114 numempty = 0
2116 numempty = 0
2115 numemptytext = 0
2117 numemptytext = 0
2116 numemptydelta = 0
2118 numemptydelta = 0
2117 # full file content
2119 # full file content
2118 numfull = 0
2120 numfull = 0
2119 # intermediate snapshot against a prior snapshot
2121 # intermediate snapshot against a prior snapshot
2120 numsemi = 0
2122 numsemi = 0
2121 # snapshot count per depth
2123 # snapshot count per depth
2122 numsnapdepth = collections.defaultdict(lambda: 0)
2124 numsnapdepth = collections.defaultdict(lambda: 0)
2123 # delta against previous revision
2125 # delta against previous revision
2124 numprev = 0
2126 numprev = 0
2125 # delta against first or second parent (not prev)
2127 # delta against first or second parent (not prev)
2126 nump1 = 0
2128 nump1 = 0
2127 nump2 = 0
2129 nump2 = 0
2128 # delta against neither prev nor parents
2130 # delta against neither prev nor parents
2129 numother = 0
2131 numother = 0
2130 # delta against prev that are also first or second parent
2132 # delta against prev that are also first or second parent
2131 # (details of `numprev`)
2133 # (details of `numprev`)
2132 nump1prev = 0
2134 nump1prev = 0
2133 nump2prev = 0
2135 nump2prev = 0
2134
2136
2135 # data about delta chain of each revs
2137 # data about delta chain of each revs
2136 chainlengths = []
2138 chainlengths = []
2137 chainbases = []
2139 chainbases = []
2138 chainspans = []
2140 chainspans = []
2139
2141
2140 # data about each revision
2142 # data about each revision
2141 datasize = [None, 0, 0]
2143 datasize = [None, 0, 0]
2142 fullsize = [None, 0, 0]
2144 fullsize = [None, 0, 0]
2143 semisize = [None, 0, 0]
2145 semisize = [None, 0, 0]
2144 # snapshot count per depth
2146 # snapshot count per depth
2145 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2147 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2146 deltasize = [None, 0, 0]
2148 deltasize = [None, 0, 0]
2147 chunktypecounts = {}
2149 chunktypecounts = {}
2148 chunktypesizes = {}
2150 chunktypesizes = {}
2149
2151
2150 def addsize(size, l):
2152 def addsize(size, l):
2151 if l[0] is None or size < l[0]:
2153 if l[0] is None or size < l[0]:
2152 l[0] = size
2154 l[0] = size
2153 if size > l[1]:
2155 if size > l[1]:
2154 l[1] = size
2156 l[1] = size
2155 l[2] += size
2157 l[2] += size
2156
2158
2157 numrevs = len(r)
2159 numrevs = len(r)
2158 for rev in pycompat.xrange(numrevs):
2160 for rev in pycompat.xrange(numrevs):
2159 p1, p2 = r.parentrevs(rev)
2161 p1, p2 = r.parentrevs(rev)
2160 delta = r.deltaparent(rev)
2162 delta = r.deltaparent(rev)
2161 if format > 0:
2163 if format > 0:
2162 addsize(r.rawsize(rev), datasize)
2164 addsize(r.rawsize(rev), datasize)
2163 if p2 != nullrev:
2165 if p2 != nullrev:
2164 nummerges += 1
2166 nummerges += 1
2165 size = r.length(rev)
2167 size = r.length(rev)
2166 if delta == nullrev:
2168 if delta == nullrev:
2167 chainlengths.append(0)
2169 chainlengths.append(0)
2168 chainbases.append(r.start(rev))
2170 chainbases.append(r.start(rev))
2169 chainspans.append(size)
2171 chainspans.append(size)
2170 if size == 0:
2172 if size == 0:
2171 numempty += 1
2173 numempty += 1
2172 numemptytext += 1
2174 numemptytext += 1
2173 else:
2175 else:
2174 numfull += 1
2176 numfull += 1
2175 numsnapdepth[0] += 1
2177 numsnapdepth[0] += 1
2176 addsize(size, fullsize)
2178 addsize(size, fullsize)
2177 addsize(size, snapsizedepth[0])
2179 addsize(size, snapsizedepth[0])
2178 else:
2180 else:
2179 chainlengths.append(chainlengths[delta] + 1)
2181 chainlengths.append(chainlengths[delta] + 1)
2180 baseaddr = chainbases[delta]
2182 baseaddr = chainbases[delta]
2181 revaddr = r.start(rev)
2183 revaddr = r.start(rev)
2182 chainbases.append(baseaddr)
2184 chainbases.append(baseaddr)
2183 chainspans.append((revaddr - baseaddr) + size)
2185 chainspans.append((revaddr - baseaddr) + size)
2184 if size == 0:
2186 if size == 0:
2185 numempty += 1
2187 numempty += 1
2186 numemptydelta += 1
2188 numemptydelta += 1
2187 elif r.issnapshot(rev):
2189 elif r.issnapshot(rev):
2188 addsize(size, semisize)
2190 addsize(size, semisize)
2189 numsemi += 1
2191 numsemi += 1
2190 depth = r.snapshotdepth(rev)
2192 depth = r.snapshotdepth(rev)
2191 numsnapdepth[depth] += 1
2193 numsnapdepth[depth] += 1
2192 addsize(size, snapsizedepth[depth])
2194 addsize(size, snapsizedepth[depth])
2193 else:
2195 else:
2194 addsize(size, deltasize)
2196 addsize(size, deltasize)
2195 if delta == rev - 1:
2197 if delta == rev - 1:
2196 numprev += 1
2198 numprev += 1
2197 if delta == p1:
2199 if delta == p1:
2198 nump1prev += 1
2200 nump1prev += 1
2199 elif delta == p2:
2201 elif delta == p2:
2200 nump2prev += 1
2202 nump2prev += 1
2201 elif delta == p1:
2203 elif delta == p1:
2202 nump1 += 1
2204 nump1 += 1
2203 elif delta == p2:
2205 elif delta == p2:
2204 nump2 += 1
2206 nump2 += 1
2205 elif delta != nullrev:
2207 elif delta != nullrev:
2206 numother += 1
2208 numother += 1
2207
2209
2208 # Obtain data on the raw chunks in the revlog.
2210 # Obtain data on the raw chunks in the revlog.
2209 if util.safehasattr(r, '_getsegmentforrevs'):
2211 if util.safehasattr(r, '_getsegmentforrevs'):
2210 segment = r._getsegmentforrevs(rev, rev)[1]
2212 segment = r._getsegmentforrevs(rev, rev)[1]
2211 else:
2213 else:
2212 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2214 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2213 if segment:
2215 if segment:
2214 chunktype = bytes(segment[0:1])
2216 chunktype = bytes(segment[0:1])
2215 else:
2217 else:
2216 chunktype = 'empty'
2218 chunktype = 'empty'
2217
2219
2218 if chunktype not in chunktypecounts:
2220 if chunktype not in chunktypecounts:
2219 chunktypecounts[chunktype] = 0
2221 chunktypecounts[chunktype] = 0
2220 chunktypesizes[chunktype] = 0
2222 chunktypesizes[chunktype] = 0
2221
2223
2222 chunktypecounts[chunktype] += 1
2224 chunktypecounts[chunktype] += 1
2223 chunktypesizes[chunktype] += size
2225 chunktypesizes[chunktype] += size
2224
2226
2225 # Adjust size min value for empty cases
2227 # Adjust size min value for empty cases
2226 for size in (datasize, fullsize, semisize, deltasize):
2228 for size in (datasize, fullsize, semisize, deltasize):
2227 if size[0] is None:
2229 if size[0] is None:
2228 size[0] = 0
2230 size[0] = 0
2229
2231
2230 numdeltas = numrevs - numfull - numempty - numsemi
2232 numdeltas = numrevs - numfull - numempty - numsemi
2231 numoprev = numprev - nump1prev - nump2prev
2233 numoprev = numprev - nump1prev - nump2prev
2232 totalrawsize = datasize[2]
2234 totalrawsize = datasize[2]
2233 datasize[2] /= numrevs
2235 datasize[2] /= numrevs
2234 fulltotal = fullsize[2]
2236 fulltotal = fullsize[2]
2235 fullsize[2] /= numfull
2237 fullsize[2] /= numfull
2236 semitotal = semisize[2]
2238 semitotal = semisize[2]
2237 snaptotal = {}
2239 snaptotal = {}
2238 if numsemi > 0:
2240 if numsemi > 0:
2239 semisize[2] /= numsemi
2241 semisize[2] /= numsemi
2240 for depth in snapsizedepth:
2242 for depth in snapsizedepth:
2241 snaptotal[depth] = snapsizedepth[depth][2]
2243 snaptotal[depth] = snapsizedepth[depth][2]
2242 snapsizedepth[depth][2] /= numsnapdepth[depth]
2244 snapsizedepth[depth][2] /= numsnapdepth[depth]
2243
2245
2244 deltatotal = deltasize[2]
2246 deltatotal = deltasize[2]
2245 if numdeltas > 0:
2247 if numdeltas > 0:
2246 deltasize[2] /= numdeltas
2248 deltasize[2] /= numdeltas
2247 totalsize = fulltotal + semitotal + deltatotal
2249 totalsize = fulltotal + semitotal + deltatotal
2248 avgchainlen = sum(chainlengths) / numrevs
2250 avgchainlen = sum(chainlengths) / numrevs
2249 maxchainlen = max(chainlengths)
2251 maxchainlen = max(chainlengths)
2250 maxchainspan = max(chainspans)
2252 maxchainspan = max(chainspans)
2251 compratio = 1
2253 compratio = 1
2252 if totalsize:
2254 if totalsize:
2253 compratio = totalrawsize / totalsize
2255 compratio = totalrawsize / totalsize
2254
2256
2255 basedfmtstr = '%%%dd\n'
2257 basedfmtstr = '%%%dd\n'
2256 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2258 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2257
2259
2258 def dfmtstr(max):
2260 def dfmtstr(max):
2259 return basedfmtstr % len(str(max))
2261 return basedfmtstr % len(str(max))
2260 def pcfmtstr(max, padding=0):
2262 def pcfmtstr(max, padding=0):
2261 return basepcfmtstr % (len(str(max)), ' ' * padding)
2263 return basepcfmtstr % (len(str(max)), ' ' * padding)
2262
2264
2263 def pcfmt(value, total):
2265 def pcfmt(value, total):
2264 if total:
2266 if total:
2265 return (value, 100 * float(value) / total)
2267 return (value, 100 * float(value) / total)
2266 else:
2268 else:
2267 return value, 100.0
2269 return value, 100.0
2268
2270
2269 ui.write(('format : %d\n') % format)
2271 ui.write(('format : %d\n') % format)
2270 ui.write(('flags : %s\n') % ', '.join(flags))
2272 ui.write(('flags : %s\n') % ', '.join(flags))
2271
2273
2272 ui.write('\n')
2274 ui.write('\n')
2273 fmt = pcfmtstr(totalsize)
2275 fmt = pcfmtstr(totalsize)
2274 fmt2 = dfmtstr(totalsize)
2276 fmt2 = dfmtstr(totalsize)
2275 ui.write(('revisions : ') + fmt2 % numrevs)
2277 ui.write(('revisions : ') + fmt2 % numrevs)
2276 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2278 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2277 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2279 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2278 ui.write(('revisions : ') + fmt2 % numrevs)
2280 ui.write(('revisions : ') + fmt2 % numrevs)
2279 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2281 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2280 ui.write((' text : ')
2282 ui.write((' text : ')
2281 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2283 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2282 ui.write((' delta : ')
2284 ui.write((' delta : ')
2283 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2285 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2284 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2286 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2285 for depth in sorted(numsnapdepth):
2287 for depth in sorted(numsnapdepth):
2286 ui.write((' lvl-%-3d : ' % depth)
2288 ui.write((' lvl-%-3d : ' % depth)
2287 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2289 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2288 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2290 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2289 ui.write(('revision size : ') + fmt2 % totalsize)
2291 ui.write(('revision size : ') + fmt2 % totalsize)
2290 ui.write((' snapshot : ')
2292 ui.write((' snapshot : ')
2291 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2293 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2292 for depth in sorted(numsnapdepth):
2294 for depth in sorted(numsnapdepth):
2293 ui.write((' lvl-%-3d : ' % depth)
2295 ui.write((' lvl-%-3d : ' % depth)
2294 + fmt % pcfmt(snaptotal[depth], totalsize))
2296 + fmt % pcfmt(snaptotal[depth], totalsize))
2295 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2297 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2296
2298
2297 def fmtchunktype(chunktype):
2299 def fmtchunktype(chunktype):
2298 if chunktype == 'empty':
2300 if chunktype == 'empty':
2299 return ' %s : ' % chunktype
2301 return ' %s : ' % chunktype
2300 elif chunktype in pycompat.bytestr(string.ascii_letters):
2302 elif chunktype in pycompat.bytestr(string.ascii_letters):
2301 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2303 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2302 else:
2304 else:
2303 return ' 0x%s : ' % hex(chunktype)
2305 return ' 0x%s : ' % hex(chunktype)
2304
2306
2305 ui.write('\n')
2307 ui.write('\n')
2306 ui.write(('chunks : ') + fmt2 % numrevs)
2308 ui.write(('chunks : ') + fmt2 % numrevs)
2307 for chunktype in sorted(chunktypecounts):
2309 for chunktype in sorted(chunktypecounts):
2308 ui.write(fmtchunktype(chunktype))
2310 ui.write(fmtchunktype(chunktype))
2309 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2311 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2310 ui.write(('chunks size : ') + fmt2 % totalsize)
2312 ui.write(('chunks size : ') + fmt2 % totalsize)
2311 for chunktype in sorted(chunktypecounts):
2313 for chunktype in sorted(chunktypecounts):
2312 ui.write(fmtchunktype(chunktype))
2314 ui.write(fmtchunktype(chunktype))
2313 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2315 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2314
2316
2315 ui.write('\n')
2317 ui.write('\n')
2316 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2318 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2317 ui.write(('avg chain length : ') + fmt % avgchainlen)
2319 ui.write(('avg chain length : ') + fmt % avgchainlen)
2318 ui.write(('max chain length : ') + fmt % maxchainlen)
2320 ui.write(('max chain length : ') + fmt % maxchainlen)
2319 ui.write(('max chain reach : ') + fmt % maxchainspan)
2321 ui.write(('max chain reach : ') + fmt % maxchainspan)
2320 ui.write(('compression ratio : ') + fmt % compratio)
2322 ui.write(('compression ratio : ') + fmt % compratio)
2321
2323
2322 if format > 0:
2324 if format > 0:
2323 ui.write('\n')
2325 ui.write('\n')
2324 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2326 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2325 % tuple(datasize))
2327 % tuple(datasize))
2326 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2328 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2327 % tuple(fullsize))
2329 % tuple(fullsize))
2328 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2330 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2329 % tuple(semisize))
2331 % tuple(semisize))
2330 for depth in sorted(snapsizedepth):
2332 for depth in sorted(snapsizedepth):
2331 if depth == 0:
2333 if depth == 0:
2332 continue
2334 continue
2333 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2335 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2334 % ((depth,) + tuple(snapsizedepth[depth])))
2336 % ((depth,) + tuple(snapsizedepth[depth])))
2335 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2337 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2336 % tuple(deltasize))
2338 % tuple(deltasize))
2337
2339
2338 if numdeltas > 0:
2340 if numdeltas > 0:
2339 ui.write('\n')
2341 ui.write('\n')
2340 fmt = pcfmtstr(numdeltas)
2342 fmt = pcfmtstr(numdeltas)
2341 fmt2 = pcfmtstr(numdeltas, 4)
2343 fmt2 = pcfmtstr(numdeltas, 4)
2342 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2344 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2343 if numprev > 0:
2345 if numprev > 0:
2344 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2346 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2345 numprev))
2347 numprev))
2346 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2348 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2347 numprev))
2349 numprev))
2348 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2350 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2349 numprev))
2351 numprev))
2350 if gdelta:
2352 if gdelta:
2351 ui.write(('deltas against p1 : ')
2353 ui.write(('deltas against p1 : ')
2352 + fmt % pcfmt(nump1, numdeltas))
2354 + fmt % pcfmt(nump1, numdeltas))
2353 ui.write(('deltas against p2 : ')
2355 ui.write(('deltas against p2 : ')
2354 + fmt % pcfmt(nump2, numdeltas))
2356 + fmt % pcfmt(nump2, numdeltas))
2355 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2357 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2356 numdeltas))
2358 numdeltas))
2357
2359
2358 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2360 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2359 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2361 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2360 _('[-f FORMAT] -c|-m|FILE'),
2362 _('[-f FORMAT] -c|-m|FILE'),
2361 optionalrepo=True)
2363 optionalrepo=True)
2362 def debugrevlogindex(ui, repo, file_=None, **opts):
2364 def debugrevlogindex(ui, repo, file_=None, **opts):
2363 """dump the contents of a revlog index"""
2365 """dump the contents of a revlog index"""
2364 opts = pycompat.byteskwargs(opts)
2366 opts = pycompat.byteskwargs(opts)
2365 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2367 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2366 format = opts.get('format', 0)
2368 format = opts.get('format', 0)
2367 if format not in (0, 1):
2369 if format not in (0, 1):
2368 raise error.Abort(_("unknown format %d") % format)
2370 raise error.Abort(_("unknown format %d") % format)
2369
2371
2370 if ui.debugflag:
2372 if ui.debugflag:
2371 shortfn = hex
2373 shortfn = hex
2372 else:
2374 else:
2373 shortfn = short
2375 shortfn = short
2374
2376
2375 # There might not be anything in r, so have a sane default
2377 # There might not be anything in r, so have a sane default
2376 idlen = 12
2378 idlen = 12
2377 for i in r:
2379 for i in r:
2378 idlen = len(shortfn(r.node(i)))
2380 idlen = len(shortfn(r.node(i)))
2379 break
2381 break
2380
2382
2381 if format == 0:
2383 if format == 0:
2382 if ui.verbose:
2384 if ui.verbose:
2383 ui.write((" rev offset length linkrev"
2385 ui.write((" rev offset length linkrev"
2384 " %s %s p2\n") % ("nodeid".ljust(idlen),
2386 " %s %s p2\n") % ("nodeid".ljust(idlen),
2385 "p1".ljust(idlen)))
2387 "p1".ljust(idlen)))
2386 else:
2388 else:
2387 ui.write((" rev linkrev %s %s p2\n") % (
2389 ui.write((" rev linkrev %s %s p2\n") % (
2388 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2390 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2389 elif format == 1:
2391 elif format == 1:
2390 if ui.verbose:
2392 if ui.verbose:
2391 ui.write((" rev flag offset length size link p1"
2393 ui.write((" rev flag offset length size link p1"
2392 " p2 %s\n") % "nodeid".rjust(idlen))
2394 " p2 %s\n") % "nodeid".rjust(idlen))
2393 else:
2395 else:
2394 ui.write((" rev flag size link p1 p2 %s\n") %
2396 ui.write((" rev flag size link p1 p2 %s\n") %
2395 "nodeid".rjust(idlen))
2397 "nodeid".rjust(idlen))
2396
2398
2397 for i in r:
2399 for i in r:
2398 node = r.node(i)
2400 node = r.node(i)
2399 if format == 0:
2401 if format == 0:
2400 try:
2402 try:
2401 pp = r.parents(node)
2403 pp = r.parents(node)
2402 except Exception:
2404 except Exception:
2403 pp = [nullid, nullid]
2405 pp = [nullid, nullid]
2404 if ui.verbose:
2406 if ui.verbose:
2405 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2407 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2406 i, r.start(i), r.length(i), r.linkrev(i),
2408 i, r.start(i), r.length(i), r.linkrev(i),
2407 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2409 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2408 else:
2410 else:
2409 ui.write("% 6d % 7d %s %s %s\n" % (
2411 ui.write("% 6d % 7d %s %s %s\n" % (
2410 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2412 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2411 shortfn(pp[1])))
2413 shortfn(pp[1])))
2412 elif format == 1:
2414 elif format == 1:
2413 pr = r.parentrevs(i)
2415 pr = r.parentrevs(i)
2414 if ui.verbose:
2416 if ui.verbose:
2415 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2417 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2416 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2418 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2417 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2419 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2418 else:
2420 else:
2419 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2421 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2420 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2422 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2421 shortfn(node)))
2423 shortfn(node)))
2422
2424
2423 @command('debugrevspec',
2425 @command('debugrevspec',
2424 [('', 'optimize', None,
2426 [('', 'optimize', None,
2425 _('print parsed tree after optimizing (DEPRECATED)')),
2427 _('print parsed tree after optimizing (DEPRECATED)')),
2426 ('', 'show-revs', True, _('print list of result revisions (default)')),
2428 ('', 'show-revs', True, _('print list of result revisions (default)')),
2427 ('s', 'show-set', None, _('print internal representation of result set')),
2429 ('s', 'show-set', None, _('print internal representation of result set')),
2428 ('p', 'show-stage', [],
2430 ('p', 'show-stage', [],
2429 _('print parsed tree at the given stage'), _('NAME')),
2431 _('print parsed tree at the given stage'), _('NAME')),
2430 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2432 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2431 ('', 'verify-optimized', False, _('verify optimized result')),
2433 ('', 'verify-optimized', False, _('verify optimized result')),
2432 ],
2434 ],
2433 ('REVSPEC'))
2435 ('REVSPEC'))
2434 def debugrevspec(ui, repo, expr, **opts):
2436 def debugrevspec(ui, repo, expr, **opts):
2435 """parse and apply a revision specification
2437 """parse and apply a revision specification
2436
2438
2437 Use -p/--show-stage option to print the parsed tree at the given stages.
2439 Use -p/--show-stage option to print the parsed tree at the given stages.
2438 Use -p all to print tree at every stage.
2440 Use -p all to print tree at every stage.
2439
2441
2440 Use --no-show-revs option with -s or -p to print only the set
2442 Use --no-show-revs option with -s or -p to print only the set
2441 representation or the parsed tree respectively.
2443 representation or the parsed tree respectively.
2442
2444
2443 Use --verify-optimized to compare the optimized result with the unoptimized
2445 Use --verify-optimized to compare the optimized result with the unoptimized
2444 one. Returns 1 if the optimized result differs.
2446 one. Returns 1 if the optimized result differs.
2445 """
2447 """
2446 opts = pycompat.byteskwargs(opts)
2448 opts = pycompat.byteskwargs(opts)
2447 aliases = ui.configitems('revsetalias')
2449 aliases = ui.configitems('revsetalias')
2448 stages = [
2450 stages = [
2449 ('parsed', lambda tree: tree),
2451 ('parsed', lambda tree: tree),
2450 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2452 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2451 ui.warn)),
2453 ui.warn)),
2452 ('concatenated', revsetlang.foldconcat),
2454 ('concatenated', revsetlang.foldconcat),
2453 ('analyzed', revsetlang.analyze),
2455 ('analyzed', revsetlang.analyze),
2454 ('optimized', revsetlang.optimize),
2456 ('optimized', revsetlang.optimize),
2455 ]
2457 ]
2456 if opts['no_optimized']:
2458 if opts['no_optimized']:
2457 stages = stages[:-1]
2459 stages = stages[:-1]
2458 if opts['verify_optimized'] and opts['no_optimized']:
2460 if opts['verify_optimized'] and opts['no_optimized']:
2459 raise error.Abort(_('cannot use --verify-optimized with '
2461 raise error.Abort(_('cannot use --verify-optimized with '
2460 '--no-optimized'))
2462 '--no-optimized'))
2461 stagenames = set(n for n, f in stages)
2463 stagenames = set(n for n, f in stages)
2462
2464
2463 showalways = set()
2465 showalways = set()
2464 showchanged = set()
2466 showchanged = set()
2465 if ui.verbose and not opts['show_stage']:
2467 if ui.verbose and not opts['show_stage']:
2466 # show parsed tree by --verbose (deprecated)
2468 # show parsed tree by --verbose (deprecated)
2467 showalways.add('parsed')
2469 showalways.add('parsed')
2468 showchanged.update(['expanded', 'concatenated'])
2470 showchanged.update(['expanded', 'concatenated'])
2469 if opts['optimize']:
2471 if opts['optimize']:
2470 showalways.add('optimized')
2472 showalways.add('optimized')
2471 if opts['show_stage'] and opts['optimize']:
2473 if opts['show_stage'] and opts['optimize']:
2472 raise error.Abort(_('cannot use --optimize with --show-stage'))
2474 raise error.Abort(_('cannot use --optimize with --show-stage'))
2473 if opts['show_stage'] == ['all']:
2475 if opts['show_stage'] == ['all']:
2474 showalways.update(stagenames)
2476 showalways.update(stagenames)
2475 else:
2477 else:
2476 for n in opts['show_stage']:
2478 for n in opts['show_stage']:
2477 if n not in stagenames:
2479 if n not in stagenames:
2478 raise error.Abort(_('invalid stage name: %s') % n)
2480 raise error.Abort(_('invalid stage name: %s') % n)
2479 showalways.update(opts['show_stage'])
2481 showalways.update(opts['show_stage'])
2480
2482
2481 treebystage = {}
2483 treebystage = {}
2482 printedtree = None
2484 printedtree = None
2483 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2485 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2484 for n, f in stages:
2486 for n, f in stages:
2485 treebystage[n] = tree = f(tree)
2487 treebystage[n] = tree = f(tree)
2486 if n in showalways or (n in showchanged and tree != printedtree):
2488 if n in showalways or (n in showchanged and tree != printedtree):
2487 if opts['show_stage'] or n != 'parsed':
2489 if opts['show_stage'] or n != 'parsed':
2488 ui.write(("* %s:\n") % n)
2490 ui.write(("* %s:\n") % n)
2489 ui.write(revsetlang.prettyformat(tree), "\n")
2491 ui.write(revsetlang.prettyformat(tree), "\n")
2490 printedtree = tree
2492 printedtree = tree
2491
2493
2492 if opts['verify_optimized']:
2494 if opts['verify_optimized']:
2493 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2495 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2494 brevs = revset.makematcher(treebystage['optimized'])(repo)
2496 brevs = revset.makematcher(treebystage['optimized'])(repo)
2495 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2497 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2496 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2498 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2497 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2499 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2498 arevs = list(arevs)
2500 arevs = list(arevs)
2499 brevs = list(brevs)
2501 brevs = list(brevs)
2500 if arevs == brevs:
2502 if arevs == brevs:
2501 return 0
2503 return 0
2502 ui.write(('--- analyzed\n'), label='diff.file_a')
2504 ui.write(('--- analyzed\n'), label='diff.file_a')
2503 ui.write(('+++ optimized\n'), label='diff.file_b')
2505 ui.write(('+++ optimized\n'), label='diff.file_b')
2504 sm = difflib.SequenceMatcher(None, arevs, brevs)
2506 sm = difflib.SequenceMatcher(None, arevs, brevs)
2505 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2507 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2506 if tag in (r'delete', r'replace'):
2508 if tag in (r'delete', r'replace'):
2507 for c in arevs[alo:ahi]:
2509 for c in arevs[alo:ahi]:
2508 ui.write('-%d\n' % c, label='diff.deleted')
2510 ui.write('-%d\n' % c, label='diff.deleted')
2509 if tag in (r'insert', r'replace'):
2511 if tag in (r'insert', r'replace'):
2510 for c in brevs[blo:bhi]:
2512 for c in brevs[blo:bhi]:
2511 ui.write('+%d\n' % c, label='diff.inserted')
2513 ui.write('+%d\n' % c, label='diff.inserted')
2512 if tag == r'equal':
2514 if tag == r'equal':
2513 for c in arevs[alo:ahi]:
2515 for c in arevs[alo:ahi]:
2514 ui.write(' %d\n' % c)
2516 ui.write(' %d\n' % c)
2515 return 1
2517 return 1
2516
2518
2517 func = revset.makematcher(tree)
2519 func = revset.makematcher(tree)
2518 revs = func(repo)
2520 revs = func(repo)
2519 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2521 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2520 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2522 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2521 if not opts['show_revs']:
2523 if not opts['show_revs']:
2522 return
2524 return
2523 for c in revs:
2525 for c in revs:
2524 ui.write("%d\n" % c)
2526 ui.write("%d\n" % c)
2525
2527
2526 @command('debugserve', [
2528 @command('debugserve', [
2527 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2529 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2528 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2530 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2529 ('', 'logiofile', '', _('file to log server I/O to')),
2531 ('', 'logiofile', '', _('file to log server I/O to')),
2530 ], '')
2532 ], '')
2531 def debugserve(ui, repo, **opts):
2533 def debugserve(ui, repo, **opts):
2532 """run a server with advanced settings
2534 """run a server with advanced settings
2533
2535
2534 This command is similar to :hg:`serve`. It exists partially as a
2536 This command is similar to :hg:`serve`. It exists partially as a
2535 workaround to the fact that ``hg serve --stdio`` must have specific
2537 workaround to the fact that ``hg serve --stdio`` must have specific
2536 arguments for security reasons.
2538 arguments for security reasons.
2537 """
2539 """
2538 opts = pycompat.byteskwargs(opts)
2540 opts = pycompat.byteskwargs(opts)
2539
2541
2540 if not opts['sshstdio']:
2542 if not opts['sshstdio']:
2541 raise error.Abort(_('only --sshstdio is currently supported'))
2543 raise error.Abort(_('only --sshstdio is currently supported'))
2542
2544
2543 logfh = None
2545 logfh = None
2544
2546
2545 if opts['logiofd'] and opts['logiofile']:
2547 if opts['logiofd'] and opts['logiofile']:
2546 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2548 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2547
2549
2548 if opts['logiofd']:
2550 if opts['logiofd']:
2549 # Line buffered because output is line based.
2551 # Line buffered because output is line based.
2550 try:
2552 try:
2551 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2553 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2552 except OSError as e:
2554 except OSError as e:
2553 if e.errno != errno.ESPIPE:
2555 if e.errno != errno.ESPIPE:
2554 raise
2556 raise
2555 # can't seek a pipe, so `ab` mode fails on py3
2557 # can't seek a pipe, so `ab` mode fails on py3
2556 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2558 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2557 elif opts['logiofile']:
2559 elif opts['logiofile']:
2558 logfh = open(opts['logiofile'], 'ab', 1)
2560 logfh = open(opts['logiofile'], 'ab', 1)
2559
2561
2560 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2562 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2561 s.serve_forever()
2563 s.serve_forever()
2562
2564
2563 @command('debugsetparents', [], _('REV1 [REV2]'))
2565 @command('debugsetparents', [], _('REV1 [REV2]'))
2564 def debugsetparents(ui, repo, rev1, rev2=None):
2566 def debugsetparents(ui, repo, rev1, rev2=None):
2565 """manually set the parents of the current working directory
2567 """manually set the parents of the current working directory
2566
2568
2567 This is useful for writing repository conversion tools, but should
2569 This is useful for writing repository conversion tools, but should
2568 be used with care. For example, neither the working directory nor the
2570 be used with care. For example, neither the working directory nor the
2569 dirstate is updated, so file status may be incorrect after running this
2571 dirstate is updated, so file status may be incorrect after running this
2570 command.
2572 command.
2571
2573
2572 Returns 0 on success.
2574 Returns 0 on success.
2573 """
2575 """
2574
2576
2575 node1 = scmutil.revsingle(repo, rev1).node()
2577 node1 = scmutil.revsingle(repo, rev1).node()
2576 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2578 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2577
2579
2578 with repo.wlock():
2580 with repo.wlock():
2579 repo.setparents(node1, node2)
2581 repo.setparents(node1, node2)
2580
2582
2581 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2583 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2582 def debugssl(ui, repo, source=None, **opts):
2584 def debugssl(ui, repo, source=None, **opts):
2583 '''test a secure connection to a server
2585 '''test a secure connection to a server
2584
2586
2585 This builds the certificate chain for the server on Windows, installing the
2587 This builds the certificate chain for the server on Windows, installing the
2586 missing intermediates and trusted root via Windows Update if necessary. It
2588 missing intermediates and trusted root via Windows Update if necessary. It
2587 does nothing on other platforms.
2589 does nothing on other platforms.
2588
2590
2589 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2591 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2590 that server is used. See :hg:`help urls` for more information.
2592 that server is used. See :hg:`help urls` for more information.
2591
2593
2592 If the update succeeds, retry the original operation. Otherwise, the cause
2594 If the update succeeds, retry the original operation. Otherwise, the cause
2593 of the SSL error is likely another issue.
2595 of the SSL error is likely another issue.
2594 '''
2596 '''
2595 if not pycompat.iswindows:
2597 if not pycompat.iswindows:
2596 raise error.Abort(_('certificate chain building is only possible on '
2598 raise error.Abort(_('certificate chain building is only possible on '
2597 'Windows'))
2599 'Windows'))
2598
2600
2599 if not source:
2601 if not source:
2600 if not repo:
2602 if not repo:
2601 raise error.Abort(_("there is no Mercurial repository here, and no "
2603 raise error.Abort(_("there is no Mercurial repository here, and no "
2602 "server specified"))
2604 "server specified"))
2603 source = "default"
2605 source = "default"
2604
2606
2605 source, branches = hg.parseurl(ui.expandpath(source))
2607 source, branches = hg.parseurl(ui.expandpath(source))
2606 url = util.url(source)
2608 url = util.url(source)
2607
2609
2608 defaultport = {'https': 443, 'ssh': 22}
2610 defaultport = {'https': 443, 'ssh': 22}
2609 if url.scheme in defaultport:
2611 if url.scheme in defaultport:
2610 try:
2612 try:
2611 addr = (url.host, int(url.port or defaultport[url.scheme]))
2613 addr = (url.host, int(url.port or defaultport[url.scheme]))
2612 except ValueError:
2614 except ValueError:
2613 raise error.Abort(_("malformed port number in URL"))
2615 raise error.Abort(_("malformed port number in URL"))
2614 else:
2616 else:
2615 raise error.Abort(_("only https and ssh connections are supported"))
2617 raise error.Abort(_("only https and ssh connections are supported"))
2616
2618
2617 from . import win32
2619 from . import win32
2618
2620
2619 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2621 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2620 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2622 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2621
2623
2622 try:
2624 try:
2623 s.connect(addr)
2625 s.connect(addr)
2624 cert = s.getpeercert(True)
2626 cert = s.getpeercert(True)
2625
2627
2626 ui.status(_('checking the certificate chain for %s\n') % url.host)
2628 ui.status(_('checking the certificate chain for %s\n') % url.host)
2627
2629
2628 complete = win32.checkcertificatechain(cert, build=False)
2630 complete = win32.checkcertificatechain(cert, build=False)
2629
2631
2630 if not complete:
2632 if not complete:
2631 ui.status(_('certificate chain is incomplete, updating... '))
2633 ui.status(_('certificate chain is incomplete, updating... '))
2632
2634
2633 if not win32.checkcertificatechain(cert):
2635 if not win32.checkcertificatechain(cert):
2634 ui.status(_('failed.\n'))
2636 ui.status(_('failed.\n'))
2635 else:
2637 else:
2636 ui.status(_('done.\n'))
2638 ui.status(_('done.\n'))
2637 else:
2639 else:
2638 ui.status(_('full certificate chain is available\n'))
2640 ui.status(_('full certificate chain is available\n'))
2639 finally:
2641 finally:
2640 s.close()
2642 s.close()
2641
2643
2642 @command('debugsub',
2644 @command('debugsub',
2643 [('r', 'rev', '',
2645 [('r', 'rev', '',
2644 _('revision to check'), _('REV'))],
2646 _('revision to check'), _('REV'))],
2645 _('[-r REV] [REV]'))
2647 _('[-r REV] [REV]'))
2646 def debugsub(ui, repo, rev=None):
2648 def debugsub(ui, repo, rev=None):
2647 ctx = scmutil.revsingle(repo, rev, None)
2649 ctx = scmutil.revsingle(repo, rev, None)
2648 for k, v in sorted(ctx.substate.items()):
2650 for k, v in sorted(ctx.substate.items()):
2649 ui.write(('path %s\n') % k)
2651 ui.write(('path %s\n') % k)
2650 ui.write((' source %s\n') % v[0])
2652 ui.write((' source %s\n') % v[0])
2651 ui.write((' revision %s\n') % v[1])
2653 ui.write((' revision %s\n') % v[1])
2652
2654
2653 @command('debugsuccessorssets',
2655 @command('debugsuccessorssets',
2654 [('', 'closest', False, _('return closest successors sets only'))],
2656 [('', 'closest', False, _('return closest successors sets only'))],
2655 _('[REV]'))
2657 _('[REV]'))
2656 def debugsuccessorssets(ui, repo, *revs, **opts):
2658 def debugsuccessorssets(ui, repo, *revs, **opts):
2657 """show set of successors for revision
2659 """show set of successors for revision
2658
2660
2659 A successors set of changeset A is a consistent group of revisions that
2661 A successors set of changeset A is a consistent group of revisions that
2660 succeed A. It contains non-obsolete changesets only unless closests
2662 succeed A. It contains non-obsolete changesets only unless closests
2661 successors set is set.
2663 successors set is set.
2662
2664
2663 In most cases a changeset A has a single successors set containing a single
2665 In most cases a changeset A has a single successors set containing a single
2664 successor (changeset A replaced by A').
2666 successor (changeset A replaced by A').
2665
2667
2666 A changeset that is made obsolete with no successors are called "pruned".
2668 A changeset that is made obsolete with no successors are called "pruned".
2667 Such changesets have no successors sets at all.
2669 Such changesets have no successors sets at all.
2668
2670
2669 A changeset that has been "split" will have a successors set containing
2671 A changeset that has been "split" will have a successors set containing
2670 more than one successor.
2672 more than one successor.
2671
2673
2672 A changeset that has been rewritten in multiple different ways is called
2674 A changeset that has been rewritten in multiple different ways is called
2673 "divergent". Such changesets have multiple successor sets (each of which
2675 "divergent". Such changesets have multiple successor sets (each of which
2674 may also be split, i.e. have multiple successors).
2676 may also be split, i.e. have multiple successors).
2675
2677
2676 Results are displayed as follows::
2678 Results are displayed as follows::
2677
2679
2678 <rev1>
2680 <rev1>
2679 <successors-1A>
2681 <successors-1A>
2680 <rev2>
2682 <rev2>
2681 <successors-2A>
2683 <successors-2A>
2682 <successors-2B1> <successors-2B2> <successors-2B3>
2684 <successors-2B1> <successors-2B2> <successors-2B3>
2683
2685
2684 Here rev2 has two possible (i.e. divergent) successors sets. The first
2686 Here rev2 has two possible (i.e. divergent) successors sets. The first
2685 holds one element, whereas the second holds three (i.e. the changeset has
2687 holds one element, whereas the second holds three (i.e. the changeset has
2686 been split).
2688 been split).
2687 """
2689 """
2688 # passed to successorssets caching computation from one call to another
2690 # passed to successorssets caching computation from one call to another
2689 cache = {}
2691 cache = {}
2690 ctx2str = bytes
2692 ctx2str = bytes
2691 node2str = short
2693 node2str = short
2692 for rev in scmutil.revrange(repo, revs):
2694 for rev in scmutil.revrange(repo, revs):
2693 ctx = repo[rev]
2695 ctx = repo[rev]
2694 ui.write('%s\n'% ctx2str(ctx))
2696 ui.write('%s\n'% ctx2str(ctx))
2695 for succsset in obsutil.successorssets(repo, ctx.node(),
2697 for succsset in obsutil.successorssets(repo, ctx.node(),
2696 closest=opts[r'closest'],
2698 closest=opts[r'closest'],
2697 cache=cache):
2699 cache=cache):
2698 if succsset:
2700 if succsset:
2699 ui.write(' ')
2701 ui.write(' ')
2700 ui.write(node2str(succsset[0]))
2702 ui.write(node2str(succsset[0]))
2701 for node in succsset[1:]:
2703 for node in succsset[1:]:
2702 ui.write(' ')
2704 ui.write(' ')
2703 ui.write(node2str(node))
2705 ui.write(node2str(node))
2704 ui.write('\n')
2706 ui.write('\n')
2705
2707
2706 @command('debugtemplate',
2708 @command('debugtemplate',
2707 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2709 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2708 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2710 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2709 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2711 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2710 optionalrepo=True)
2712 optionalrepo=True)
2711 def debugtemplate(ui, repo, tmpl, **opts):
2713 def debugtemplate(ui, repo, tmpl, **opts):
2712 """parse and apply a template
2714 """parse and apply a template
2713
2715
2714 If -r/--rev is given, the template is processed as a log template and
2716 If -r/--rev is given, the template is processed as a log template and
2715 applied to the given changesets. Otherwise, it is processed as a generic
2717 applied to the given changesets. Otherwise, it is processed as a generic
2716 template.
2718 template.
2717
2719
2718 Use --verbose to print the parsed tree.
2720 Use --verbose to print the parsed tree.
2719 """
2721 """
2720 revs = None
2722 revs = None
2721 if opts[r'rev']:
2723 if opts[r'rev']:
2722 if repo is None:
2724 if repo is None:
2723 raise error.RepoError(_('there is no Mercurial repository here '
2725 raise error.RepoError(_('there is no Mercurial repository here '
2724 '(.hg not found)'))
2726 '(.hg not found)'))
2725 revs = scmutil.revrange(repo, opts[r'rev'])
2727 revs = scmutil.revrange(repo, opts[r'rev'])
2726
2728
2727 props = {}
2729 props = {}
2728 for d in opts[r'define']:
2730 for d in opts[r'define']:
2729 try:
2731 try:
2730 k, v = (e.strip() for e in d.split('=', 1))
2732 k, v = (e.strip() for e in d.split('=', 1))
2731 if not k or k == 'ui':
2733 if not k or k == 'ui':
2732 raise ValueError
2734 raise ValueError
2733 props[k] = v
2735 props[k] = v
2734 except ValueError:
2736 except ValueError:
2735 raise error.Abort(_('malformed keyword definition: %s') % d)
2737 raise error.Abort(_('malformed keyword definition: %s') % d)
2736
2738
2737 if ui.verbose:
2739 if ui.verbose:
2738 aliases = ui.configitems('templatealias')
2740 aliases = ui.configitems('templatealias')
2739 tree = templater.parse(tmpl)
2741 tree = templater.parse(tmpl)
2740 ui.note(templater.prettyformat(tree), '\n')
2742 ui.note(templater.prettyformat(tree), '\n')
2741 newtree = templater.expandaliases(tree, aliases)
2743 newtree = templater.expandaliases(tree, aliases)
2742 if newtree != tree:
2744 if newtree != tree:
2743 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2745 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2744
2746
2745 if revs is None:
2747 if revs is None:
2746 tres = formatter.templateresources(ui, repo)
2748 tres = formatter.templateresources(ui, repo)
2747 t = formatter.maketemplater(ui, tmpl, resources=tres)
2749 t = formatter.maketemplater(ui, tmpl, resources=tres)
2748 if ui.verbose:
2750 if ui.verbose:
2749 kwds, funcs = t.symbolsuseddefault()
2751 kwds, funcs = t.symbolsuseddefault()
2750 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2752 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2751 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2753 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2752 ui.write(t.renderdefault(props))
2754 ui.write(t.renderdefault(props))
2753 else:
2755 else:
2754 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2756 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2755 if ui.verbose:
2757 if ui.verbose:
2756 kwds, funcs = displayer.t.symbolsuseddefault()
2758 kwds, funcs = displayer.t.symbolsuseddefault()
2757 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2759 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2758 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2760 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2759 for r in revs:
2761 for r in revs:
2760 displayer.show(repo[r], **pycompat.strkwargs(props))
2762 displayer.show(repo[r], **pycompat.strkwargs(props))
2761 displayer.close()
2763 displayer.close()
2762
2764
2763 @command('debuguigetpass', [
2765 @command('debuguigetpass', [
2764 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2766 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2765 ], _('[-p TEXT]'), norepo=True)
2767 ], _('[-p TEXT]'), norepo=True)
2766 def debuguigetpass(ui, prompt=''):
2768 def debuguigetpass(ui, prompt=''):
2767 """show prompt to type password"""
2769 """show prompt to type password"""
2768 r = ui.getpass(prompt)
2770 r = ui.getpass(prompt)
2769 ui.write(('respose: %s\n') % r)
2771 ui.write(('respose: %s\n') % r)
2770
2772
2771 @command('debuguiprompt', [
2773 @command('debuguiprompt', [
2772 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2774 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2773 ], _('[-p TEXT]'), norepo=True)
2775 ], _('[-p TEXT]'), norepo=True)
2774 def debuguiprompt(ui, prompt=''):
2776 def debuguiprompt(ui, prompt=''):
2775 """show plain prompt"""
2777 """show plain prompt"""
2776 r = ui.prompt(prompt)
2778 r = ui.prompt(prompt)
2777 ui.write(('response: %s\n') % r)
2779 ui.write(('response: %s\n') % r)
2778
2780
2779 @command('debugupdatecaches', [])
2781 @command('debugupdatecaches', [])
2780 def debugupdatecaches(ui, repo, *pats, **opts):
2782 def debugupdatecaches(ui, repo, *pats, **opts):
2781 """warm all known caches in the repository"""
2783 """warm all known caches in the repository"""
2782 with repo.wlock(), repo.lock():
2784 with repo.wlock(), repo.lock():
2783 repo.updatecaches(full=True)
2785 repo.updatecaches(full=True)
2784
2786
2785 @command('debugupgraderepo', [
2787 @command('debugupgraderepo', [
2786 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2788 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2787 ('', 'run', False, _('performs an upgrade')),
2789 ('', 'run', False, _('performs an upgrade')),
2788 ('', 'backup', True, _('keep the old repository content around')),
2790 ('', 'backup', True, _('keep the old repository content around')),
2789 ])
2791 ])
2790 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2792 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2791 """upgrade a repository to use different features
2793 """upgrade a repository to use different features
2792
2794
2793 If no arguments are specified, the repository is evaluated for upgrade
2795 If no arguments are specified, the repository is evaluated for upgrade
2794 and a list of problems and potential optimizations is printed.
2796 and a list of problems and potential optimizations is printed.
2795
2797
2796 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2798 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2797 can be influenced via additional arguments. More details will be provided
2799 can be influenced via additional arguments. More details will be provided
2798 by the command output when run without ``--run``.
2800 by the command output when run without ``--run``.
2799
2801
2800 During the upgrade, the repository will be locked and no writes will be
2802 During the upgrade, the repository will be locked and no writes will be
2801 allowed.
2803 allowed.
2802
2804
2803 At the end of the upgrade, the repository may not be readable while new
2805 At the end of the upgrade, the repository may not be readable while new
2804 repository data is swapped in. This window will be as long as it takes to
2806 repository data is swapped in. This window will be as long as it takes to
2805 rename some directories inside the ``.hg`` directory. On most machines, this
2807 rename some directories inside the ``.hg`` directory. On most machines, this
2806 should complete almost instantaneously and the chances of a consumer being
2808 should complete almost instantaneously and the chances of a consumer being
2807 unable to access the repository should be low.
2809 unable to access the repository should be low.
2808 """
2810 """
2809 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2811 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2810 backup=backup)
2812 backup=backup)
2811
2813
2812 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2814 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2813 inferrepo=True)
2815 inferrepo=True)
2814 def debugwalk(ui, repo, *pats, **opts):
2816 def debugwalk(ui, repo, *pats, **opts):
2815 """show how files match on given patterns"""
2817 """show how files match on given patterns"""
2816 opts = pycompat.byteskwargs(opts)
2818 opts = pycompat.byteskwargs(opts)
2817 m = scmutil.match(repo[None], pats, opts)
2819 m = scmutil.match(repo[None], pats, opts)
2818 if ui.verbose:
2820 if ui.verbose:
2819 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2821 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2820 items = list(repo[None].walk(m))
2822 items = list(repo[None].walk(m))
2821 if not items:
2823 if not items:
2822 return
2824 return
2823 f = lambda fn: fn
2825 f = lambda fn: fn
2824 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2826 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2825 f = lambda fn: util.normpath(fn)
2827 f = lambda fn: util.normpath(fn)
2826 fmt = 'f %%-%ds %%-%ds %%s' % (
2828 fmt = 'f %%-%ds %%-%ds %%s' % (
2827 max([len(abs) for abs in items]),
2829 max([len(abs) for abs in items]),
2828 max([len(repo.pathto(abs)) for abs in items]))
2830 max([len(repo.pathto(abs)) for abs in items]))
2829 for abs in items:
2831 for abs in items:
2830 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2832 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2831 ui.write("%s\n" % line.rstrip())
2833 ui.write("%s\n" % line.rstrip())
2832
2834
2833 @command('debugwhyunstable', [], _('REV'))
2835 @command('debugwhyunstable', [], _('REV'))
2834 def debugwhyunstable(ui, repo, rev):
2836 def debugwhyunstable(ui, repo, rev):
2835 """explain instabilities of a changeset"""
2837 """explain instabilities of a changeset"""
2836 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2838 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2837 dnodes = ''
2839 dnodes = ''
2838 if entry.get('divergentnodes'):
2840 if entry.get('divergentnodes'):
2839 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2841 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2840 for ctx in entry['divergentnodes']) + ' '
2842 for ctx in entry['divergentnodes']) + ' '
2841 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2843 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2842 entry['reason'], entry['node']))
2844 entry['reason'], entry['node']))
2843
2845
2844 @command('debugwireargs',
2846 @command('debugwireargs',
2845 [('', 'three', '', 'three'),
2847 [('', 'three', '', 'three'),
2846 ('', 'four', '', 'four'),
2848 ('', 'four', '', 'four'),
2847 ('', 'five', '', 'five'),
2849 ('', 'five', '', 'five'),
2848 ] + cmdutil.remoteopts,
2850 ] + cmdutil.remoteopts,
2849 _('REPO [OPTIONS]... [ONE [TWO]]'),
2851 _('REPO [OPTIONS]... [ONE [TWO]]'),
2850 norepo=True)
2852 norepo=True)
2851 def debugwireargs(ui, repopath, *vals, **opts):
2853 def debugwireargs(ui, repopath, *vals, **opts):
2852 opts = pycompat.byteskwargs(opts)
2854 opts = pycompat.byteskwargs(opts)
2853 repo = hg.peer(ui, opts, repopath)
2855 repo = hg.peer(ui, opts, repopath)
2854 for opt in cmdutil.remoteopts:
2856 for opt in cmdutil.remoteopts:
2855 del opts[opt[1]]
2857 del opts[opt[1]]
2856 args = {}
2858 args = {}
2857 for k, v in opts.iteritems():
2859 for k, v in opts.iteritems():
2858 if v:
2860 if v:
2859 args[k] = v
2861 args[k] = v
2860 args = pycompat.strkwargs(args)
2862 args = pycompat.strkwargs(args)
2861 # run twice to check that we don't mess up the stream for the next command
2863 # run twice to check that we don't mess up the stream for the next command
2862 res1 = repo.debugwireargs(*vals, **args)
2864 res1 = repo.debugwireargs(*vals, **args)
2863 res2 = repo.debugwireargs(*vals, **args)
2865 res2 = repo.debugwireargs(*vals, **args)
2864 ui.write("%s\n" % res1)
2866 ui.write("%s\n" % res1)
2865 if res1 != res2:
2867 if res1 != res2:
2866 ui.warn("%s\n" % res2)
2868 ui.warn("%s\n" % res2)
2867
2869
2868 def _parsewirelangblocks(fh):
2870 def _parsewirelangblocks(fh):
2869 activeaction = None
2871 activeaction = None
2870 blocklines = []
2872 blocklines = []
2871 lastindent = 0
2873 lastindent = 0
2872
2874
2873 for line in fh:
2875 for line in fh:
2874 line = line.rstrip()
2876 line = line.rstrip()
2875 if not line:
2877 if not line:
2876 continue
2878 continue
2877
2879
2878 if line.startswith(b'#'):
2880 if line.startswith(b'#'):
2879 continue
2881 continue
2880
2882
2881 if not line.startswith(b' '):
2883 if not line.startswith(b' '):
2882 # New block. Flush previous one.
2884 # New block. Flush previous one.
2883 if activeaction:
2885 if activeaction:
2884 yield activeaction, blocklines
2886 yield activeaction, blocklines
2885
2887
2886 activeaction = line
2888 activeaction = line
2887 blocklines = []
2889 blocklines = []
2888 lastindent = 0
2890 lastindent = 0
2889 continue
2891 continue
2890
2892
2891 # Else we start with an indent.
2893 # Else we start with an indent.
2892
2894
2893 if not activeaction:
2895 if not activeaction:
2894 raise error.Abort(_('indented line outside of block'))
2896 raise error.Abort(_('indented line outside of block'))
2895
2897
2896 indent = len(line) - len(line.lstrip())
2898 indent = len(line) - len(line.lstrip())
2897
2899
2898 # If this line is indented more than the last line, concatenate it.
2900 # If this line is indented more than the last line, concatenate it.
2899 if indent > lastindent and blocklines:
2901 if indent > lastindent and blocklines:
2900 blocklines[-1] += line.lstrip()
2902 blocklines[-1] += line.lstrip()
2901 else:
2903 else:
2902 blocklines.append(line)
2904 blocklines.append(line)
2903 lastindent = indent
2905 lastindent = indent
2904
2906
2905 # Flush last block.
2907 # Flush last block.
2906 if activeaction:
2908 if activeaction:
2907 yield activeaction, blocklines
2909 yield activeaction, blocklines
2908
2910
2909 @command('debugwireproto',
2911 @command('debugwireproto',
2910 [
2912 [
2911 ('', 'localssh', False, _('start an SSH server for this repo')),
2913 ('', 'localssh', False, _('start an SSH server for this repo')),
2912 ('', 'peer', '', _('construct a specific version of the peer')),
2914 ('', 'peer', '', _('construct a specific version of the peer')),
2913 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2915 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2914 ('', 'nologhandshake', False,
2916 ('', 'nologhandshake', False,
2915 _('do not log I/O related to the peer handshake')),
2917 _('do not log I/O related to the peer handshake')),
2916 ] + cmdutil.remoteopts,
2918 ] + cmdutil.remoteopts,
2917 _('[PATH]'),
2919 _('[PATH]'),
2918 optionalrepo=True)
2920 optionalrepo=True)
2919 def debugwireproto(ui, repo, path=None, **opts):
2921 def debugwireproto(ui, repo, path=None, **opts):
2920 """send wire protocol commands to a server
2922 """send wire protocol commands to a server
2921
2923
2922 This command can be used to issue wire protocol commands to remote
2924 This command can be used to issue wire protocol commands to remote
2923 peers and to debug the raw data being exchanged.
2925 peers and to debug the raw data being exchanged.
2924
2926
2925 ``--localssh`` will start an SSH server against the current repository
2927 ``--localssh`` will start an SSH server against the current repository
2926 and connect to that. By default, the connection will perform a handshake
2928 and connect to that. By default, the connection will perform a handshake
2927 and establish an appropriate peer instance.
2929 and establish an appropriate peer instance.
2928
2930
2929 ``--peer`` can be used to bypass the handshake protocol and construct a
2931 ``--peer`` can be used to bypass the handshake protocol and construct a
2930 peer instance using the specified class type. Valid values are ``raw``,
2932 peer instance using the specified class type. Valid values are ``raw``,
2931 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2933 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2932 raw data payloads and don't support higher-level command actions.
2934 raw data payloads and don't support higher-level command actions.
2933
2935
2934 ``--noreadstderr`` can be used to disable automatic reading from stderr
2936 ``--noreadstderr`` can be used to disable automatic reading from stderr
2935 of the peer (for SSH connections only). Disabling automatic reading of
2937 of the peer (for SSH connections only). Disabling automatic reading of
2936 stderr is useful for making output more deterministic.
2938 stderr is useful for making output more deterministic.
2937
2939
2938 Commands are issued via a mini language which is specified via stdin.
2940 Commands are issued via a mini language which is specified via stdin.
2939 The language consists of individual actions to perform. An action is
2941 The language consists of individual actions to perform. An action is
2940 defined by a block. A block is defined as a line with no leading
2942 defined by a block. A block is defined as a line with no leading
2941 space followed by 0 or more lines with leading space. Blocks are
2943 space followed by 0 or more lines with leading space. Blocks are
2942 effectively a high-level command with additional metadata.
2944 effectively a high-level command with additional metadata.
2943
2945
2944 Lines beginning with ``#`` are ignored.
2946 Lines beginning with ``#`` are ignored.
2945
2947
2946 The following sections denote available actions.
2948 The following sections denote available actions.
2947
2949
2948 raw
2950 raw
2949 ---
2951 ---
2950
2952
2951 Send raw data to the server.
2953 Send raw data to the server.
2952
2954
2953 The block payload contains the raw data to send as one atomic send
2955 The block payload contains the raw data to send as one atomic send
2954 operation. The data may not actually be delivered in a single system
2956 operation. The data may not actually be delivered in a single system
2955 call: it depends on the abilities of the transport being used.
2957 call: it depends on the abilities of the transport being used.
2956
2958
2957 Each line in the block is de-indented and concatenated. Then, that
2959 Each line in the block is de-indented and concatenated. Then, that
2958 value is evaluated as a Python b'' literal. This allows the use of
2960 value is evaluated as a Python b'' literal. This allows the use of
2959 backslash escaping, etc.
2961 backslash escaping, etc.
2960
2962
2961 raw+
2963 raw+
2962 ----
2964 ----
2963
2965
2964 Behaves like ``raw`` except flushes output afterwards.
2966 Behaves like ``raw`` except flushes output afterwards.
2965
2967
2966 command <X>
2968 command <X>
2967 -----------
2969 -----------
2968
2970
2969 Send a request to run a named command, whose name follows the ``command``
2971 Send a request to run a named command, whose name follows the ``command``
2970 string.
2972 string.
2971
2973
2972 Arguments to the command are defined as lines in this block. The format of
2974 Arguments to the command are defined as lines in this block. The format of
2973 each line is ``<key> <value>``. e.g.::
2975 each line is ``<key> <value>``. e.g.::
2974
2976
2975 command listkeys
2977 command listkeys
2976 namespace bookmarks
2978 namespace bookmarks
2977
2979
2978 If the value begins with ``eval:``, it will be interpreted as a Python
2980 If the value begins with ``eval:``, it will be interpreted as a Python
2979 literal expression. Otherwise values are interpreted as Python b'' literals.
2981 literal expression. Otherwise values are interpreted as Python b'' literals.
2980 This allows sending complex types and encoding special byte sequences via
2982 This allows sending complex types and encoding special byte sequences via
2981 backslash escaping.
2983 backslash escaping.
2982
2984
2983 The following arguments have special meaning:
2985 The following arguments have special meaning:
2984
2986
2985 ``PUSHFILE``
2987 ``PUSHFILE``
2986 When defined, the *push* mechanism of the peer will be used instead
2988 When defined, the *push* mechanism of the peer will be used instead
2987 of the static request-response mechanism and the content of the
2989 of the static request-response mechanism and the content of the
2988 file specified in the value of this argument will be sent as the
2990 file specified in the value of this argument will be sent as the
2989 command payload.
2991 command payload.
2990
2992
2991 This can be used to submit a local bundle file to the remote.
2993 This can be used to submit a local bundle file to the remote.
2992
2994
2993 batchbegin
2995 batchbegin
2994 ----------
2996 ----------
2995
2997
2996 Instruct the peer to begin a batched send.
2998 Instruct the peer to begin a batched send.
2997
2999
2998 All ``command`` blocks are queued for execution until the next
3000 All ``command`` blocks are queued for execution until the next
2999 ``batchsubmit`` block.
3001 ``batchsubmit`` block.
3000
3002
3001 batchsubmit
3003 batchsubmit
3002 -----------
3004 -----------
3003
3005
3004 Submit previously queued ``command`` blocks as a batch request.
3006 Submit previously queued ``command`` blocks as a batch request.
3005
3007
3006 This action MUST be paired with a ``batchbegin`` action.
3008 This action MUST be paired with a ``batchbegin`` action.
3007
3009
3008 httprequest <method> <path>
3010 httprequest <method> <path>
3009 ---------------------------
3011 ---------------------------
3010
3012
3011 (HTTP peer only)
3013 (HTTP peer only)
3012
3014
3013 Send an HTTP request to the peer.
3015 Send an HTTP request to the peer.
3014
3016
3015 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3017 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3016
3018
3017 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3019 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3018 headers to add to the request. e.g. ``Accept: foo``.
3020 headers to add to the request. e.g. ``Accept: foo``.
3019
3021
3020 The following arguments are special:
3022 The following arguments are special:
3021
3023
3022 ``BODYFILE``
3024 ``BODYFILE``
3023 The content of the file defined as the value to this argument will be
3025 The content of the file defined as the value to this argument will be
3024 transferred verbatim as the HTTP request body.
3026 transferred verbatim as the HTTP request body.
3025
3027
3026 ``frame <type> <flags> <payload>``
3028 ``frame <type> <flags> <payload>``
3027 Send a unified protocol frame as part of the request body.
3029 Send a unified protocol frame as part of the request body.
3028
3030
3029 All frames will be collected and sent as the body to the HTTP
3031 All frames will be collected and sent as the body to the HTTP
3030 request.
3032 request.
3031
3033
3032 close
3034 close
3033 -----
3035 -----
3034
3036
3035 Close the connection to the server.
3037 Close the connection to the server.
3036
3038
3037 flush
3039 flush
3038 -----
3040 -----
3039
3041
3040 Flush data written to the server.
3042 Flush data written to the server.
3041
3043
3042 readavailable
3044 readavailable
3043 -------------
3045 -------------
3044
3046
3045 Close the write end of the connection and read all available data from
3047 Close the write end of the connection and read all available data from
3046 the server.
3048 the server.
3047
3049
3048 If the connection to the server encompasses multiple pipes, we poll both
3050 If the connection to the server encompasses multiple pipes, we poll both
3049 pipes and read available data.
3051 pipes and read available data.
3050
3052
3051 readline
3053 readline
3052 --------
3054 --------
3053
3055
3054 Read a line of output from the server. If there are multiple output
3056 Read a line of output from the server. If there are multiple output
3055 pipes, reads only the main pipe.
3057 pipes, reads only the main pipe.
3056
3058
3057 ereadline
3059 ereadline
3058 ---------
3060 ---------
3059
3061
3060 Like ``readline``, but read from the stderr pipe, if available.
3062 Like ``readline``, but read from the stderr pipe, if available.
3061
3063
3062 read <X>
3064 read <X>
3063 --------
3065 --------
3064
3066
3065 ``read()`` N bytes from the server's main output pipe.
3067 ``read()`` N bytes from the server's main output pipe.
3066
3068
3067 eread <X>
3069 eread <X>
3068 ---------
3070 ---------
3069
3071
3070 ``read()`` N bytes from the server's stderr pipe, if available.
3072 ``read()`` N bytes from the server's stderr pipe, if available.
3071
3073
3072 Specifying Unified Frame-Based Protocol Frames
3074 Specifying Unified Frame-Based Protocol Frames
3073 ----------------------------------------------
3075 ----------------------------------------------
3074
3076
3075 It is possible to emit a *Unified Frame-Based Protocol* by using special
3077 It is possible to emit a *Unified Frame-Based Protocol* by using special
3076 syntax.
3078 syntax.
3077
3079
3078 A frame is composed as a type, flags, and payload. These can be parsed
3080 A frame is composed as a type, flags, and payload. These can be parsed
3079 from a string of the form:
3081 from a string of the form:
3080
3082
3081 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3083 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3082
3084
3083 ``request-id`` and ``stream-id`` are integers defining the request and
3085 ``request-id`` and ``stream-id`` are integers defining the request and
3084 stream identifiers.
3086 stream identifiers.
3085
3087
3086 ``type`` can be an integer value for the frame type or the string name
3088 ``type`` can be an integer value for the frame type or the string name
3087 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3089 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3088 ``command-name``.
3090 ``command-name``.
3089
3091
3090 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3092 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3091 components. Each component (and there can be just one) can be an integer
3093 components. Each component (and there can be just one) can be an integer
3092 or a flag name for stream flags or frame flags, respectively. Values are
3094 or a flag name for stream flags or frame flags, respectively. Values are
3093 resolved to integers and then bitwise OR'd together.
3095 resolved to integers and then bitwise OR'd together.
3094
3096
3095 ``payload`` represents the raw frame payload. If it begins with
3097 ``payload`` represents the raw frame payload. If it begins with
3096 ``cbor:``, the following string is evaluated as Python code and the
3098 ``cbor:``, the following string is evaluated as Python code and the
3097 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3099 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3098 as a Python byte string literal.
3100 as a Python byte string literal.
3099 """
3101 """
3100 opts = pycompat.byteskwargs(opts)
3102 opts = pycompat.byteskwargs(opts)
3101
3103
3102 if opts['localssh'] and not repo:
3104 if opts['localssh'] and not repo:
3103 raise error.Abort(_('--localssh requires a repository'))
3105 raise error.Abort(_('--localssh requires a repository'))
3104
3106
3105 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3107 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3106 raise error.Abort(_('invalid value for --peer'),
3108 raise error.Abort(_('invalid value for --peer'),
3107 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3109 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3108
3110
3109 if path and opts['localssh']:
3111 if path and opts['localssh']:
3110 raise error.Abort(_('cannot specify --localssh with an explicit '
3112 raise error.Abort(_('cannot specify --localssh with an explicit '
3111 'path'))
3113 'path'))
3112
3114
3113 if ui.interactive():
3115 if ui.interactive():
3114 ui.write(_('(waiting for commands on stdin)\n'))
3116 ui.write(_('(waiting for commands on stdin)\n'))
3115
3117
3116 blocks = list(_parsewirelangblocks(ui.fin))
3118 blocks = list(_parsewirelangblocks(ui.fin))
3117
3119
3118 proc = None
3120 proc = None
3119 stdin = None
3121 stdin = None
3120 stdout = None
3122 stdout = None
3121 stderr = None
3123 stderr = None
3122 opener = None
3124 opener = None
3123
3125
3124 if opts['localssh']:
3126 if opts['localssh']:
3125 # We start the SSH server in its own process so there is process
3127 # We start the SSH server in its own process so there is process
3126 # separation. This prevents a whole class of potential bugs around
3128 # separation. This prevents a whole class of potential bugs around
3127 # shared state from interfering with server operation.
3129 # shared state from interfering with server operation.
3128 args = procutil.hgcmd() + [
3130 args = procutil.hgcmd() + [
3129 '-R', repo.root,
3131 '-R', repo.root,
3130 'debugserve', '--sshstdio',
3132 'debugserve', '--sshstdio',
3131 ]
3133 ]
3132 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3134 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3133 stdin=subprocess.PIPE,
3135 stdin=subprocess.PIPE,
3134 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3136 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3135 bufsize=0)
3137 bufsize=0)
3136
3138
3137 stdin = proc.stdin
3139 stdin = proc.stdin
3138 stdout = proc.stdout
3140 stdout = proc.stdout
3139 stderr = proc.stderr
3141 stderr = proc.stderr
3140
3142
3141 # We turn the pipes into observers so we can log I/O.
3143 # We turn the pipes into observers so we can log I/O.
3142 if ui.verbose or opts['peer'] == 'raw':
3144 if ui.verbose or opts['peer'] == 'raw':
3143 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3145 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3144 logdata=True)
3146 logdata=True)
3145 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3147 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3146 logdata=True)
3148 logdata=True)
3147 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3149 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3148 logdata=True)
3150 logdata=True)
3149
3151
3150 # --localssh also implies the peer connection settings.
3152 # --localssh also implies the peer connection settings.
3151
3153
3152 url = 'ssh://localserver'
3154 url = 'ssh://localserver'
3153 autoreadstderr = not opts['noreadstderr']
3155 autoreadstderr = not opts['noreadstderr']
3154
3156
3155 if opts['peer'] == 'ssh1':
3157 if opts['peer'] == 'ssh1':
3156 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3158 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3157 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3159 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3158 None, autoreadstderr=autoreadstderr)
3160 None, autoreadstderr=autoreadstderr)
3159 elif opts['peer'] == 'ssh2':
3161 elif opts['peer'] == 'ssh2':
3160 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3162 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3161 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3163 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3162 None, autoreadstderr=autoreadstderr)
3164 None, autoreadstderr=autoreadstderr)
3163 elif opts['peer'] == 'raw':
3165 elif opts['peer'] == 'raw':
3164 ui.write(_('using raw connection to peer\n'))
3166 ui.write(_('using raw connection to peer\n'))
3165 peer = None
3167 peer = None
3166 else:
3168 else:
3167 ui.write(_('creating ssh peer from handshake results\n'))
3169 ui.write(_('creating ssh peer from handshake results\n'))
3168 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3170 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3169 autoreadstderr=autoreadstderr)
3171 autoreadstderr=autoreadstderr)
3170
3172
3171 elif path:
3173 elif path:
3172 # We bypass hg.peer() so we can proxy the sockets.
3174 # We bypass hg.peer() so we can proxy the sockets.
3173 # TODO consider not doing this because we skip
3175 # TODO consider not doing this because we skip
3174 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3176 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3175 u = util.url(path)
3177 u = util.url(path)
3176 if u.scheme != 'http':
3178 if u.scheme != 'http':
3177 raise error.Abort(_('only http:// paths are currently supported'))
3179 raise error.Abort(_('only http:// paths are currently supported'))
3178
3180
3179 url, authinfo = u.authinfo()
3181 url, authinfo = u.authinfo()
3180 openerargs = {
3182 openerargs = {
3181 r'useragent': b'Mercurial debugwireproto',
3183 r'useragent': b'Mercurial debugwireproto',
3182 }
3184 }
3183
3185
3184 # Turn pipes/sockets into observers so we can log I/O.
3186 # Turn pipes/sockets into observers so we can log I/O.
3185 if ui.verbose:
3187 if ui.verbose:
3186 openerargs.update({
3188 openerargs.update({
3187 r'loggingfh': ui,
3189 r'loggingfh': ui,
3188 r'loggingname': b's',
3190 r'loggingname': b's',
3189 r'loggingopts': {
3191 r'loggingopts': {
3190 r'logdata': True,
3192 r'logdata': True,
3191 r'logdataapis': False,
3193 r'logdataapis': False,
3192 },
3194 },
3193 })
3195 })
3194
3196
3195 if ui.debugflag:
3197 if ui.debugflag:
3196 openerargs[r'loggingopts'][r'logdataapis'] = True
3198 openerargs[r'loggingopts'][r'logdataapis'] = True
3197
3199
3198 # Don't send default headers when in raw mode. This allows us to
3200 # Don't send default headers when in raw mode. This allows us to
3199 # bypass most of the behavior of our URL handling code so we can
3201 # bypass most of the behavior of our URL handling code so we can
3200 # have near complete control over what's sent on the wire.
3202 # have near complete control over what's sent on the wire.
3201 if opts['peer'] == 'raw':
3203 if opts['peer'] == 'raw':
3202 openerargs[r'sendaccept'] = False
3204 openerargs[r'sendaccept'] = False
3203
3205
3204 opener = urlmod.opener(ui, authinfo, **openerargs)
3206 opener = urlmod.opener(ui, authinfo, **openerargs)
3205
3207
3206 if opts['peer'] == 'http2':
3208 if opts['peer'] == 'http2':
3207 ui.write(_('creating http peer for wire protocol version 2\n'))
3209 ui.write(_('creating http peer for wire protocol version 2\n'))
3208 # We go through makepeer() because we need an API descriptor for
3210 # We go through makepeer() because we need an API descriptor for
3209 # the peer instance to be useful.
3211 # the peer instance to be useful.
3210 with ui.configoverride({
3212 with ui.configoverride({
3211 ('experimental', 'httppeer.advertise-v2'): True}):
3213 ('experimental', 'httppeer.advertise-v2'): True}):
3212 if opts['nologhandshake']:
3214 if opts['nologhandshake']:
3213 ui.pushbuffer()
3215 ui.pushbuffer()
3214
3216
3215 peer = httppeer.makepeer(ui, path, opener=opener)
3217 peer = httppeer.makepeer(ui, path, opener=opener)
3216
3218
3217 if opts['nologhandshake']:
3219 if opts['nologhandshake']:
3218 ui.popbuffer()
3220 ui.popbuffer()
3219
3221
3220 if not isinstance(peer, httppeer.httpv2peer):
3222 if not isinstance(peer, httppeer.httpv2peer):
3221 raise error.Abort(_('could not instantiate HTTP peer for '
3223 raise error.Abort(_('could not instantiate HTTP peer for '
3222 'wire protocol version 2'),
3224 'wire protocol version 2'),
3223 hint=_('the server may not have the feature '
3225 hint=_('the server may not have the feature '
3224 'enabled or is not allowing this '
3226 'enabled or is not allowing this '
3225 'client version'))
3227 'client version'))
3226
3228
3227 elif opts['peer'] == 'raw':
3229 elif opts['peer'] == 'raw':
3228 ui.write(_('using raw connection to peer\n'))
3230 ui.write(_('using raw connection to peer\n'))
3229 peer = None
3231 peer = None
3230 elif opts['peer']:
3232 elif opts['peer']:
3231 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3233 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3232 opts['peer'])
3234 opts['peer'])
3233 else:
3235 else:
3234 peer = httppeer.makepeer(ui, path, opener=opener)
3236 peer = httppeer.makepeer(ui, path, opener=opener)
3235
3237
3236 # We /could/ populate stdin/stdout with sock.makefile()...
3238 # We /could/ populate stdin/stdout with sock.makefile()...
3237 else:
3239 else:
3238 raise error.Abort(_('unsupported connection configuration'))
3240 raise error.Abort(_('unsupported connection configuration'))
3239
3241
3240 batchedcommands = None
3242 batchedcommands = None
3241
3243
3242 # Now perform actions based on the parsed wire language instructions.
3244 # Now perform actions based on the parsed wire language instructions.
3243 for action, lines in blocks:
3245 for action, lines in blocks:
3244 if action in ('raw', 'raw+'):
3246 if action in ('raw', 'raw+'):
3245 if not stdin:
3247 if not stdin:
3246 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3248 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3247
3249
3248 # Concatenate the data together.
3250 # Concatenate the data together.
3249 data = ''.join(l.lstrip() for l in lines)
3251 data = ''.join(l.lstrip() for l in lines)
3250 data = stringutil.unescapestr(data)
3252 data = stringutil.unescapestr(data)
3251 stdin.write(data)
3253 stdin.write(data)
3252
3254
3253 if action == 'raw+':
3255 if action == 'raw+':
3254 stdin.flush()
3256 stdin.flush()
3255 elif action == 'flush':
3257 elif action == 'flush':
3256 if not stdin:
3258 if not stdin:
3257 raise error.Abort(_('cannot call flush on this peer'))
3259 raise error.Abort(_('cannot call flush on this peer'))
3258 stdin.flush()
3260 stdin.flush()
3259 elif action.startswith('command'):
3261 elif action.startswith('command'):
3260 if not peer:
3262 if not peer:
3261 raise error.Abort(_('cannot send commands unless peer instance '
3263 raise error.Abort(_('cannot send commands unless peer instance '
3262 'is available'))
3264 'is available'))
3263
3265
3264 command = action.split(' ', 1)[1]
3266 command = action.split(' ', 1)[1]
3265
3267
3266 args = {}
3268 args = {}
3267 for line in lines:
3269 for line in lines:
3268 # We need to allow empty values.
3270 # We need to allow empty values.
3269 fields = line.lstrip().split(' ', 1)
3271 fields = line.lstrip().split(' ', 1)
3270 if len(fields) == 1:
3272 if len(fields) == 1:
3271 key = fields[0]
3273 key = fields[0]
3272 value = ''
3274 value = ''
3273 else:
3275 else:
3274 key, value = fields
3276 key, value = fields
3275
3277
3276 if value.startswith('eval:'):
3278 if value.startswith('eval:'):
3277 value = stringutil.evalpythonliteral(value[5:])
3279 value = stringutil.evalpythonliteral(value[5:])
3278 else:
3280 else:
3279 value = stringutil.unescapestr(value)
3281 value = stringutil.unescapestr(value)
3280
3282
3281 args[key] = value
3283 args[key] = value
3282
3284
3283 if batchedcommands is not None:
3285 if batchedcommands is not None:
3284 batchedcommands.append((command, args))
3286 batchedcommands.append((command, args))
3285 continue
3287 continue
3286
3288
3287 ui.status(_('sending %s command\n') % command)
3289 ui.status(_('sending %s command\n') % command)
3288
3290
3289 if 'PUSHFILE' in args:
3291 if 'PUSHFILE' in args:
3290 with open(args['PUSHFILE'], r'rb') as fh:
3292 with open(args['PUSHFILE'], r'rb') as fh:
3291 del args['PUSHFILE']
3293 del args['PUSHFILE']
3292 res, output = peer._callpush(command, fh,
3294 res, output = peer._callpush(command, fh,
3293 **pycompat.strkwargs(args))
3295 **pycompat.strkwargs(args))
3294 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3296 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3295 ui.status(_('remote output: %s\n') %
3297 ui.status(_('remote output: %s\n') %
3296 stringutil.escapestr(output))
3298 stringutil.escapestr(output))
3297 else:
3299 else:
3298 with peer.commandexecutor() as e:
3300 with peer.commandexecutor() as e:
3299 res = e.callcommand(command, args).result()
3301 res = e.callcommand(command, args).result()
3300
3302
3301 if isinstance(res, wireprotov2peer.commandresponse):
3303 if isinstance(res, wireprotov2peer.commandresponse):
3302 val = res.objects()
3304 val = res.objects()
3303 ui.status(_('response: %s\n') %
3305 ui.status(_('response: %s\n') %
3304 stringutil.pprint(val, bprefix=True, indent=2))
3306 stringutil.pprint(val, bprefix=True, indent=2))
3305 else:
3307 else:
3306 ui.status(_('response: %s\n') %
3308 ui.status(_('response: %s\n') %
3307 stringutil.pprint(res, bprefix=True, indent=2))
3309 stringutil.pprint(res, bprefix=True, indent=2))
3308
3310
3309 elif action == 'batchbegin':
3311 elif action == 'batchbegin':
3310 if batchedcommands is not None:
3312 if batchedcommands is not None:
3311 raise error.Abort(_('nested batchbegin not allowed'))
3313 raise error.Abort(_('nested batchbegin not allowed'))
3312
3314
3313 batchedcommands = []
3315 batchedcommands = []
3314 elif action == 'batchsubmit':
3316 elif action == 'batchsubmit':
3315 # There is a batching API we could go through. But it would be
3317 # There is a batching API we could go through. But it would be
3316 # difficult to normalize requests into function calls. It is easier
3318 # difficult to normalize requests into function calls. It is easier
3317 # to bypass this layer and normalize to commands + args.
3319 # to bypass this layer and normalize to commands + args.
3318 ui.status(_('sending batch with %d sub-commands\n') %
3320 ui.status(_('sending batch with %d sub-commands\n') %
3319 len(batchedcommands))
3321 len(batchedcommands))
3320 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3322 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3321 ui.status(_('response #%d: %s\n') %
3323 ui.status(_('response #%d: %s\n') %
3322 (i, stringutil.escapestr(chunk)))
3324 (i, stringutil.escapestr(chunk)))
3323
3325
3324 batchedcommands = None
3326 batchedcommands = None
3325
3327
3326 elif action.startswith('httprequest '):
3328 elif action.startswith('httprequest '):
3327 if not opener:
3329 if not opener:
3328 raise error.Abort(_('cannot use httprequest without an HTTP '
3330 raise error.Abort(_('cannot use httprequest without an HTTP '
3329 'peer'))
3331 'peer'))
3330
3332
3331 request = action.split(' ', 2)
3333 request = action.split(' ', 2)
3332 if len(request) != 3:
3334 if len(request) != 3:
3333 raise error.Abort(_('invalid httprequest: expected format is '
3335 raise error.Abort(_('invalid httprequest: expected format is '
3334 '"httprequest <method> <path>'))
3336 '"httprequest <method> <path>'))
3335
3337
3336 method, httppath = request[1:]
3338 method, httppath = request[1:]
3337 headers = {}
3339 headers = {}
3338 body = None
3340 body = None
3339 frames = []
3341 frames = []
3340 for line in lines:
3342 for line in lines:
3341 line = line.lstrip()
3343 line = line.lstrip()
3342 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3344 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3343 if m:
3345 if m:
3344 # Headers need to use native strings.
3346 # Headers need to use native strings.
3345 key = pycompat.strurl(m.group(1))
3347 key = pycompat.strurl(m.group(1))
3346 value = pycompat.strurl(m.group(2))
3348 value = pycompat.strurl(m.group(2))
3347 headers[key] = value
3349 headers[key] = value
3348 continue
3350 continue
3349
3351
3350 if line.startswith(b'BODYFILE '):
3352 if line.startswith(b'BODYFILE '):
3351 with open(line.split(b' ', 1), 'rb') as fh:
3353 with open(line.split(b' ', 1), 'rb') as fh:
3352 body = fh.read()
3354 body = fh.read()
3353 elif line.startswith(b'frame '):
3355 elif line.startswith(b'frame '):
3354 frame = wireprotoframing.makeframefromhumanstring(
3356 frame = wireprotoframing.makeframefromhumanstring(
3355 line[len(b'frame '):])
3357 line[len(b'frame '):])
3356
3358
3357 frames.append(frame)
3359 frames.append(frame)
3358 else:
3360 else:
3359 raise error.Abort(_('unknown argument to httprequest: %s') %
3361 raise error.Abort(_('unknown argument to httprequest: %s') %
3360 line)
3362 line)
3361
3363
3362 url = path + httppath
3364 url = path + httppath
3363
3365
3364 if frames:
3366 if frames:
3365 body = b''.join(bytes(f) for f in frames)
3367 body = b''.join(bytes(f) for f in frames)
3366
3368
3367 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3369 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3368
3370
3369 # urllib.Request insists on using has_data() as a proxy for
3371 # urllib.Request insists on using has_data() as a proxy for
3370 # determining the request method. Override that to use our
3372 # determining the request method. Override that to use our
3371 # explicitly requested method.
3373 # explicitly requested method.
3372 req.get_method = lambda: pycompat.sysstr(method)
3374 req.get_method = lambda: pycompat.sysstr(method)
3373
3375
3374 try:
3376 try:
3375 res = opener.open(req)
3377 res = opener.open(req)
3376 body = res.read()
3378 body = res.read()
3377 except util.urlerr.urlerror as e:
3379 except util.urlerr.urlerror as e:
3378 # read() method must be called, but only exists in Python 2
3380 # read() method must be called, but only exists in Python 2
3379 getattr(e, 'read', lambda: None)()
3381 getattr(e, 'read', lambda: None)()
3380 continue
3382 continue
3381
3383
3382 ct = res.headers.get(r'Content-Type')
3384 ct = res.headers.get(r'Content-Type')
3383 if ct == r'application/mercurial-cbor':
3385 if ct == r'application/mercurial-cbor':
3384 ui.write(_('cbor> %s\n') %
3386 ui.write(_('cbor> %s\n') %
3385 stringutil.pprint(cborutil.decodeall(body),
3387 stringutil.pprint(cborutil.decodeall(body),
3386 bprefix=True,
3388 bprefix=True,
3387 indent=2))
3389 indent=2))
3388
3390
3389 elif action == 'close':
3391 elif action == 'close':
3390 peer.close()
3392 peer.close()
3391 elif action == 'readavailable':
3393 elif action == 'readavailable':
3392 if not stdout or not stderr:
3394 if not stdout or not stderr:
3393 raise error.Abort(_('readavailable not available on this peer'))
3395 raise error.Abort(_('readavailable not available on this peer'))
3394
3396
3395 stdin.close()
3397 stdin.close()
3396 stdout.read()
3398 stdout.read()
3397 stderr.read()
3399 stderr.read()
3398
3400
3399 elif action == 'readline':
3401 elif action == 'readline':
3400 if not stdout:
3402 if not stdout:
3401 raise error.Abort(_('readline not available on this peer'))
3403 raise error.Abort(_('readline not available on this peer'))
3402 stdout.readline()
3404 stdout.readline()
3403 elif action == 'ereadline':
3405 elif action == 'ereadline':
3404 if not stderr:
3406 if not stderr:
3405 raise error.Abort(_('ereadline not available on this peer'))
3407 raise error.Abort(_('ereadline not available on this peer'))
3406 stderr.readline()
3408 stderr.readline()
3407 elif action.startswith('read '):
3409 elif action.startswith('read '):
3408 count = int(action.split(' ', 1)[1])
3410 count = int(action.split(' ', 1)[1])
3409 if not stdout:
3411 if not stdout:
3410 raise error.Abort(_('read not available on this peer'))
3412 raise error.Abort(_('read not available on this peer'))
3411 stdout.read(count)
3413 stdout.read(count)
3412 elif action.startswith('eread '):
3414 elif action.startswith('eread '):
3413 count = int(action.split(' ', 1)[1])
3415 count = int(action.split(' ', 1)[1])
3414 if not stderr:
3416 if not stderr:
3415 raise error.Abort(_('eread not available on this peer'))
3417 raise error.Abort(_('eread not available on this peer'))
3416 stderr.read(count)
3418 stderr.read(count)
3417 else:
3419 else:
3418 raise error.Abort(_('unknown action: %s') % action)
3420 raise error.Abort(_('unknown action: %s') % action)
3419
3421
3420 if batchedcommands is not None:
3422 if batchedcommands is not None:
3421 raise error.Abort(_('unclosed "batchbegin" request'))
3423 raise error.Abort(_('unclosed "batchbegin" request'))
3422
3424
3423 if peer:
3425 if peer:
3424 peer.close()
3426 peer.close()
3425
3427
3426 if proc:
3428 if proc:
3427 proc.kill()
3429 proc.kill()
@@ -1,150 +1,160 b''
1 Source bundle was generated with the following script:
1 Source bundle was generated with the following script:
2
2
3 # hg init
3 # hg init
4 # echo a > a
4 # echo a > a
5 # ln -s a l
5 # ln -s a l
6 # hg ci -Ama -d'0 0'
6 # hg ci -Ama -d'0 0'
7 # mkdir b
7 # mkdir b
8 # echo a > b/a
8 # echo a > b/a
9 # chmod +x b/a
9 # chmod +x b/a
10 # hg ci -Amb -d'1 0'
10 # hg ci -Amb -d'1 0'
11
11
12 $ hg init
12 $ hg init
13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
14 adding changesets
14 adding changesets
15 adding manifests
15 adding manifests
16 adding file changes
16 adding file changes
17 added 2 changesets with 3 changes to 3 files
17 added 2 changesets with 3 changes to 3 files
18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
19 (run 'hg update' to get a working copy)
19 (run 'hg update' to get a working copy)
20
20
21 The next call is expected to return nothing:
21 The next call is expected to return nothing:
22
22
23 $ hg manifest
23 $ hg manifest
24
24
25 $ hg co
25 $ hg co
26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
27
27
28 $ hg manifest
28 $ hg manifest
29 a
29 a
30 b/a
30 b/a
31 l
31 l
32
32
33 $ hg files -vr .
33 $ hg files -vr .
34 2 a
34 2 a
35 2 x b/a
35 2 x b/a
36 1 l l
36 1 l l
37 $ hg files -r . -X b
37 $ hg files -r . -X b
38 a
38 a
39 l
39 l
40 $ hg files -T '{path} {size} {flags}\n'
40 $ hg files -T '{path} {size} {flags}\n'
41 a 2
41 a 2
42 b/a 2 x
42 b/a 2 x
43 l 1 l
43 l 1 l
44 $ hg files -T '{path} {node|shortest}\n' -r.
44 $ hg files -T '{path} {node|shortest}\n' -r.
45 a 5bdc
45 a 5bdc
46 b/a 5bdc
46 b/a 5bdc
47 l 5bdc
47 l 5bdc
48
48
49 $ hg manifest -v
49 $ hg manifest -v
50 644 a
50 644 a
51 755 * b/a
51 755 * b/a
52 644 @ l
52 644 @ l
53 $ hg manifest -T '{path} {rev}\n'
53 $ hg manifest -T '{path} {rev}\n'
54 a 1
54 a 1
55 b/a 1
55 b/a 1
56 l 1
56 l 1
57
57
58 $ hg manifest --debug
58 $ hg manifest --debug
59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
62
62
63 $ hg manifest -r 0
63 $ hg manifest -r 0
64 a
64 a
65 l
65 l
66
66
67 $ hg manifest -r 1
67 $ hg manifest -r 1
68 a
68 a
69 b/a
69 b/a
70 l
70 l
71
71
72 $ hg manifest -r tip
72 $ hg manifest -r tip
73 a
73 a
74 b/a
74 b/a
75 l
75 l
76
76
77 $ hg manifest tip
77 $ hg manifest tip
78 a
78 a
79 b/a
79 b/a
80 l
80 l
81
81
82 $ hg manifest --all
82 $ hg manifest --all
83 a
83 a
84 b/a
84 b/a
85 l
85 l
86
86
87 The next two calls are expected to abort:
87 The next two calls are expected to abort:
88
88
89 $ hg manifest -r 2
89 $ hg manifest -r 2
90 abort: unknown revision '2'!
90 abort: unknown revision '2'!
91 [255]
91 [255]
92
92
93 $ hg manifest -r tip tip
93 $ hg manifest -r tip tip
94 abort: please specify just one revision
94 abort: please specify just one revision
95 [255]
95 [255]
96
96
97 Testing the manifest full text cache utility
97 Testing the manifest full text cache utility
98 --------------------------------------------
98 --------------------------------------------
99
99
100 Reminder of the manifest log content
100 Reminder of the manifest log content
101
101
102 $ hg log --debug | grep 'manifest:'
102 $ hg log --debug | grep 'manifest:'
103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
105
105
106 Showing the content of the caches after the above operations
106 Showing the content of the caches after the above operations
107
107
108 $ hg debugmanifestfulltextcache
108 $ hg debugmanifestfulltextcache
109 cache empty
109 cache empty
110
110
111 Adding a new persistent entry in the cache
111 Adding a new persistent entry in the cache
112
112
113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
114
114
115 $ hg debugmanifestfulltextcache
115 $ hg debugmanifestfulltextcache
116 cache contains 1 manifest entries, in order of most to least recent:
116 cache contains 1 manifest entries, in order of most to least recent:
117 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
117 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
118 total cache data size 157 bytes, on-disk 157 bytes
118 total cache data size 157 bytes, on-disk 157 bytes
119
119
120 Check we don't duplicated entry (added from the debug command)
120 Check we don't duplicated entry (added from the debug command)
121
121
122 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
122 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
123 $ hg debugmanifestfulltextcache
123 $ hg debugmanifestfulltextcache
124 cache contains 1 manifest entries, in order of most to least recent:
124 cache contains 1 manifest entries, in order of most to least recent:
125 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
125 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
126 total cache data size 157 bytes, on-disk 157 bytes
126 total cache data size 157 bytes, on-disk 157 bytes
127
127
128 Adding a second entry
128 Adding a second entry
129
129
130 $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf
130 $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf
131 $ hg debugmanifestfulltextcache
131 $ hg debugmanifestfulltextcache
132 cache contains 2 manifest entries, in order of most to least recent:
132 cache contains 2 manifest entries, in order of most to least recent:
133 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
133 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
134 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
134 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
135 total cache data size 268 bytes, on-disk 268 bytes
135 total cache data size 268 bytes, on-disk 268 bytes
136
136
137 Accessing the initial entry again, refresh their order
137 Accessing the initial entry again, refresh their order
138
138
139 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
139 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
140 $ hg debugmanifestfulltextcache
140 $ hg debugmanifestfulltextcache
141 cache contains 2 manifest entries, in order of most to least recent:
141 cache contains 2 manifest entries, in order of most to least recent:
142 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
142 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
143 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
143 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
144 total cache data size 268 bytes, on-disk 268 bytes
144 total cache data size 268 bytes, on-disk 268 bytes
145
145
146 Check cache clearing
146 Check cache clearing
147
147
148 $ hg debugmanifestfulltextcache --clear
148 $ hg debugmanifestfulltextcache --clear
149 $ hg debugmanifestfulltextcache
149 $ hg debugmanifestfulltextcache
150 cache empty
150 cache empty
151
152 Check adding multiple entry in one go:
153
154 $ hg debugmanifestfulltextcache --add fce2a30dedad1eef4da95ca1dc0004157aa527cf --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
155 $ hg debugmanifestfulltextcache
156 cache contains 2 manifest entries, in order of most to least recent:
157 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
158 id: fce2a30dedad1eef4da95ca1dc0004157aa527cf, size 87 bytes
159 total cache data size 268 bytes, on-disk 268 bytes
160 $ hg debugmanifestfulltextcache --clear
General Comments 0
You need to be logged in to leave comments. Login now