##// END OF EJS Templates
debugcommands: use openstorage() in debugdata (BC)...
Gregory Szorc -
r39317:dd6bc250 default
parent child Browse files
Show More
@@ -1,3325 +1,3325 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 encoding,
45 encoding,
46 error,
46 error,
47 exchange,
47 exchange,
48 extensions,
48 extensions,
49 filemerge,
49 filemerge,
50 filesetlang,
50 filesetlang,
51 formatter,
51 formatter,
52 hg,
52 hg,
53 httppeer,
53 httppeer,
54 localrepo,
54 localrepo,
55 lock as lockmod,
55 lock as lockmod,
56 logcmdutil,
56 logcmdutil,
57 merge as mergemod,
57 merge as mergemod,
58 obsolete,
58 obsolete,
59 obsutil,
59 obsutil,
60 phases,
60 phases,
61 policy,
61 policy,
62 pvec,
62 pvec,
63 pycompat,
63 pycompat,
64 registrar,
64 registrar,
65 repair,
65 repair,
66 revlog,
66 revlog,
67 revset,
67 revset,
68 revsetlang,
68 revsetlang,
69 scmutil,
69 scmutil,
70 setdiscovery,
70 setdiscovery,
71 simplemerge,
71 simplemerge,
72 sshpeer,
72 sshpeer,
73 sslutil,
73 sslutil,
74 streamclone,
74 streamclone,
75 templater,
75 templater,
76 treediscovery,
76 treediscovery,
77 upgrade,
77 upgrade,
78 url as urlmod,
78 url as urlmod,
79 util,
79 util,
80 vfs as vfsmod,
80 vfs as vfsmod,
81 wireprotoframing,
81 wireprotoframing,
82 wireprotoserver,
82 wireprotoserver,
83 wireprotov2peer,
83 wireprotov2peer,
84 )
84 )
85 from .utils import (
85 from .utils import (
86 dateutil,
86 dateutil,
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90
90
91 release = lockmod.release
91 release = lockmod.release
92
92
93 command = registrar.command()
93 command = registrar.command()
94
94
95 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
95 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 def debugancestor(ui, repo, *args):
96 def debugancestor(ui, repo, *args):
97 """find the ancestor revision of two revisions in a given index"""
97 """find the ancestor revision of two revisions in a given index"""
98 if len(args) == 3:
98 if len(args) == 3:
99 index, rev1, rev2 = args
99 index, rev1, rev2 = args
100 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
100 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 lookup = r.lookup
101 lookup = r.lookup
102 elif len(args) == 2:
102 elif len(args) == 2:
103 if not repo:
103 if not repo:
104 raise error.Abort(_('there is no Mercurial repository here '
104 raise error.Abort(_('there is no Mercurial repository here '
105 '(.hg not found)'))
105 '(.hg not found)'))
106 rev1, rev2 = args
106 rev1, rev2 = args
107 r = repo.changelog
107 r = repo.changelog
108 lookup = repo.lookup
108 lookup = repo.lookup
109 else:
109 else:
110 raise error.Abort(_('either two or three arguments required'))
110 raise error.Abort(_('either two or three arguments required'))
111 a = r.ancestor(lookup(rev1), lookup(rev2))
111 a = r.ancestor(lookup(rev1), lookup(rev2))
112 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
112 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113
113
114 @command('debugapplystreamclonebundle', [], 'FILE')
114 @command('debugapplystreamclonebundle', [], 'FILE')
115 def debugapplystreamclonebundle(ui, repo, fname):
115 def debugapplystreamclonebundle(ui, repo, fname):
116 """apply a stream clone bundle file"""
116 """apply a stream clone bundle file"""
117 f = hg.openpath(ui, fname)
117 f = hg.openpath(ui, fname)
118 gen = exchange.readbundle(ui, f, fname)
118 gen = exchange.readbundle(ui, f, fname)
119 gen.apply(repo)
119 gen.apply(repo)
120
120
121 @command('debugbuilddag',
121 @command('debugbuilddag',
122 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
122 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
123 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('n', 'new-file', None, _('add new file at each rev'))],
124 ('n', 'new-file', None, _('add new file at each rev'))],
125 _('[OPTION]... [TEXT]'))
125 _('[OPTION]... [TEXT]'))
126 def debugbuilddag(ui, repo, text=None,
126 def debugbuilddag(ui, repo, text=None,
127 mergeable_file=False,
127 mergeable_file=False,
128 overwritten_file=False,
128 overwritten_file=False,
129 new_file=False):
129 new_file=False):
130 """builds a repo with a given DAG from scratch in the current empty repo
130 """builds a repo with a given DAG from scratch in the current empty repo
131
131
132 The description of the DAG is read from stdin if not given on the
132 The description of the DAG is read from stdin if not given on the
133 command line.
133 command line.
134
134
135 Elements:
135 Elements:
136
136
137 - "+n" is a linear run of n nodes based on the current default parent
137 - "+n" is a linear run of n nodes based on the current default parent
138 - "." is a single node based on the current default parent
138 - "." is a single node based on the current default parent
139 - "$" resets the default parent to null (implied at the start);
139 - "$" resets the default parent to null (implied at the start);
140 otherwise the default parent is always the last node created
140 otherwise the default parent is always the last node created
141 - "<p" sets the default parent to the backref p
141 - "<p" sets the default parent to the backref p
142 - "*p" is a fork at parent p, which is a backref
142 - "*p" is a fork at parent p, which is a backref
143 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
143 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "/p2" is a merge of the preceding node and p2
144 - "/p2" is a merge of the preceding node and p2
145 - ":tag" defines a local tag for the preceding node
145 - ":tag" defines a local tag for the preceding node
146 - "@branch" sets the named branch for subsequent nodes
146 - "@branch" sets the named branch for subsequent nodes
147 - "#...\\n" is a comment up to the end of the line
147 - "#...\\n" is a comment up to the end of the line
148
148
149 Whitespace between the above elements is ignored.
149 Whitespace between the above elements is ignored.
150
150
151 A backref is either
151 A backref is either
152
152
153 - a number n, which references the node curr-n, where curr is the current
153 - a number n, which references the node curr-n, where curr is the current
154 node, or
154 node, or
155 - the name of a local tag you placed earlier using ":tag", or
155 - the name of a local tag you placed earlier using ":tag", or
156 - empty to denote the default parent.
156 - empty to denote the default parent.
157
157
158 All string valued-elements are either strictly alphanumeric, or must
158 All string valued-elements are either strictly alphanumeric, or must
159 be enclosed in double quotes ("..."), with "\\" as escape character.
159 be enclosed in double quotes ("..."), with "\\" as escape character.
160 """
160 """
161
161
162 if text is None:
162 if text is None:
163 ui.status(_("reading DAG from stdin\n"))
163 ui.status(_("reading DAG from stdin\n"))
164 text = ui.fin.read()
164 text = ui.fin.read()
165
165
166 cl = repo.changelog
166 cl = repo.changelog
167 if len(cl) > 0:
167 if len(cl) > 0:
168 raise error.Abort(_('repository is not empty'))
168 raise error.Abort(_('repository is not empty'))
169
169
170 # determine number of revs in DAG
170 # determine number of revs in DAG
171 total = 0
171 total = 0
172 for type, data in dagparser.parsedag(text):
172 for type, data in dagparser.parsedag(text):
173 if type == 'n':
173 if type == 'n':
174 total += 1
174 total += 1
175
175
176 if mergeable_file:
176 if mergeable_file:
177 linesperrev = 2
177 linesperrev = 2
178 # make a file with k lines per rev
178 # make a file with k lines per rev
179 initialmergedlines = ['%d' % i
179 initialmergedlines = ['%d' % i
180 for i in pycompat.xrange(0, total * linesperrev)]
180 for i in pycompat.xrange(0, total * linesperrev)]
181 initialmergedlines.append("")
181 initialmergedlines.append("")
182
182
183 tags = []
183 tags = []
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 total=total)
185 total=total)
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 at = -1
187 at = -1
188 atbranch = 'default'
188 atbranch = 'default'
189 nodeids = []
189 nodeids = []
190 id = 0
190 id = 0
191 progress.update(id)
191 progress.update(id)
192 for type, data in dagparser.parsedag(text):
192 for type, data in dagparser.parsedag(text):
193 if type == 'n':
193 if type == 'n':
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 id, ps = data
195 id, ps = data
196
196
197 files = []
197 files = []
198 filecontent = {}
198 filecontent = {}
199
199
200 p2 = None
200 p2 = None
201 if mergeable_file:
201 if mergeable_file:
202 fn = "mf"
202 fn = "mf"
203 p1 = repo[ps[0]]
203 p1 = repo[ps[0]]
204 if len(ps) > 1:
204 if len(ps) > 1:
205 p2 = repo[ps[1]]
205 p2 = repo[ps[1]]
206 pa = p1.ancestor(p2)
206 pa = p1.ancestor(p2)
207 base, local, other = [x[fn].data() for x in (pa, p1,
207 base, local, other = [x[fn].data() for x in (pa, p1,
208 p2)]
208 p2)]
209 m3 = simplemerge.Merge3Text(base, local, other)
209 m3 = simplemerge.Merge3Text(base, local, other)
210 ml = [l.strip() for l in m3.merge_lines()]
210 ml = [l.strip() for l in m3.merge_lines()]
211 ml.append("")
211 ml.append("")
212 elif at > 0:
212 elif at > 0:
213 ml = p1[fn].data().split("\n")
213 ml = p1[fn].data().split("\n")
214 else:
214 else:
215 ml = initialmergedlines
215 ml = initialmergedlines
216 ml[id * linesperrev] += " r%i" % id
216 ml[id * linesperrev] += " r%i" % id
217 mergedtext = "\n".join(ml)
217 mergedtext = "\n".join(ml)
218 files.append(fn)
218 files.append(fn)
219 filecontent[fn] = mergedtext
219 filecontent[fn] = mergedtext
220
220
221 if overwritten_file:
221 if overwritten_file:
222 fn = "of"
222 fn = "of"
223 files.append(fn)
223 files.append(fn)
224 filecontent[fn] = "r%i\n" % id
224 filecontent[fn] = "r%i\n" % id
225
225
226 if new_file:
226 if new_file:
227 fn = "nf%i" % id
227 fn = "nf%i" % id
228 files.append(fn)
228 files.append(fn)
229 filecontent[fn] = "r%i\n" % id
229 filecontent[fn] = "r%i\n" % id
230 if len(ps) > 1:
230 if len(ps) > 1:
231 if not p2:
231 if not p2:
232 p2 = repo[ps[1]]
232 p2 = repo[ps[1]]
233 for fn in p2:
233 for fn in p2:
234 if fn.startswith("nf"):
234 if fn.startswith("nf"):
235 files.append(fn)
235 files.append(fn)
236 filecontent[fn] = p2[fn].data()
236 filecontent[fn] = p2[fn].data()
237
237
238 def fctxfn(repo, cx, path):
238 def fctxfn(repo, cx, path):
239 if path in filecontent:
239 if path in filecontent:
240 return context.memfilectx(repo, cx, path,
240 return context.memfilectx(repo, cx, path,
241 filecontent[path])
241 filecontent[path])
242 return None
242 return None
243
243
244 if len(ps) == 0 or ps[0] < 0:
244 if len(ps) == 0 or ps[0] < 0:
245 pars = [None, None]
245 pars = [None, None]
246 elif len(ps) == 1:
246 elif len(ps) == 1:
247 pars = [nodeids[ps[0]], None]
247 pars = [nodeids[ps[0]], None]
248 else:
248 else:
249 pars = [nodeids[p] for p in ps]
249 pars = [nodeids[p] for p in ps]
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 date=(id, 0),
251 date=(id, 0),
252 user="debugbuilddag",
252 user="debugbuilddag",
253 extra={'branch': atbranch})
253 extra={'branch': atbranch})
254 nodeid = repo.commitctx(cx)
254 nodeid = repo.commitctx(cx)
255 nodeids.append(nodeid)
255 nodeids.append(nodeid)
256 at = id
256 at = id
257 elif type == 'l':
257 elif type == 'l':
258 id, name = data
258 id, name = data
259 ui.note(('tag %s\n' % name))
259 ui.note(('tag %s\n' % name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 elif type == 'a':
261 elif type == 'a':
262 ui.note(('branch %s\n' % data))
262 ui.note(('branch %s\n' % data))
263 atbranch = data
263 atbranch = data
264 progress.update(id)
264 progress.update(id)
265
265
266 if tags:
266 if tags:
267 repo.vfs.write("localtags", "".join(tags))
267 repo.vfs.write("localtags", "".join(tags))
268
268
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 indent_string = ' ' * indent
270 indent_string = ' ' * indent
271 if all:
271 if all:
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 % indent_string)
273 % indent_string)
274
274
275 def showchunks(named):
275 def showchunks(named):
276 ui.write("\n%s%s\n" % (indent_string, named))
276 ui.write("\n%s%s\n" % (indent_string, named))
277 for deltadata in gen.deltaiter():
277 for deltadata in gen.deltaiter():
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 ui.write("%s%s %s %s %s %s %d\n" %
279 ui.write("%s%s %s %s %s %s %d\n" %
280 (indent_string, hex(node), hex(p1), hex(p2),
280 (indent_string, hex(node), hex(p1), hex(p2),
281 hex(cs), hex(deltabase), len(delta)))
281 hex(cs), hex(deltabase), len(delta)))
282
282
283 chunkdata = gen.changelogheader()
283 chunkdata = gen.changelogheader()
284 showchunks("changelog")
284 showchunks("changelog")
285 chunkdata = gen.manifestheader()
285 chunkdata = gen.manifestheader()
286 showchunks("manifest")
286 showchunks("manifest")
287 for chunkdata in iter(gen.filelogheader, {}):
287 for chunkdata in iter(gen.filelogheader, {}):
288 fname = chunkdata['filename']
288 fname = chunkdata['filename']
289 showchunks(fname)
289 showchunks(fname)
290 else:
290 else:
291 if isinstance(gen, bundle2.unbundle20):
291 if isinstance(gen, bundle2.unbundle20):
292 raise error.Abort(_('use debugbundle2 for this file'))
292 raise error.Abort(_('use debugbundle2 for this file'))
293 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
294 for deltadata in gen.deltaiter():
294 for deltadata in gen.deltaiter():
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 ui.write("%s%s\n" % (indent_string, hex(node)))
296 ui.write("%s%s\n" % (indent_string, hex(node)))
297
297
298 def _debugobsmarkers(ui, part, indent=0, **opts):
298 def _debugobsmarkers(ui, part, indent=0, **opts):
299 """display version and markers contained in 'data'"""
299 """display version and markers contained in 'data'"""
300 opts = pycompat.byteskwargs(opts)
300 opts = pycompat.byteskwargs(opts)
301 data = part.read()
301 data = part.read()
302 indent_string = ' ' * indent
302 indent_string = ' ' * indent
303 try:
303 try:
304 version, markers = obsolete._readmarkers(data)
304 version, markers = obsolete._readmarkers(data)
305 except error.UnknownVersion as exc:
305 except error.UnknownVersion as exc:
306 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg %= indent_string, exc.version, len(data)
307 msg %= indent_string, exc.version, len(data)
308 ui.write(msg)
308 ui.write(msg)
309 else:
309 else:
310 msg = "%sversion: %d (%d bytes)\n"
310 msg = "%sversion: %d (%d bytes)\n"
311 msg %= indent_string, version, len(data)
311 msg %= indent_string, version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 fm = ui.formatter('debugobsolete', opts)
313 fm = ui.formatter('debugobsolete', opts)
314 for rawmarker in sorted(markers):
314 for rawmarker in sorted(markers):
315 m = obsutil.marker(None, rawmarker)
315 m = obsutil.marker(None, rawmarker)
316 fm.startitem()
316 fm.startitem()
317 fm.plain(indent_string)
317 fm.plain(indent_string)
318 cmdutil.showmarker(fm, m)
318 cmdutil.showmarker(fm, m)
319 fm.end()
319 fm.end()
320
320
321 def _debugphaseheads(ui, data, indent=0):
321 def _debugphaseheads(ui, data, indent=0):
322 """display version and markers contained in 'data'"""
322 """display version and markers contained in 'data'"""
323 indent_string = ' ' * indent
323 indent_string = ' ' * indent
324 headsbyphase = phases.binarydecode(data)
324 headsbyphase = phases.binarydecode(data)
325 for phase in phases.allphases:
325 for phase in phases.allphases:
326 for head in headsbyphase[phase]:
326 for head in headsbyphase[phase]:
327 ui.write(indent_string)
327 ui.write(indent_string)
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329
329
330 def _quasirepr(thing):
330 def _quasirepr(thing):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 return '{%s}' % (
332 return '{%s}' % (
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 return pycompat.bytestr(repr(thing))
334 return pycompat.bytestr(repr(thing))
335
335
336 def _debugbundle2(ui, gen, all=None, **opts):
336 def _debugbundle2(ui, gen, all=None, **opts):
337 """lists the contents of a bundle2"""
337 """lists the contents of a bundle2"""
338 if not isinstance(gen, bundle2.unbundle20):
338 if not isinstance(gen, bundle2.unbundle20):
339 raise error.Abort(_('not a bundle2 file'))
339 raise error.Abort(_('not a bundle2 file'))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 parttypes = opts.get(r'part_type', [])
341 parttypes = opts.get(r'part_type', [])
342 for part in gen.iterparts():
342 for part in gen.iterparts():
343 if parttypes and part.type not in parttypes:
343 if parttypes and part.type not in parttypes:
344 continue
344 continue
345 msg = '%s -- %s (mandatory: %r)\n'
345 msg = '%s -- %s (mandatory: %r)\n'
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 if part.type == 'changegroup':
347 if part.type == 'changegroup':
348 version = part.params.get('version', '01')
348 version = part.params.get('version', '01')
349 cg = changegroup.getunbundler(version, part, 'UN')
349 cg = changegroup.getunbundler(version, part, 'UN')
350 if not ui.quiet:
350 if not ui.quiet:
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 if part.type == 'obsmarkers':
352 if part.type == 'obsmarkers':
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugobsmarkers(ui, part, indent=4, **opts)
354 _debugobsmarkers(ui, part, indent=4, **opts)
355 if part.type == 'phase-heads':
355 if part.type == 'phase-heads':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugphaseheads(ui, part, indent=4)
357 _debugphaseheads(ui, part, indent=4)
358
358
359 @command('debugbundle',
359 @command('debugbundle',
360 [('a', 'all', None, _('show all details')),
360 [('a', 'all', None, _('show all details')),
361 ('', 'part-type', [], _('show only the named part type')),
361 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 _('FILE'),
363 _('FILE'),
364 norepo=True)
364 norepo=True)
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 """lists the contents of a bundle"""
366 """lists the contents of a bundle"""
367 with hg.openpath(ui, bundlepath) as f:
367 with hg.openpath(ui, bundlepath) as f:
368 if spec:
368 if spec:
369 spec = exchange.getbundlespec(ui, f)
369 spec = exchange.getbundlespec(ui, f)
370 ui.write('%s\n' % spec)
370 ui.write('%s\n' % spec)
371 return
371 return
372
372
373 gen = exchange.readbundle(ui, f, bundlepath)
373 gen = exchange.readbundle(ui, f, bundlepath)
374 if isinstance(gen, bundle2.unbundle20):
374 if isinstance(gen, bundle2.unbundle20):
375 return _debugbundle2(ui, gen, all=all, **opts)
375 return _debugbundle2(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
377
377
378 @command('debugcapabilities',
378 @command('debugcapabilities',
379 [], _('PATH'),
379 [], _('PATH'),
380 norepo=True)
380 norepo=True)
381 def debugcapabilities(ui, path, **opts):
381 def debugcapabilities(ui, path, **opts):
382 """lists the capabilities of a remote peer"""
382 """lists the capabilities of a remote peer"""
383 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
384 peer = hg.peer(ui, opts, path)
384 peer = hg.peer(ui, opts, path)
385 caps = peer.capabilities()
385 caps = peer.capabilities()
386 ui.write(('Main capabilities:\n'))
386 ui.write(('Main capabilities:\n'))
387 for c in sorted(caps):
387 for c in sorted(caps):
388 ui.write((' %s\n') % c)
388 ui.write((' %s\n') % c)
389 b2caps = bundle2.bundle2caps(peer)
389 b2caps = bundle2.bundle2caps(peer)
390 if b2caps:
390 if b2caps:
391 ui.write(('Bundle2 capabilities:\n'))
391 ui.write(('Bundle2 capabilities:\n'))
392 for key, values in sorted(b2caps.iteritems()):
392 for key, values in sorted(b2caps.iteritems()):
393 ui.write((' %s\n') % key)
393 ui.write((' %s\n') % key)
394 for v in values:
394 for v in values:
395 ui.write((' %s\n') % v)
395 ui.write((' %s\n') % v)
396
396
397 @command('debugcheckstate', [], '')
397 @command('debugcheckstate', [], '')
398 def debugcheckstate(ui, repo):
398 def debugcheckstate(ui, repo):
399 """validate the correctness of the current dirstate"""
399 """validate the correctness of the current dirstate"""
400 parent1, parent2 = repo.dirstate.parents()
400 parent1, parent2 = repo.dirstate.parents()
401 m1 = repo[parent1].manifest()
401 m1 = repo[parent1].manifest()
402 m2 = repo[parent2].manifest()
402 m2 = repo[parent2].manifest()
403 errors = 0
403 errors = 0
404 for f in repo.dirstate:
404 for f in repo.dirstate:
405 state = repo.dirstate[f]
405 state = repo.dirstate[f]
406 if state in "nr" and f not in m1:
406 if state in "nr" and f not in m1:
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 errors += 1
408 errors += 1
409 if state in "a" and f in m1:
409 if state in "a" and f in m1:
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "m" and f not in m1 and f not in m2:
412 if state in "m" and f not in m1 and f not in m2:
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 (f, state))
414 (f, state))
415 errors += 1
415 errors += 1
416 for f in m1:
416 for f in m1:
417 state = repo.dirstate[f]
417 state = repo.dirstate[f]
418 if state not in "nrm":
418 if state not in "nrm":
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 errors += 1
420 errors += 1
421 if errors:
421 if errors:
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 raise error.Abort(error)
423 raise error.Abort(error)
424
424
425 @command('debugcolor',
425 @command('debugcolor',
426 [('', 'style', None, _('show all configured styles'))],
426 [('', 'style', None, _('show all configured styles'))],
427 'hg debugcolor')
427 'hg debugcolor')
428 def debugcolor(ui, repo, **opts):
428 def debugcolor(ui, repo, **opts):
429 """show available color, effects or style"""
429 """show available color, effects or style"""
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 if opts.get(r'style'):
431 if opts.get(r'style'):
432 return _debugdisplaystyle(ui)
432 return _debugdisplaystyle(ui)
433 else:
433 else:
434 return _debugdisplaycolor(ui)
434 return _debugdisplaycolor(ui)
435
435
436 def _debugdisplaycolor(ui):
436 def _debugdisplaycolor(ui):
437 ui = ui.copy()
437 ui = ui.copy()
438 ui._styles.clear()
438 ui._styles.clear()
439 for effect in color._activeeffects(ui).keys():
439 for effect in color._activeeffects(ui).keys():
440 ui._styles[effect] = effect
440 ui._styles[effect] = effect
441 if ui._terminfoparams:
441 if ui._terminfoparams:
442 for k, v in ui.configitems('color'):
442 for k, v in ui.configitems('color'):
443 if k.startswith('color.'):
443 if k.startswith('color.'):
444 ui._styles[k] = k[6:]
444 ui._styles[k] = k[6:]
445 elif k.startswith('terminfo.'):
445 elif k.startswith('terminfo.'):
446 ui._styles[k] = k[9:]
446 ui._styles[k] = k[9:]
447 ui.write(_('available colors:\n'))
447 ui.write(_('available colors:\n'))
448 # sort label with a '_' after the other to group '_background' entry.
448 # sort label with a '_' after the other to group '_background' entry.
449 items = sorted(ui._styles.items(),
449 items = sorted(ui._styles.items(),
450 key=lambda i: ('_' in i[0], i[0], i[1]))
450 key=lambda i: ('_' in i[0], i[0], i[1]))
451 for colorname, label in items:
451 for colorname, label in items:
452 ui.write(('%s\n') % colorname, label=label)
452 ui.write(('%s\n') % colorname, label=label)
453
453
454 def _debugdisplaystyle(ui):
454 def _debugdisplaystyle(ui):
455 ui.write(_('available style:\n'))
455 ui.write(_('available style:\n'))
456 if not ui._styles:
456 if not ui._styles:
457 return
457 return
458 width = max(len(s) for s in ui._styles)
458 width = max(len(s) for s in ui._styles)
459 for label, effects in sorted(ui._styles.items()):
459 for label, effects in sorted(ui._styles.items()):
460 ui.write('%s' % label, label=label)
460 ui.write('%s' % label, label=label)
461 if effects:
461 if effects:
462 # 50
462 # 50
463 ui.write(': ')
463 ui.write(': ')
464 ui.write(' ' * (max(0, width - len(label))))
464 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write('\n')
466 ui.write('\n')
467
467
468 @command('debugcreatestreamclonebundle', [], 'FILE')
468 @command('debugcreatestreamclonebundle', [], 'FILE')
469 def debugcreatestreamclonebundle(ui, repo, fname):
469 def debugcreatestreamclonebundle(ui, repo, fname):
470 """create a stream clone bundle file
470 """create a stream clone bundle file
471
471
472 Stream bundles are special bundles that are essentially archives of
472 Stream bundles are special bundles that are essentially archives of
473 revlog files. They are commonly used for cloning very quickly.
473 revlog files. They are commonly used for cloning very quickly.
474 """
474 """
475 # TODO we may want to turn this into an abort when this functionality
475 # TODO we may want to turn this into an abort when this functionality
476 # is moved into `hg bundle`.
476 # is moved into `hg bundle`.
477 if phases.hassecret(repo):
477 if phases.hassecret(repo):
478 ui.warn(_('(warning: stream clone bundle will contain secret '
478 ui.warn(_('(warning: stream clone bundle will contain secret '
479 'revisions)\n'))
479 'revisions)\n'))
480
480
481 requirements, gen = streamclone.generatebundlev1(repo)
481 requirements, gen = streamclone.generatebundlev1(repo)
482 changegroup.writechunks(ui, gen, fname)
482 changegroup.writechunks(ui, gen, fname)
483
483
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485
485
486 @command('debugdag',
486 @command('debugdag',
487 [('t', 'tags', None, _('use tags as labels')),
487 [('t', 'tags', None, _('use tags as labels')),
488 ('b', 'branches', None, _('annotate with branch names')),
488 ('b', 'branches', None, _('annotate with branch names')),
489 ('', 'dots', None, _('use dots for runs')),
489 ('', 'dots', None, _('use dots for runs')),
490 ('s', 'spaces', None, _('separate elements by spaces'))],
490 ('s', 'spaces', None, _('separate elements by spaces'))],
491 _('[OPTION]... [FILE [REV]...]'),
491 _('[OPTION]... [FILE [REV]...]'),
492 optionalrepo=True)
492 optionalrepo=True)
493 def debugdag(ui, repo, file_=None, *revs, **opts):
493 def debugdag(ui, repo, file_=None, *revs, **opts):
494 """format the changelog or an index DAG as a concise textual description
494 """format the changelog or an index DAG as a concise textual description
495
495
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 revision numbers, they get labeled in the output as rN.
497 revision numbers, they get labeled in the output as rN.
498
498
499 Otherwise, the changelog DAG of the current repo is emitted.
499 Otherwise, the changelog DAG of the current repo is emitted.
500 """
500 """
501 spaces = opts.get(r'spaces')
501 spaces = opts.get(r'spaces')
502 dots = opts.get(r'dots')
502 dots = opts.get(r'dots')
503 if file_:
503 if file_:
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 file_)
505 file_)
506 revs = set((int(r) for r in revs))
506 revs = set((int(r) for r in revs))
507 def events():
507 def events():
508 for r in rlog:
508 for r in rlog:
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 if p != -1))
510 if p != -1))
511 if r in revs:
511 if r in revs:
512 yield 'l', (r, "r%i" % r)
512 yield 'l', (r, "r%i" % r)
513 elif repo:
513 elif repo:
514 cl = repo.changelog
514 cl = repo.changelog
515 tags = opts.get(r'tags')
515 tags = opts.get(r'tags')
516 branches = opts.get(r'branches')
516 branches = opts.get(r'branches')
517 if tags:
517 if tags:
518 labels = {}
518 labels = {}
519 for l, n in repo.tags().items():
519 for l, n in repo.tags().items():
520 labels.setdefault(cl.rev(n), []).append(l)
520 labels.setdefault(cl.rev(n), []).append(l)
521 def events():
521 def events():
522 b = "default"
522 b = "default"
523 for r in cl:
523 for r in cl:
524 if branches:
524 if branches:
525 newb = cl.read(cl.node(r))[5]['branch']
525 newb = cl.read(cl.node(r))[5]['branch']
526 if newb != b:
526 if newb != b:
527 yield 'a', newb
527 yield 'a', newb
528 b = newb
528 b = newb
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 if p != -1))
530 if p != -1))
531 if tags:
531 if tags:
532 ls = labels.get(r)
532 ls = labels.get(r)
533 if ls:
533 if ls:
534 for l in ls:
534 for l in ls:
535 yield 'l', (r, l)
535 yield 'l', (r, l)
536 else:
536 else:
537 raise error.Abort(_('need repo for changelog dag'))
537 raise error.Abort(_('need repo for changelog dag'))
538
538
539 for line in dagparser.dagtextlines(events(),
539 for line in dagparser.dagtextlines(events(),
540 addspaces=spaces,
540 addspaces=spaces,
541 wraplabels=True,
541 wraplabels=True,
542 wrapannotations=True,
542 wrapannotations=True,
543 wrapnonlinear=dots,
543 wrapnonlinear=dots,
544 usedots=dots,
544 usedots=dots,
545 maxlinewidth=70):
545 maxlinewidth=70):
546 ui.write(line)
546 ui.write(line)
547 ui.write("\n")
547 ui.write("\n")
548
548
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 def debugdata(ui, repo, file_, rev=None, **opts):
550 def debugdata(ui, repo, file_, rev=None, **opts):
551 """dump the contents of a data file revision"""
551 """dump the contents of a data file revision"""
552 opts = pycompat.byteskwargs(opts)
552 opts = pycompat.byteskwargs(opts)
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if rev is not None:
554 if rev is not None:
555 raise error.CommandError('debugdata', _('invalid arguments'))
555 raise error.CommandError('debugdata', _('invalid arguments'))
556 file_, rev = None, file_
556 file_, rev = None, file_
557 elif rev is None:
557 elif rev is None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
559 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
560 try:
560 try:
561 ui.write(r.revision(r.lookup(rev), raw=True))
561 ui.write(r.revision(r.lookup(rev), raw=True))
562 except KeyError:
562 except KeyError:
563 raise error.Abort(_('invalid revision identifier %s') % rev)
563 raise error.Abort(_('invalid revision identifier %s') % rev)
564
564
565 @command('debugdate',
565 @command('debugdate',
566 [('e', 'extended', None, _('try extended date formats'))],
566 [('e', 'extended', None, _('try extended date formats'))],
567 _('[-e] DATE [RANGE]'),
567 _('[-e] DATE [RANGE]'),
568 norepo=True, optionalrepo=True)
568 norepo=True, optionalrepo=True)
569 def debugdate(ui, date, range=None, **opts):
569 def debugdate(ui, date, range=None, **opts):
570 """parse and display a date"""
570 """parse and display a date"""
571 if opts[r"extended"]:
571 if opts[r"extended"]:
572 d = dateutil.parsedate(date, util.extendeddateformats)
572 d = dateutil.parsedate(date, util.extendeddateformats)
573 else:
573 else:
574 d = dateutil.parsedate(date)
574 d = dateutil.parsedate(date)
575 ui.write(("internal: %d %d\n") % d)
575 ui.write(("internal: %d %d\n") % d)
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 if range:
577 if range:
578 m = dateutil.matchdate(range)
578 m = dateutil.matchdate(range)
579 ui.write(("match: %s\n") % m(d[0]))
579 ui.write(("match: %s\n") % m(d[0]))
580
580
581 @command('debugdeltachain',
581 @command('debugdeltachain',
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 _('-c|-m|FILE'),
583 _('-c|-m|FILE'),
584 optionalrepo=True)
584 optionalrepo=True)
585 def debugdeltachain(ui, repo, file_=None, **opts):
585 def debugdeltachain(ui, repo, file_=None, **opts):
586 """dump information about delta chains in a revlog
586 """dump information about delta chains in a revlog
587
587
588 Output can be templatized. Available template keywords are:
588 Output can be templatized. Available template keywords are:
589
589
590 :``rev``: revision number
590 :``rev``: revision number
591 :``chainid``: delta chain identifier (numbered by unique base)
591 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainlen``: delta chain length to this revision
592 :``chainlen``: delta chain length to this revision
593 :``prevrev``: previous revision in delta chain
593 :``prevrev``: previous revision in delta chain
594 :``deltatype``: role of delta / how it was computed
594 :``deltatype``: role of delta / how it was computed
595 :``compsize``: compressed size of revision
595 :``compsize``: compressed size of revision
596 :``uncompsize``: uncompressed size of revision
596 :``uncompsize``: uncompressed size of revision
597 :``chainsize``: total size of compressed revisions in chain
597 :``chainsize``: total size of compressed revisions in chain
598 :``chainratio``: total chain size divided by uncompressed revision size
598 :``chainratio``: total chain size divided by uncompressed revision size
599 (new delta chains typically start at ratio 2.00)
599 (new delta chains typically start at ratio 2.00)
600 :``lindist``: linear distance from base revision in delta chain to end
600 :``lindist``: linear distance from base revision in delta chain to end
601 of this revision
601 of this revision
602 :``extradist``: total size of revisions not part of this delta chain from
602 :``extradist``: total size of revisions not part of this delta chain from
603 base of delta chain to end of this revision; a measurement
603 base of delta chain to end of this revision; a measurement
604 of how much extra data we need to read/seek across to read
604 of how much extra data we need to read/seek across to read
605 the delta chain for this revision
605 the delta chain for this revision
606 :``extraratio``: extradist divided by chainsize; another representation of
606 :``extraratio``: extradist divided by chainsize; another representation of
607 how much unrelated data is needed to load this delta chain
607 how much unrelated data is needed to load this delta chain
608
608
609 If the repository is configured to use the sparse read, additional keywords
609 If the repository is configured to use the sparse read, additional keywords
610 are available:
610 are available:
611
611
612 :``readsize``: total size of data read from the disk for a revision
612 :``readsize``: total size of data read from the disk for a revision
613 (sum of the sizes of all the blocks)
613 (sum of the sizes of all the blocks)
614 :``largestblock``: size of the largest block of data read from the disk
614 :``largestblock``: size of the largest block of data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
616 :``srchunks``: in how many data hunks the whole revision would be read
616 :``srchunks``: in how many data hunks the whole revision would be read
617
617
618 The sparse read can be enabled with experimental.sparse-read = True
618 The sparse read can be enabled with experimental.sparse-read = True
619 """
619 """
620 opts = pycompat.byteskwargs(opts)
620 opts = pycompat.byteskwargs(opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 index = r.index
622 index = r.index
623 start = r.start
623 start = r.start
624 length = r.length
624 length = r.length
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 withsparseread = getattr(r, '_withsparseread', False)
626 withsparseread = getattr(r, '_withsparseread', False)
627
627
628 def revinfo(rev):
628 def revinfo(rev):
629 e = index[rev]
629 e = index[rev]
630 compsize = e[1]
630 compsize = e[1]
631 uncompsize = e[2]
631 uncompsize = e[2]
632 chainsize = 0
632 chainsize = 0
633
633
634 if generaldelta:
634 if generaldelta:
635 if e[3] == e[5]:
635 if e[3] == e[5]:
636 deltatype = 'p1'
636 deltatype = 'p1'
637 elif e[3] == e[6]:
637 elif e[3] == e[6]:
638 deltatype = 'p2'
638 deltatype = 'p2'
639 elif e[3] == rev - 1:
639 elif e[3] == rev - 1:
640 deltatype = 'prev'
640 deltatype = 'prev'
641 elif e[3] == rev:
641 elif e[3] == rev:
642 deltatype = 'base'
642 deltatype = 'base'
643 else:
643 else:
644 deltatype = 'other'
644 deltatype = 'other'
645 else:
645 else:
646 if e[3] == rev:
646 if e[3] == rev:
647 deltatype = 'base'
647 deltatype = 'base'
648 else:
648 else:
649 deltatype = 'prev'
649 deltatype = 'prev'
650
650
651 chain = r._deltachain(rev)[0]
651 chain = r._deltachain(rev)[0]
652 for iterrev in chain:
652 for iterrev in chain:
653 e = index[iterrev]
653 e = index[iterrev]
654 chainsize += e[1]
654 chainsize += e[1]
655
655
656 return compsize, uncompsize, deltatype, chain, chainsize
656 return compsize, uncompsize, deltatype, chain, chainsize
657
657
658 fm = ui.formatter('debugdeltachain', opts)
658 fm = ui.formatter('debugdeltachain', opts)
659
659
660 fm.plain(' rev chain# chainlen prev delta '
660 fm.plain(' rev chain# chainlen prev delta '
661 'size rawsize chainsize ratio lindist extradist '
661 'size rawsize chainsize ratio lindist extradist '
662 'extraratio')
662 'extraratio')
663 if withsparseread:
663 if withsparseread:
664 fm.plain(' readsize largestblk rddensity srchunks')
664 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain('\n')
665 fm.plain('\n')
666
666
667 chainbases = {}
667 chainbases = {}
668 for rev in r:
668 for rev in r:
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 chainbase = chain[0]
670 chainbase = chain[0]
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 basestart = start(chainbase)
672 basestart = start(chainbase)
673 revstart = start(rev)
673 revstart = start(rev)
674 lineardist = revstart + comp - basestart
674 lineardist = revstart + comp - basestart
675 extradist = lineardist - chainsize
675 extradist = lineardist - chainsize
676 try:
676 try:
677 prevrev = chain[-2]
677 prevrev = chain[-2]
678 except IndexError:
678 except IndexError:
679 prevrev = -1
679 prevrev = -1
680
680
681 if uncomp != 0:
681 if uncomp != 0:
682 chainratio = float(chainsize) / float(uncomp)
682 chainratio = float(chainsize) / float(uncomp)
683 else:
683 else:
684 chainratio = chainsize
684 chainratio = chainsize
685
685
686 if chainsize != 0:
686 if chainsize != 0:
687 extraratio = float(extradist) / float(chainsize)
687 extraratio = float(extradist) / float(chainsize)
688 else:
688 else:
689 extraratio = extradist
689 extraratio = extradist
690
690
691 fm.startitem()
691 fm.startitem()
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 'uncompsize chainsize chainratio lindist extradist '
693 'uncompsize chainsize chainratio lindist extradist '
694 'extraratio',
694 'extraratio',
695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 rev, chainid, len(chain), prevrev, deltatype, comp,
696 rev, chainid, len(chain), prevrev, deltatype, comp,
697 uncomp, chainsize, chainratio, lineardist, extradist,
697 uncomp, chainsize, chainratio, lineardist, extradist,
698 extraratio,
698 extraratio,
699 rev=rev, chainid=chainid, chainlen=len(chain),
699 rev=rev, chainid=chainid, chainlen=len(chain),
700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 uncompsize=uncomp, chainsize=chainsize,
701 uncompsize=uncomp, chainsize=chainsize,
702 chainratio=chainratio, lindist=lineardist,
702 chainratio=chainratio, lindist=lineardist,
703 extradist=extradist, extraratio=extraratio)
703 extradist=extradist, extraratio=extraratio)
704 if withsparseread:
704 if withsparseread:
705 readsize = 0
705 readsize = 0
706 largestblock = 0
706 largestblock = 0
707 srchunks = 0
707 srchunks = 0
708
708
709 for revschunk in revlog._slicechunk(r, chain):
709 for revschunk in revlog._slicechunk(r, chain):
710 srchunks += 1
710 srchunks += 1
711 blkend = start(revschunk[-1]) + length(revschunk[-1])
711 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blksize = blkend - start(revschunk[0])
712 blksize = blkend - start(revschunk[0])
713
713
714 readsize += blksize
714 readsize += blksize
715 if largestblock < blksize:
715 if largestblock < blksize:
716 largestblock = blksize
716 largestblock = blksize
717
717
718 if readsize:
718 if readsize:
719 readdensity = float(chainsize) / float(readsize)
719 readdensity = float(chainsize) / float(readsize)
720 else:
720 else:
721 readdensity = 1
721 readdensity = 1
722
722
723 fm.write('readsize largestblock readdensity srchunks',
723 fm.write('readsize largestblock readdensity srchunks',
724 ' %10d %10d %9.5f %8d',
724 ' %10d %10d %9.5f %8d',
725 readsize, largestblock, readdensity, srchunks,
725 readsize, largestblock, readdensity, srchunks,
726 readsize=readsize, largestblock=largestblock,
726 readsize=readsize, largestblock=largestblock,
727 readdensity=readdensity, srchunks=srchunks)
727 readdensity=readdensity, srchunks=srchunks)
728
728
729 fm.plain('\n')
729 fm.plain('\n')
730
730
731 fm.end()
731 fm.end()
732
732
733 @command('debugdirstate|debugstate',
733 @command('debugdirstate|debugstate',
734 [('', 'nodates', None, _('do not display the saved mtime')),
734 [('', 'nodates', None, _('do not display the saved mtime')),
735 ('', 'datesort', None, _('sort by saved mtime'))],
735 ('', 'datesort', None, _('sort by saved mtime'))],
736 _('[OPTION]...'))
736 _('[OPTION]...'))
737 def debugstate(ui, repo, **opts):
737 def debugstate(ui, repo, **opts):
738 """show the contents of the current dirstate"""
738 """show the contents of the current dirstate"""
739
739
740 nodates = opts.get(r'nodates')
740 nodates = opts.get(r'nodates')
741 datesort = opts.get(r'datesort')
741 datesort = opts.get(r'datesort')
742
742
743 timestr = ""
743 timestr = ""
744 if datesort:
744 if datesort:
745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 else:
746 else:
747 keyfunc = None # sort by filename
747 keyfunc = None # sort by filename
748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 if ent[3] == -1:
749 if ent[3] == -1:
750 timestr = 'unset '
750 timestr = 'unset '
751 elif nodates:
751 elif nodates:
752 timestr = 'set '
752 timestr = 'set '
753 else:
753 else:
754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 time.localtime(ent[3]))
755 time.localtime(ent[3]))
756 timestr = encoding.strtolocal(timestr)
756 timestr = encoding.strtolocal(timestr)
757 if ent[1] & 0o20000:
757 if ent[1] & 0o20000:
758 mode = 'lnk'
758 mode = 'lnk'
759 else:
759 else:
760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 for f in repo.dirstate.copies():
762 for f in repo.dirstate.copies():
763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764
764
765 @command('debugdiscovery',
765 @command('debugdiscovery',
766 [('', 'old', None, _('use old-style discovery')),
766 [('', 'old', None, _('use old-style discovery')),
767 ('', 'nonheads', None,
767 ('', 'nonheads', None,
768 _('use old-style discovery with non-heads included')),
768 _('use old-style discovery with non-heads included')),
769 ('', 'rev', [], 'restrict discovery to this set of revs'),
769 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 ] + cmdutil.remoteopts,
770 ] + cmdutil.remoteopts,
771 _('[--rev REV] [OTHER]'))
771 _('[--rev REV] [OTHER]'))
772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 """runs the changeset discovery protocol in isolation"""
773 """runs the changeset discovery protocol in isolation"""
774 opts = pycompat.byteskwargs(opts)
774 opts = pycompat.byteskwargs(opts)
775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 remote = hg.peer(repo, opts, remoteurl)
776 remote = hg.peer(repo, opts, remoteurl)
777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778
778
779 # make sure tests are repeatable
779 # make sure tests are repeatable
780 random.seed(12323)
780 random.seed(12323)
781
781
782 def doit(pushedrevs, remoteheads, remote=remote):
782 def doit(pushedrevs, remoteheads, remote=remote):
783 if opts.get('old'):
783 if opts.get('old'):
784 if not util.safehasattr(remote, 'branches'):
784 if not util.safehasattr(remote, 'branches'):
785 # enable in-client legacy support
785 # enable in-client legacy support
786 remote = localrepo.locallegacypeer(remote.local())
786 remote = localrepo.locallegacypeer(remote.local())
787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 force=True)
788 force=True)
789 common = set(common)
789 common = set(common)
790 if not opts.get('nonheads'):
790 if not opts.get('nonheads'):
791 ui.write(("unpruned common: %s\n") %
791 ui.write(("unpruned common: %s\n") %
792 " ".join(sorted(short(n) for n in common)))
792 " ".join(sorted(short(n) for n in common)))
793
793
794 clnode = repo.changelog.node
794 clnode = repo.changelog.node
795 common = repo.revs('heads(::%ln)', common)
795 common = repo.revs('heads(::%ln)', common)
796 common = {clnode(r) for r in common}
796 common = {clnode(r) for r in common}
797 else:
797 else:
798 nodes = None
798 nodes = None
799 if pushedrevs:
799 if pushedrevs:
800 revs = scmutil.revrange(repo, pushedrevs)
800 revs = scmutil.revrange(repo, pushedrevs)
801 nodes = [repo[r].node() for r in revs]
801 nodes = [repo[r].node() for r in revs]
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 ancestorsof=nodes)
803 ancestorsof=nodes)
804 common = set(common)
804 common = set(common)
805 rheads = set(hds)
805 rheads = set(hds)
806 lheads = set(repo.heads())
806 lheads = set(repo.heads())
807 ui.write(("common heads: %s\n") %
807 ui.write(("common heads: %s\n") %
808 " ".join(sorted(short(n) for n in common)))
808 " ".join(sorted(short(n) for n in common)))
809 if lheads <= common:
809 if lheads <= common:
810 ui.write(("local is subset\n"))
810 ui.write(("local is subset\n"))
811 elif rheads <= common:
811 elif rheads <= common:
812 ui.write(("remote is subset\n"))
812 ui.write(("remote is subset\n"))
813
813
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 localrevs = opts['rev']
815 localrevs = opts['rev']
816 doit(localrevs, remoterevs)
816 doit(localrevs, remoterevs)
817
817
818 _chunksize = 4 << 10
818 _chunksize = 4 << 10
819
819
820 @command('debugdownload',
820 @command('debugdownload',
821 [
821 [
822 ('o', 'output', '', _('path')),
822 ('o', 'output', '', _('path')),
823 ],
823 ],
824 optionalrepo=True)
824 optionalrepo=True)
825 def debugdownload(ui, repo, url, output=None, **opts):
825 def debugdownload(ui, repo, url, output=None, **opts):
826 """download a resource using Mercurial logic and config
826 """download a resource using Mercurial logic and config
827 """
827 """
828 fh = urlmod.open(ui, url, output)
828 fh = urlmod.open(ui, url, output)
829
829
830 dest = ui
830 dest = ui
831 if output:
831 if output:
832 dest = open(output, "wb", _chunksize)
832 dest = open(output, "wb", _chunksize)
833 try:
833 try:
834 data = fh.read(_chunksize)
834 data = fh.read(_chunksize)
835 while data:
835 while data:
836 dest.write(data)
836 dest.write(data)
837 data = fh.read(_chunksize)
837 data = fh.read(_chunksize)
838 finally:
838 finally:
839 if output:
839 if output:
840 dest.close()
840 dest.close()
841
841
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 def debugextensions(ui, repo, **opts):
843 def debugextensions(ui, repo, **opts):
844 '''show information about active extensions'''
844 '''show information about active extensions'''
845 opts = pycompat.byteskwargs(opts)
845 opts = pycompat.byteskwargs(opts)
846 exts = extensions.extensions(ui)
846 exts = extensions.extensions(ui)
847 hgver = util.version()
847 hgver = util.version()
848 fm = ui.formatter('debugextensions', opts)
848 fm = ui.formatter('debugextensions', opts)
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 isinternal = extensions.ismoduleinternal(extmod)
850 isinternal = extensions.ismoduleinternal(extmod)
851 extsource = pycompat.fsencode(extmod.__file__)
851 extsource = pycompat.fsencode(extmod.__file__)
852 if isinternal:
852 if isinternal:
853 exttestedwith = [] # never expose magic string to users
853 exttestedwith = [] # never expose magic string to users
854 else:
854 else:
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 extbuglink = getattr(extmod, 'buglink', None)
856 extbuglink = getattr(extmod, 'buglink', None)
857
857
858 fm.startitem()
858 fm.startitem()
859
859
860 if ui.quiet or ui.verbose:
860 if ui.quiet or ui.verbose:
861 fm.write('name', '%s\n', extname)
861 fm.write('name', '%s\n', extname)
862 else:
862 else:
863 fm.write('name', '%s', extname)
863 fm.write('name', '%s', extname)
864 if isinternal or hgver in exttestedwith:
864 if isinternal or hgver in exttestedwith:
865 fm.plain('\n')
865 fm.plain('\n')
866 elif not exttestedwith:
866 elif not exttestedwith:
867 fm.plain(_(' (untested!)\n'))
867 fm.plain(_(' (untested!)\n'))
868 else:
868 else:
869 lasttestedversion = exttestedwith[-1]
869 lasttestedversion = exttestedwith[-1]
870 fm.plain(' (%s!)\n' % lasttestedversion)
870 fm.plain(' (%s!)\n' % lasttestedversion)
871
871
872 fm.condwrite(ui.verbose and extsource, 'source',
872 fm.condwrite(ui.verbose and extsource, 'source',
873 _(' location: %s\n'), extsource or "")
873 _(' location: %s\n'), extsource or "")
874
874
875 if ui.verbose:
875 if ui.verbose:
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 fm.data(bundled=isinternal)
877 fm.data(bundled=isinternal)
878
878
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 _(' tested with: %s\n'),
880 _(' tested with: %s\n'),
881 fm.formatlist(exttestedwith, name='ver'))
881 fm.formatlist(exttestedwith, name='ver'))
882
882
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 _(' bug reporting: %s\n'), extbuglink or "")
884 _(' bug reporting: %s\n'), extbuglink or "")
885
885
886 fm.end()
886 fm.end()
887
887
888 @command('debugfileset',
888 @command('debugfileset',
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 ('', 'all-files', False,
890 ('', 'all-files', False,
891 _('test files from all revisions and working directory')),
891 _('test files from all revisions and working directory')),
892 ('s', 'show-matcher', None,
892 ('s', 'show-matcher', None,
893 _('print internal representation of matcher')),
893 _('print internal representation of matcher')),
894 ('p', 'show-stage', [],
894 ('p', 'show-stage', [],
895 _('print parsed tree at the given stage'), _('NAME'))],
895 _('print parsed tree at the given stage'), _('NAME'))],
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 def debugfileset(ui, repo, expr, **opts):
897 def debugfileset(ui, repo, expr, **opts):
898 '''parse and apply a fileset specification'''
898 '''parse and apply a fileset specification'''
899 from . import fileset
899 from . import fileset
900 fileset.symbols # force import of fileset so we have predicates to optimize
900 fileset.symbols # force import of fileset so we have predicates to optimize
901 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
903
903
904 stages = [
904 stages = [
905 ('parsed', pycompat.identity),
905 ('parsed', pycompat.identity),
906 ('analyzed', filesetlang.analyze),
906 ('analyzed', filesetlang.analyze),
907 ('optimized', filesetlang.optimize),
907 ('optimized', filesetlang.optimize),
908 ]
908 ]
909 stagenames = set(n for n, f in stages)
909 stagenames = set(n for n, f in stages)
910
910
911 showalways = set()
911 showalways = set()
912 if ui.verbose and not opts['show_stage']:
912 if ui.verbose and not opts['show_stage']:
913 # show parsed tree by --verbose (deprecated)
913 # show parsed tree by --verbose (deprecated)
914 showalways.add('parsed')
914 showalways.add('parsed')
915 if opts['show_stage'] == ['all']:
915 if opts['show_stage'] == ['all']:
916 showalways.update(stagenames)
916 showalways.update(stagenames)
917 else:
917 else:
918 for n in opts['show_stage']:
918 for n in opts['show_stage']:
919 if n not in stagenames:
919 if n not in stagenames:
920 raise error.Abort(_('invalid stage name: %s') % n)
920 raise error.Abort(_('invalid stage name: %s') % n)
921 showalways.update(opts['show_stage'])
921 showalways.update(opts['show_stage'])
922
922
923 tree = filesetlang.parse(expr)
923 tree = filesetlang.parse(expr)
924 for n, f in stages:
924 for n, f in stages:
925 tree = f(tree)
925 tree = f(tree)
926 if n in showalways:
926 if n in showalways:
927 if opts['show_stage'] or n != 'parsed':
927 if opts['show_stage'] or n != 'parsed':
928 ui.write(("* %s:\n") % n)
928 ui.write(("* %s:\n") % n)
929 ui.write(filesetlang.prettyformat(tree), "\n")
929 ui.write(filesetlang.prettyformat(tree), "\n")
930
930
931 files = set()
931 files = set()
932 if opts['all_files']:
932 if opts['all_files']:
933 for r in repo:
933 for r in repo:
934 c = repo[r]
934 c = repo[r]
935 files.update(c.files())
935 files.update(c.files())
936 files.update(c.substate)
936 files.update(c.substate)
937 if opts['all_files'] or ctx.rev() is None:
937 if opts['all_files'] or ctx.rev() is None:
938 wctx = repo[None]
938 wctx = repo[None]
939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
940 subrepos=list(wctx.substate),
940 subrepos=list(wctx.substate),
941 unknown=True, ignored=True))
941 unknown=True, ignored=True))
942 files.update(wctx.substate)
942 files.update(wctx.substate)
943 else:
943 else:
944 files.update(ctx.files())
944 files.update(ctx.files())
945 files.update(ctx.substate)
945 files.update(ctx.substate)
946
946
947 m = ctx.matchfileset(expr)
947 m = ctx.matchfileset(expr)
948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
950 for f in sorted(files):
950 for f in sorted(files):
951 if not m(f):
951 if not m(f):
952 continue
952 continue
953 ui.write("%s\n" % f)
953 ui.write("%s\n" % f)
954
954
955 @command('debugformat',
955 @command('debugformat',
956 [] + cmdutil.formatteropts)
956 [] + cmdutil.formatteropts)
957 def debugformat(ui, repo, **opts):
957 def debugformat(ui, repo, **opts):
958 """display format information about the current repository
958 """display format information about the current repository
959
959
960 Use --verbose to get extra information about current config value and
960 Use --verbose to get extra information about current config value and
961 Mercurial default."""
961 Mercurial default."""
962 opts = pycompat.byteskwargs(opts)
962 opts = pycompat.byteskwargs(opts)
963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
964 maxvariantlength = max(len('format-variant'), maxvariantlength)
964 maxvariantlength = max(len('format-variant'), maxvariantlength)
965
965
966 def makeformatname(name):
966 def makeformatname(name):
967 return '%s:' + (' ' * (maxvariantlength - len(name)))
967 return '%s:' + (' ' * (maxvariantlength - len(name)))
968
968
969 fm = ui.formatter('debugformat', opts)
969 fm = ui.formatter('debugformat', opts)
970 if fm.isplain():
970 if fm.isplain():
971 def formatvalue(value):
971 def formatvalue(value):
972 if util.safehasattr(value, 'startswith'):
972 if util.safehasattr(value, 'startswith'):
973 return value
973 return value
974 if value:
974 if value:
975 return 'yes'
975 return 'yes'
976 else:
976 else:
977 return 'no'
977 return 'no'
978 else:
978 else:
979 formatvalue = pycompat.identity
979 formatvalue = pycompat.identity
980
980
981 fm.plain('format-variant')
981 fm.plain('format-variant')
982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
983 fm.plain(' repo')
983 fm.plain(' repo')
984 if ui.verbose:
984 if ui.verbose:
985 fm.plain(' config default')
985 fm.plain(' config default')
986 fm.plain('\n')
986 fm.plain('\n')
987 for fv in upgrade.allformatvariant:
987 for fv in upgrade.allformatvariant:
988 fm.startitem()
988 fm.startitem()
989 repovalue = fv.fromrepo(repo)
989 repovalue = fv.fromrepo(repo)
990 configvalue = fv.fromconfig(repo)
990 configvalue = fv.fromconfig(repo)
991
991
992 if repovalue != configvalue:
992 if repovalue != configvalue:
993 namelabel = 'formatvariant.name.mismatchconfig'
993 namelabel = 'formatvariant.name.mismatchconfig'
994 repolabel = 'formatvariant.repo.mismatchconfig'
994 repolabel = 'formatvariant.repo.mismatchconfig'
995 elif repovalue != fv.default:
995 elif repovalue != fv.default:
996 namelabel = 'formatvariant.name.mismatchdefault'
996 namelabel = 'formatvariant.name.mismatchdefault'
997 repolabel = 'formatvariant.repo.mismatchdefault'
997 repolabel = 'formatvariant.repo.mismatchdefault'
998 else:
998 else:
999 namelabel = 'formatvariant.name.uptodate'
999 namelabel = 'formatvariant.name.uptodate'
1000 repolabel = 'formatvariant.repo.uptodate'
1000 repolabel = 'formatvariant.repo.uptodate'
1001
1001
1002 fm.write('name', makeformatname(fv.name), fv.name,
1002 fm.write('name', makeformatname(fv.name), fv.name,
1003 label=namelabel)
1003 label=namelabel)
1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1005 label=repolabel)
1005 label=repolabel)
1006 if fv.default != configvalue:
1006 if fv.default != configvalue:
1007 configlabel = 'formatvariant.config.special'
1007 configlabel = 'formatvariant.config.special'
1008 else:
1008 else:
1009 configlabel = 'formatvariant.config.default'
1009 configlabel = 'formatvariant.config.default'
1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1011 label=configlabel)
1011 label=configlabel)
1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1013 label='formatvariant.default')
1013 label='formatvariant.default')
1014 fm.plain('\n')
1014 fm.plain('\n')
1015 fm.end()
1015 fm.end()
1016
1016
1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1018 def debugfsinfo(ui, path="."):
1018 def debugfsinfo(ui, path="."):
1019 """show information detected about current filesystem"""
1019 """show information detected about current filesystem"""
1020 ui.write(('path: %s\n') % path)
1020 ui.write(('path: %s\n') % path)
1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1026 casesensitive = '(unknown)'
1026 casesensitive = '(unknown)'
1027 try:
1027 try:
1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1030 except OSError:
1030 except OSError:
1031 pass
1031 pass
1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1033
1033
1034 @command('debuggetbundle',
1034 @command('debuggetbundle',
1035 [('H', 'head', [], _('id of head node'), _('ID')),
1035 [('H', 'head', [], _('id of head node'), _('ID')),
1036 ('C', 'common', [], _('id of common node'), _('ID')),
1036 ('C', 'common', [], _('id of common node'), _('ID')),
1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1038 _('REPO FILE [-H|-C ID]...'),
1038 _('REPO FILE [-H|-C ID]...'),
1039 norepo=True)
1039 norepo=True)
1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1041 """retrieves a bundle from a repo
1041 """retrieves a bundle from a repo
1042
1042
1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1044 given file.
1044 given file.
1045 """
1045 """
1046 opts = pycompat.byteskwargs(opts)
1046 opts = pycompat.byteskwargs(opts)
1047 repo = hg.peer(ui, opts, repopath)
1047 repo = hg.peer(ui, opts, repopath)
1048 if not repo.capable('getbundle'):
1048 if not repo.capable('getbundle'):
1049 raise error.Abort("getbundle() not supported by target repository")
1049 raise error.Abort("getbundle() not supported by target repository")
1050 args = {}
1050 args = {}
1051 if common:
1051 if common:
1052 args[r'common'] = [bin(s) for s in common]
1052 args[r'common'] = [bin(s) for s in common]
1053 if head:
1053 if head:
1054 args[r'heads'] = [bin(s) for s in head]
1054 args[r'heads'] = [bin(s) for s in head]
1055 # TODO: get desired bundlecaps from command line.
1055 # TODO: get desired bundlecaps from command line.
1056 args[r'bundlecaps'] = None
1056 args[r'bundlecaps'] = None
1057 bundle = repo.getbundle('debug', **args)
1057 bundle = repo.getbundle('debug', **args)
1058
1058
1059 bundletype = opts.get('type', 'bzip2').lower()
1059 bundletype = opts.get('type', 'bzip2').lower()
1060 btypes = {'none': 'HG10UN',
1060 btypes = {'none': 'HG10UN',
1061 'bzip2': 'HG10BZ',
1061 'bzip2': 'HG10BZ',
1062 'gzip': 'HG10GZ',
1062 'gzip': 'HG10GZ',
1063 'bundle2': 'HG20'}
1063 'bundle2': 'HG20'}
1064 bundletype = btypes.get(bundletype)
1064 bundletype = btypes.get(bundletype)
1065 if bundletype not in bundle2.bundletypes:
1065 if bundletype not in bundle2.bundletypes:
1066 raise error.Abort(_('unknown bundle type specified with --type'))
1066 raise error.Abort(_('unknown bundle type specified with --type'))
1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1068
1068
1069 @command('debugignore', [], '[FILE]')
1069 @command('debugignore', [], '[FILE]')
1070 def debugignore(ui, repo, *files, **opts):
1070 def debugignore(ui, repo, *files, **opts):
1071 """display the combined ignore pattern and information about ignored files
1071 """display the combined ignore pattern and information about ignored files
1072
1072
1073 With no argument display the combined ignore pattern.
1073 With no argument display the combined ignore pattern.
1074
1074
1075 Given space separated file names, shows if the given file is ignored and
1075 Given space separated file names, shows if the given file is ignored and
1076 if so, show the ignore rule (file and line number) that matched it.
1076 if so, show the ignore rule (file and line number) that matched it.
1077 """
1077 """
1078 ignore = repo.dirstate._ignore
1078 ignore = repo.dirstate._ignore
1079 if not files:
1079 if not files:
1080 # Show all the patterns
1080 # Show all the patterns
1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1082 else:
1082 else:
1083 m = scmutil.match(repo[None], pats=files)
1083 m = scmutil.match(repo[None], pats=files)
1084 for f in m.files():
1084 for f in m.files():
1085 nf = util.normpath(f)
1085 nf = util.normpath(f)
1086 ignored = None
1086 ignored = None
1087 ignoredata = None
1087 ignoredata = None
1088 if nf != '.':
1088 if nf != '.':
1089 if ignore(nf):
1089 if ignore(nf):
1090 ignored = nf
1090 ignored = nf
1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1092 else:
1092 else:
1093 for p in util.finddirs(nf):
1093 for p in util.finddirs(nf):
1094 if ignore(p):
1094 if ignore(p):
1095 ignored = p
1095 ignored = p
1096 ignoredata = repo.dirstate._ignorefileandline(p)
1096 ignoredata = repo.dirstate._ignorefileandline(p)
1097 break
1097 break
1098 if ignored:
1098 if ignored:
1099 if ignored == nf:
1099 if ignored == nf:
1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1101 else:
1101 else:
1102 ui.write(_("%s is ignored because of "
1102 ui.write(_("%s is ignored because of "
1103 "containing folder %s\n")
1103 "containing folder %s\n")
1104 % (m.uipath(f), ignored))
1104 % (m.uipath(f), ignored))
1105 ignorefile, lineno, line = ignoredata
1105 ignorefile, lineno, line = ignoredata
1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1107 % (ignorefile, lineno, line))
1107 % (ignorefile, lineno, line))
1108 else:
1108 else:
1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1110
1110
1111 @command('debugindex', cmdutil.debugrevlogopts +
1111 @command('debugindex', cmdutil.debugrevlogopts +
1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1113 _('[-f FORMAT] -c|-m|FILE'),
1113 _('[-f FORMAT] -c|-m|FILE'),
1114 optionalrepo=True)
1114 optionalrepo=True)
1115 def debugindex(ui, repo, file_=None, **opts):
1115 def debugindex(ui, repo, file_=None, **opts):
1116 """dump the contents of an index file"""
1116 """dump the contents of an index file"""
1117 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1119 format = opts.get('format', 0)
1119 format = opts.get('format', 0)
1120 if format not in (0, 1):
1120 if format not in (0, 1):
1121 raise error.Abort(_("unknown format %d") % format)
1121 raise error.Abort(_("unknown format %d") % format)
1122
1122
1123 if ui.debugflag:
1123 if ui.debugflag:
1124 shortfn = hex
1124 shortfn = hex
1125 else:
1125 else:
1126 shortfn = short
1126 shortfn = short
1127
1127
1128 # There might not be anything in r, so have a sane default
1128 # There might not be anything in r, so have a sane default
1129 idlen = 12
1129 idlen = 12
1130 for i in r:
1130 for i in r:
1131 idlen = len(shortfn(r.node(i)))
1131 idlen = len(shortfn(r.node(i)))
1132 break
1132 break
1133
1133
1134 if format == 0:
1134 if format == 0:
1135 if ui.verbose:
1135 if ui.verbose:
1136 ui.write((" rev offset length linkrev"
1136 ui.write((" rev offset length linkrev"
1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1138 "p1".ljust(idlen)))
1138 "p1".ljust(idlen)))
1139 else:
1139 else:
1140 ui.write((" rev linkrev %s %s p2\n") % (
1140 ui.write((" rev linkrev %s %s p2\n") % (
1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1142 elif format == 1:
1142 elif format == 1:
1143 if ui.verbose:
1143 if ui.verbose:
1144 ui.write((" rev flag offset length size link p1"
1144 ui.write((" rev flag offset length size link p1"
1145 " p2 %s\n") % "nodeid".rjust(idlen))
1145 " p2 %s\n") % "nodeid".rjust(idlen))
1146 else:
1146 else:
1147 ui.write((" rev flag size link p1 p2 %s\n") %
1147 ui.write((" rev flag size link p1 p2 %s\n") %
1148 "nodeid".rjust(idlen))
1148 "nodeid".rjust(idlen))
1149
1149
1150 for i in r:
1150 for i in r:
1151 node = r.node(i)
1151 node = r.node(i)
1152 if format == 0:
1152 if format == 0:
1153 try:
1153 try:
1154 pp = r.parents(node)
1154 pp = r.parents(node)
1155 except Exception:
1155 except Exception:
1156 pp = [nullid, nullid]
1156 pp = [nullid, nullid]
1157 if ui.verbose:
1157 if ui.verbose:
1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1159 i, r.start(i), r.length(i), r.linkrev(i),
1159 i, r.start(i), r.length(i), r.linkrev(i),
1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1161 else:
1161 else:
1162 ui.write("% 6d % 7d %s %s %s\n" % (
1162 ui.write("% 6d % 7d %s %s %s\n" % (
1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1164 shortfn(pp[1])))
1164 shortfn(pp[1])))
1165 elif format == 1:
1165 elif format == 1:
1166 pr = r.parentrevs(i)
1166 pr = r.parentrevs(i)
1167 if ui.verbose:
1167 if ui.verbose:
1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1171 else:
1171 else:
1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1174 shortfn(node)))
1174 shortfn(node)))
1175
1175
1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1177 _('-c|-m|FILE'), optionalrepo=True)
1177 _('-c|-m|FILE'), optionalrepo=True)
1178 def debugindexdot(ui, repo, file_=None, **opts):
1178 def debugindexdot(ui, repo, file_=None, **opts):
1179 """dump an index DAG as a graphviz dot file"""
1179 """dump an index DAG as a graphviz dot file"""
1180 opts = pycompat.byteskwargs(opts)
1180 opts = pycompat.byteskwargs(opts)
1181 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1181 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1182 ui.write(("digraph G {\n"))
1182 ui.write(("digraph G {\n"))
1183 for i in r:
1183 for i in r:
1184 node = r.node(i)
1184 node = r.node(i)
1185 pp = r.parents(node)
1185 pp = r.parents(node)
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1187 if pp[1] != nullid:
1187 if pp[1] != nullid:
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1189 ui.write("}\n")
1189 ui.write("}\n")
1190
1190
1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1192 def debuginstall(ui, **opts):
1192 def debuginstall(ui, **opts):
1193 '''test Mercurial installation
1193 '''test Mercurial installation
1194
1194
1195 Returns 0 on success.
1195 Returns 0 on success.
1196 '''
1196 '''
1197 opts = pycompat.byteskwargs(opts)
1197 opts = pycompat.byteskwargs(opts)
1198
1198
1199 def writetemp(contents):
1199 def writetemp(contents):
1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1201 f = os.fdopen(fd, r"wb")
1201 f = os.fdopen(fd, r"wb")
1202 f.write(contents)
1202 f.write(contents)
1203 f.close()
1203 f.close()
1204 return name
1204 return name
1205
1205
1206 problems = 0
1206 problems = 0
1207
1207
1208 fm = ui.formatter('debuginstall', opts)
1208 fm = ui.formatter('debuginstall', opts)
1209 fm.startitem()
1209 fm.startitem()
1210
1210
1211 # encoding
1211 # encoding
1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1213 err = None
1213 err = None
1214 try:
1214 try:
1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1216 except LookupError as inst:
1216 except LookupError as inst:
1217 err = stringutil.forcebytestr(inst)
1217 err = stringutil.forcebytestr(inst)
1218 problems += 1
1218 problems += 1
1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1220 " (check that your locale is properly set)\n"), err)
1220 " (check that your locale is properly set)\n"), err)
1221
1221
1222 # Python
1222 # Python
1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1224 pycompat.sysexecutable)
1224 pycompat.sysexecutable)
1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1226 ("%d.%d.%d" % sys.version_info[:3]))
1226 ("%d.%d.%d" % sys.version_info[:3]))
1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1229
1229
1230 security = set(sslutil.supportedprotocols)
1230 security = set(sslutil.supportedprotocols)
1231 if sslutil.hassni:
1231 if sslutil.hassni:
1232 security.add('sni')
1232 security.add('sni')
1233
1233
1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1235 fm.formatlist(sorted(security), name='protocol',
1235 fm.formatlist(sorted(security), name='protocol',
1236 fmt='%s', sep=','))
1236 fmt='%s', sep=','))
1237
1237
1238 # These are warnings, not errors. So don't increment problem count. This
1238 # These are warnings, not errors. So don't increment problem count. This
1239 # may change in the future.
1239 # may change in the future.
1240 if 'tls1.2' not in security:
1240 if 'tls1.2' not in security:
1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1242 'network connections lack modern security\n'))
1242 'network connections lack modern security\n'))
1243 if 'sni' not in security:
1243 if 'sni' not in security:
1244 fm.plain(_(' SNI not supported by Python install; may have '
1244 fm.plain(_(' SNI not supported by Python install; may have '
1245 'connectivity issues with some servers\n'))
1245 'connectivity issues with some servers\n'))
1246
1246
1247 # TODO print CA cert info
1247 # TODO print CA cert info
1248
1248
1249 # hg version
1249 # hg version
1250 hgver = util.version()
1250 hgver = util.version()
1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1252 hgver.split('+')[0])
1252 hgver.split('+')[0])
1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1254 '+'.join(hgver.split('+')[1:]))
1254 '+'.join(hgver.split('+')[1:]))
1255
1255
1256 # compiled modules
1256 # compiled modules
1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1258 policy.policy)
1258 policy.policy)
1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1260 os.path.dirname(pycompat.fsencode(__file__)))
1260 os.path.dirname(pycompat.fsencode(__file__)))
1261
1261
1262 if policy.policy in ('c', 'allow'):
1262 if policy.policy in ('c', 'allow'):
1263 err = None
1263 err = None
1264 try:
1264 try:
1265 from .cext import (
1265 from .cext import (
1266 base85,
1266 base85,
1267 bdiff,
1267 bdiff,
1268 mpatch,
1268 mpatch,
1269 osutil,
1269 osutil,
1270 )
1270 )
1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1272 except Exception as inst:
1272 except Exception as inst:
1273 err = stringutil.forcebytestr(inst)
1273 err = stringutil.forcebytestr(inst)
1274 problems += 1
1274 problems += 1
1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1276
1276
1277 compengines = util.compengines._engines.values()
1277 compengines = util.compengines._engines.values()
1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1279 fm.formatlist(sorted(e.name() for e in compengines),
1279 fm.formatlist(sorted(e.name() for e in compengines),
1280 name='compengine', fmt='%s', sep=', '))
1280 name='compengine', fmt='%s', sep=', '))
1281 fm.write('compenginesavail', _('checking available compression engines '
1281 fm.write('compenginesavail', _('checking available compression engines '
1282 '(%s)\n'),
1282 '(%s)\n'),
1283 fm.formatlist(sorted(e.name() for e in compengines
1283 fm.formatlist(sorted(e.name() for e in compengines
1284 if e.available()),
1284 if e.available()),
1285 name='compengine', fmt='%s', sep=', '))
1285 name='compengine', fmt='%s', sep=', '))
1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1287 fm.write('compenginesserver', _('checking available compression engines '
1287 fm.write('compenginesserver', _('checking available compression engines '
1288 'for wire protocol (%s)\n'),
1288 'for wire protocol (%s)\n'),
1289 fm.formatlist([e.name() for e in wirecompengines
1289 fm.formatlist([e.name() for e in wirecompengines
1290 if e.wireprotosupport()],
1290 if e.wireprotosupport()],
1291 name='compengine', fmt='%s', sep=', '))
1291 name='compengine', fmt='%s', sep=', '))
1292 re2 = 'missing'
1292 re2 = 'missing'
1293 if util._re2:
1293 if util._re2:
1294 re2 = 'available'
1294 re2 = 'available'
1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1296 fm.data(re2=bool(util._re2))
1296 fm.data(re2=bool(util._re2))
1297
1297
1298 # templates
1298 # templates
1299 p = templater.templatepaths()
1299 p = templater.templatepaths()
1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1302 if p:
1302 if p:
1303 m = templater.templatepath("map-cmdline.default")
1303 m = templater.templatepath("map-cmdline.default")
1304 if m:
1304 if m:
1305 # template found, check if it is working
1305 # template found, check if it is working
1306 err = None
1306 err = None
1307 try:
1307 try:
1308 templater.templater.frommapfile(m)
1308 templater.templater.frommapfile(m)
1309 except Exception as inst:
1309 except Exception as inst:
1310 err = stringutil.forcebytestr(inst)
1310 err = stringutil.forcebytestr(inst)
1311 p = None
1311 p = None
1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1313 else:
1313 else:
1314 p = None
1314 p = None
1315 fm.condwrite(p, 'defaulttemplate',
1315 fm.condwrite(p, 'defaulttemplate',
1316 _("checking default template (%s)\n"), m)
1316 _("checking default template (%s)\n"), m)
1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1318 _(" template '%s' not found\n"), "default")
1318 _(" template '%s' not found\n"), "default")
1319 if not p:
1319 if not p:
1320 problems += 1
1320 problems += 1
1321 fm.condwrite(not p, '',
1321 fm.condwrite(not p, '',
1322 _(" (templates seem to have been installed incorrectly)\n"))
1322 _(" (templates seem to have been installed incorrectly)\n"))
1323
1323
1324 # editor
1324 # editor
1325 editor = ui.geteditor()
1325 editor = ui.geteditor()
1326 editor = util.expandpath(editor)
1326 editor = util.expandpath(editor)
1327 editorbin = procutil.shellsplit(editor)[0]
1327 editorbin = procutil.shellsplit(editor)[0]
1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1329 cmdpath = procutil.findexe(editorbin)
1329 cmdpath = procutil.findexe(editorbin)
1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1331 _(" No commit editor set and can't find %s in PATH\n"
1331 _(" No commit editor set and can't find %s in PATH\n"
1332 " (specify a commit editor in your configuration"
1332 " (specify a commit editor in your configuration"
1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1335 _(" Can't find editor '%s' in PATH\n"
1335 _(" Can't find editor '%s' in PATH\n"
1336 " (specify a commit editor in your configuration"
1336 " (specify a commit editor in your configuration"
1337 " file)\n"), not cmdpath and editorbin)
1337 " file)\n"), not cmdpath and editorbin)
1338 if not cmdpath and editor != 'vi':
1338 if not cmdpath and editor != 'vi':
1339 problems += 1
1339 problems += 1
1340
1340
1341 # check username
1341 # check username
1342 username = None
1342 username = None
1343 err = None
1343 err = None
1344 try:
1344 try:
1345 username = ui.username()
1345 username = ui.username()
1346 except error.Abort as e:
1346 except error.Abort as e:
1347 err = stringutil.forcebytestr(e)
1347 err = stringutil.forcebytestr(e)
1348 problems += 1
1348 problems += 1
1349
1349
1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1352 " (specify a username in your configuration file)\n"), err)
1352 " (specify a username in your configuration file)\n"), err)
1353
1353
1354 fm.condwrite(not problems, '',
1354 fm.condwrite(not problems, '',
1355 _("no problems detected\n"))
1355 _("no problems detected\n"))
1356 if not problems:
1356 if not problems:
1357 fm.data(problems=problems)
1357 fm.data(problems=problems)
1358 fm.condwrite(problems, 'problems',
1358 fm.condwrite(problems, 'problems',
1359 _("%d problems detected,"
1359 _("%d problems detected,"
1360 " please check your install!\n"), problems)
1360 " please check your install!\n"), problems)
1361 fm.end()
1361 fm.end()
1362
1362
1363 return problems
1363 return problems
1364
1364
1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1366 def debugknown(ui, repopath, *ids, **opts):
1366 def debugknown(ui, repopath, *ids, **opts):
1367 """test whether node ids are known to a repo
1367 """test whether node ids are known to a repo
1368
1368
1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1370 and 1s indicating unknown/known.
1370 and 1s indicating unknown/known.
1371 """
1371 """
1372 opts = pycompat.byteskwargs(opts)
1372 opts = pycompat.byteskwargs(opts)
1373 repo = hg.peer(ui, opts, repopath)
1373 repo = hg.peer(ui, opts, repopath)
1374 if not repo.capable('known'):
1374 if not repo.capable('known'):
1375 raise error.Abort("known() not supported by target repository")
1375 raise error.Abort("known() not supported by target repository")
1376 flags = repo.known([bin(s) for s in ids])
1376 flags = repo.known([bin(s) for s in ids])
1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1378
1378
1379 @command('debuglabelcomplete', [], _('LABEL...'))
1379 @command('debuglabelcomplete', [], _('LABEL...'))
1380 def debuglabelcomplete(ui, repo, *args):
1380 def debuglabelcomplete(ui, repo, *args):
1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1382 debugnamecomplete(ui, repo, *args)
1382 debugnamecomplete(ui, repo, *args)
1383
1383
1384 @command('debuglocks',
1384 @command('debuglocks',
1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1386 ('W', 'force-wlock', None,
1386 ('W', 'force-wlock', None,
1387 _('free the working state lock (DANGEROUS)')),
1387 _('free the working state lock (DANGEROUS)')),
1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1389 ('S', 'set-wlock', None,
1389 ('S', 'set-wlock', None,
1390 _('set the working state lock until stopped'))],
1390 _('set the working state lock until stopped'))],
1391 _('[OPTION]...'))
1391 _('[OPTION]...'))
1392 def debuglocks(ui, repo, **opts):
1392 def debuglocks(ui, repo, **opts):
1393 """show or modify state of locks
1393 """show or modify state of locks
1394
1394
1395 By default, this command will show which locks are held. This
1395 By default, this command will show which locks are held. This
1396 includes the user and process holding the lock, the amount of time
1396 includes the user and process holding the lock, the amount of time
1397 the lock has been held, and the machine name where the process is
1397 the lock has been held, and the machine name where the process is
1398 running if it's not local.
1398 running if it's not local.
1399
1399
1400 Locks protect the integrity of Mercurial's data, so should be
1400 Locks protect the integrity of Mercurial's data, so should be
1401 treated with care. System crashes or other interruptions may cause
1401 treated with care. System crashes or other interruptions may cause
1402 locks to not be properly released, though Mercurial will usually
1402 locks to not be properly released, though Mercurial will usually
1403 detect and remove such stale locks automatically.
1403 detect and remove such stale locks automatically.
1404
1404
1405 However, detecting stale locks may not always be possible (for
1405 However, detecting stale locks may not always be possible (for
1406 instance, on a shared filesystem). Removing locks may also be
1406 instance, on a shared filesystem). Removing locks may also be
1407 blocked by filesystem permissions.
1407 blocked by filesystem permissions.
1408
1408
1409 Setting a lock will prevent other commands from changing the data.
1409 Setting a lock will prevent other commands from changing the data.
1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1411 The set locks are removed when the command exits.
1411 The set locks are removed when the command exits.
1412
1412
1413 Returns 0 if no locks are held.
1413 Returns 0 if no locks are held.
1414
1414
1415 """
1415 """
1416
1416
1417 if opts.get(r'force_lock'):
1417 if opts.get(r'force_lock'):
1418 repo.svfs.unlink('lock')
1418 repo.svfs.unlink('lock')
1419 if opts.get(r'force_wlock'):
1419 if opts.get(r'force_wlock'):
1420 repo.vfs.unlink('wlock')
1420 repo.vfs.unlink('wlock')
1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1422 return 0
1422 return 0
1423
1423
1424 locks = []
1424 locks = []
1425 try:
1425 try:
1426 if opts.get(r'set_wlock'):
1426 if opts.get(r'set_wlock'):
1427 try:
1427 try:
1428 locks.append(repo.wlock(False))
1428 locks.append(repo.wlock(False))
1429 except error.LockHeld:
1429 except error.LockHeld:
1430 raise error.Abort(_('wlock is already held'))
1430 raise error.Abort(_('wlock is already held'))
1431 if opts.get(r'set_lock'):
1431 if opts.get(r'set_lock'):
1432 try:
1432 try:
1433 locks.append(repo.lock(False))
1433 locks.append(repo.lock(False))
1434 except error.LockHeld:
1434 except error.LockHeld:
1435 raise error.Abort(_('lock is already held'))
1435 raise error.Abort(_('lock is already held'))
1436 if len(locks):
1436 if len(locks):
1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1438 return 0
1438 return 0
1439 finally:
1439 finally:
1440 release(*locks)
1440 release(*locks)
1441
1441
1442 now = time.time()
1442 now = time.time()
1443 held = 0
1443 held = 0
1444
1444
1445 def report(vfs, name, method):
1445 def report(vfs, name, method):
1446 # this causes stale locks to get reaped for more accurate reporting
1446 # this causes stale locks to get reaped for more accurate reporting
1447 try:
1447 try:
1448 l = method(False)
1448 l = method(False)
1449 except error.LockHeld:
1449 except error.LockHeld:
1450 l = None
1450 l = None
1451
1451
1452 if l:
1452 if l:
1453 l.release()
1453 l.release()
1454 else:
1454 else:
1455 try:
1455 try:
1456 st = vfs.lstat(name)
1456 st = vfs.lstat(name)
1457 age = now - st[stat.ST_MTIME]
1457 age = now - st[stat.ST_MTIME]
1458 user = util.username(st.st_uid)
1458 user = util.username(st.st_uid)
1459 locker = vfs.readlock(name)
1459 locker = vfs.readlock(name)
1460 if ":" in locker:
1460 if ":" in locker:
1461 host, pid = locker.split(':')
1461 host, pid = locker.split(':')
1462 if host == socket.gethostname():
1462 if host == socket.gethostname():
1463 locker = 'user %s, process %s' % (user, pid)
1463 locker = 'user %s, process %s' % (user, pid)
1464 else:
1464 else:
1465 locker = 'user %s, process %s, host %s' \
1465 locker = 'user %s, process %s, host %s' \
1466 % (user, pid, host)
1466 % (user, pid, host)
1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1468 return 1
1468 return 1
1469 except OSError as e:
1469 except OSError as e:
1470 if e.errno != errno.ENOENT:
1470 if e.errno != errno.ENOENT:
1471 raise
1471 raise
1472
1472
1473 ui.write(("%-6s free\n") % (name + ":"))
1473 ui.write(("%-6s free\n") % (name + ":"))
1474 return 0
1474 return 0
1475
1475
1476 held += report(repo.svfs, "lock", repo.lock)
1476 held += report(repo.svfs, "lock", repo.lock)
1477 held += report(repo.vfs, "wlock", repo.wlock)
1477 held += report(repo.vfs, "wlock", repo.wlock)
1478
1478
1479 return held
1479 return held
1480
1480
1481 @command('debugmanifestfulltextcache', [
1481 @command('debugmanifestfulltextcache', [
1482 ('', 'clear', False, _('clear the cache')),
1482 ('', 'clear', False, _('clear the cache')),
1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1484 _('NODE'))
1484 _('NODE'))
1485 ], '')
1485 ], '')
1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1487 """show, clear or amend the contents of the manifest fulltext cache"""
1487 """show, clear or amend the contents of the manifest fulltext cache"""
1488 with repo.lock():
1488 with repo.lock():
1489 r = repo.manifestlog.getstorage(b'')
1489 r = repo.manifestlog.getstorage(b'')
1490 try:
1490 try:
1491 cache = r._fulltextcache
1491 cache = r._fulltextcache
1492 except AttributeError:
1492 except AttributeError:
1493 ui.warn(_(
1493 ui.warn(_(
1494 "Current revlog implementation doesn't appear to have a "
1494 "Current revlog implementation doesn't appear to have a "
1495 'manifest fulltext cache\n'))
1495 'manifest fulltext cache\n'))
1496 return
1496 return
1497
1497
1498 if opts.get(r'clear'):
1498 if opts.get(r'clear'):
1499 cache.clear()
1499 cache.clear()
1500
1500
1501 if add:
1501 if add:
1502 try:
1502 try:
1503 manifest = repo.manifestlog[r.lookup(add)]
1503 manifest = repo.manifestlog[r.lookup(add)]
1504 except error.LookupError as e:
1504 except error.LookupError as e:
1505 raise error.Abort(e, hint="Check your manifest node id")
1505 raise error.Abort(e, hint="Check your manifest node id")
1506 manifest.read() # stores revisision in cache too
1506 manifest.read() # stores revisision in cache too
1507
1507
1508 if not len(cache):
1508 if not len(cache):
1509 ui.write(_('Cache empty'))
1509 ui.write(_('Cache empty'))
1510 else:
1510 else:
1511 ui.write(
1511 ui.write(
1512 _('Cache contains %d manifest entries, in order of most to '
1512 _('Cache contains %d manifest entries, in order of most to '
1513 'least recent:\n') % (len(cache),))
1513 'least recent:\n') % (len(cache),))
1514 totalsize = 0
1514 totalsize = 0
1515 for nodeid in cache:
1515 for nodeid in cache:
1516 # Use cache.get to not update the LRU order
1516 # Use cache.get to not update the LRU order
1517 data = cache.get(nodeid)
1517 data = cache.get(nodeid)
1518 size = len(data)
1518 size = len(data)
1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1520 ui.write(_('id: %s, size %s\n') % (
1520 ui.write(_('id: %s, size %s\n') % (
1521 hex(nodeid), util.bytecount(size)))
1521 hex(nodeid), util.bytecount(size)))
1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1523 ui.write(
1523 ui.write(
1524 _('Total cache data size %s, on-disk %s\n') % (
1524 _('Total cache data size %s, on-disk %s\n') % (
1525 util.bytecount(totalsize), util.bytecount(ondisk))
1525 util.bytecount(totalsize), util.bytecount(ondisk))
1526 )
1526 )
1527
1527
1528 @command('debugmergestate', [], '')
1528 @command('debugmergestate', [], '')
1529 def debugmergestate(ui, repo, *args):
1529 def debugmergestate(ui, repo, *args):
1530 """print merge state
1530 """print merge state
1531
1531
1532 Use --verbose to print out information about whether v1 or v2 merge state
1532 Use --verbose to print out information about whether v1 or v2 merge state
1533 was chosen."""
1533 was chosen."""
1534 def _hashornull(h):
1534 def _hashornull(h):
1535 if h == nullhex:
1535 if h == nullhex:
1536 return 'null'
1536 return 'null'
1537 else:
1537 else:
1538 return h
1538 return h
1539
1539
1540 def printrecords(version):
1540 def printrecords(version):
1541 ui.write(('* version %d records\n') % version)
1541 ui.write(('* version %d records\n') % version)
1542 if version == 1:
1542 if version == 1:
1543 records = v1records
1543 records = v1records
1544 else:
1544 else:
1545 records = v2records
1545 records = v2records
1546
1546
1547 for rtype, record in records:
1547 for rtype, record in records:
1548 # pretty print some record types
1548 # pretty print some record types
1549 if rtype == 'L':
1549 if rtype == 'L':
1550 ui.write(('local: %s\n') % record)
1550 ui.write(('local: %s\n') % record)
1551 elif rtype == 'O':
1551 elif rtype == 'O':
1552 ui.write(('other: %s\n') % record)
1552 ui.write(('other: %s\n') % record)
1553 elif rtype == 'm':
1553 elif rtype == 'm':
1554 driver, mdstate = record.split('\0', 1)
1554 driver, mdstate = record.split('\0', 1)
1555 ui.write(('merge driver: %s (state "%s")\n')
1555 ui.write(('merge driver: %s (state "%s")\n')
1556 % (driver, mdstate))
1556 % (driver, mdstate))
1557 elif rtype in 'FDC':
1557 elif rtype in 'FDC':
1558 r = record.split('\0')
1558 r = record.split('\0')
1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1560 if version == 1:
1560 if version == 1:
1561 onode = 'not stored in v1 format'
1561 onode = 'not stored in v1 format'
1562 flags = r[7]
1562 flags = r[7]
1563 else:
1563 else:
1564 onode, flags = r[7:9]
1564 onode, flags = r[7:9]
1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1566 % (f, rtype, state, _hashornull(hash)))
1566 % (f, rtype, state, _hashornull(hash)))
1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1568 ui.write((' ancestor path: %s (node %s)\n')
1568 ui.write((' ancestor path: %s (node %s)\n')
1569 % (afile, _hashornull(anode)))
1569 % (afile, _hashornull(anode)))
1570 ui.write((' other path: %s (node %s)\n')
1570 ui.write((' other path: %s (node %s)\n')
1571 % (ofile, _hashornull(onode)))
1571 % (ofile, _hashornull(onode)))
1572 elif rtype == 'f':
1572 elif rtype == 'f':
1573 filename, rawextras = record.split('\0', 1)
1573 filename, rawextras = record.split('\0', 1)
1574 extras = rawextras.split('\0')
1574 extras = rawextras.split('\0')
1575 i = 0
1575 i = 0
1576 extrastrings = []
1576 extrastrings = []
1577 while i < len(extras):
1577 while i < len(extras):
1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1579 i += 2
1579 i += 2
1580
1580
1581 ui.write(('file extras: %s (%s)\n')
1581 ui.write(('file extras: %s (%s)\n')
1582 % (filename, ', '.join(extrastrings)))
1582 % (filename, ', '.join(extrastrings)))
1583 elif rtype == 'l':
1583 elif rtype == 'l':
1584 labels = record.split('\0', 2)
1584 labels = record.split('\0', 2)
1585 labels = [l for l in labels if len(l) > 0]
1585 labels = [l for l in labels if len(l) > 0]
1586 ui.write(('labels:\n'))
1586 ui.write(('labels:\n'))
1587 ui.write((' local: %s\n' % labels[0]))
1587 ui.write((' local: %s\n' % labels[0]))
1588 ui.write((' other: %s\n' % labels[1]))
1588 ui.write((' other: %s\n' % labels[1]))
1589 if len(labels) > 2:
1589 if len(labels) > 2:
1590 ui.write((' base: %s\n' % labels[2]))
1590 ui.write((' base: %s\n' % labels[2]))
1591 else:
1591 else:
1592 ui.write(('unrecognized entry: %s\t%s\n')
1592 ui.write(('unrecognized entry: %s\t%s\n')
1593 % (rtype, record.replace('\0', '\t')))
1593 % (rtype, record.replace('\0', '\t')))
1594
1594
1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1596 # merge state records. We shouldn't be doing this, but this is OK since this
1596 # merge state records. We shouldn't be doing this, but this is OK since this
1597 # command is pretty low-level.
1597 # command is pretty low-level.
1598 ms = mergemod.mergestate(repo)
1598 ms = mergemod.mergestate(repo)
1599
1599
1600 # sort so that reasonable information is on top
1600 # sort so that reasonable information is on top
1601 v1records = ms._readrecordsv1()
1601 v1records = ms._readrecordsv1()
1602 v2records = ms._readrecordsv2()
1602 v2records = ms._readrecordsv2()
1603 order = 'LOml'
1603 order = 'LOml'
1604 def key(r):
1604 def key(r):
1605 idx = order.find(r[0])
1605 idx = order.find(r[0])
1606 if idx == -1:
1606 if idx == -1:
1607 return (1, r[1])
1607 return (1, r[1])
1608 else:
1608 else:
1609 return (0, idx)
1609 return (0, idx)
1610 v1records.sort(key=key)
1610 v1records.sort(key=key)
1611 v2records.sort(key=key)
1611 v2records.sort(key=key)
1612
1612
1613 if not v1records and not v2records:
1613 if not v1records and not v2records:
1614 ui.write(('no merge state found\n'))
1614 ui.write(('no merge state found\n'))
1615 elif not v2records:
1615 elif not v2records:
1616 ui.note(('no version 2 merge state\n'))
1616 ui.note(('no version 2 merge state\n'))
1617 printrecords(1)
1617 printrecords(1)
1618 elif ms._v1v2match(v1records, v2records):
1618 elif ms._v1v2match(v1records, v2records):
1619 ui.note(('v1 and v2 states match: using v2\n'))
1619 ui.note(('v1 and v2 states match: using v2\n'))
1620 printrecords(2)
1620 printrecords(2)
1621 else:
1621 else:
1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1623 printrecords(1)
1623 printrecords(1)
1624 if ui.verbose:
1624 if ui.verbose:
1625 printrecords(2)
1625 printrecords(2)
1626
1626
1627 @command('debugnamecomplete', [], _('NAME...'))
1627 @command('debugnamecomplete', [], _('NAME...'))
1628 def debugnamecomplete(ui, repo, *args):
1628 def debugnamecomplete(ui, repo, *args):
1629 '''complete "names" - tags, open branch names, bookmark names'''
1629 '''complete "names" - tags, open branch names, bookmark names'''
1630
1630
1631 names = set()
1631 names = set()
1632 # since we previously only listed open branches, we will handle that
1632 # since we previously only listed open branches, we will handle that
1633 # specially (after this for loop)
1633 # specially (after this for loop)
1634 for name, ns in repo.names.iteritems():
1634 for name, ns in repo.names.iteritems():
1635 if name != 'branches':
1635 if name != 'branches':
1636 names.update(ns.listnames(repo))
1636 names.update(ns.listnames(repo))
1637 names.update(tag for (tag, heads, tip, closed)
1637 names.update(tag for (tag, heads, tip, closed)
1638 in repo.branchmap().iterbranches() if not closed)
1638 in repo.branchmap().iterbranches() if not closed)
1639 completions = set()
1639 completions = set()
1640 if not args:
1640 if not args:
1641 args = ['']
1641 args = ['']
1642 for a in args:
1642 for a in args:
1643 completions.update(n for n in names if n.startswith(a))
1643 completions.update(n for n in names if n.startswith(a))
1644 ui.write('\n'.join(sorted(completions)))
1644 ui.write('\n'.join(sorted(completions)))
1645 ui.write('\n')
1645 ui.write('\n')
1646
1646
1647 @command('debugobsolete',
1647 @command('debugobsolete',
1648 [('', 'flags', 0, _('markers flag')),
1648 [('', 'flags', 0, _('markers flag')),
1649 ('', 'record-parents', False,
1649 ('', 'record-parents', False,
1650 _('record parent information for the precursor')),
1650 _('record parent information for the precursor')),
1651 ('r', 'rev', [], _('display markers relevant to REV')),
1651 ('r', 'rev', [], _('display markers relevant to REV')),
1652 ('', 'exclusive', False, _('restrict display to markers only '
1652 ('', 'exclusive', False, _('restrict display to markers only '
1653 'relevant to REV')),
1653 'relevant to REV')),
1654 ('', 'index', False, _('display index of the marker')),
1654 ('', 'index', False, _('display index of the marker')),
1655 ('', 'delete', [], _('delete markers specified by indices')),
1655 ('', 'delete', [], _('delete markers specified by indices')),
1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1659 """create arbitrary obsolete marker
1659 """create arbitrary obsolete marker
1660
1660
1661 With no arguments, displays the list of obsolescence markers."""
1661 With no arguments, displays the list of obsolescence markers."""
1662
1662
1663 opts = pycompat.byteskwargs(opts)
1663 opts = pycompat.byteskwargs(opts)
1664
1664
1665 def parsenodeid(s):
1665 def parsenodeid(s):
1666 try:
1666 try:
1667 # We do not use revsingle/revrange functions here to accept
1667 # We do not use revsingle/revrange functions here to accept
1668 # arbitrary node identifiers, possibly not present in the
1668 # arbitrary node identifiers, possibly not present in the
1669 # local repository.
1669 # local repository.
1670 n = bin(s)
1670 n = bin(s)
1671 if len(n) != len(nullid):
1671 if len(n) != len(nullid):
1672 raise TypeError()
1672 raise TypeError()
1673 return n
1673 return n
1674 except TypeError:
1674 except TypeError:
1675 raise error.Abort('changeset references must be full hexadecimal '
1675 raise error.Abort('changeset references must be full hexadecimal '
1676 'node identifiers')
1676 'node identifiers')
1677
1677
1678 if opts.get('delete'):
1678 if opts.get('delete'):
1679 indices = []
1679 indices = []
1680 for v in opts.get('delete'):
1680 for v in opts.get('delete'):
1681 try:
1681 try:
1682 indices.append(int(v))
1682 indices.append(int(v))
1683 except ValueError:
1683 except ValueError:
1684 raise error.Abort(_('invalid index value: %r') % v,
1684 raise error.Abort(_('invalid index value: %r') % v,
1685 hint=_('use integers for indices'))
1685 hint=_('use integers for indices'))
1686
1686
1687 if repo.currenttransaction():
1687 if repo.currenttransaction():
1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1689 'of transaction.'))
1689 'of transaction.'))
1690
1690
1691 with repo.lock():
1691 with repo.lock():
1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1694
1694
1695 return
1695 return
1696
1696
1697 if precursor is not None:
1697 if precursor is not None:
1698 if opts['rev']:
1698 if opts['rev']:
1699 raise error.Abort('cannot select revision when creating marker')
1699 raise error.Abort('cannot select revision when creating marker')
1700 metadata = {}
1700 metadata = {}
1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1702 succs = tuple(parsenodeid(succ) for succ in successors)
1702 succs = tuple(parsenodeid(succ) for succ in successors)
1703 l = repo.lock()
1703 l = repo.lock()
1704 try:
1704 try:
1705 tr = repo.transaction('debugobsolete')
1705 tr = repo.transaction('debugobsolete')
1706 try:
1706 try:
1707 date = opts.get('date')
1707 date = opts.get('date')
1708 if date:
1708 if date:
1709 date = dateutil.parsedate(date)
1709 date = dateutil.parsedate(date)
1710 else:
1710 else:
1711 date = None
1711 date = None
1712 prec = parsenodeid(precursor)
1712 prec = parsenodeid(precursor)
1713 parents = None
1713 parents = None
1714 if opts['record_parents']:
1714 if opts['record_parents']:
1715 if prec not in repo.unfiltered():
1715 if prec not in repo.unfiltered():
1716 raise error.Abort('cannot used --record-parents on '
1716 raise error.Abort('cannot used --record-parents on '
1717 'unknown changesets')
1717 'unknown changesets')
1718 parents = repo.unfiltered()[prec].parents()
1718 parents = repo.unfiltered()[prec].parents()
1719 parents = tuple(p.node() for p in parents)
1719 parents = tuple(p.node() for p in parents)
1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1721 parents=parents, date=date,
1721 parents=parents, date=date,
1722 metadata=metadata, ui=ui)
1722 metadata=metadata, ui=ui)
1723 tr.close()
1723 tr.close()
1724 except ValueError as exc:
1724 except ValueError as exc:
1725 raise error.Abort(_('bad obsmarker input: %s') %
1725 raise error.Abort(_('bad obsmarker input: %s') %
1726 pycompat.bytestr(exc))
1726 pycompat.bytestr(exc))
1727 finally:
1727 finally:
1728 tr.release()
1728 tr.release()
1729 finally:
1729 finally:
1730 l.release()
1730 l.release()
1731 else:
1731 else:
1732 if opts['rev']:
1732 if opts['rev']:
1733 revs = scmutil.revrange(repo, opts['rev'])
1733 revs = scmutil.revrange(repo, opts['rev'])
1734 nodes = [repo[r].node() for r in revs]
1734 nodes = [repo[r].node() for r in revs]
1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1736 exclusive=opts['exclusive']))
1736 exclusive=opts['exclusive']))
1737 markers.sort(key=lambda x: x._data)
1737 markers.sort(key=lambda x: x._data)
1738 else:
1738 else:
1739 markers = obsutil.getmarkers(repo)
1739 markers = obsutil.getmarkers(repo)
1740
1740
1741 markerstoiter = markers
1741 markerstoiter = markers
1742 isrelevant = lambda m: True
1742 isrelevant = lambda m: True
1743 if opts.get('rev') and opts.get('index'):
1743 if opts.get('rev') and opts.get('index'):
1744 markerstoiter = obsutil.getmarkers(repo)
1744 markerstoiter = obsutil.getmarkers(repo)
1745 markerset = set(markers)
1745 markerset = set(markers)
1746 isrelevant = lambda m: m in markerset
1746 isrelevant = lambda m: m in markerset
1747
1747
1748 fm = ui.formatter('debugobsolete', opts)
1748 fm = ui.formatter('debugobsolete', opts)
1749 for i, m in enumerate(markerstoiter):
1749 for i, m in enumerate(markerstoiter):
1750 if not isrelevant(m):
1750 if not isrelevant(m):
1751 # marker can be irrelevant when we're iterating over a set
1751 # marker can be irrelevant when we're iterating over a set
1752 # of markers (markerstoiter) which is bigger than the set
1752 # of markers (markerstoiter) which is bigger than the set
1753 # of markers we want to display (markers)
1753 # of markers we want to display (markers)
1754 # this can happen if both --index and --rev options are
1754 # this can happen if both --index and --rev options are
1755 # provided and thus we need to iterate over all of the markers
1755 # provided and thus we need to iterate over all of the markers
1756 # to get the correct indices, but only display the ones that
1756 # to get the correct indices, but only display the ones that
1757 # are relevant to --rev value
1757 # are relevant to --rev value
1758 continue
1758 continue
1759 fm.startitem()
1759 fm.startitem()
1760 ind = i if opts.get('index') else None
1760 ind = i if opts.get('index') else None
1761 cmdutil.showmarker(fm, m, index=ind)
1761 cmdutil.showmarker(fm, m, index=ind)
1762 fm.end()
1762 fm.end()
1763
1763
1764 @command('debugpathcomplete',
1764 @command('debugpathcomplete',
1765 [('f', 'full', None, _('complete an entire path')),
1765 [('f', 'full', None, _('complete an entire path')),
1766 ('n', 'normal', None, _('show only normal files')),
1766 ('n', 'normal', None, _('show only normal files')),
1767 ('a', 'added', None, _('show only added files')),
1767 ('a', 'added', None, _('show only added files')),
1768 ('r', 'removed', None, _('show only removed files'))],
1768 ('r', 'removed', None, _('show only removed files'))],
1769 _('FILESPEC...'))
1769 _('FILESPEC...'))
1770 def debugpathcomplete(ui, repo, *specs, **opts):
1770 def debugpathcomplete(ui, repo, *specs, **opts):
1771 '''complete part or all of a tracked path
1771 '''complete part or all of a tracked path
1772
1772
1773 This command supports shells that offer path name completion. It
1773 This command supports shells that offer path name completion. It
1774 currently completes only files already known to the dirstate.
1774 currently completes only files already known to the dirstate.
1775
1775
1776 Completion extends only to the next path segment unless
1776 Completion extends only to the next path segment unless
1777 --full is specified, in which case entire paths are used.'''
1777 --full is specified, in which case entire paths are used.'''
1778
1778
1779 def complete(path, acceptable):
1779 def complete(path, acceptable):
1780 dirstate = repo.dirstate
1780 dirstate = repo.dirstate
1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1782 rootdir = repo.root + pycompat.ossep
1782 rootdir = repo.root + pycompat.ossep
1783 if spec != repo.root and not spec.startswith(rootdir):
1783 if spec != repo.root and not spec.startswith(rootdir):
1784 return [], []
1784 return [], []
1785 if os.path.isdir(spec):
1785 if os.path.isdir(spec):
1786 spec += '/'
1786 spec += '/'
1787 spec = spec[len(rootdir):]
1787 spec = spec[len(rootdir):]
1788 fixpaths = pycompat.ossep != '/'
1788 fixpaths = pycompat.ossep != '/'
1789 if fixpaths:
1789 if fixpaths:
1790 spec = spec.replace(pycompat.ossep, '/')
1790 spec = spec.replace(pycompat.ossep, '/')
1791 speclen = len(spec)
1791 speclen = len(spec)
1792 fullpaths = opts[r'full']
1792 fullpaths = opts[r'full']
1793 files, dirs = set(), set()
1793 files, dirs = set(), set()
1794 adddir, addfile = dirs.add, files.add
1794 adddir, addfile = dirs.add, files.add
1795 for f, st in dirstate.iteritems():
1795 for f, st in dirstate.iteritems():
1796 if f.startswith(spec) and st[0] in acceptable:
1796 if f.startswith(spec) and st[0] in acceptable:
1797 if fixpaths:
1797 if fixpaths:
1798 f = f.replace('/', pycompat.ossep)
1798 f = f.replace('/', pycompat.ossep)
1799 if fullpaths:
1799 if fullpaths:
1800 addfile(f)
1800 addfile(f)
1801 continue
1801 continue
1802 s = f.find(pycompat.ossep, speclen)
1802 s = f.find(pycompat.ossep, speclen)
1803 if s >= 0:
1803 if s >= 0:
1804 adddir(f[:s])
1804 adddir(f[:s])
1805 else:
1805 else:
1806 addfile(f)
1806 addfile(f)
1807 return files, dirs
1807 return files, dirs
1808
1808
1809 acceptable = ''
1809 acceptable = ''
1810 if opts[r'normal']:
1810 if opts[r'normal']:
1811 acceptable += 'nm'
1811 acceptable += 'nm'
1812 if opts[r'added']:
1812 if opts[r'added']:
1813 acceptable += 'a'
1813 acceptable += 'a'
1814 if opts[r'removed']:
1814 if opts[r'removed']:
1815 acceptable += 'r'
1815 acceptable += 'r'
1816 cwd = repo.getcwd()
1816 cwd = repo.getcwd()
1817 if not specs:
1817 if not specs:
1818 specs = ['.']
1818 specs = ['.']
1819
1819
1820 files, dirs = set(), set()
1820 files, dirs = set(), set()
1821 for spec in specs:
1821 for spec in specs:
1822 f, d = complete(spec, acceptable or 'nmar')
1822 f, d = complete(spec, acceptable or 'nmar')
1823 files.update(f)
1823 files.update(f)
1824 dirs.update(d)
1824 dirs.update(d)
1825 files.update(dirs)
1825 files.update(dirs)
1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1827 ui.write('\n')
1827 ui.write('\n')
1828
1828
1829 @command('debugpeer', [], _('PATH'), norepo=True)
1829 @command('debugpeer', [], _('PATH'), norepo=True)
1830 def debugpeer(ui, path):
1830 def debugpeer(ui, path):
1831 """establish a connection to a peer repository"""
1831 """establish a connection to a peer repository"""
1832 # Always enable peer request logging. Requires --debug to display
1832 # Always enable peer request logging. Requires --debug to display
1833 # though.
1833 # though.
1834 overrides = {
1834 overrides = {
1835 ('devel', 'debug.peer-request'): True,
1835 ('devel', 'debug.peer-request'): True,
1836 }
1836 }
1837
1837
1838 with ui.configoverride(overrides):
1838 with ui.configoverride(overrides):
1839 peer = hg.peer(ui, {}, path)
1839 peer = hg.peer(ui, {}, path)
1840
1840
1841 local = peer.local() is not None
1841 local = peer.local() is not None
1842 canpush = peer.canpush()
1842 canpush = peer.canpush()
1843
1843
1844 ui.write(_('url: %s\n') % peer.url())
1844 ui.write(_('url: %s\n') % peer.url())
1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1847
1847
1848 @command('debugpickmergetool',
1848 @command('debugpickmergetool',
1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1852 _('[PATTERN]...'),
1852 _('[PATTERN]...'),
1853 inferrepo=True)
1853 inferrepo=True)
1854 def debugpickmergetool(ui, repo, *pats, **opts):
1854 def debugpickmergetool(ui, repo, *pats, **opts):
1855 """examine which merge tool is chosen for specified file
1855 """examine which merge tool is chosen for specified file
1856
1856
1857 As described in :hg:`help merge-tools`, Mercurial examines
1857 As described in :hg:`help merge-tools`, Mercurial examines
1858 configurations below in this order to decide which merge tool is
1858 configurations below in this order to decide which merge tool is
1859 chosen for specified file.
1859 chosen for specified file.
1860
1860
1861 1. ``--tool`` option
1861 1. ``--tool`` option
1862 2. ``HGMERGE`` environment variable
1862 2. ``HGMERGE`` environment variable
1863 3. configurations in ``merge-patterns`` section
1863 3. configurations in ``merge-patterns`` section
1864 4. configuration of ``ui.merge``
1864 4. configuration of ``ui.merge``
1865 5. configurations in ``merge-tools`` section
1865 5. configurations in ``merge-tools`` section
1866 6. ``hgmerge`` tool (for historical reason only)
1866 6. ``hgmerge`` tool (for historical reason only)
1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1868
1868
1869 This command writes out examination result in the style below::
1869 This command writes out examination result in the style below::
1870
1870
1871 FILE = MERGETOOL
1871 FILE = MERGETOOL
1872
1872
1873 By default, all files known in the first parent context of the
1873 By default, all files known in the first parent context of the
1874 working directory are examined. Use file patterns and/or -I/-X
1874 working directory are examined. Use file patterns and/or -I/-X
1875 options to limit target files. -r/--rev is also useful to examine
1875 options to limit target files. -r/--rev is also useful to examine
1876 files in another context without actual updating to it.
1876 files in another context without actual updating to it.
1877
1877
1878 With --debug, this command shows warning messages while matching
1878 With --debug, this command shows warning messages while matching
1879 against ``merge-patterns`` and so on, too. It is recommended to
1879 against ``merge-patterns`` and so on, too. It is recommended to
1880 use this option with explicit file patterns and/or -I/-X options,
1880 use this option with explicit file patterns and/or -I/-X options,
1881 because this option increases amount of output per file according
1881 because this option increases amount of output per file according
1882 to configurations in hgrc.
1882 to configurations in hgrc.
1883
1883
1884 With -v/--verbose, this command shows configurations below at
1884 With -v/--verbose, this command shows configurations below at
1885 first (only if specified).
1885 first (only if specified).
1886
1886
1887 - ``--tool`` option
1887 - ``--tool`` option
1888 - ``HGMERGE`` environment variable
1888 - ``HGMERGE`` environment variable
1889 - configuration of ``ui.merge``
1889 - configuration of ``ui.merge``
1890
1890
1891 If merge tool is chosen before matching against
1891 If merge tool is chosen before matching against
1892 ``merge-patterns``, this command can't show any helpful
1892 ``merge-patterns``, this command can't show any helpful
1893 information, even with --debug. In such case, information above is
1893 information, even with --debug. In such case, information above is
1894 useful to know why a merge tool is chosen.
1894 useful to know why a merge tool is chosen.
1895 """
1895 """
1896 opts = pycompat.byteskwargs(opts)
1896 opts = pycompat.byteskwargs(opts)
1897 overrides = {}
1897 overrides = {}
1898 if opts['tool']:
1898 if opts['tool']:
1899 overrides[('ui', 'forcemerge')] = opts['tool']
1899 overrides[('ui', 'forcemerge')] = opts['tool']
1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1901
1901
1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1903 hgmerge = encoding.environ.get("HGMERGE")
1903 hgmerge = encoding.environ.get("HGMERGE")
1904 if hgmerge is not None:
1904 if hgmerge is not None:
1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1906 uimerge = ui.config("ui", "merge")
1906 uimerge = ui.config("ui", "merge")
1907 if uimerge:
1907 if uimerge:
1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1909
1909
1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 m = scmutil.match(ctx, pats, opts)
1911 m = scmutil.match(ctx, pats, opts)
1912 changedelete = opts['changedelete']
1912 changedelete = opts['changedelete']
1913 for path in ctx.walk(m):
1913 for path in ctx.walk(m):
1914 fctx = ctx[path]
1914 fctx = ctx[path]
1915 try:
1915 try:
1916 if not ui.debugflag:
1916 if not ui.debugflag:
1917 ui.pushbuffer(error=True)
1917 ui.pushbuffer(error=True)
1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1919 fctx.isbinary(),
1919 fctx.isbinary(),
1920 'l' in fctx.flags(),
1920 'l' in fctx.flags(),
1921 changedelete)
1921 changedelete)
1922 finally:
1922 finally:
1923 if not ui.debugflag:
1923 if not ui.debugflag:
1924 ui.popbuffer()
1924 ui.popbuffer()
1925 ui.write(('%s = %s\n') % (path, tool))
1925 ui.write(('%s = %s\n') % (path, tool))
1926
1926
1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1929 '''access the pushkey key/value protocol
1929 '''access the pushkey key/value protocol
1930
1930
1931 With two args, list the keys in the given namespace.
1931 With two args, list the keys in the given namespace.
1932
1932
1933 With five args, set a key to new if it currently is set to old.
1933 With five args, set a key to new if it currently is set to old.
1934 Reports success or failure.
1934 Reports success or failure.
1935 '''
1935 '''
1936
1936
1937 target = hg.peer(ui, {}, repopath)
1937 target = hg.peer(ui, {}, repopath)
1938 if keyinfo:
1938 if keyinfo:
1939 key, old, new = keyinfo
1939 key, old, new = keyinfo
1940 with target.commandexecutor() as e:
1940 with target.commandexecutor() as e:
1941 r = e.callcommand('pushkey', {
1941 r = e.callcommand('pushkey', {
1942 'namespace': namespace,
1942 'namespace': namespace,
1943 'key': key,
1943 'key': key,
1944 'old': old,
1944 'old': old,
1945 'new': new,
1945 'new': new,
1946 }).result()
1946 }).result()
1947
1947
1948 ui.status(pycompat.bytestr(r) + '\n')
1948 ui.status(pycompat.bytestr(r) + '\n')
1949 return not r
1949 return not r
1950 else:
1950 else:
1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1953 stringutil.escapestr(v)))
1953 stringutil.escapestr(v)))
1954
1954
1955 @command('debugpvec', [], _('A B'))
1955 @command('debugpvec', [], _('A B'))
1956 def debugpvec(ui, repo, a, b=None):
1956 def debugpvec(ui, repo, a, b=None):
1957 ca = scmutil.revsingle(repo, a)
1957 ca = scmutil.revsingle(repo, a)
1958 cb = scmutil.revsingle(repo, b)
1958 cb = scmutil.revsingle(repo, b)
1959 pa = pvec.ctxpvec(ca)
1959 pa = pvec.ctxpvec(ca)
1960 pb = pvec.ctxpvec(cb)
1960 pb = pvec.ctxpvec(cb)
1961 if pa == pb:
1961 if pa == pb:
1962 rel = "="
1962 rel = "="
1963 elif pa > pb:
1963 elif pa > pb:
1964 rel = ">"
1964 rel = ">"
1965 elif pa < pb:
1965 elif pa < pb:
1966 rel = "<"
1966 rel = "<"
1967 elif pa | pb:
1967 elif pa | pb:
1968 rel = "|"
1968 rel = "|"
1969 ui.write(_("a: %s\n") % pa)
1969 ui.write(_("a: %s\n") % pa)
1970 ui.write(_("b: %s\n") % pb)
1970 ui.write(_("b: %s\n") % pb)
1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1974 pa.distance(pb), rel))
1974 pa.distance(pb), rel))
1975
1975
1976 @command('debugrebuilddirstate|debugrebuildstate',
1976 @command('debugrebuilddirstate|debugrebuildstate',
1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1979 'the working copy parent')),
1979 'the working copy parent')),
1980 ],
1980 ],
1981 _('[-r REV]'))
1981 _('[-r REV]'))
1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1983 """rebuild the dirstate as it would look like for the given revision
1983 """rebuild the dirstate as it would look like for the given revision
1984
1984
1985 If no revision is specified the first current parent will be used.
1985 If no revision is specified the first current parent will be used.
1986
1986
1987 The dirstate will be set to the files of the given revision.
1987 The dirstate will be set to the files of the given revision.
1988 The actual working directory content or existing dirstate
1988 The actual working directory content or existing dirstate
1989 information such as adds or removes is not considered.
1989 information such as adds or removes is not considered.
1990
1990
1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1992 tracked but are not in the parent manifest, or that exist in the parent
1992 tracked but are not in the parent manifest, or that exist in the parent
1993 manifest but are not in the dirstate. It will not change adds, removes, or
1993 manifest but are not in the dirstate. It will not change adds, removes, or
1994 modified files that are in the working copy parent.
1994 modified files that are in the working copy parent.
1995
1995
1996 One use of this command is to make the next :hg:`status` invocation
1996 One use of this command is to make the next :hg:`status` invocation
1997 check the actual file content.
1997 check the actual file content.
1998 """
1998 """
1999 ctx = scmutil.revsingle(repo, rev)
1999 ctx = scmutil.revsingle(repo, rev)
2000 with repo.wlock():
2000 with repo.wlock():
2001 dirstate = repo.dirstate
2001 dirstate = repo.dirstate
2002 changedfiles = None
2002 changedfiles = None
2003 # See command doc for what minimal does.
2003 # See command doc for what minimal does.
2004 if opts.get(r'minimal'):
2004 if opts.get(r'minimal'):
2005 manifestfiles = set(ctx.manifest().keys())
2005 manifestfiles = set(ctx.manifest().keys())
2006 dirstatefiles = set(dirstate)
2006 dirstatefiles = set(dirstate)
2007 manifestonly = manifestfiles - dirstatefiles
2007 manifestonly = manifestfiles - dirstatefiles
2008 dsonly = dirstatefiles - manifestfiles
2008 dsonly = dirstatefiles - manifestfiles
2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2010 changedfiles = manifestonly | dsnotadded
2010 changedfiles = manifestonly | dsnotadded
2011
2011
2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2013
2013
2014 @command('debugrebuildfncache', [], '')
2014 @command('debugrebuildfncache', [], '')
2015 def debugrebuildfncache(ui, repo):
2015 def debugrebuildfncache(ui, repo):
2016 """rebuild the fncache file"""
2016 """rebuild the fncache file"""
2017 repair.rebuildfncache(ui, repo)
2017 repair.rebuildfncache(ui, repo)
2018
2018
2019 @command('debugrename',
2019 @command('debugrename',
2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2021 _('[-r REV] FILE'))
2021 _('[-r REV] FILE'))
2022 def debugrename(ui, repo, file1, *pats, **opts):
2022 def debugrename(ui, repo, file1, *pats, **opts):
2023 """dump rename information"""
2023 """dump rename information"""
2024
2024
2025 opts = pycompat.byteskwargs(opts)
2025 opts = pycompat.byteskwargs(opts)
2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2028 for abs in ctx.walk(m):
2028 for abs in ctx.walk(m):
2029 fctx = ctx[abs]
2029 fctx = ctx[abs]
2030 o = fctx.filelog().renamed(fctx.filenode())
2030 o = fctx.filelog().renamed(fctx.filenode())
2031 rel = m.rel(abs)
2031 rel = m.rel(abs)
2032 if o:
2032 if o:
2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2034 else:
2034 else:
2035 ui.write(_("%s not renamed\n") % rel)
2035 ui.write(_("%s not renamed\n") % rel)
2036
2036
2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2038 [('d', 'dump', False, _('dump index data'))],
2038 [('d', 'dump', False, _('dump index data'))],
2039 _('-c|-m|FILE'),
2039 _('-c|-m|FILE'),
2040 optionalrepo=True)
2040 optionalrepo=True)
2041 def debugrevlog(ui, repo, file_=None, **opts):
2041 def debugrevlog(ui, repo, file_=None, **opts):
2042 """show data and statistics about a revlog"""
2042 """show data and statistics about a revlog"""
2043 opts = pycompat.byteskwargs(opts)
2043 opts = pycompat.byteskwargs(opts)
2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2045
2045
2046 if opts.get("dump"):
2046 if opts.get("dump"):
2047 numrevs = len(r)
2047 numrevs = len(r)
2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2049 " rawsize totalsize compression heads chainlen\n"))
2049 " rawsize totalsize compression heads chainlen\n"))
2050 ts = 0
2050 ts = 0
2051 heads = set()
2051 heads = set()
2052
2052
2053 for rev in pycompat.xrange(numrevs):
2053 for rev in pycompat.xrange(numrevs):
2054 dbase = r.deltaparent(rev)
2054 dbase = r.deltaparent(rev)
2055 if dbase == -1:
2055 if dbase == -1:
2056 dbase = rev
2056 dbase = rev
2057 cbase = r.chainbase(rev)
2057 cbase = r.chainbase(rev)
2058 clen = r.chainlen(rev)
2058 clen = r.chainlen(rev)
2059 p1, p2 = r.parentrevs(rev)
2059 p1, p2 = r.parentrevs(rev)
2060 rs = r.rawsize(rev)
2060 rs = r.rawsize(rev)
2061 ts = ts + rs
2061 ts = ts + rs
2062 heads -= set(r.parentrevs(rev))
2062 heads -= set(r.parentrevs(rev))
2063 heads.add(rev)
2063 heads.add(rev)
2064 try:
2064 try:
2065 compression = ts / r.end(rev)
2065 compression = ts / r.end(rev)
2066 except ZeroDivisionError:
2066 except ZeroDivisionError:
2067 compression = 0
2067 compression = 0
2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2069 "%11d %5d %8d\n" %
2069 "%11d %5d %8d\n" %
2070 (rev, p1, p2, r.start(rev), r.end(rev),
2070 (rev, p1, p2, r.start(rev), r.end(rev),
2071 r.start(dbase), r.start(cbase),
2071 r.start(dbase), r.start(cbase),
2072 r.start(p1), r.start(p2),
2072 r.start(p1), r.start(p2),
2073 rs, ts, compression, len(heads), clen))
2073 rs, ts, compression, len(heads), clen))
2074 return 0
2074 return 0
2075
2075
2076 v = r.version
2076 v = r.version
2077 format = v & 0xFFFF
2077 format = v & 0xFFFF
2078 flags = []
2078 flags = []
2079 gdelta = False
2079 gdelta = False
2080 if v & revlog.FLAG_INLINE_DATA:
2080 if v & revlog.FLAG_INLINE_DATA:
2081 flags.append('inline')
2081 flags.append('inline')
2082 if v & revlog.FLAG_GENERALDELTA:
2082 if v & revlog.FLAG_GENERALDELTA:
2083 gdelta = True
2083 gdelta = True
2084 flags.append('generaldelta')
2084 flags.append('generaldelta')
2085 if not flags:
2085 if not flags:
2086 flags = ['(none)']
2086 flags = ['(none)']
2087
2087
2088 ### tracks merge vs single parent
2088 ### tracks merge vs single parent
2089 nummerges = 0
2089 nummerges = 0
2090
2090
2091 ### tracks ways the "delta" are build
2091 ### tracks ways the "delta" are build
2092 # nodelta
2092 # nodelta
2093 numempty = 0
2093 numempty = 0
2094 numemptytext = 0
2094 numemptytext = 0
2095 numemptydelta = 0
2095 numemptydelta = 0
2096 # full file content
2096 # full file content
2097 numfull = 0
2097 numfull = 0
2098 # intermediate snapshot against a prior snapshot
2098 # intermediate snapshot against a prior snapshot
2099 numsemi = 0
2099 numsemi = 0
2100 # snapshot count per depth
2100 # snapshot count per depth
2101 numsnapdepth = collections.defaultdict(lambda: 0)
2101 numsnapdepth = collections.defaultdict(lambda: 0)
2102 # delta against previous revision
2102 # delta against previous revision
2103 numprev = 0
2103 numprev = 0
2104 # delta against first or second parent (not prev)
2104 # delta against first or second parent (not prev)
2105 nump1 = 0
2105 nump1 = 0
2106 nump2 = 0
2106 nump2 = 0
2107 # delta against neither prev nor parents
2107 # delta against neither prev nor parents
2108 numother = 0
2108 numother = 0
2109 # delta against prev that are also first or second parent
2109 # delta against prev that are also first or second parent
2110 # (details of `numprev`)
2110 # (details of `numprev`)
2111 nump1prev = 0
2111 nump1prev = 0
2112 nump2prev = 0
2112 nump2prev = 0
2113
2113
2114 # data about delta chain of each revs
2114 # data about delta chain of each revs
2115 chainlengths = []
2115 chainlengths = []
2116 chainbases = []
2116 chainbases = []
2117 chainspans = []
2117 chainspans = []
2118
2118
2119 # data about each revision
2119 # data about each revision
2120 datasize = [None, 0, 0]
2120 datasize = [None, 0, 0]
2121 fullsize = [None, 0, 0]
2121 fullsize = [None, 0, 0]
2122 semisize = [None, 0, 0]
2122 semisize = [None, 0, 0]
2123 # snapshot count per depth
2123 # snapshot count per depth
2124 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2124 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2125 deltasize = [None, 0, 0]
2125 deltasize = [None, 0, 0]
2126 chunktypecounts = {}
2126 chunktypecounts = {}
2127 chunktypesizes = {}
2127 chunktypesizes = {}
2128
2128
2129 def addsize(size, l):
2129 def addsize(size, l):
2130 if l[0] is None or size < l[0]:
2130 if l[0] is None or size < l[0]:
2131 l[0] = size
2131 l[0] = size
2132 if size > l[1]:
2132 if size > l[1]:
2133 l[1] = size
2133 l[1] = size
2134 l[2] += size
2134 l[2] += size
2135
2135
2136 numrevs = len(r)
2136 numrevs = len(r)
2137 for rev in pycompat.xrange(numrevs):
2137 for rev in pycompat.xrange(numrevs):
2138 p1, p2 = r.parentrevs(rev)
2138 p1, p2 = r.parentrevs(rev)
2139 delta = r.deltaparent(rev)
2139 delta = r.deltaparent(rev)
2140 if format > 0:
2140 if format > 0:
2141 addsize(r.rawsize(rev), datasize)
2141 addsize(r.rawsize(rev), datasize)
2142 if p2 != nullrev:
2142 if p2 != nullrev:
2143 nummerges += 1
2143 nummerges += 1
2144 size = r.length(rev)
2144 size = r.length(rev)
2145 if delta == nullrev:
2145 if delta == nullrev:
2146 chainlengths.append(0)
2146 chainlengths.append(0)
2147 chainbases.append(r.start(rev))
2147 chainbases.append(r.start(rev))
2148 chainspans.append(size)
2148 chainspans.append(size)
2149 if size == 0:
2149 if size == 0:
2150 numempty += 1
2150 numempty += 1
2151 numemptytext += 1
2151 numemptytext += 1
2152 else:
2152 else:
2153 numfull += 1
2153 numfull += 1
2154 numsnapdepth[0] += 1
2154 numsnapdepth[0] += 1
2155 addsize(size, fullsize)
2155 addsize(size, fullsize)
2156 addsize(size, snapsizedepth[0])
2156 addsize(size, snapsizedepth[0])
2157 else:
2157 else:
2158 chainlengths.append(chainlengths[delta] + 1)
2158 chainlengths.append(chainlengths[delta] + 1)
2159 baseaddr = chainbases[delta]
2159 baseaddr = chainbases[delta]
2160 revaddr = r.start(rev)
2160 revaddr = r.start(rev)
2161 chainbases.append(baseaddr)
2161 chainbases.append(baseaddr)
2162 chainspans.append((revaddr - baseaddr) + size)
2162 chainspans.append((revaddr - baseaddr) + size)
2163 if size == 0:
2163 if size == 0:
2164 numempty += 1
2164 numempty += 1
2165 numemptydelta += 1
2165 numemptydelta += 1
2166 elif r.issnapshot(rev):
2166 elif r.issnapshot(rev):
2167 addsize(size, semisize)
2167 addsize(size, semisize)
2168 numsemi += 1
2168 numsemi += 1
2169 depth = r.snapshotdepth(rev)
2169 depth = r.snapshotdepth(rev)
2170 numsnapdepth[depth] += 1
2170 numsnapdepth[depth] += 1
2171 addsize(size, snapsizedepth[depth])
2171 addsize(size, snapsizedepth[depth])
2172 else:
2172 else:
2173 addsize(size, deltasize)
2173 addsize(size, deltasize)
2174 if delta == rev - 1:
2174 if delta == rev - 1:
2175 numprev += 1
2175 numprev += 1
2176 if delta == p1:
2176 if delta == p1:
2177 nump1prev += 1
2177 nump1prev += 1
2178 elif delta == p2:
2178 elif delta == p2:
2179 nump2prev += 1
2179 nump2prev += 1
2180 elif delta == p1:
2180 elif delta == p1:
2181 nump1 += 1
2181 nump1 += 1
2182 elif delta == p2:
2182 elif delta == p2:
2183 nump2 += 1
2183 nump2 += 1
2184 elif delta != nullrev:
2184 elif delta != nullrev:
2185 numother += 1
2185 numother += 1
2186
2186
2187 # Obtain data on the raw chunks in the revlog.
2187 # Obtain data on the raw chunks in the revlog.
2188 if util.safehasattr(r, '_getsegmentforrevs'):
2188 if util.safehasattr(r, '_getsegmentforrevs'):
2189 segment = r._getsegmentforrevs(rev, rev)[1]
2189 segment = r._getsegmentforrevs(rev, rev)[1]
2190 else:
2190 else:
2191 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2191 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2192 if segment:
2192 if segment:
2193 chunktype = bytes(segment[0:1])
2193 chunktype = bytes(segment[0:1])
2194 else:
2194 else:
2195 chunktype = 'empty'
2195 chunktype = 'empty'
2196
2196
2197 if chunktype not in chunktypecounts:
2197 if chunktype not in chunktypecounts:
2198 chunktypecounts[chunktype] = 0
2198 chunktypecounts[chunktype] = 0
2199 chunktypesizes[chunktype] = 0
2199 chunktypesizes[chunktype] = 0
2200
2200
2201 chunktypecounts[chunktype] += 1
2201 chunktypecounts[chunktype] += 1
2202 chunktypesizes[chunktype] += size
2202 chunktypesizes[chunktype] += size
2203
2203
2204 # Adjust size min value for empty cases
2204 # Adjust size min value for empty cases
2205 for size in (datasize, fullsize, semisize, deltasize):
2205 for size in (datasize, fullsize, semisize, deltasize):
2206 if size[0] is None:
2206 if size[0] is None:
2207 size[0] = 0
2207 size[0] = 0
2208
2208
2209 numdeltas = numrevs - numfull - numempty - numsemi
2209 numdeltas = numrevs - numfull - numempty - numsemi
2210 numoprev = numprev - nump1prev - nump2prev
2210 numoprev = numprev - nump1prev - nump2prev
2211 totalrawsize = datasize[2]
2211 totalrawsize = datasize[2]
2212 datasize[2] /= numrevs
2212 datasize[2] /= numrevs
2213 fulltotal = fullsize[2]
2213 fulltotal = fullsize[2]
2214 fullsize[2] /= numfull
2214 fullsize[2] /= numfull
2215 semitotal = semisize[2]
2215 semitotal = semisize[2]
2216 snaptotal = {}
2216 snaptotal = {}
2217 if 0 < numsemi:
2217 if 0 < numsemi:
2218 semisize[2] /= numsemi
2218 semisize[2] /= numsemi
2219 for depth in snapsizedepth:
2219 for depth in snapsizedepth:
2220 snaptotal[depth] = snapsizedepth[depth][2]
2220 snaptotal[depth] = snapsizedepth[depth][2]
2221 snapsizedepth[depth][2] /= numsnapdepth[depth]
2221 snapsizedepth[depth][2] /= numsnapdepth[depth]
2222
2222
2223 deltatotal = deltasize[2]
2223 deltatotal = deltasize[2]
2224 if numdeltas > 0:
2224 if numdeltas > 0:
2225 deltasize[2] /= numdeltas
2225 deltasize[2] /= numdeltas
2226 totalsize = fulltotal + semitotal + deltatotal
2226 totalsize = fulltotal + semitotal + deltatotal
2227 avgchainlen = sum(chainlengths) / numrevs
2227 avgchainlen = sum(chainlengths) / numrevs
2228 maxchainlen = max(chainlengths)
2228 maxchainlen = max(chainlengths)
2229 maxchainspan = max(chainspans)
2229 maxchainspan = max(chainspans)
2230 compratio = 1
2230 compratio = 1
2231 if totalsize:
2231 if totalsize:
2232 compratio = totalrawsize / totalsize
2232 compratio = totalrawsize / totalsize
2233
2233
2234 basedfmtstr = '%%%dd\n'
2234 basedfmtstr = '%%%dd\n'
2235 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2235 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2236
2236
2237 def dfmtstr(max):
2237 def dfmtstr(max):
2238 return basedfmtstr % len(str(max))
2238 return basedfmtstr % len(str(max))
2239 def pcfmtstr(max, padding=0):
2239 def pcfmtstr(max, padding=0):
2240 return basepcfmtstr % (len(str(max)), ' ' * padding)
2240 return basepcfmtstr % (len(str(max)), ' ' * padding)
2241
2241
2242 def pcfmt(value, total):
2242 def pcfmt(value, total):
2243 if total:
2243 if total:
2244 return (value, 100 * float(value) / total)
2244 return (value, 100 * float(value) / total)
2245 else:
2245 else:
2246 return value, 100.0
2246 return value, 100.0
2247
2247
2248 ui.write(('format : %d\n') % format)
2248 ui.write(('format : %d\n') % format)
2249 ui.write(('flags : %s\n') % ', '.join(flags))
2249 ui.write(('flags : %s\n') % ', '.join(flags))
2250
2250
2251 ui.write('\n')
2251 ui.write('\n')
2252 fmt = pcfmtstr(totalsize)
2252 fmt = pcfmtstr(totalsize)
2253 fmt2 = dfmtstr(totalsize)
2253 fmt2 = dfmtstr(totalsize)
2254 ui.write(('revisions : ') + fmt2 % numrevs)
2254 ui.write(('revisions : ') + fmt2 % numrevs)
2255 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2255 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2256 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2256 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2257 ui.write(('revisions : ') + fmt2 % numrevs)
2257 ui.write(('revisions : ') + fmt2 % numrevs)
2258 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2258 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2259 ui.write((' text : ')
2259 ui.write((' text : ')
2260 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2260 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2261 ui.write((' delta : ')
2261 ui.write((' delta : ')
2262 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2262 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2263 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2263 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2264 for depth in sorted(numsnapdepth):
2264 for depth in sorted(numsnapdepth):
2265 ui.write((' lvl-%-3d : ' % depth)
2265 ui.write((' lvl-%-3d : ' % depth)
2266 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2266 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2267 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2267 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2268 ui.write(('revision size : ') + fmt2 % totalsize)
2268 ui.write(('revision size : ') + fmt2 % totalsize)
2269 ui.write((' snapshot : ')
2269 ui.write((' snapshot : ')
2270 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2270 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2271 for depth in sorted(numsnapdepth):
2271 for depth in sorted(numsnapdepth):
2272 ui.write((' lvl-%-3d : ' % depth)
2272 ui.write((' lvl-%-3d : ' % depth)
2273 + fmt % pcfmt(snaptotal[depth], totalsize))
2273 + fmt % pcfmt(snaptotal[depth], totalsize))
2274 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2274 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2275
2275
2276 def fmtchunktype(chunktype):
2276 def fmtchunktype(chunktype):
2277 if chunktype == 'empty':
2277 if chunktype == 'empty':
2278 return ' %s : ' % chunktype
2278 return ' %s : ' % chunktype
2279 elif chunktype in pycompat.bytestr(string.ascii_letters):
2279 elif chunktype in pycompat.bytestr(string.ascii_letters):
2280 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2280 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2281 else:
2281 else:
2282 return ' 0x%s : ' % hex(chunktype)
2282 return ' 0x%s : ' % hex(chunktype)
2283
2283
2284 ui.write('\n')
2284 ui.write('\n')
2285 ui.write(('chunks : ') + fmt2 % numrevs)
2285 ui.write(('chunks : ') + fmt2 % numrevs)
2286 for chunktype in sorted(chunktypecounts):
2286 for chunktype in sorted(chunktypecounts):
2287 ui.write(fmtchunktype(chunktype))
2287 ui.write(fmtchunktype(chunktype))
2288 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2288 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2289 ui.write(('chunks size : ') + fmt2 % totalsize)
2289 ui.write(('chunks size : ') + fmt2 % totalsize)
2290 for chunktype in sorted(chunktypecounts):
2290 for chunktype in sorted(chunktypecounts):
2291 ui.write(fmtchunktype(chunktype))
2291 ui.write(fmtchunktype(chunktype))
2292 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2292 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2293
2293
2294 ui.write('\n')
2294 ui.write('\n')
2295 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2295 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2296 ui.write(('avg chain length : ') + fmt % avgchainlen)
2296 ui.write(('avg chain length : ') + fmt % avgchainlen)
2297 ui.write(('max chain length : ') + fmt % maxchainlen)
2297 ui.write(('max chain length : ') + fmt % maxchainlen)
2298 ui.write(('max chain reach : ') + fmt % maxchainspan)
2298 ui.write(('max chain reach : ') + fmt % maxchainspan)
2299 ui.write(('compression ratio : ') + fmt % compratio)
2299 ui.write(('compression ratio : ') + fmt % compratio)
2300
2300
2301 if format > 0:
2301 if format > 0:
2302 ui.write('\n')
2302 ui.write('\n')
2303 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2303 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2304 % tuple(datasize))
2304 % tuple(datasize))
2305 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2305 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2306 % tuple(fullsize))
2306 % tuple(fullsize))
2307 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2307 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2308 % tuple(semisize))
2308 % tuple(semisize))
2309 for depth in sorted(snapsizedepth):
2309 for depth in sorted(snapsizedepth):
2310 if depth == 0:
2310 if depth == 0:
2311 continue
2311 continue
2312 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2312 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2313 % ((depth,) + tuple(snapsizedepth[depth])))
2313 % ((depth,) + tuple(snapsizedepth[depth])))
2314 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2314 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2315 % tuple(deltasize))
2315 % tuple(deltasize))
2316
2316
2317 if numdeltas > 0:
2317 if numdeltas > 0:
2318 ui.write('\n')
2318 ui.write('\n')
2319 fmt = pcfmtstr(numdeltas)
2319 fmt = pcfmtstr(numdeltas)
2320 fmt2 = pcfmtstr(numdeltas, 4)
2320 fmt2 = pcfmtstr(numdeltas, 4)
2321 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2321 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2322 if numprev > 0:
2322 if numprev > 0:
2323 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2323 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2324 numprev))
2324 numprev))
2325 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2325 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2326 numprev))
2326 numprev))
2327 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2327 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2328 numprev))
2328 numprev))
2329 if gdelta:
2329 if gdelta:
2330 ui.write(('deltas against p1 : ')
2330 ui.write(('deltas against p1 : ')
2331 + fmt % pcfmt(nump1, numdeltas))
2331 + fmt % pcfmt(nump1, numdeltas))
2332 ui.write(('deltas against p2 : ')
2332 ui.write(('deltas against p2 : ')
2333 + fmt % pcfmt(nump2, numdeltas))
2333 + fmt % pcfmt(nump2, numdeltas))
2334 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2334 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2335 numdeltas))
2335 numdeltas))
2336
2336
2337 @command('debugrevspec',
2337 @command('debugrevspec',
2338 [('', 'optimize', None,
2338 [('', 'optimize', None,
2339 _('print parsed tree after optimizing (DEPRECATED)')),
2339 _('print parsed tree after optimizing (DEPRECATED)')),
2340 ('', 'show-revs', True, _('print list of result revisions (default)')),
2340 ('', 'show-revs', True, _('print list of result revisions (default)')),
2341 ('s', 'show-set', None, _('print internal representation of result set')),
2341 ('s', 'show-set', None, _('print internal representation of result set')),
2342 ('p', 'show-stage', [],
2342 ('p', 'show-stage', [],
2343 _('print parsed tree at the given stage'), _('NAME')),
2343 _('print parsed tree at the given stage'), _('NAME')),
2344 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2344 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2345 ('', 'verify-optimized', False, _('verify optimized result')),
2345 ('', 'verify-optimized', False, _('verify optimized result')),
2346 ],
2346 ],
2347 ('REVSPEC'))
2347 ('REVSPEC'))
2348 def debugrevspec(ui, repo, expr, **opts):
2348 def debugrevspec(ui, repo, expr, **opts):
2349 """parse and apply a revision specification
2349 """parse and apply a revision specification
2350
2350
2351 Use -p/--show-stage option to print the parsed tree at the given stages.
2351 Use -p/--show-stage option to print the parsed tree at the given stages.
2352 Use -p all to print tree at every stage.
2352 Use -p all to print tree at every stage.
2353
2353
2354 Use --no-show-revs option with -s or -p to print only the set
2354 Use --no-show-revs option with -s or -p to print only the set
2355 representation or the parsed tree respectively.
2355 representation or the parsed tree respectively.
2356
2356
2357 Use --verify-optimized to compare the optimized result with the unoptimized
2357 Use --verify-optimized to compare the optimized result with the unoptimized
2358 one. Returns 1 if the optimized result differs.
2358 one. Returns 1 if the optimized result differs.
2359 """
2359 """
2360 opts = pycompat.byteskwargs(opts)
2360 opts = pycompat.byteskwargs(opts)
2361 aliases = ui.configitems('revsetalias')
2361 aliases = ui.configitems('revsetalias')
2362 stages = [
2362 stages = [
2363 ('parsed', lambda tree: tree),
2363 ('parsed', lambda tree: tree),
2364 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2364 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2365 ui.warn)),
2365 ui.warn)),
2366 ('concatenated', revsetlang.foldconcat),
2366 ('concatenated', revsetlang.foldconcat),
2367 ('analyzed', revsetlang.analyze),
2367 ('analyzed', revsetlang.analyze),
2368 ('optimized', revsetlang.optimize),
2368 ('optimized', revsetlang.optimize),
2369 ]
2369 ]
2370 if opts['no_optimized']:
2370 if opts['no_optimized']:
2371 stages = stages[:-1]
2371 stages = stages[:-1]
2372 if opts['verify_optimized'] and opts['no_optimized']:
2372 if opts['verify_optimized'] and opts['no_optimized']:
2373 raise error.Abort(_('cannot use --verify-optimized with '
2373 raise error.Abort(_('cannot use --verify-optimized with '
2374 '--no-optimized'))
2374 '--no-optimized'))
2375 stagenames = set(n for n, f in stages)
2375 stagenames = set(n for n, f in stages)
2376
2376
2377 showalways = set()
2377 showalways = set()
2378 showchanged = set()
2378 showchanged = set()
2379 if ui.verbose and not opts['show_stage']:
2379 if ui.verbose and not opts['show_stage']:
2380 # show parsed tree by --verbose (deprecated)
2380 # show parsed tree by --verbose (deprecated)
2381 showalways.add('parsed')
2381 showalways.add('parsed')
2382 showchanged.update(['expanded', 'concatenated'])
2382 showchanged.update(['expanded', 'concatenated'])
2383 if opts['optimize']:
2383 if opts['optimize']:
2384 showalways.add('optimized')
2384 showalways.add('optimized')
2385 if opts['show_stage'] and opts['optimize']:
2385 if opts['show_stage'] and opts['optimize']:
2386 raise error.Abort(_('cannot use --optimize with --show-stage'))
2386 raise error.Abort(_('cannot use --optimize with --show-stage'))
2387 if opts['show_stage'] == ['all']:
2387 if opts['show_stage'] == ['all']:
2388 showalways.update(stagenames)
2388 showalways.update(stagenames)
2389 else:
2389 else:
2390 for n in opts['show_stage']:
2390 for n in opts['show_stage']:
2391 if n not in stagenames:
2391 if n not in stagenames:
2392 raise error.Abort(_('invalid stage name: %s') % n)
2392 raise error.Abort(_('invalid stage name: %s') % n)
2393 showalways.update(opts['show_stage'])
2393 showalways.update(opts['show_stage'])
2394
2394
2395 treebystage = {}
2395 treebystage = {}
2396 printedtree = None
2396 printedtree = None
2397 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2397 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2398 for n, f in stages:
2398 for n, f in stages:
2399 treebystage[n] = tree = f(tree)
2399 treebystage[n] = tree = f(tree)
2400 if n in showalways or (n in showchanged and tree != printedtree):
2400 if n in showalways or (n in showchanged and tree != printedtree):
2401 if opts['show_stage'] or n != 'parsed':
2401 if opts['show_stage'] or n != 'parsed':
2402 ui.write(("* %s:\n") % n)
2402 ui.write(("* %s:\n") % n)
2403 ui.write(revsetlang.prettyformat(tree), "\n")
2403 ui.write(revsetlang.prettyformat(tree), "\n")
2404 printedtree = tree
2404 printedtree = tree
2405
2405
2406 if opts['verify_optimized']:
2406 if opts['verify_optimized']:
2407 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2407 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2408 brevs = revset.makematcher(treebystage['optimized'])(repo)
2408 brevs = revset.makematcher(treebystage['optimized'])(repo)
2409 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2409 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2410 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2410 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2411 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2411 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2412 arevs = list(arevs)
2412 arevs = list(arevs)
2413 brevs = list(brevs)
2413 brevs = list(brevs)
2414 if arevs == brevs:
2414 if arevs == brevs:
2415 return 0
2415 return 0
2416 ui.write(('--- analyzed\n'), label='diff.file_a')
2416 ui.write(('--- analyzed\n'), label='diff.file_a')
2417 ui.write(('+++ optimized\n'), label='diff.file_b')
2417 ui.write(('+++ optimized\n'), label='diff.file_b')
2418 sm = difflib.SequenceMatcher(None, arevs, brevs)
2418 sm = difflib.SequenceMatcher(None, arevs, brevs)
2419 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2419 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2420 if tag in ('delete', 'replace'):
2420 if tag in ('delete', 'replace'):
2421 for c in arevs[alo:ahi]:
2421 for c in arevs[alo:ahi]:
2422 ui.write('-%s\n' % c, label='diff.deleted')
2422 ui.write('-%s\n' % c, label='diff.deleted')
2423 if tag in ('insert', 'replace'):
2423 if tag in ('insert', 'replace'):
2424 for c in brevs[blo:bhi]:
2424 for c in brevs[blo:bhi]:
2425 ui.write('+%s\n' % c, label='diff.inserted')
2425 ui.write('+%s\n' % c, label='diff.inserted')
2426 if tag == 'equal':
2426 if tag == 'equal':
2427 for c in arevs[alo:ahi]:
2427 for c in arevs[alo:ahi]:
2428 ui.write(' %s\n' % c)
2428 ui.write(' %s\n' % c)
2429 return 1
2429 return 1
2430
2430
2431 func = revset.makematcher(tree)
2431 func = revset.makematcher(tree)
2432 revs = func(repo)
2432 revs = func(repo)
2433 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2433 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2434 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2434 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2435 if not opts['show_revs']:
2435 if not opts['show_revs']:
2436 return
2436 return
2437 for c in revs:
2437 for c in revs:
2438 ui.write("%d\n" % c)
2438 ui.write("%d\n" % c)
2439
2439
2440 @command('debugserve', [
2440 @command('debugserve', [
2441 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2441 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2442 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2442 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2443 ('', 'logiofile', '', _('file to log server I/O to')),
2443 ('', 'logiofile', '', _('file to log server I/O to')),
2444 ], '')
2444 ], '')
2445 def debugserve(ui, repo, **opts):
2445 def debugserve(ui, repo, **opts):
2446 """run a server with advanced settings
2446 """run a server with advanced settings
2447
2447
2448 This command is similar to :hg:`serve`. It exists partially as a
2448 This command is similar to :hg:`serve`. It exists partially as a
2449 workaround to the fact that ``hg serve --stdio`` must have specific
2449 workaround to the fact that ``hg serve --stdio`` must have specific
2450 arguments for security reasons.
2450 arguments for security reasons.
2451 """
2451 """
2452 opts = pycompat.byteskwargs(opts)
2452 opts = pycompat.byteskwargs(opts)
2453
2453
2454 if not opts['sshstdio']:
2454 if not opts['sshstdio']:
2455 raise error.Abort(_('only --sshstdio is currently supported'))
2455 raise error.Abort(_('only --sshstdio is currently supported'))
2456
2456
2457 logfh = None
2457 logfh = None
2458
2458
2459 if opts['logiofd'] and opts['logiofile']:
2459 if opts['logiofd'] and opts['logiofile']:
2460 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2460 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2461
2461
2462 if opts['logiofd']:
2462 if opts['logiofd']:
2463 # Line buffered because output is line based.
2463 # Line buffered because output is line based.
2464 try:
2464 try:
2465 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2465 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2466 except OSError as e:
2466 except OSError as e:
2467 if e.errno != errno.ESPIPE:
2467 if e.errno != errno.ESPIPE:
2468 raise
2468 raise
2469 # can't seek a pipe, so `ab` mode fails on py3
2469 # can't seek a pipe, so `ab` mode fails on py3
2470 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2470 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2471 elif opts['logiofile']:
2471 elif opts['logiofile']:
2472 logfh = open(opts['logiofile'], 'ab', 1)
2472 logfh = open(opts['logiofile'], 'ab', 1)
2473
2473
2474 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2474 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2475 s.serve_forever()
2475 s.serve_forever()
2476
2476
2477 @command('debugsetparents', [], _('REV1 [REV2]'))
2477 @command('debugsetparents', [], _('REV1 [REV2]'))
2478 def debugsetparents(ui, repo, rev1, rev2=None):
2478 def debugsetparents(ui, repo, rev1, rev2=None):
2479 """manually set the parents of the current working directory
2479 """manually set the parents of the current working directory
2480
2480
2481 This is useful for writing repository conversion tools, but should
2481 This is useful for writing repository conversion tools, but should
2482 be used with care. For example, neither the working directory nor the
2482 be used with care. For example, neither the working directory nor the
2483 dirstate is updated, so file status may be incorrect after running this
2483 dirstate is updated, so file status may be incorrect after running this
2484 command.
2484 command.
2485
2485
2486 Returns 0 on success.
2486 Returns 0 on success.
2487 """
2487 """
2488
2488
2489 node1 = scmutil.revsingle(repo, rev1).node()
2489 node1 = scmutil.revsingle(repo, rev1).node()
2490 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2490 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2491
2491
2492 with repo.wlock():
2492 with repo.wlock():
2493 repo.setparents(node1, node2)
2493 repo.setparents(node1, node2)
2494
2494
2495 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2495 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2496 def debugssl(ui, repo, source=None, **opts):
2496 def debugssl(ui, repo, source=None, **opts):
2497 '''test a secure connection to a server
2497 '''test a secure connection to a server
2498
2498
2499 This builds the certificate chain for the server on Windows, installing the
2499 This builds the certificate chain for the server on Windows, installing the
2500 missing intermediates and trusted root via Windows Update if necessary. It
2500 missing intermediates and trusted root via Windows Update if necessary. It
2501 does nothing on other platforms.
2501 does nothing on other platforms.
2502
2502
2503 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2503 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2504 that server is used. See :hg:`help urls` for more information.
2504 that server is used. See :hg:`help urls` for more information.
2505
2505
2506 If the update succeeds, retry the original operation. Otherwise, the cause
2506 If the update succeeds, retry the original operation. Otherwise, the cause
2507 of the SSL error is likely another issue.
2507 of the SSL error is likely another issue.
2508 '''
2508 '''
2509 if not pycompat.iswindows:
2509 if not pycompat.iswindows:
2510 raise error.Abort(_('certificate chain building is only possible on '
2510 raise error.Abort(_('certificate chain building is only possible on '
2511 'Windows'))
2511 'Windows'))
2512
2512
2513 if not source:
2513 if not source:
2514 if not repo:
2514 if not repo:
2515 raise error.Abort(_("there is no Mercurial repository here, and no "
2515 raise error.Abort(_("there is no Mercurial repository here, and no "
2516 "server specified"))
2516 "server specified"))
2517 source = "default"
2517 source = "default"
2518
2518
2519 source, branches = hg.parseurl(ui.expandpath(source))
2519 source, branches = hg.parseurl(ui.expandpath(source))
2520 url = util.url(source)
2520 url = util.url(source)
2521 addr = None
2521 addr = None
2522
2522
2523 defaultport = {'https': 443, 'ssh': 22}
2523 defaultport = {'https': 443, 'ssh': 22}
2524 if url.scheme in defaultport:
2524 if url.scheme in defaultport:
2525 try:
2525 try:
2526 addr = (url.host, int(url.port or defaultport[url.scheme]))
2526 addr = (url.host, int(url.port or defaultport[url.scheme]))
2527 except ValueError:
2527 except ValueError:
2528 raise error.Abort(_("malformed port number in URL"))
2528 raise error.Abort(_("malformed port number in URL"))
2529 else:
2529 else:
2530 raise error.Abort(_("only https and ssh connections are supported"))
2530 raise error.Abort(_("only https and ssh connections are supported"))
2531
2531
2532 from . import win32
2532 from . import win32
2533
2533
2534 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2534 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2535 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2535 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2536
2536
2537 try:
2537 try:
2538 s.connect(addr)
2538 s.connect(addr)
2539 cert = s.getpeercert(True)
2539 cert = s.getpeercert(True)
2540
2540
2541 ui.status(_('checking the certificate chain for %s\n') % url.host)
2541 ui.status(_('checking the certificate chain for %s\n') % url.host)
2542
2542
2543 complete = win32.checkcertificatechain(cert, build=False)
2543 complete = win32.checkcertificatechain(cert, build=False)
2544
2544
2545 if not complete:
2545 if not complete:
2546 ui.status(_('certificate chain is incomplete, updating... '))
2546 ui.status(_('certificate chain is incomplete, updating... '))
2547
2547
2548 if not win32.checkcertificatechain(cert):
2548 if not win32.checkcertificatechain(cert):
2549 ui.status(_('failed.\n'))
2549 ui.status(_('failed.\n'))
2550 else:
2550 else:
2551 ui.status(_('done.\n'))
2551 ui.status(_('done.\n'))
2552 else:
2552 else:
2553 ui.status(_('full certificate chain is available\n'))
2553 ui.status(_('full certificate chain is available\n'))
2554 finally:
2554 finally:
2555 s.close()
2555 s.close()
2556
2556
2557 @command('debugsub',
2557 @command('debugsub',
2558 [('r', 'rev', '',
2558 [('r', 'rev', '',
2559 _('revision to check'), _('REV'))],
2559 _('revision to check'), _('REV'))],
2560 _('[-r REV] [REV]'))
2560 _('[-r REV] [REV]'))
2561 def debugsub(ui, repo, rev=None):
2561 def debugsub(ui, repo, rev=None):
2562 ctx = scmutil.revsingle(repo, rev, None)
2562 ctx = scmutil.revsingle(repo, rev, None)
2563 for k, v in sorted(ctx.substate.items()):
2563 for k, v in sorted(ctx.substate.items()):
2564 ui.write(('path %s\n') % k)
2564 ui.write(('path %s\n') % k)
2565 ui.write((' source %s\n') % v[0])
2565 ui.write((' source %s\n') % v[0])
2566 ui.write((' revision %s\n') % v[1])
2566 ui.write((' revision %s\n') % v[1])
2567
2567
2568 @command('debugsuccessorssets',
2568 @command('debugsuccessorssets',
2569 [('', 'closest', False, _('return closest successors sets only'))],
2569 [('', 'closest', False, _('return closest successors sets only'))],
2570 _('[REV]'))
2570 _('[REV]'))
2571 def debugsuccessorssets(ui, repo, *revs, **opts):
2571 def debugsuccessorssets(ui, repo, *revs, **opts):
2572 """show set of successors for revision
2572 """show set of successors for revision
2573
2573
2574 A successors set of changeset A is a consistent group of revisions that
2574 A successors set of changeset A is a consistent group of revisions that
2575 succeed A. It contains non-obsolete changesets only unless closests
2575 succeed A. It contains non-obsolete changesets only unless closests
2576 successors set is set.
2576 successors set is set.
2577
2577
2578 In most cases a changeset A has a single successors set containing a single
2578 In most cases a changeset A has a single successors set containing a single
2579 successor (changeset A replaced by A').
2579 successor (changeset A replaced by A').
2580
2580
2581 A changeset that is made obsolete with no successors are called "pruned".
2581 A changeset that is made obsolete with no successors are called "pruned".
2582 Such changesets have no successors sets at all.
2582 Such changesets have no successors sets at all.
2583
2583
2584 A changeset that has been "split" will have a successors set containing
2584 A changeset that has been "split" will have a successors set containing
2585 more than one successor.
2585 more than one successor.
2586
2586
2587 A changeset that has been rewritten in multiple different ways is called
2587 A changeset that has been rewritten in multiple different ways is called
2588 "divergent". Such changesets have multiple successor sets (each of which
2588 "divergent". Such changesets have multiple successor sets (each of which
2589 may also be split, i.e. have multiple successors).
2589 may also be split, i.e. have multiple successors).
2590
2590
2591 Results are displayed as follows::
2591 Results are displayed as follows::
2592
2592
2593 <rev1>
2593 <rev1>
2594 <successors-1A>
2594 <successors-1A>
2595 <rev2>
2595 <rev2>
2596 <successors-2A>
2596 <successors-2A>
2597 <successors-2B1> <successors-2B2> <successors-2B3>
2597 <successors-2B1> <successors-2B2> <successors-2B3>
2598
2598
2599 Here rev2 has two possible (i.e. divergent) successors sets. The first
2599 Here rev2 has two possible (i.e. divergent) successors sets. The first
2600 holds one element, whereas the second holds three (i.e. the changeset has
2600 holds one element, whereas the second holds three (i.e. the changeset has
2601 been split).
2601 been split).
2602 """
2602 """
2603 # passed to successorssets caching computation from one call to another
2603 # passed to successorssets caching computation from one call to another
2604 cache = {}
2604 cache = {}
2605 ctx2str = bytes
2605 ctx2str = bytes
2606 node2str = short
2606 node2str = short
2607 for rev in scmutil.revrange(repo, revs):
2607 for rev in scmutil.revrange(repo, revs):
2608 ctx = repo[rev]
2608 ctx = repo[rev]
2609 ui.write('%s\n'% ctx2str(ctx))
2609 ui.write('%s\n'% ctx2str(ctx))
2610 for succsset in obsutil.successorssets(repo, ctx.node(),
2610 for succsset in obsutil.successorssets(repo, ctx.node(),
2611 closest=opts[r'closest'],
2611 closest=opts[r'closest'],
2612 cache=cache):
2612 cache=cache):
2613 if succsset:
2613 if succsset:
2614 ui.write(' ')
2614 ui.write(' ')
2615 ui.write(node2str(succsset[0]))
2615 ui.write(node2str(succsset[0]))
2616 for node in succsset[1:]:
2616 for node in succsset[1:]:
2617 ui.write(' ')
2617 ui.write(' ')
2618 ui.write(node2str(node))
2618 ui.write(node2str(node))
2619 ui.write('\n')
2619 ui.write('\n')
2620
2620
2621 @command('debugtemplate',
2621 @command('debugtemplate',
2622 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2622 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2623 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2623 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2624 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2624 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2625 optionalrepo=True)
2625 optionalrepo=True)
2626 def debugtemplate(ui, repo, tmpl, **opts):
2626 def debugtemplate(ui, repo, tmpl, **opts):
2627 """parse and apply a template
2627 """parse and apply a template
2628
2628
2629 If -r/--rev is given, the template is processed as a log template and
2629 If -r/--rev is given, the template is processed as a log template and
2630 applied to the given changesets. Otherwise, it is processed as a generic
2630 applied to the given changesets. Otherwise, it is processed as a generic
2631 template.
2631 template.
2632
2632
2633 Use --verbose to print the parsed tree.
2633 Use --verbose to print the parsed tree.
2634 """
2634 """
2635 revs = None
2635 revs = None
2636 if opts[r'rev']:
2636 if opts[r'rev']:
2637 if repo is None:
2637 if repo is None:
2638 raise error.RepoError(_('there is no Mercurial repository here '
2638 raise error.RepoError(_('there is no Mercurial repository here '
2639 '(.hg not found)'))
2639 '(.hg not found)'))
2640 revs = scmutil.revrange(repo, opts[r'rev'])
2640 revs = scmutil.revrange(repo, opts[r'rev'])
2641
2641
2642 props = {}
2642 props = {}
2643 for d in opts[r'define']:
2643 for d in opts[r'define']:
2644 try:
2644 try:
2645 k, v = (e.strip() for e in d.split('=', 1))
2645 k, v = (e.strip() for e in d.split('=', 1))
2646 if not k or k == 'ui':
2646 if not k or k == 'ui':
2647 raise ValueError
2647 raise ValueError
2648 props[k] = v
2648 props[k] = v
2649 except ValueError:
2649 except ValueError:
2650 raise error.Abort(_('malformed keyword definition: %s') % d)
2650 raise error.Abort(_('malformed keyword definition: %s') % d)
2651
2651
2652 if ui.verbose:
2652 if ui.verbose:
2653 aliases = ui.configitems('templatealias')
2653 aliases = ui.configitems('templatealias')
2654 tree = templater.parse(tmpl)
2654 tree = templater.parse(tmpl)
2655 ui.note(templater.prettyformat(tree), '\n')
2655 ui.note(templater.prettyformat(tree), '\n')
2656 newtree = templater.expandaliases(tree, aliases)
2656 newtree = templater.expandaliases(tree, aliases)
2657 if newtree != tree:
2657 if newtree != tree:
2658 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2658 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2659
2659
2660 if revs is None:
2660 if revs is None:
2661 tres = formatter.templateresources(ui, repo)
2661 tres = formatter.templateresources(ui, repo)
2662 t = formatter.maketemplater(ui, tmpl, resources=tres)
2662 t = formatter.maketemplater(ui, tmpl, resources=tres)
2663 if ui.verbose:
2663 if ui.verbose:
2664 kwds, funcs = t.symbolsuseddefault()
2664 kwds, funcs = t.symbolsuseddefault()
2665 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2665 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2666 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2666 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2667 ui.write(t.renderdefault(props))
2667 ui.write(t.renderdefault(props))
2668 else:
2668 else:
2669 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2669 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2670 if ui.verbose:
2670 if ui.verbose:
2671 kwds, funcs = displayer.t.symbolsuseddefault()
2671 kwds, funcs = displayer.t.symbolsuseddefault()
2672 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2672 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2673 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2673 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2674 for r in revs:
2674 for r in revs:
2675 displayer.show(repo[r], **pycompat.strkwargs(props))
2675 displayer.show(repo[r], **pycompat.strkwargs(props))
2676 displayer.close()
2676 displayer.close()
2677
2677
2678 @command('debuguigetpass', [
2678 @command('debuguigetpass', [
2679 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2679 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2680 ], _('[-p TEXT]'), norepo=True)
2680 ], _('[-p TEXT]'), norepo=True)
2681 def debuguigetpass(ui, prompt=''):
2681 def debuguigetpass(ui, prompt=''):
2682 """show prompt to type password"""
2682 """show prompt to type password"""
2683 r = ui.getpass(prompt)
2683 r = ui.getpass(prompt)
2684 ui.write(('respose: %s\n') % r)
2684 ui.write(('respose: %s\n') % r)
2685
2685
2686 @command('debuguiprompt', [
2686 @command('debuguiprompt', [
2687 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2687 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2688 ], _('[-p TEXT]'), norepo=True)
2688 ], _('[-p TEXT]'), norepo=True)
2689 def debuguiprompt(ui, prompt=''):
2689 def debuguiprompt(ui, prompt=''):
2690 """show plain prompt"""
2690 """show plain prompt"""
2691 r = ui.prompt(prompt)
2691 r = ui.prompt(prompt)
2692 ui.write(('response: %s\n') % r)
2692 ui.write(('response: %s\n') % r)
2693
2693
2694 @command('debugupdatecaches', [])
2694 @command('debugupdatecaches', [])
2695 def debugupdatecaches(ui, repo, *pats, **opts):
2695 def debugupdatecaches(ui, repo, *pats, **opts):
2696 """warm all known caches in the repository"""
2696 """warm all known caches in the repository"""
2697 with repo.wlock(), repo.lock():
2697 with repo.wlock(), repo.lock():
2698 repo.updatecaches(full=True)
2698 repo.updatecaches(full=True)
2699
2699
2700 @command('debugupgraderepo', [
2700 @command('debugupgraderepo', [
2701 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2701 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2702 ('', 'run', False, _('performs an upgrade')),
2702 ('', 'run', False, _('performs an upgrade')),
2703 ])
2703 ])
2704 def debugupgraderepo(ui, repo, run=False, optimize=None):
2704 def debugupgraderepo(ui, repo, run=False, optimize=None):
2705 """upgrade a repository to use different features
2705 """upgrade a repository to use different features
2706
2706
2707 If no arguments are specified, the repository is evaluated for upgrade
2707 If no arguments are specified, the repository is evaluated for upgrade
2708 and a list of problems and potential optimizations is printed.
2708 and a list of problems and potential optimizations is printed.
2709
2709
2710 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2710 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2711 can be influenced via additional arguments. More details will be provided
2711 can be influenced via additional arguments. More details will be provided
2712 by the command output when run without ``--run``.
2712 by the command output when run without ``--run``.
2713
2713
2714 During the upgrade, the repository will be locked and no writes will be
2714 During the upgrade, the repository will be locked and no writes will be
2715 allowed.
2715 allowed.
2716
2716
2717 At the end of the upgrade, the repository may not be readable while new
2717 At the end of the upgrade, the repository may not be readable while new
2718 repository data is swapped in. This window will be as long as it takes to
2718 repository data is swapped in. This window will be as long as it takes to
2719 rename some directories inside the ``.hg`` directory. On most machines, this
2719 rename some directories inside the ``.hg`` directory. On most machines, this
2720 should complete almost instantaneously and the chances of a consumer being
2720 should complete almost instantaneously and the chances of a consumer being
2721 unable to access the repository should be low.
2721 unable to access the repository should be low.
2722 """
2722 """
2723 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2723 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2724
2724
2725 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2725 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2726 inferrepo=True)
2726 inferrepo=True)
2727 def debugwalk(ui, repo, *pats, **opts):
2727 def debugwalk(ui, repo, *pats, **opts):
2728 """show how files match on given patterns"""
2728 """show how files match on given patterns"""
2729 opts = pycompat.byteskwargs(opts)
2729 opts = pycompat.byteskwargs(opts)
2730 m = scmutil.match(repo[None], pats, opts)
2730 m = scmutil.match(repo[None], pats, opts)
2731 if ui.verbose:
2731 if ui.verbose:
2732 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2732 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2733 items = list(repo[None].walk(m))
2733 items = list(repo[None].walk(m))
2734 if not items:
2734 if not items:
2735 return
2735 return
2736 f = lambda fn: fn
2736 f = lambda fn: fn
2737 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2737 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2738 f = lambda fn: util.normpath(fn)
2738 f = lambda fn: util.normpath(fn)
2739 fmt = 'f %%-%ds %%-%ds %%s' % (
2739 fmt = 'f %%-%ds %%-%ds %%s' % (
2740 max([len(abs) for abs in items]),
2740 max([len(abs) for abs in items]),
2741 max([len(m.rel(abs)) for abs in items]))
2741 max([len(m.rel(abs)) for abs in items]))
2742 for abs in items:
2742 for abs in items:
2743 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2743 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2744 ui.write("%s\n" % line.rstrip())
2744 ui.write("%s\n" % line.rstrip())
2745
2745
2746 @command('debugwhyunstable', [], _('REV'))
2746 @command('debugwhyunstable', [], _('REV'))
2747 def debugwhyunstable(ui, repo, rev):
2747 def debugwhyunstable(ui, repo, rev):
2748 """explain instabilities of a changeset"""
2748 """explain instabilities of a changeset"""
2749 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2749 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2750 dnodes = ''
2750 dnodes = ''
2751 if entry.get('divergentnodes'):
2751 if entry.get('divergentnodes'):
2752 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2752 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2753 for ctx in entry['divergentnodes']) + ' '
2753 for ctx in entry['divergentnodes']) + ' '
2754 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2754 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2755 entry['reason'], entry['node']))
2755 entry['reason'], entry['node']))
2756
2756
2757 @command('debugwireargs',
2757 @command('debugwireargs',
2758 [('', 'three', '', 'three'),
2758 [('', 'three', '', 'three'),
2759 ('', 'four', '', 'four'),
2759 ('', 'four', '', 'four'),
2760 ('', 'five', '', 'five'),
2760 ('', 'five', '', 'five'),
2761 ] + cmdutil.remoteopts,
2761 ] + cmdutil.remoteopts,
2762 _('REPO [OPTIONS]... [ONE [TWO]]'),
2762 _('REPO [OPTIONS]... [ONE [TWO]]'),
2763 norepo=True)
2763 norepo=True)
2764 def debugwireargs(ui, repopath, *vals, **opts):
2764 def debugwireargs(ui, repopath, *vals, **opts):
2765 opts = pycompat.byteskwargs(opts)
2765 opts = pycompat.byteskwargs(opts)
2766 repo = hg.peer(ui, opts, repopath)
2766 repo = hg.peer(ui, opts, repopath)
2767 for opt in cmdutil.remoteopts:
2767 for opt in cmdutil.remoteopts:
2768 del opts[opt[1]]
2768 del opts[opt[1]]
2769 args = {}
2769 args = {}
2770 for k, v in opts.iteritems():
2770 for k, v in opts.iteritems():
2771 if v:
2771 if v:
2772 args[k] = v
2772 args[k] = v
2773 args = pycompat.strkwargs(args)
2773 args = pycompat.strkwargs(args)
2774 # run twice to check that we don't mess up the stream for the next command
2774 # run twice to check that we don't mess up the stream for the next command
2775 res1 = repo.debugwireargs(*vals, **args)
2775 res1 = repo.debugwireargs(*vals, **args)
2776 res2 = repo.debugwireargs(*vals, **args)
2776 res2 = repo.debugwireargs(*vals, **args)
2777 ui.write("%s\n" % res1)
2777 ui.write("%s\n" % res1)
2778 if res1 != res2:
2778 if res1 != res2:
2779 ui.warn("%s\n" % res2)
2779 ui.warn("%s\n" % res2)
2780
2780
2781 def _parsewirelangblocks(fh):
2781 def _parsewirelangblocks(fh):
2782 activeaction = None
2782 activeaction = None
2783 blocklines = []
2783 blocklines = []
2784
2784
2785 for line in fh:
2785 for line in fh:
2786 line = line.rstrip()
2786 line = line.rstrip()
2787 if not line:
2787 if not line:
2788 continue
2788 continue
2789
2789
2790 if line.startswith(b'#'):
2790 if line.startswith(b'#'):
2791 continue
2791 continue
2792
2792
2793 if not line.startswith(b' '):
2793 if not line.startswith(b' '):
2794 # New block. Flush previous one.
2794 # New block. Flush previous one.
2795 if activeaction:
2795 if activeaction:
2796 yield activeaction, blocklines
2796 yield activeaction, blocklines
2797
2797
2798 activeaction = line
2798 activeaction = line
2799 blocklines = []
2799 blocklines = []
2800 continue
2800 continue
2801
2801
2802 # Else we start with an indent.
2802 # Else we start with an indent.
2803
2803
2804 if not activeaction:
2804 if not activeaction:
2805 raise error.Abort(_('indented line outside of block'))
2805 raise error.Abort(_('indented line outside of block'))
2806
2806
2807 blocklines.append(line)
2807 blocklines.append(line)
2808
2808
2809 # Flush last block.
2809 # Flush last block.
2810 if activeaction:
2810 if activeaction:
2811 yield activeaction, blocklines
2811 yield activeaction, blocklines
2812
2812
2813 @command('debugwireproto',
2813 @command('debugwireproto',
2814 [
2814 [
2815 ('', 'localssh', False, _('start an SSH server for this repo')),
2815 ('', 'localssh', False, _('start an SSH server for this repo')),
2816 ('', 'peer', '', _('construct a specific version of the peer')),
2816 ('', 'peer', '', _('construct a specific version of the peer')),
2817 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2817 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2818 ('', 'nologhandshake', False,
2818 ('', 'nologhandshake', False,
2819 _('do not log I/O related to the peer handshake')),
2819 _('do not log I/O related to the peer handshake')),
2820 ] + cmdutil.remoteopts,
2820 ] + cmdutil.remoteopts,
2821 _('[PATH]'),
2821 _('[PATH]'),
2822 optionalrepo=True)
2822 optionalrepo=True)
2823 def debugwireproto(ui, repo, path=None, **opts):
2823 def debugwireproto(ui, repo, path=None, **opts):
2824 """send wire protocol commands to a server
2824 """send wire protocol commands to a server
2825
2825
2826 This command can be used to issue wire protocol commands to remote
2826 This command can be used to issue wire protocol commands to remote
2827 peers and to debug the raw data being exchanged.
2827 peers and to debug the raw data being exchanged.
2828
2828
2829 ``--localssh`` will start an SSH server against the current repository
2829 ``--localssh`` will start an SSH server against the current repository
2830 and connect to that. By default, the connection will perform a handshake
2830 and connect to that. By default, the connection will perform a handshake
2831 and establish an appropriate peer instance.
2831 and establish an appropriate peer instance.
2832
2832
2833 ``--peer`` can be used to bypass the handshake protocol and construct a
2833 ``--peer`` can be used to bypass the handshake protocol and construct a
2834 peer instance using the specified class type. Valid values are ``raw``,
2834 peer instance using the specified class type. Valid values are ``raw``,
2835 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2835 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2836 raw data payloads and don't support higher-level command actions.
2836 raw data payloads and don't support higher-level command actions.
2837
2837
2838 ``--noreadstderr`` can be used to disable automatic reading from stderr
2838 ``--noreadstderr`` can be used to disable automatic reading from stderr
2839 of the peer (for SSH connections only). Disabling automatic reading of
2839 of the peer (for SSH connections only). Disabling automatic reading of
2840 stderr is useful for making output more deterministic.
2840 stderr is useful for making output more deterministic.
2841
2841
2842 Commands are issued via a mini language which is specified via stdin.
2842 Commands are issued via a mini language which is specified via stdin.
2843 The language consists of individual actions to perform. An action is
2843 The language consists of individual actions to perform. An action is
2844 defined by a block. A block is defined as a line with no leading
2844 defined by a block. A block is defined as a line with no leading
2845 space followed by 0 or more lines with leading space. Blocks are
2845 space followed by 0 or more lines with leading space. Blocks are
2846 effectively a high-level command with additional metadata.
2846 effectively a high-level command with additional metadata.
2847
2847
2848 Lines beginning with ``#`` are ignored.
2848 Lines beginning with ``#`` are ignored.
2849
2849
2850 The following sections denote available actions.
2850 The following sections denote available actions.
2851
2851
2852 raw
2852 raw
2853 ---
2853 ---
2854
2854
2855 Send raw data to the server.
2855 Send raw data to the server.
2856
2856
2857 The block payload contains the raw data to send as one atomic send
2857 The block payload contains the raw data to send as one atomic send
2858 operation. The data may not actually be delivered in a single system
2858 operation. The data may not actually be delivered in a single system
2859 call: it depends on the abilities of the transport being used.
2859 call: it depends on the abilities of the transport being used.
2860
2860
2861 Each line in the block is de-indented and concatenated. Then, that
2861 Each line in the block is de-indented and concatenated. Then, that
2862 value is evaluated as a Python b'' literal. This allows the use of
2862 value is evaluated as a Python b'' literal. This allows the use of
2863 backslash escaping, etc.
2863 backslash escaping, etc.
2864
2864
2865 raw+
2865 raw+
2866 ----
2866 ----
2867
2867
2868 Behaves like ``raw`` except flushes output afterwards.
2868 Behaves like ``raw`` except flushes output afterwards.
2869
2869
2870 command <X>
2870 command <X>
2871 -----------
2871 -----------
2872
2872
2873 Send a request to run a named command, whose name follows the ``command``
2873 Send a request to run a named command, whose name follows the ``command``
2874 string.
2874 string.
2875
2875
2876 Arguments to the command are defined as lines in this block. The format of
2876 Arguments to the command are defined as lines in this block. The format of
2877 each line is ``<key> <value>``. e.g.::
2877 each line is ``<key> <value>``. e.g.::
2878
2878
2879 command listkeys
2879 command listkeys
2880 namespace bookmarks
2880 namespace bookmarks
2881
2881
2882 If the value begins with ``eval:``, it will be interpreted as a Python
2882 If the value begins with ``eval:``, it will be interpreted as a Python
2883 literal expression. Otherwise values are interpreted as Python b'' literals.
2883 literal expression. Otherwise values are interpreted as Python b'' literals.
2884 This allows sending complex types and encoding special byte sequences via
2884 This allows sending complex types and encoding special byte sequences via
2885 backslash escaping.
2885 backslash escaping.
2886
2886
2887 The following arguments have special meaning:
2887 The following arguments have special meaning:
2888
2888
2889 ``PUSHFILE``
2889 ``PUSHFILE``
2890 When defined, the *push* mechanism of the peer will be used instead
2890 When defined, the *push* mechanism of the peer will be used instead
2891 of the static request-response mechanism and the content of the
2891 of the static request-response mechanism and the content of the
2892 file specified in the value of this argument will be sent as the
2892 file specified in the value of this argument will be sent as the
2893 command payload.
2893 command payload.
2894
2894
2895 This can be used to submit a local bundle file to the remote.
2895 This can be used to submit a local bundle file to the remote.
2896
2896
2897 batchbegin
2897 batchbegin
2898 ----------
2898 ----------
2899
2899
2900 Instruct the peer to begin a batched send.
2900 Instruct the peer to begin a batched send.
2901
2901
2902 All ``command`` blocks are queued for execution until the next
2902 All ``command`` blocks are queued for execution until the next
2903 ``batchsubmit`` block.
2903 ``batchsubmit`` block.
2904
2904
2905 batchsubmit
2905 batchsubmit
2906 -----------
2906 -----------
2907
2907
2908 Submit previously queued ``command`` blocks as a batch request.
2908 Submit previously queued ``command`` blocks as a batch request.
2909
2909
2910 This action MUST be paired with a ``batchbegin`` action.
2910 This action MUST be paired with a ``batchbegin`` action.
2911
2911
2912 httprequest <method> <path>
2912 httprequest <method> <path>
2913 ---------------------------
2913 ---------------------------
2914
2914
2915 (HTTP peer only)
2915 (HTTP peer only)
2916
2916
2917 Send an HTTP request to the peer.
2917 Send an HTTP request to the peer.
2918
2918
2919 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2919 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2920
2920
2921 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2921 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2922 headers to add to the request. e.g. ``Accept: foo``.
2922 headers to add to the request. e.g. ``Accept: foo``.
2923
2923
2924 The following arguments are special:
2924 The following arguments are special:
2925
2925
2926 ``BODYFILE``
2926 ``BODYFILE``
2927 The content of the file defined as the value to this argument will be
2927 The content of the file defined as the value to this argument will be
2928 transferred verbatim as the HTTP request body.
2928 transferred verbatim as the HTTP request body.
2929
2929
2930 ``frame <type> <flags> <payload>``
2930 ``frame <type> <flags> <payload>``
2931 Send a unified protocol frame as part of the request body.
2931 Send a unified protocol frame as part of the request body.
2932
2932
2933 All frames will be collected and sent as the body to the HTTP
2933 All frames will be collected and sent as the body to the HTTP
2934 request.
2934 request.
2935
2935
2936 close
2936 close
2937 -----
2937 -----
2938
2938
2939 Close the connection to the server.
2939 Close the connection to the server.
2940
2940
2941 flush
2941 flush
2942 -----
2942 -----
2943
2943
2944 Flush data written to the server.
2944 Flush data written to the server.
2945
2945
2946 readavailable
2946 readavailable
2947 -------------
2947 -------------
2948
2948
2949 Close the write end of the connection and read all available data from
2949 Close the write end of the connection and read all available data from
2950 the server.
2950 the server.
2951
2951
2952 If the connection to the server encompasses multiple pipes, we poll both
2952 If the connection to the server encompasses multiple pipes, we poll both
2953 pipes and read available data.
2953 pipes and read available data.
2954
2954
2955 readline
2955 readline
2956 --------
2956 --------
2957
2957
2958 Read a line of output from the server. If there are multiple output
2958 Read a line of output from the server. If there are multiple output
2959 pipes, reads only the main pipe.
2959 pipes, reads only the main pipe.
2960
2960
2961 ereadline
2961 ereadline
2962 ---------
2962 ---------
2963
2963
2964 Like ``readline``, but read from the stderr pipe, if available.
2964 Like ``readline``, but read from the stderr pipe, if available.
2965
2965
2966 read <X>
2966 read <X>
2967 --------
2967 --------
2968
2968
2969 ``read()`` N bytes from the server's main output pipe.
2969 ``read()`` N bytes from the server's main output pipe.
2970
2970
2971 eread <X>
2971 eread <X>
2972 ---------
2972 ---------
2973
2973
2974 ``read()`` N bytes from the server's stderr pipe, if available.
2974 ``read()`` N bytes from the server's stderr pipe, if available.
2975
2975
2976 Specifying Unified Frame-Based Protocol Frames
2976 Specifying Unified Frame-Based Protocol Frames
2977 ----------------------------------------------
2977 ----------------------------------------------
2978
2978
2979 It is possible to emit a *Unified Frame-Based Protocol* by using special
2979 It is possible to emit a *Unified Frame-Based Protocol* by using special
2980 syntax.
2980 syntax.
2981
2981
2982 A frame is composed as a type, flags, and payload. These can be parsed
2982 A frame is composed as a type, flags, and payload. These can be parsed
2983 from a string of the form:
2983 from a string of the form:
2984
2984
2985 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2985 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2986
2986
2987 ``request-id`` and ``stream-id`` are integers defining the request and
2987 ``request-id`` and ``stream-id`` are integers defining the request and
2988 stream identifiers.
2988 stream identifiers.
2989
2989
2990 ``type`` can be an integer value for the frame type or the string name
2990 ``type`` can be an integer value for the frame type or the string name
2991 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2991 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2992 ``command-name``.
2992 ``command-name``.
2993
2993
2994 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2994 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2995 components. Each component (and there can be just one) can be an integer
2995 components. Each component (and there can be just one) can be an integer
2996 or a flag name for stream flags or frame flags, respectively. Values are
2996 or a flag name for stream flags or frame flags, respectively. Values are
2997 resolved to integers and then bitwise OR'd together.
2997 resolved to integers and then bitwise OR'd together.
2998
2998
2999 ``payload`` represents the raw frame payload. If it begins with
2999 ``payload`` represents the raw frame payload. If it begins with
3000 ``cbor:``, the following string is evaluated as Python code and the
3000 ``cbor:``, the following string is evaluated as Python code and the
3001 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3001 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3002 as a Python byte string literal.
3002 as a Python byte string literal.
3003 """
3003 """
3004 opts = pycompat.byteskwargs(opts)
3004 opts = pycompat.byteskwargs(opts)
3005
3005
3006 if opts['localssh'] and not repo:
3006 if opts['localssh'] and not repo:
3007 raise error.Abort(_('--localssh requires a repository'))
3007 raise error.Abort(_('--localssh requires a repository'))
3008
3008
3009 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3009 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3010 raise error.Abort(_('invalid value for --peer'),
3010 raise error.Abort(_('invalid value for --peer'),
3011 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3011 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3012
3012
3013 if path and opts['localssh']:
3013 if path and opts['localssh']:
3014 raise error.Abort(_('cannot specify --localssh with an explicit '
3014 raise error.Abort(_('cannot specify --localssh with an explicit '
3015 'path'))
3015 'path'))
3016
3016
3017 if ui.interactive():
3017 if ui.interactive():
3018 ui.write(_('(waiting for commands on stdin)\n'))
3018 ui.write(_('(waiting for commands on stdin)\n'))
3019
3019
3020 blocks = list(_parsewirelangblocks(ui.fin))
3020 blocks = list(_parsewirelangblocks(ui.fin))
3021
3021
3022 proc = None
3022 proc = None
3023 stdin = None
3023 stdin = None
3024 stdout = None
3024 stdout = None
3025 stderr = None
3025 stderr = None
3026 opener = None
3026 opener = None
3027
3027
3028 if opts['localssh']:
3028 if opts['localssh']:
3029 # We start the SSH server in its own process so there is process
3029 # We start the SSH server in its own process so there is process
3030 # separation. This prevents a whole class of potential bugs around
3030 # separation. This prevents a whole class of potential bugs around
3031 # shared state from interfering with server operation.
3031 # shared state from interfering with server operation.
3032 args = procutil.hgcmd() + [
3032 args = procutil.hgcmd() + [
3033 '-R', repo.root,
3033 '-R', repo.root,
3034 'debugserve', '--sshstdio',
3034 'debugserve', '--sshstdio',
3035 ]
3035 ]
3036 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3036 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3037 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3037 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3038 bufsize=0)
3038 bufsize=0)
3039
3039
3040 stdin = proc.stdin
3040 stdin = proc.stdin
3041 stdout = proc.stdout
3041 stdout = proc.stdout
3042 stderr = proc.stderr
3042 stderr = proc.stderr
3043
3043
3044 # We turn the pipes into observers so we can log I/O.
3044 # We turn the pipes into observers so we can log I/O.
3045 if ui.verbose or opts['peer'] == 'raw':
3045 if ui.verbose or opts['peer'] == 'raw':
3046 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3046 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3047 logdata=True)
3047 logdata=True)
3048 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3048 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3049 logdata=True)
3049 logdata=True)
3050 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3050 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3051 logdata=True)
3051 logdata=True)
3052
3052
3053 # --localssh also implies the peer connection settings.
3053 # --localssh also implies the peer connection settings.
3054
3054
3055 url = 'ssh://localserver'
3055 url = 'ssh://localserver'
3056 autoreadstderr = not opts['noreadstderr']
3056 autoreadstderr = not opts['noreadstderr']
3057
3057
3058 if opts['peer'] == 'ssh1':
3058 if opts['peer'] == 'ssh1':
3059 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3059 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3060 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3060 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3061 None, autoreadstderr=autoreadstderr)
3061 None, autoreadstderr=autoreadstderr)
3062 elif opts['peer'] == 'ssh2':
3062 elif opts['peer'] == 'ssh2':
3063 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3063 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3064 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3064 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3065 None, autoreadstderr=autoreadstderr)
3065 None, autoreadstderr=autoreadstderr)
3066 elif opts['peer'] == 'raw':
3066 elif opts['peer'] == 'raw':
3067 ui.write(_('using raw connection to peer\n'))
3067 ui.write(_('using raw connection to peer\n'))
3068 peer = None
3068 peer = None
3069 else:
3069 else:
3070 ui.write(_('creating ssh peer from handshake results\n'))
3070 ui.write(_('creating ssh peer from handshake results\n'))
3071 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3071 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3072 autoreadstderr=autoreadstderr)
3072 autoreadstderr=autoreadstderr)
3073
3073
3074 elif path:
3074 elif path:
3075 # We bypass hg.peer() so we can proxy the sockets.
3075 # We bypass hg.peer() so we can proxy the sockets.
3076 # TODO consider not doing this because we skip
3076 # TODO consider not doing this because we skip
3077 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3077 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3078 u = util.url(path)
3078 u = util.url(path)
3079 if u.scheme != 'http':
3079 if u.scheme != 'http':
3080 raise error.Abort(_('only http:// paths are currently supported'))
3080 raise error.Abort(_('only http:// paths are currently supported'))
3081
3081
3082 url, authinfo = u.authinfo()
3082 url, authinfo = u.authinfo()
3083 openerargs = {
3083 openerargs = {
3084 r'useragent': b'Mercurial debugwireproto',
3084 r'useragent': b'Mercurial debugwireproto',
3085 }
3085 }
3086
3086
3087 # Turn pipes/sockets into observers so we can log I/O.
3087 # Turn pipes/sockets into observers so we can log I/O.
3088 if ui.verbose:
3088 if ui.verbose:
3089 openerargs.update({
3089 openerargs.update({
3090 r'loggingfh': ui,
3090 r'loggingfh': ui,
3091 r'loggingname': b's',
3091 r'loggingname': b's',
3092 r'loggingopts': {
3092 r'loggingopts': {
3093 r'logdata': True,
3093 r'logdata': True,
3094 r'logdataapis': False,
3094 r'logdataapis': False,
3095 },
3095 },
3096 })
3096 })
3097
3097
3098 if ui.debugflag:
3098 if ui.debugflag:
3099 openerargs[r'loggingopts'][r'logdataapis'] = True
3099 openerargs[r'loggingopts'][r'logdataapis'] = True
3100
3100
3101 # Don't send default headers when in raw mode. This allows us to
3101 # Don't send default headers when in raw mode. This allows us to
3102 # bypass most of the behavior of our URL handling code so we can
3102 # bypass most of the behavior of our URL handling code so we can
3103 # have near complete control over what's sent on the wire.
3103 # have near complete control over what's sent on the wire.
3104 if opts['peer'] == 'raw':
3104 if opts['peer'] == 'raw':
3105 openerargs[r'sendaccept'] = False
3105 openerargs[r'sendaccept'] = False
3106
3106
3107 opener = urlmod.opener(ui, authinfo, **openerargs)
3107 opener = urlmod.opener(ui, authinfo, **openerargs)
3108
3108
3109 if opts['peer'] == 'http2':
3109 if opts['peer'] == 'http2':
3110 ui.write(_('creating http peer for wire protocol version 2\n'))
3110 ui.write(_('creating http peer for wire protocol version 2\n'))
3111 # We go through makepeer() because we need an API descriptor for
3111 # We go through makepeer() because we need an API descriptor for
3112 # the peer instance to be useful.
3112 # the peer instance to be useful.
3113 with ui.configoverride({
3113 with ui.configoverride({
3114 ('experimental', 'httppeer.advertise-v2'): True}):
3114 ('experimental', 'httppeer.advertise-v2'): True}):
3115 if opts['nologhandshake']:
3115 if opts['nologhandshake']:
3116 ui.pushbuffer()
3116 ui.pushbuffer()
3117
3117
3118 peer = httppeer.makepeer(ui, path, opener=opener)
3118 peer = httppeer.makepeer(ui, path, opener=opener)
3119
3119
3120 if opts['nologhandshake']:
3120 if opts['nologhandshake']:
3121 ui.popbuffer()
3121 ui.popbuffer()
3122
3122
3123 if not isinstance(peer, httppeer.httpv2peer):
3123 if not isinstance(peer, httppeer.httpv2peer):
3124 raise error.Abort(_('could not instantiate HTTP peer for '
3124 raise error.Abort(_('could not instantiate HTTP peer for '
3125 'wire protocol version 2'),
3125 'wire protocol version 2'),
3126 hint=_('the server may not have the feature '
3126 hint=_('the server may not have the feature '
3127 'enabled or is not allowing this '
3127 'enabled or is not allowing this '
3128 'client version'))
3128 'client version'))
3129
3129
3130 elif opts['peer'] == 'raw':
3130 elif opts['peer'] == 'raw':
3131 ui.write(_('using raw connection to peer\n'))
3131 ui.write(_('using raw connection to peer\n'))
3132 peer = None
3132 peer = None
3133 elif opts['peer']:
3133 elif opts['peer']:
3134 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3134 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3135 opts['peer'])
3135 opts['peer'])
3136 else:
3136 else:
3137 peer = httppeer.makepeer(ui, path, opener=opener)
3137 peer = httppeer.makepeer(ui, path, opener=opener)
3138
3138
3139 # We /could/ populate stdin/stdout with sock.makefile()...
3139 # We /could/ populate stdin/stdout with sock.makefile()...
3140 else:
3140 else:
3141 raise error.Abort(_('unsupported connection configuration'))
3141 raise error.Abort(_('unsupported connection configuration'))
3142
3142
3143 batchedcommands = None
3143 batchedcommands = None
3144
3144
3145 # Now perform actions based on the parsed wire language instructions.
3145 # Now perform actions based on the parsed wire language instructions.
3146 for action, lines in blocks:
3146 for action, lines in blocks:
3147 if action in ('raw', 'raw+'):
3147 if action in ('raw', 'raw+'):
3148 if not stdin:
3148 if not stdin:
3149 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3149 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3150
3150
3151 # Concatenate the data together.
3151 # Concatenate the data together.
3152 data = ''.join(l.lstrip() for l in lines)
3152 data = ''.join(l.lstrip() for l in lines)
3153 data = stringutil.unescapestr(data)
3153 data = stringutil.unescapestr(data)
3154 stdin.write(data)
3154 stdin.write(data)
3155
3155
3156 if action == 'raw+':
3156 if action == 'raw+':
3157 stdin.flush()
3157 stdin.flush()
3158 elif action == 'flush':
3158 elif action == 'flush':
3159 if not stdin:
3159 if not stdin:
3160 raise error.Abort(_('cannot call flush on this peer'))
3160 raise error.Abort(_('cannot call flush on this peer'))
3161 stdin.flush()
3161 stdin.flush()
3162 elif action.startswith('command'):
3162 elif action.startswith('command'):
3163 if not peer:
3163 if not peer:
3164 raise error.Abort(_('cannot send commands unless peer instance '
3164 raise error.Abort(_('cannot send commands unless peer instance '
3165 'is available'))
3165 'is available'))
3166
3166
3167 command = action.split(' ', 1)[1]
3167 command = action.split(' ', 1)[1]
3168
3168
3169 args = {}
3169 args = {}
3170 for line in lines:
3170 for line in lines:
3171 # We need to allow empty values.
3171 # We need to allow empty values.
3172 fields = line.lstrip().split(' ', 1)
3172 fields = line.lstrip().split(' ', 1)
3173 if len(fields) == 1:
3173 if len(fields) == 1:
3174 key = fields[0]
3174 key = fields[0]
3175 value = ''
3175 value = ''
3176 else:
3176 else:
3177 key, value = fields
3177 key, value = fields
3178
3178
3179 if value.startswith('eval:'):
3179 if value.startswith('eval:'):
3180 value = stringutil.evalpythonliteral(value[5:])
3180 value = stringutil.evalpythonliteral(value[5:])
3181 else:
3181 else:
3182 value = stringutil.unescapestr(value)
3182 value = stringutil.unescapestr(value)
3183
3183
3184 args[key] = value
3184 args[key] = value
3185
3185
3186 if batchedcommands is not None:
3186 if batchedcommands is not None:
3187 batchedcommands.append((command, args))
3187 batchedcommands.append((command, args))
3188 continue
3188 continue
3189
3189
3190 ui.status(_('sending %s command\n') % command)
3190 ui.status(_('sending %s command\n') % command)
3191
3191
3192 if 'PUSHFILE' in args:
3192 if 'PUSHFILE' in args:
3193 with open(args['PUSHFILE'], r'rb') as fh:
3193 with open(args['PUSHFILE'], r'rb') as fh:
3194 del args['PUSHFILE']
3194 del args['PUSHFILE']
3195 res, output = peer._callpush(command, fh,
3195 res, output = peer._callpush(command, fh,
3196 **pycompat.strkwargs(args))
3196 **pycompat.strkwargs(args))
3197 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3197 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3198 ui.status(_('remote output: %s\n') %
3198 ui.status(_('remote output: %s\n') %
3199 stringutil.escapestr(output))
3199 stringutil.escapestr(output))
3200 else:
3200 else:
3201 with peer.commandexecutor() as e:
3201 with peer.commandexecutor() as e:
3202 res = e.callcommand(command, args).result()
3202 res = e.callcommand(command, args).result()
3203
3203
3204 if isinstance(res, wireprotov2peer.commandresponse):
3204 if isinstance(res, wireprotov2peer.commandresponse):
3205 val = list(res.cborobjects())
3205 val = list(res.cborobjects())
3206 ui.status(_('response: %s\n') %
3206 ui.status(_('response: %s\n') %
3207 stringutil.pprint(val, bprefix=True))
3207 stringutil.pprint(val, bprefix=True))
3208
3208
3209 else:
3209 else:
3210 ui.status(_('response: %s\n') %
3210 ui.status(_('response: %s\n') %
3211 stringutil.pprint(res, bprefix=True))
3211 stringutil.pprint(res, bprefix=True))
3212
3212
3213 elif action == 'batchbegin':
3213 elif action == 'batchbegin':
3214 if batchedcommands is not None:
3214 if batchedcommands is not None:
3215 raise error.Abort(_('nested batchbegin not allowed'))
3215 raise error.Abort(_('nested batchbegin not allowed'))
3216
3216
3217 batchedcommands = []
3217 batchedcommands = []
3218 elif action == 'batchsubmit':
3218 elif action == 'batchsubmit':
3219 # There is a batching API we could go through. But it would be
3219 # There is a batching API we could go through. But it would be
3220 # difficult to normalize requests into function calls. It is easier
3220 # difficult to normalize requests into function calls. It is easier
3221 # to bypass this layer and normalize to commands + args.
3221 # to bypass this layer and normalize to commands + args.
3222 ui.status(_('sending batch with %d sub-commands\n') %
3222 ui.status(_('sending batch with %d sub-commands\n') %
3223 len(batchedcommands))
3223 len(batchedcommands))
3224 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3224 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3225 ui.status(_('response #%d: %s\n') %
3225 ui.status(_('response #%d: %s\n') %
3226 (i, stringutil.escapestr(chunk)))
3226 (i, stringutil.escapestr(chunk)))
3227
3227
3228 batchedcommands = None
3228 batchedcommands = None
3229
3229
3230 elif action.startswith('httprequest '):
3230 elif action.startswith('httprequest '):
3231 if not opener:
3231 if not opener:
3232 raise error.Abort(_('cannot use httprequest without an HTTP '
3232 raise error.Abort(_('cannot use httprequest without an HTTP '
3233 'peer'))
3233 'peer'))
3234
3234
3235 request = action.split(' ', 2)
3235 request = action.split(' ', 2)
3236 if len(request) != 3:
3236 if len(request) != 3:
3237 raise error.Abort(_('invalid httprequest: expected format is '
3237 raise error.Abort(_('invalid httprequest: expected format is '
3238 '"httprequest <method> <path>'))
3238 '"httprequest <method> <path>'))
3239
3239
3240 method, httppath = request[1:]
3240 method, httppath = request[1:]
3241 headers = {}
3241 headers = {}
3242 body = None
3242 body = None
3243 frames = []
3243 frames = []
3244 for line in lines:
3244 for line in lines:
3245 line = line.lstrip()
3245 line = line.lstrip()
3246 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3246 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3247 if m:
3247 if m:
3248 headers[m.group(1)] = m.group(2)
3248 headers[m.group(1)] = m.group(2)
3249 continue
3249 continue
3250
3250
3251 if line.startswith(b'BODYFILE '):
3251 if line.startswith(b'BODYFILE '):
3252 with open(line.split(b' ', 1), 'rb') as fh:
3252 with open(line.split(b' ', 1), 'rb') as fh:
3253 body = fh.read()
3253 body = fh.read()
3254 elif line.startswith(b'frame '):
3254 elif line.startswith(b'frame '):
3255 frame = wireprotoframing.makeframefromhumanstring(
3255 frame = wireprotoframing.makeframefromhumanstring(
3256 line[len(b'frame '):])
3256 line[len(b'frame '):])
3257
3257
3258 frames.append(frame)
3258 frames.append(frame)
3259 else:
3259 else:
3260 raise error.Abort(_('unknown argument to httprequest: %s') %
3260 raise error.Abort(_('unknown argument to httprequest: %s') %
3261 line)
3261 line)
3262
3262
3263 url = path + httppath
3263 url = path + httppath
3264
3264
3265 if frames:
3265 if frames:
3266 body = b''.join(bytes(f) for f in frames)
3266 body = b''.join(bytes(f) for f in frames)
3267
3267
3268 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3268 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3269
3269
3270 # urllib.Request insists on using has_data() as a proxy for
3270 # urllib.Request insists on using has_data() as a proxy for
3271 # determining the request method. Override that to use our
3271 # determining the request method. Override that to use our
3272 # explicitly requested method.
3272 # explicitly requested method.
3273 req.get_method = lambda: pycompat.sysstr(method)
3273 req.get_method = lambda: pycompat.sysstr(method)
3274
3274
3275 try:
3275 try:
3276 res = opener.open(req)
3276 res = opener.open(req)
3277 body = res.read()
3277 body = res.read()
3278 except util.urlerr.urlerror as e:
3278 except util.urlerr.urlerror as e:
3279 # read() method must be called, but only exists in Python 2
3279 # read() method must be called, but only exists in Python 2
3280 getattr(e, 'read', lambda: None)()
3280 getattr(e, 'read', lambda: None)()
3281 continue
3281 continue
3282
3282
3283 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3283 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3284 ui.write(_('cbor> %s\n') %
3284 ui.write(_('cbor> %s\n') %
3285 stringutil.pprint(cbor.loads(body), bprefix=True))
3285 stringutil.pprint(cbor.loads(body), bprefix=True))
3286
3286
3287 elif action == 'close':
3287 elif action == 'close':
3288 peer.close()
3288 peer.close()
3289 elif action == 'readavailable':
3289 elif action == 'readavailable':
3290 if not stdout or not stderr:
3290 if not stdout or not stderr:
3291 raise error.Abort(_('readavailable not available on this peer'))
3291 raise error.Abort(_('readavailable not available on this peer'))
3292
3292
3293 stdin.close()
3293 stdin.close()
3294 stdout.read()
3294 stdout.read()
3295 stderr.read()
3295 stderr.read()
3296
3296
3297 elif action == 'readline':
3297 elif action == 'readline':
3298 if not stdout:
3298 if not stdout:
3299 raise error.Abort(_('readline not available on this peer'))
3299 raise error.Abort(_('readline not available on this peer'))
3300 stdout.readline()
3300 stdout.readline()
3301 elif action == 'ereadline':
3301 elif action == 'ereadline':
3302 if not stderr:
3302 if not stderr:
3303 raise error.Abort(_('ereadline not available on this peer'))
3303 raise error.Abort(_('ereadline not available on this peer'))
3304 stderr.readline()
3304 stderr.readline()
3305 elif action.startswith('read '):
3305 elif action.startswith('read '):
3306 count = int(action.split(' ', 1)[1])
3306 count = int(action.split(' ', 1)[1])
3307 if not stdout:
3307 if not stdout:
3308 raise error.Abort(_('read not available on this peer'))
3308 raise error.Abort(_('read not available on this peer'))
3309 stdout.read(count)
3309 stdout.read(count)
3310 elif action.startswith('eread '):
3310 elif action.startswith('eread '):
3311 count = int(action.split(' ', 1)[1])
3311 count = int(action.split(' ', 1)[1])
3312 if not stderr:
3312 if not stderr:
3313 raise error.Abort(_('eread not available on this peer'))
3313 raise error.Abort(_('eread not available on this peer'))
3314 stderr.read(count)
3314 stderr.read(count)
3315 else:
3315 else:
3316 raise error.Abort(_('unknown action: %s') % action)
3316 raise error.Abort(_('unknown action: %s') % action)
3317
3317
3318 if batchedcommands is not None:
3318 if batchedcommands is not None:
3319 raise error.Abort(_('unclosed "batchbegin" request'))
3319 raise error.Abort(_('unclosed "batchbegin" request'))
3320
3320
3321 if peer:
3321 if peer:
3322 peer.close()
3322 peer.close()
3323
3323
3324 if proc:
3324 if proc:
3325 proc.kill()
3325 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now