##// END OF EJS Templates
debugcommands: add support for extensions adding their own debug info...
Augie Fackler -
r42875:0c0478b7 default draft
parent child Browse files
Show More
@@ -1,3484 +1,3489 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 compression,
85 compression,
86 dateutil,
86 dateutil,
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90
90
91 from .revlogutils import (
91 from .revlogutils import (
92 deltas as deltautil
92 deltas as deltautil
93 )
93 )
94
94
95 release = lockmod.release
95 release = lockmod.release
96
96
97 command = registrar.command()
97 command = registrar.command()
98
98
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 def debugancestor(ui, repo, *args):
100 def debugancestor(ui, repo, *args):
101 """find the ancestor revision of two revisions in a given index"""
101 """find the ancestor revision of two revisions in a given index"""
102 if len(args) == 3:
102 if len(args) == 3:
103 index, rev1, rev2 = args
103 index, rev1, rev2 = args
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 lookup = r.lookup
105 lookup = r.lookup
106 elif len(args) == 2:
106 elif len(args) == 2:
107 if not repo:
107 if not repo:
108 raise error.Abort(_('there is no Mercurial repository here '
108 raise error.Abort(_('there is no Mercurial repository here '
109 '(.hg not found)'))
109 '(.hg not found)'))
110 rev1, rev2 = args
110 rev1, rev2 = args
111 r = repo.changelog
111 r = repo.changelog
112 lookup = repo.lookup
112 lookup = repo.lookup
113 else:
113 else:
114 raise error.Abort(_('either two or three arguments required'))
114 raise error.Abort(_('either two or three arguments required'))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117
117
118 @command('debugapplystreamclonebundle', [], 'FILE')
118 @command('debugapplystreamclonebundle', [], 'FILE')
119 def debugapplystreamclonebundle(ui, repo, fname):
119 def debugapplystreamclonebundle(ui, repo, fname):
120 """apply a stream clone bundle file"""
120 """apply a stream clone bundle file"""
121 f = hg.openpath(ui, fname)
121 f = hg.openpath(ui, fname)
122 gen = exchange.readbundle(ui, f, fname)
122 gen = exchange.readbundle(ui, f, fname)
123 gen.apply(repo)
123 gen.apply(repo)
124
124
125 @command('debugbuilddag',
125 @command('debugbuilddag',
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 ('n', 'new-file', None, _('add new file at each rev'))],
128 ('n', 'new-file', None, _('add new file at each rev'))],
129 _('[OPTION]... [TEXT]'))
129 _('[OPTION]... [TEXT]'))
130 def debugbuilddag(ui, repo, text=None,
130 def debugbuilddag(ui, repo, text=None,
131 mergeable_file=False,
131 mergeable_file=False,
132 overwritten_file=False,
132 overwritten_file=False,
133 new_file=False):
133 new_file=False):
134 """builds a repo with a given DAG from scratch in the current empty repo
134 """builds a repo with a given DAG from scratch in the current empty repo
135
135
136 The description of the DAG is read from stdin if not given on the
136 The description of the DAG is read from stdin if not given on the
137 command line.
137 command line.
138
138
139 Elements:
139 Elements:
140
140
141 - "+n" is a linear run of n nodes based on the current default parent
141 - "+n" is a linear run of n nodes based on the current default parent
142 - "." is a single node based on the current default parent
142 - "." is a single node based on the current default parent
143 - "$" resets the default parent to null (implied at the start);
143 - "$" resets the default parent to null (implied at the start);
144 otherwise the default parent is always the last node created
144 otherwise the default parent is always the last node created
145 - "<p" sets the default parent to the backref p
145 - "<p" sets the default parent to the backref p
146 - "*p" is a fork at parent p, which is a backref
146 - "*p" is a fork at parent p, which is a backref
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 - "/p2" is a merge of the preceding node and p2
148 - "/p2" is a merge of the preceding node and p2
149 - ":tag" defines a local tag for the preceding node
149 - ":tag" defines a local tag for the preceding node
150 - "@branch" sets the named branch for subsequent nodes
150 - "@branch" sets the named branch for subsequent nodes
151 - "#...\\n" is a comment up to the end of the line
151 - "#...\\n" is a comment up to the end of the line
152
152
153 Whitespace between the above elements is ignored.
153 Whitespace between the above elements is ignored.
154
154
155 A backref is either
155 A backref is either
156
156
157 - a number n, which references the node curr-n, where curr is the current
157 - a number n, which references the node curr-n, where curr is the current
158 node, or
158 node, or
159 - the name of a local tag you placed earlier using ":tag", or
159 - the name of a local tag you placed earlier using ":tag", or
160 - empty to denote the default parent.
160 - empty to denote the default parent.
161
161
162 All string valued-elements are either strictly alphanumeric, or must
162 All string valued-elements are either strictly alphanumeric, or must
163 be enclosed in double quotes ("..."), with "\\" as escape character.
163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 """
164 """
165
165
166 if text is None:
166 if text is None:
167 ui.status(_("reading DAG from stdin\n"))
167 ui.status(_("reading DAG from stdin\n"))
168 text = ui.fin.read()
168 text = ui.fin.read()
169
169
170 cl = repo.changelog
170 cl = repo.changelog
171 if len(cl) > 0:
171 if len(cl) > 0:
172 raise error.Abort(_('repository is not empty'))
172 raise error.Abort(_('repository is not empty'))
173
173
174 # determine number of revs in DAG
174 # determine number of revs in DAG
175 total = 0
175 total = 0
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 total += 1
178 total += 1
179
179
180 if mergeable_file:
180 if mergeable_file:
181 linesperrev = 2
181 linesperrev = 2
182 # make a file with k lines per rev
182 # make a file with k lines per rev
183 initialmergedlines = ['%d' % i
183 initialmergedlines = ['%d' % i
184 for i in pycompat.xrange(0, total * linesperrev)]
184 for i in pycompat.xrange(0, total * linesperrev)]
185 initialmergedlines.append("")
185 initialmergedlines.append("")
186
186
187 tags = []
187 tags = []
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 total=total)
189 total=total)
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 at = -1
191 at = -1
192 atbranch = 'default'
192 atbranch = 'default'
193 nodeids = []
193 nodeids = []
194 id = 0
194 id = 0
195 progress.update(id)
195 progress.update(id)
196 for type, data in dagparser.parsedag(text):
196 for type, data in dagparser.parsedag(text):
197 if type == 'n':
197 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
199 id, ps = data
200
200
201 files = []
201 files = []
202 filecontent = {}
202 filecontent = {}
203
203
204 p2 = None
204 p2 = None
205 if mergeable_file:
205 if mergeable_file:
206 fn = "mf"
206 fn = "mf"
207 p1 = repo[ps[0]]
207 p1 = repo[ps[0]]
208 if len(ps) > 1:
208 if len(ps) > 1:
209 p2 = repo[ps[1]]
209 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
210 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
211 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
212 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
213 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
214 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
215 ml.append("")
216 elif at > 0:
216 elif at > 0:
217 ml = p1[fn].data().split("\n")
217 ml = p1[fn].data().split("\n")
218 else:
218 else:
219 ml = initialmergedlines
219 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
220 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
221 mergedtext = "\n".join(ml)
222 files.append(fn)
222 files.append(fn)
223 filecontent[fn] = mergedtext
223 filecontent[fn] = mergedtext
224
224
225 if overwritten_file:
225 if overwritten_file:
226 fn = "of"
226 fn = "of"
227 files.append(fn)
227 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
228 filecontent[fn] = "r%i\n" % id
229
229
230 if new_file:
230 if new_file:
231 fn = "nf%i" % id
231 fn = "nf%i" % id
232 files.append(fn)
232 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
233 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
234 if len(ps) > 1:
235 if not p2:
235 if not p2:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 for fn in p2:
237 for fn in p2:
238 if fn.startswith("nf"):
238 if fn.startswith("nf"):
239 files.append(fn)
239 files.append(fn)
240 filecontent[fn] = p2[fn].data()
240 filecontent[fn] = p2[fn].data()
241
241
242 def fctxfn(repo, cx, path):
242 def fctxfn(repo, cx, path):
243 if path in filecontent:
243 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
244 return context.memfilectx(repo, cx, path,
245 filecontent[path])
245 filecontent[path])
246 return None
246 return None
247
247
248 if len(ps) == 0 or ps[0] < 0:
248 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
249 pars = [None, None]
250 elif len(ps) == 1:
250 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
251 pars = [nodeids[ps[0]], None]
252 else:
252 else:
253 pars = [nodeids[p] for p in ps]
253 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
255 date=(id, 0),
256 user="debugbuilddag",
256 user="debugbuilddag",
257 extra={'branch': atbranch})
257 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
258 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
259 nodeids.append(nodeid)
260 at = id
260 at = id
261 elif type == 'l':
261 elif type == 'l':
262 id, name = data
262 id, name = data
263 ui.note(('tag %s\n' % name))
263 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
265 elif type == 'a':
266 ui.note(('branch %s\n' % data))
266 ui.note(('branch %s\n' % data))
267 atbranch = data
267 atbranch = data
268 progress.update(id)
268 progress.update(id)
269
269
270 if tags:
270 if tags:
271 repo.vfs.write("localtags", "".join(tags))
271 repo.vfs.write("localtags", "".join(tags))
272
272
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 indent_string = ' ' * indent
274 indent_string = ' ' * indent
275 if all:
275 if all:
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 % indent_string)
277 % indent_string)
278
278
279 def showchunks(named):
279 def showchunks(named):
280 ui.write("\n%s%s\n" % (indent_string, named))
280 ui.write("\n%s%s\n" % (indent_string, named))
281 for deltadata in gen.deltaiter():
281 for deltadata in gen.deltaiter():
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 ui.write("%s%s %s %s %s %s %d\n" %
283 ui.write("%s%s %s %s %s %s %d\n" %
284 (indent_string, hex(node), hex(p1), hex(p2),
284 (indent_string, hex(node), hex(p1), hex(p2),
285 hex(cs), hex(deltabase), len(delta)))
285 hex(cs), hex(deltabase), len(delta)))
286
286
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 showchunks("changelog")
288 showchunks("changelog")
289 chunkdata = gen.manifestheader()
289 chunkdata = gen.manifestheader()
290 showchunks("manifest")
290 showchunks("manifest")
291 for chunkdata in iter(gen.filelogheader, {}):
291 for chunkdata in iter(gen.filelogheader, {}):
292 fname = chunkdata['filename']
292 fname = chunkdata['filename']
293 showchunks(fname)
293 showchunks(fname)
294 else:
294 else:
295 if isinstance(gen, bundle2.unbundle20):
295 if isinstance(gen, bundle2.unbundle20):
296 raise error.Abort(_('use debugbundle2 for this file'))
296 raise error.Abort(_('use debugbundle2 for this file'))
297 chunkdata = gen.changelogheader()
297 chunkdata = gen.changelogheader()
298 for deltadata in gen.deltaiter():
298 for deltadata in gen.deltaiter():
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 ui.write("%s%s\n" % (indent_string, hex(node)))
300 ui.write("%s%s\n" % (indent_string, hex(node)))
301
301
302 def _debugobsmarkers(ui, part, indent=0, **opts):
302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 """display version and markers contained in 'data'"""
303 """display version and markers contained in 'data'"""
304 opts = pycompat.byteskwargs(opts)
304 opts = pycompat.byteskwargs(opts)
305 data = part.read()
305 data = part.read()
306 indent_string = ' ' * indent
306 indent_string = ' ' * indent
307 try:
307 try:
308 version, markers = obsolete._readmarkers(data)
308 version, markers = obsolete._readmarkers(data)
309 except error.UnknownVersion as exc:
309 except error.UnknownVersion as exc:
310 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg = "%sunsupported version: %s (%d bytes)\n"
311 msg %= indent_string, exc.version, len(data)
311 msg %= indent_string, exc.version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 else:
313 else:
314 msg = "%sversion: %d (%d bytes)\n"
314 msg = "%sversion: %d (%d bytes)\n"
315 msg %= indent_string, version, len(data)
315 msg %= indent_string, version, len(data)
316 ui.write(msg)
316 ui.write(msg)
317 fm = ui.formatter('debugobsolete', opts)
317 fm = ui.formatter('debugobsolete', opts)
318 for rawmarker in sorted(markers):
318 for rawmarker in sorted(markers):
319 m = obsutil.marker(None, rawmarker)
319 m = obsutil.marker(None, rawmarker)
320 fm.startitem()
320 fm.startitem()
321 fm.plain(indent_string)
321 fm.plain(indent_string)
322 cmdutil.showmarker(fm, m)
322 cmdutil.showmarker(fm, m)
323 fm.end()
323 fm.end()
324
324
325 def _debugphaseheads(ui, data, indent=0):
325 def _debugphaseheads(ui, data, indent=0):
326 """display version and markers contained in 'data'"""
326 """display version and markers contained in 'data'"""
327 indent_string = ' ' * indent
327 indent_string = ' ' * indent
328 headsbyphase = phases.binarydecode(data)
328 headsbyphase = phases.binarydecode(data)
329 for phase in phases.allphases:
329 for phase in phases.allphases:
330 for head in headsbyphase[phase]:
330 for head in headsbyphase[phase]:
331 ui.write(indent_string)
331 ui.write(indent_string)
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333
333
334 def _quasirepr(thing):
334 def _quasirepr(thing):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 return '{%s}' % (
336 return '{%s}' % (
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 return pycompat.bytestr(repr(thing))
338 return pycompat.bytestr(repr(thing))
339
339
340 def _debugbundle2(ui, gen, all=None, **opts):
340 def _debugbundle2(ui, gen, all=None, **opts):
341 """lists the contents of a bundle2"""
341 """lists the contents of a bundle2"""
342 if not isinstance(gen, bundle2.unbundle20):
342 if not isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_('not a bundle2 file'))
343 raise error.Abort(_('not a bundle2 file'))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 parttypes = opts.get(r'part_type', [])
345 parttypes = opts.get(r'part_type', [])
346 for part in gen.iterparts():
346 for part in gen.iterparts():
347 if parttypes and part.type not in parttypes:
347 if parttypes and part.type not in parttypes:
348 continue
348 continue
349 msg = '%s -- %s (mandatory: %r)\n'
349 msg = '%s -- %s (mandatory: %r)\n'
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 if part.type == 'changegroup':
351 if part.type == 'changegroup':
352 version = part.params.get('version', '01')
352 version = part.params.get('version', '01')
353 cg = changegroup.getunbundler(version, part, 'UN')
353 cg = changegroup.getunbundler(version, part, 'UN')
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 if part.type == 'obsmarkers':
356 if part.type == 'obsmarkers':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugobsmarkers(ui, part, indent=4, **opts)
358 _debugobsmarkers(ui, part, indent=4, **opts)
359 if part.type == 'phase-heads':
359 if part.type == 'phase-heads':
360 if not ui.quiet:
360 if not ui.quiet:
361 _debugphaseheads(ui, part, indent=4)
361 _debugphaseheads(ui, part, indent=4)
362
362
363 @command('debugbundle',
363 @command('debugbundle',
364 [('a', 'all', None, _('show all details')),
364 [('a', 'all', None, _('show all details')),
365 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 _('FILE'),
367 _('FILE'),
368 norepo=True)
368 norepo=True)
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 """lists the contents of a bundle"""
370 """lists the contents of a bundle"""
371 with hg.openpath(ui, bundlepath) as f:
371 with hg.openpath(ui, bundlepath) as f:
372 if spec:
372 if spec:
373 spec = exchange.getbundlespec(ui, f)
373 spec = exchange.getbundlespec(ui, f)
374 ui.write('%s\n' % spec)
374 ui.write('%s\n' % spec)
375 return
375 return
376
376
377 gen = exchange.readbundle(ui, f, bundlepath)
377 gen = exchange.readbundle(ui, f, bundlepath)
378 if isinstance(gen, bundle2.unbundle20):
378 if isinstance(gen, bundle2.unbundle20):
379 return _debugbundle2(ui, gen, all=all, **opts)
379 return _debugbundle2(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
381
381
382 @command('debugcapabilities',
382 @command('debugcapabilities',
383 [], _('PATH'),
383 [], _('PATH'),
384 norepo=True)
384 norepo=True)
385 def debugcapabilities(ui, path, **opts):
385 def debugcapabilities(ui, path, **opts):
386 """lists the capabilities of a remote peer"""
386 """lists the capabilities of a remote peer"""
387 opts = pycompat.byteskwargs(opts)
387 opts = pycompat.byteskwargs(opts)
388 peer = hg.peer(ui, opts, path)
388 peer = hg.peer(ui, opts, path)
389 caps = peer.capabilities()
389 caps = peer.capabilities()
390 ui.write(('Main capabilities:\n'))
390 ui.write(('Main capabilities:\n'))
391 for c in sorted(caps):
391 for c in sorted(caps):
392 ui.write((' %s\n') % c)
392 ui.write((' %s\n') % c)
393 b2caps = bundle2.bundle2caps(peer)
393 b2caps = bundle2.bundle2caps(peer)
394 if b2caps:
394 if b2caps:
395 ui.write(('Bundle2 capabilities:\n'))
395 ui.write(('Bundle2 capabilities:\n'))
396 for key, values in sorted(b2caps.iteritems()):
396 for key, values in sorted(b2caps.iteritems()):
397 ui.write((' %s\n') % key)
397 ui.write((' %s\n') % key)
398 for v in values:
398 for v in values:
399 ui.write((' %s\n') % v)
399 ui.write((' %s\n') % v)
400
400
401 @command('debugcheckstate', [], '')
401 @command('debugcheckstate', [], '')
402 def debugcheckstate(ui, repo):
402 def debugcheckstate(ui, repo):
403 """validate the correctness of the current dirstate"""
403 """validate the correctness of the current dirstate"""
404 parent1, parent2 = repo.dirstate.parents()
404 parent1, parent2 = repo.dirstate.parents()
405 m1 = repo[parent1].manifest()
405 m1 = repo[parent1].manifest()
406 m2 = repo[parent2].manifest()
406 m2 = repo[parent2].manifest()
407 errors = 0
407 errors = 0
408 for f in repo.dirstate:
408 for f in repo.dirstate:
409 state = repo.dirstate[f]
409 state = repo.dirstate[f]
410 if state in "nr" and f not in m1:
410 if state in "nr" and f not in m1:
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "a" and f in m1:
413 if state in "a" and f in m1:
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 errors += 1
415 errors += 1
416 if state in "m" and f not in m1 and f not in m2:
416 if state in "m" and f not in m1 and f not in m2:
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 (f, state))
418 (f, state))
419 errors += 1
419 errors += 1
420 for f in m1:
420 for f in m1:
421 state = repo.dirstate[f]
421 state = repo.dirstate[f]
422 if state not in "nrm":
422 if state not in "nrm":
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 errors += 1
424 errors += 1
425 if errors:
425 if errors:
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 raise error.Abort(error)
427 raise error.Abort(error)
428
428
429 @command('debugcolor',
429 @command('debugcolor',
430 [('', 'style', None, _('show all configured styles'))],
430 [('', 'style', None, _('show all configured styles'))],
431 'hg debugcolor')
431 'hg debugcolor')
432 def debugcolor(ui, repo, **opts):
432 def debugcolor(ui, repo, **opts):
433 """show available color, effects or style"""
433 """show available color, effects or style"""
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 if opts.get(r'style'):
435 if opts.get(r'style'):
436 return _debugdisplaystyle(ui)
436 return _debugdisplaystyle(ui)
437 else:
437 else:
438 return _debugdisplaycolor(ui)
438 return _debugdisplaycolor(ui)
439
439
440 def _debugdisplaycolor(ui):
440 def _debugdisplaycolor(ui):
441 ui = ui.copy()
441 ui = ui.copy()
442 ui._styles.clear()
442 ui._styles.clear()
443 for effect in color._activeeffects(ui).keys():
443 for effect in color._activeeffects(ui).keys():
444 ui._styles[effect] = effect
444 ui._styles[effect] = effect
445 if ui._terminfoparams:
445 if ui._terminfoparams:
446 for k, v in ui.configitems('color'):
446 for k, v in ui.configitems('color'):
447 if k.startswith('color.'):
447 if k.startswith('color.'):
448 ui._styles[k] = k[6:]
448 ui._styles[k] = k[6:]
449 elif k.startswith('terminfo.'):
449 elif k.startswith('terminfo.'):
450 ui._styles[k] = k[9:]
450 ui._styles[k] = k[9:]
451 ui.write(_('available colors:\n'))
451 ui.write(_('available colors:\n'))
452 # sort label with a '_' after the other to group '_background' entry.
452 # sort label with a '_' after the other to group '_background' entry.
453 items = sorted(ui._styles.items(),
453 items = sorted(ui._styles.items(),
454 key=lambda i: ('_' in i[0], i[0], i[1]))
454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 for colorname, label in items:
455 for colorname, label in items:
456 ui.write(('%s\n') % colorname, label=label)
456 ui.write(('%s\n') % colorname, label=label)
457
457
458 def _debugdisplaystyle(ui):
458 def _debugdisplaystyle(ui):
459 ui.write(_('available style:\n'))
459 ui.write(_('available style:\n'))
460 if not ui._styles:
460 if not ui._styles:
461 return
461 return
462 width = max(len(s) for s in ui._styles)
462 width = max(len(s) for s in ui._styles)
463 for label, effects in sorted(ui._styles.items()):
463 for label, effects in sorted(ui._styles.items()):
464 ui.write('%s' % label, label=label)
464 ui.write('%s' % label, label=label)
465 if effects:
465 if effects:
466 # 50
466 # 50
467 ui.write(': ')
467 ui.write(': ')
468 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(' ' * (max(0, width - len(label))))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 ui.write('\n')
470 ui.write('\n')
471
471
472 @command('debugcreatestreamclonebundle', [], 'FILE')
472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 def debugcreatestreamclonebundle(ui, repo, fname):
473 def debugcreatestreamclonebundle(ui, repo, fname):
474 """create a stream clone bundle file
474 """create a stream clone bundle file
475
475
476 Stream bundles are special bundles that are essentially archives of
476 Stream bundles are special bundles that are essentially archives of
477 revlog files. They are commonly used for cloning very quickly.
477 revlog files. They are commonly used for cloning very quickly.
478 """
478 """
479 # TODO we may want to turn this into an abort when this functionality
479 # TODO we may want to turn this into an abort when this functionality
480 # is moved into `hg bundle`.
480 # is moved into `hg bundle`.
481 if phases.hassecret(repo):
481 if phases.hassecret(repo):
482 ui.warn(_('(warning: stream clone bundle will contain secret '
482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 'revisions)\n'))
483 'revisions)\n'))
484
484
485 requirements, gen = streamclone.generatebundlev1(repo)
485 requirements, gen = streamclone.generatebundlev1(repo)
486 changegroup.writechunks(ui, gen, fname)
486 changegroup.writechunks(ui, gen, fname)
487
487
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489
489
490 @command('debugdag',
490 @command('debugdag',
491 [('t', 'tags', None, _('use tags as labels')),
491 [('t', 'tags', None, _('use tags as labels')),
492 ('b', 'branches', None, _('annotate with branch names')),
492 ('b', 'branches', None, _('annotate with branch names')),
493 ('', 'dots', None, _('use dots for runs')),
493 ('', 'dots', None, _('use dots for runs')),
494 ('s', 'spaces', None, _('separate elements by spaces'))],
494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 _('[OPTION]... [FILE [REV]...]'),
495 _('[OPTION]... [FILE [REV]...]'),
496 optionalrepo=True)
496 optionalrepo=True)
497 def debugdag(ui, repo, file_=None, *revs, **opts):
497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 """format the changelog or an index DAG as a concise textual description
498 """format the changelog or an index DAG as a concise textual description
499
499
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 revision numbers, they get labeled in the output as rN.
501 revision numbers, they get labeled in the output as rN.
502
502
503 Otherwise, the changelog DAG of the current repo is emitted.
503 Otherwise, the changelog DAG of the current repo is emitted.
504 """
504 """
505 spaces = opts.get(r'spaces')
505 spaces = opts.get(r'spaces')
506 dots = opts.get(r'dots')
506 dots = opts.get(r'dots')
507 if file_:
507 if file_:
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 file_)
509 file_)
510 revs = set((int(r) for r in revs))
510 revs = set((int(r) for r in revs))
511 def events():
511 def events():
512 for r in rlog:
512 for r in rlog:
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 if p != -1))
514 if p != -1))
515 if r in revs:
515 if r in revs:
516 yield 'l', (r, "r%i" % r)
516 yield 'l', (r, "r%i" % r)
517 elif repo:
517 elif repo:
518 cl = repo.changelog
518 cl = repo.changelog
519 tags = opts.get(r'tags')
519 tags = opts.get(r'tags')
520 branches = opts.get(r'branches')
520 branches = opts.get(r'branches')
521 if tags:
521 if tags:
522 labels = {}
522 labels = {}
523 for l, n in repo.tags().items():
523 for l, n in repo.tags().items():
524 labels.setdefault(cl.rev(n), []).append(l)
524 labels.setdefault(cl.rev(n), []).append(l)
525 def events():
525 def events():
526 b = "default"
526 b = "default"
527 for r in cl:
527 for r in cl:
528 if branches:
528 if branches:
529 newb = cl.read(cl.node(r))[5]['branch']
529 newb = cl.read(cl.node(r))[5]['branch']
530 if newb != b:
530 if newb != b:
531 yield 'a', newb
531 yield 'a', newb
532 b = newb
532 b = newb
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 if p != -1))
534 if p != -1))
535 if tags:
535 if tags:
536 ls = labels.get(r)
536 ls = labels.get(r)
537 if ls:
537 if ls:
538 for l in ls:
538 for l in ls:
539 yield 'l', (r, l)
539 yield 'l', (r, l)
540 else:
540 else:
541 raise error.Abort(_('need repo for changelog dag'))
541 raise error.Abort(_('need repo for changelog dag'))
542
542
543 for line in dagparser.dagtextlines(events(),
543 for line in dagparser.dagtextlines(events(),
544 addspaces=spaces,
544 addspaces=spaces,
545 wraplabels=True,
545 wraplabels=True,
546 wrapannotations=True,
546 wrapannotations=True,
547 wrapnonlinear=dots,
547 wrapnonlinear=dots,
548 usedots=dots,
548 usedots=dots,
549 maxlinewidth=70):
549 maxlinewidth=70):
550 ui.write(line)
550 ui.write(line)
551 ui.write("\n")
551 ui.write("\n")
552
552
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 def debugdata(ui, repo, file_, rev=None, **opts):
554 def debugdata(ui, repo, file_, rev=None, **opts):
555 """dump the contents of a data file revision"""
555 """dump the contents of a data file revision"""
556 opts = pycompat.byteskwargs(opts)
556 opts = pycompat.byteskwargs(opts)
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 if rev is not None:
558 if rev is not None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 file_, rev = None, file_
560 file_, rev = None, file_
561 elif rev is None:
561 elif rev is None:
562 raise error.CommandError('debugdata', _('invalid arguments'))
562 raise error.CommandError('debugdata', _('invalid arguments'))
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 try:
564 try:
565 ui.write(r.revision(r.lookup(rev), raw=True))
565 ui.write(r.revision(r.lookup(rev), raw=True))
566 except KeyError:
566 except KeyError:
567 raise error.Abort(_('invalid revision identifier %s') % rev)
567 raise error.Abort(_('invalid revision identifier %s') % rev)
568
568
569 @command('debugdate',
569 @command('debugdate',
570 [('e', 'extended', None, _('try extended date formats'))],
570 [('e', 'extended', None, _('try extended date formats'))],
571 _('[-e] DATE [RANGE]'),
571 _('[-e] DATE [RANGE]'),
572 norepo=True, optionalrepo=True)
572 norepo=True, optionalrepo=True)
573 def debugdate(ui, date, range=None, **opts):
573 def debugdate(ui, date, range=None, **opts):
574 """parse and display a date"""
574 """parse and display a date"""
575 if opts[r"extended"]:
575 if opts[r"extended"]:
576 d = dateutil.parsedate(date, util.extendeddateformats)
576 d = dateutil.parsedate(date, util.extendeddateformats)
577 else:
577 else:
578 d = dateutil.parsedate(date)
578 d = dateutil.parsedate(date)
579 ui.write(("internal: %d %d\n") % d)
579 ui.write(("internal: %d %d\n") % d)
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 if range:
581 if range:
582 m = dateutil.matchdate(range)
582 m = dateutil.matchdate(range)
583 ui.write(("match: %s\n") % m(d[0]))
583 ui.write(("match: %s\n") % m(d[0]))
584
584
585 @command('debugdeltachain',
585 @command('debugdeltachain',
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 _('-c|-m|FILE'),
587 _('-c|-m|FILE'),
588 optionalrepo=True)
588 optionalrepo=True)
589 def debugdeltachain(ui, repo, file_=None, **opts):
589 def debugdeltachain(ui, repo, file_=None, **opts):
590 """dump information about delta chains in a revlog
590 """dump information about delta chains in a revlog
591
591
592 Output can be templatized. Available template keywords are:
592 Output can be templatized. Available template keywords are:
593
593
594 :``rev``: revision number
594 :``rev``: revision number
595 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainid``: delta chain identifier (numbered by unique base)
596 :``chainlen``: delta chain length to this revision
596 :``chainlen``: delta chain length to this revision
597 :``prevrev``: previous revision in delta chain
597 :``prevrev``: previous revision in delta chain
598 :``deltatype``: role of delta / how it was computed
598 :``deltatype``: role of delta / how it was computed
599 :``compsize``: compressed size of revision
599 :``compsize``: compressed size of revision
600 :``uncompsize``: uncompressed size of revision
600 :``uncompsize``: uncompressed size of revision
601 :``chainsize``: total size of compressed revisions in chain
601 :``chainsize``: total size of compressed revisions in chain
602 :``chainratio``: total chain size divided by uncompressed revision size
602 :``chainratio``: total chain size divided by uncompressed revision size
603 (new delta chains typically start at ratio 2.00)
603 (new delta chains typically start at ratio 2.00)
604 :``lindist``: linear distance from base revision in delta chain to end
604 :``lindist``: linear distance from base revision in delta chain to end
605 of this revision
605 of this revision
606 :``extradist``: total size of revisions not part of this delta chain from
606 :``extradist``: total size of revisions not part of this delta chain from
607 base of delta chain to end of this revision; a measurement
607 base of delta chain to end of this revision; a measurement
608 of how much extra data we need to read/seek across to read
608 of how much extra data we need to read/seek across to read
609 the delta chain for this revision
609 the delta chain for this revision
610 :``extraratio``: extradist divided by chainsize; another representation of
610 :``extraratio``: extradist divided by chainsize; another representation of
611 how much unrelated data is needed to load this delta chain
611 how much unrelated data is needed to load this delta chain
612
612
613 If the repository is configured to use the sparse read, additional keywords
613 If the repository is configured to use the sparse read, additional keywords
614 are available:
614 are available:
615
615
616 :``readsize``: total size of data read from the disk for a revision
616 :``readsize``: total size of data read from the disk for a revision
617 (sum of the sizes of all the blocks)
617 (sum of the sizes of all the blocks)
618 :``largestblock``: size of the largest block of data read from the disk
618 :``largestblock``: size of the largest block of data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
620 :``srchunks``: in how many data hunks the whole revision would be read
620 :``srchunks``: in how many data hunks the whole revision would be read
621
621
622 The sparse read can be enabled with experimental.sparse-read = True
622 The sparse read can be enabled with experimental.sparse-read = True
623 """
623 """
624 opts = pycompat.byteskwargs(opts)
624 opts = pycompat.byteskwargs(opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 index = r.index
626 index = r.index
627 start = r.start
627 start = r.start
628 length = r.length
628 length = r.length
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 withsparseread = getattr(r, '_withsparseread', False)
630 withsparseread = getattr(r, '_withsparseread', False)
631
631
632 def revinfo(rev):
632 def revinfo(rev):
633 e = index[rev]
633 e = index[rev]
634 compsize = e[1]
634 compsize = e[1]
635 uncompsize = e[2]
635 uncompsize = e[2]
636 chainsize = 0
636 chainsize = 0
637
637
638 if generaldelta:
638 if generaldelta:
639 if e[3] == e[5]:
639 if e[3] == e[5]:
640 deltatype = 'p1'
640 deltatype = 'p1'
641 elif e[3] == e[6]:
641 elif e[3] == e[6]:
642 deltatype = 'p2'
642 deltatype = 'p2'
643 elif e[3] == rev - 1:
643 elif e[3] == rev - 1:
644 deltatype = 'prev'
644 deltatype = 'prev'
645 elif e[3] == rev:
645 elif e[3] == rev:
646 deltatype = 'base'
646 deltatype = 'base'
647 else:
647 else:
648 deltatype = 'other'
648 deltatype = 'other'
649 else:
649 else:
650 if e[3] == rev:
650 if e[3] == rev:
651 deltatype = 'base'
651 deltatype = 'base'
652 else:
652 else:
653 deltatype = 'prev'
653 deltatype = 'prev'
654
654
655 chain = r._deltachain(rev)[0]
655 chain = r._deltachain(rev)[0]
656 for iterrev in chain:
656 for iterrev in chain:
657 e = index[iterrev]
657 e = index[iterrev]
658 chainsize += e[1]
658 chainsize += e[1]
659
659
660 return compsize, uncompsize, deltatype, chain, chainsize
660 return compsize, uncompsize, deltatype, chain, chainsize
661
661
662 fm = ui.formatter('debugdeltachain', opts)
662 fm = ui.formatter('debugdeltachain', opts)
663
663
664 fm.plain(' rev chain# chainlen prev delta '
664 fm.plain(' rev chain# chainlen prev delta '
665 'size rawsize chainsize ratio lindist extradist '
665 'size rawsize chainsize ratio lindist extradist '
666 'extraratio')
666 'extraratio')
667 if withsparseread:
667 if withsparseread:
668 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain('\n')
669 fm.plain('\n')
670
670
671 chainbases = {}
671 chainbases = {}
672 for rev in r:
672 for rev in r:
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 chainbase = chain[0]
674 chainbase = chain[0]
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 basestart = start(chainbase)
676 basestart = start(chainbase)
677 revstart = start(rev)
677 revstart = start(rev)
678 lineardist = revstart + comp - basestart
678 lineardist = revstart + comp - basestart
679 extradist = lineardist - chainsize
679 extradist = lineardist - chainsize
680 try:
680 try:
681 prevrev = chain[-2]
681 prevrev = chain[-2]
682 except IndexError:
682 except IndexError:
683 prevrev = -1
683 prevrev = -1
684
684
685 if uncomp != 0:
685 if uncomp != 0:
686 chainratio = float(chainsize) / float(uncomp)
686 chainratio = float(chainsize) / float(uncomp)
687 else:
687 else:
688 chainratio = chainsize
688 chainratio = chainsize
689
689
690 if chainsize != 0:
690 if chainsize != 0:
691 extraratio = float(extradist) / float(chainsize)
691 extraratio = float(extradist) / float(chainsize)
692 else:
692 else:
693 extraratio = extradist
693 extraratio = extradist
694
694
695 fm.startitem()
695 fm.startitem()
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 'uncompsize chainsize chainratio lindist extradist '
697 'uncompsize chainsize chainratio lindist extradist '
698 'extraratio',
698 'extraratio',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 rev, chainid, len(chain), prevrev, deltatype, comp,
700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 uncomp, chainsize, chainratio, lineardist, extradist,
701 uncomp, chainsize, chainratio, lineardist, extradist,
702 extraratio,
702 extraratio,
703 rev=rev, chainid=chainid, chainlen=len(chain),
703 rev=rev, chainid=chainid, chainlen=len(chain),
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 uncompsize=uncomp, chainsize=chainsize,
705 uncompsize=uncomp, chainsize=chainsize,
706 chainratio=chainratio, lindist=lineardist,
706 chainratio=chainratio, lindist=lineardist,
707 extradist=extradist, extraratio=extraratio)
707 extradist=extradist, extraratio=extraratio)
708 if withsparseread:
708 if withsparseread:
709 readsize = 0
709 readsize = 0
710 largestblock = 0
710 largestblock = 0
711 srchunks = 0
711 srchunks = 0
712
712
713 for revschunk in deltautil.slicechunk(r, chain):
713 for revschunk in deltautil.slicechunk(r, chain):
714 srchunks += 1
714 srchunks += 1
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 blksize = blkend - start(revschunk[0])
716 blksize = blkend - start(revschunk[0])
717
717
718 readsize += blksize
718 readsize += blksize
719 if largestblock < blksize:
719 if largestblock < blksize:
720 largestblock = blksize
720 largestblock = blksize
721
721
722 if readsize:
722 if readsize:
723 readdensity = float(chainsize) / float(readsize)
723 readdensity = float(chainsize) / float(readsize)
724 else:
724 else:
725 readdensity = 1
725 readdensity = 1
726
726
727 fm.write('readsize largestblock readdensity srchunks',
727 fm.write('readsize largestblock readdensity srchunks',
728 ' %10d %10d %9.5f %8d',
728 ' %10d %10d %9.5f %8d',
729 readsize, largestblock, readdensity, srchunks,
729 readsize, largestblock, readdensity, srchunks,
730 readsize=readsize, largestblock=largestblock,
730 readsize=readsize, largestblock=largestblock,
731 readdensity=readdensity, srchunks=srchunks)
731 readdensity=readdensity, srchunks=srchunks)
732
732
733 fm.plain('\n')
733 fm.plain('\n')
734
734
735 fm.end()
735 fm.end()
736
736
737 @command('debugdirstate|debugstate',
737 @command('debugdirstate|debugstate',
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'dates', True, _('display the saved mtime')),
740 ('', 'datesort', None, _('sort by saved mtime'))],
740 ('', 'datesort', None, _('sort by saved mtime'))],
741 _('[OPTION]...'))
741 _('[OPTION]...'))
742 def debugstate(ui, repo, **opts):
742 def debugstate(ui, repo, **opts):
743 """show the contents of the current dirstate"""
743 """show the contents of the current dirstate"""
744
744
745 nodates = not opts[r'dates']
745 nodates = not opts[r'dates']
746 if opts.get(r'nodates') is not None:
746 if opts.get(r'nodates') is not None:
747 nodates = True
747 nodates = True
748 datesort = opts.get(r'datesort')
748 datesort = opts.get(r'datesort')
749
749
750 if datesort:
750 if datesort:
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 else:
752 else:
753 keyfunc = None # sort by filename
753 keyfunc = None # sort by filename
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 if ent[3] == -1:
755 if ent[3] == -1:
756 timestr = 'unset '
756 timestr = 'unset '
757 elif nodates:
757 elif nodates:
758 timestr = 'set '
758 timestr = 'set '
759 else:
759 else:
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 time.localtime(ent[3]))
761 time.localtime(ent[3]))
762 timestr = encoding.strtolocal(timestr)
762 timestr = encoding.strtolocal(timestr)
763 if ent[1] & 0o20000:
763 if ent[1] & 0o20000:
764 mode = 'lnk'
764 mode = 'lnk'
765 else:
765 else:
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 for f in repo.dirstate.copies():
768 for f in repo.dirstate.copies():
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770
770
771 @command('debugdiscovery',
771 @command('debugdiscovery',
772 [('', 'old', None, _('use old-style discovery')),
772 [('', 'old', None, _('use old-style discovery')),
773 ('', 'nonheads', None,
773 ('', 'nonheads', None,
774 _('use old-style discovery with non-heads included')),
774 _('use old-style discovery with non-heads included')),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 ] + cmdutil.remoteopts,
777 ] + cmdutil.remoteopts,
778 _('[--rev REV] [OTHER]'))
778 _('[--rev REV] [OTHER]'))
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 """runs the changeset discovery protocol in isolation"""
780 """runs the changeset discovery protocol in isolation"""
781 opts = pycompat.byteskwargs(opts)
781 opts = pycompat.byteskwargs(opts)
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 remote = hg.peer(repo, opts, remoteurl)
783 remote = hg.peer(repo, opts, remoteurl)
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785
785
786 # make sure tests are repeatable
786 # make sure tests are repeatable
787 random.seed(int(opts['seed']))
787 random.seed(int(opts['seed']))
788
788
789
789
790
790
791 if opts.get('old'):
791 if opts.get('old'):
792 def doit(pushedrevs, remoteheads, remote=remote):
792 def doit(pushedrevs, remoteheads, remote=remote):
793 if not util.safehasattr(remote, 'branches'):
793 if not util.safehasattr(remote, 'branches'):
794 # enable in-client legacy support
794 # enable in-client legacy support
795 remote = localrepo.locallegacypeer(remote.local())
795 remote = localrepo.locallegacypeer(remote.local())
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 force=True)
797 force=True)
798 common = set(common)
798 common = set(common)
799 if not opts.get('nonheads'):
799 if not opts.get('nonheads'):
800 ui.write(("unpruned common: %s\n") %
800 ui.write(("unpruned common: %s\n") %
801 " ".join(sorted(short(n) for n in common)))
801 " ".join(sorted(short(n) for n in common)))
802
802
803 clnode = repo.changelog.node
803 clnode = repo.changelog.node
804 common = repo.revs('heads(::%ln)', common)
804 common = repo.revs('heads(::%ln)', common)
805 common = {clnode(r) for r in common}
805 common = {clnode(r) for r in common}
806 return common, hds
806 return common, hds
807 else:
807 else:
808 def doit(pushedrevs, remoteheads, remote=remote):
808 def doit(pushedrevs, remoteheads, remote=remote):
809 nodes = None
809 nodes = None
810 if pushedrevs:
810 if pushedrevs:
811 revs = scmutil.revrange(repo, pushedrevs)
811 revs = scmutil.revrange(repo, pushedrevs)
812 nodes = [repo[r].node() for r in revs]
812 nodes = [repo[r].node() for r in revs]
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 ancestorsof=nodes)
814 ancestorsof=nodes)
815 return common, hds
815 return common, hds
816
816
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 localrevs = opts['rev']
818 localrevs = opts['rev']
819 with util.timedcm('debug-discovery') as t:
819 with util.timedcm('debug-discovery') as t:
820 common, hds = doit(localrevs, remoterevs)
820 common, hds = doit(localrevs, remoterevs)
821
821
822 # compute all statistics
822 # compute all statistics
823 common = set(common)
823 common = set(common)
824 rheads = set(hds)
824 rheads = set(hds)
825 lheads = set(repo.heads())
825 lheads = set(repo.heads())
826
826
827 data = {}
827 data = {}
828 data['elapsed'] = t.elapsed
828 data['elapsed'] = t.elapsed
829 data['nb-common'] = len(common)
829 data['nb-common'] = len(common)
830 data['nb-common-local'] = len(common & lheads)
830 data['nb-common-local'] = len(common & lheads)
831 data['nb-common-remote'] = len(common & rheads)
831 data['nb-common-remote'] = len(common & rheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
833 data['nb-local'] = len(lheads)
833 data['nb-local'] = len(lheads)
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
835 data['nb-remote'] = len(rheads)
835 data['nb-remote'] = len(rheads)
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
837 data['nb-revs'] = len(repo.revs('all()'))
837 data['nb-revs'] = len(repo.revs('all()'))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
840
840
841 # display discovery summary
841 # display discovery summary
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
843 ui.write(("heads summary:\n"))
843 ui.write(("heads summary:\n"))
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
857
857
858 if ui.verbose:
858 if ui.verbose:
859 ui.write(("common heads: %s\n") %
859 ui.write(("common heads: %s\n") %
860 " ".join(sorted(short(n) for n in common)))
860 " ".join(sorted(short(n) for n in common)))
861
861
862 _chunksize = 4 << 10
862 _chunksize = 4 << 10
863
863
864 @command('debugdownload',
864 @command('debugdownload',
865 [
865 [
866 ('o', 'output', '', _('path')),
866 ('o', 'output', '', _('path')),
867 ],
867 ],
868 optionalrepo=True)
868 optionalrepo=True)
869 def debugdownload(ui, repo, url, output=None, **opts):
869 def debugdownload(ui, repo, url, output=None, **opts):
870 """download a resource using Mercurial logic and config
870 """download a resource using Mercurial logic and config
871 """
871 """
872 fh = urlmod.open(ui, url, output)
872 fh = urlmod.open(ui, url, output)
873
873
874 dest = ui
874 dest = ui
875 if output:
875 if output:
876 dest = open(output, "wb", _chunksize)
876 dest = open(output, "wb", _chunksize)
877 try:
877 try:
878 data = fh.read(_chunksize)
878 data = fh.read(_chunksize)
879 while data:
879 while data:
880 dest.write(data)
880 dest.write(data)
881 data = fh.read(_chunksize)
881 data = fh.read(_chunksize)
882 finally:
882 finally:
883 if output:
883 if output:
884 dest.close()
884 dest.close()
885
885
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
887 def debugextensions(ui, repo, **opts):
887 def debugextensions(ui, repo, **opts):
888 '''show information about active extensions'''
888 '''show information about active extensions'''
889 opts = pycompat.byteskwargs(opts)
889 opts = pycompat.byteskwargs(opts)
890 exts = extensions.extensions(ui)
890 exts = extensions.extensions(ui)
891 hgver = util.version()
891 hgver = util.version()
892 fm = ui.formatter('debugextensions', opts)
892 fm = ui.formatter('debugextensions', opts)
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
894 isinternal = extensions.ismoduleinternal(extmod)
894 isinternal = extensions.ismoduleinternal(extmod)
895 extsource = pycompat.fsencode(extmod.__file__)
895 extsource = pycompat.fsencode(extmod.__file__)
896 if isinternal:
896 if isinternal:
897 exttestedwith = [] # never expose magic string to users
897 exttestedwith = [] # never expose magic string to users
898 else:
898 else:
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
900 extbuglink = getattr(extmod, 'buglink', None)
900 extbuglink = getattr(extmod, 'buglink', None)
901
901
902 fm.startitem()
902 fm.startitem()
903
903
904 if ui.quiet or ui.verbose:
904 if ui.quiet or ui.verbose:
905 fm.write('name', '%s\n', extname)
905 fm.write('name', '%s\n', extname)
906 else:
906 else:
907 fm.write('name', '%s', extname)
907 fm.write('name', '%s', extname)
908 if isinternal or hgver in exttestedwith:
908 if isinternal or hgver in exttestedwith:
909 fm.plain('\n')
909 fm.plain('\n')
910 elif not exttestedwith:
910 elif not exttestedwith:
911 fm.plain(_(' (untested!)\n'))
911 fm.plain(_(' (untested!)\n'))
912 else:
912 else:
913 lasttestedversion = exttestedwith[-1]
913 lasttestedversion = exttestedwith[-1]
914 fm.plain(' (%s!)\n' % lasttestedversion)
914 fm.plain(' (%s!)\n' % lasttestedversion)
915
915
916 fm.condwrite(ui.verbose and extsource, 'source',
916 fm.condwrite(ui.verbose and extsource, 'source',
917 _(' location: %s\n'), extsource or "")
917 _(' location: %s\n'), extsource or "")
918
918
919 if ui.verbose:
919 if ui.verbose:
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
921 fm.data(bundled=isinternal)
921 fm.data(bundled=isinternal)
922
922
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
924 _(' tested with: %s\n'),
924 _(' tested with: %s\n'),
925 fm.formatlist(exttestedwith, name='ver'))
925 fm.formatlist(exttestedwith, name='ver'))
926
926
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
928 _(' bug reporting: %s\n'), extbuglink or "")
928 _(' bug reporting: %s\n'), extbuglink or "")
929
929
930 fm.end()
930 fm.end()
931
931
932 @command('debugfileset',
932 @command('debugfileset',
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
934 ('', 'all-files', False,
934 ('', 'all-files', False,
935 _('test files from all revisions and working directory')),
935 _('test files from all revisions and working directory')),
936 ('s', 'show-matcher', None,
936 ('s', 'show-matcher', None,
937 _('print internal representation of matcher')),
937 _('print internal representation of matcher')),
938 ('p', 'show-stage', [],
938 ('p', 'show-stage', [],
939 _('print parsed tree at the given stage'), _('NAME'))],
939 _('print parsed tree at the given stage'), _('NAME'))],
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
941 def debugfileset(ui, repo, expr, **opts):
941 def debugfileset(ui, repo, expr, **opts):
942 '''parse and apply a fileset specification'''
942 '''parse and apply a fileset specification'''
943 from . import fileset
943 from . import fileset
944 fileset.symbols # force import of fileset so we have predicates to optimize
944 fileset.symbols # force import of fileset so we have predicates to optimize
945 opts = pycompat.byteskwargs(opts)
945 opts = pycompat.byteskwargs(opts)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
947
947
948 stages = [
948 stages = [
949 ('parsed', pycompat.identity),
949 ('parsed', pycompat.identity),
950 ('analyzed', filesetlang.analyze),
950 ('analyzed', filesetlang.analyze),
951 ('optimized', filesetlang.optimize),
951 ('optimized', filesetlang.optimize),
952 ]
952 ]
953 stagenames = set(n for n, f in stages)
953 stagenames = set(n for n, f in stages)
954
954
955 showalways = set()
955 showalways = set()
956 if ui.verbose and not opts['show_stage']:
956 if ui.verbose and not opts['show_stage']:
957 # show parsed tree by --verbose (deprecated)
957 # show parsed tree by --verbose (deprecated)
958 showalways.add('parsed')
958 showalways.add('parsed')
959 if opts['show_stage'] == ['all']:
959 if opts['show_stage'] == ['all']:
960 showalways.update(stagenames)
960 showalways.update(stagenames)
961 else:
961 else:
962 for n in opts['show_stage']:
962 for n in opts['show_stage']:
963 if n not in stagenames:
963 if n not in stagenames:
964 raise error.Abort(_('invalid stage name: %s') % n)
964 raise error.Abort(_('invalid stage name: %s') % n)
965 showalways.update(opts['show_stage'])
965 showalways.update(opts['show_stage'])
966
966
967 tree = filesetlang.parse(expr)
967 tree = filesetlang.parse(expr)
968 for n, f in stages:
968 for n, f in stages:
969 tree = f(tree)
969 tree = f(tree)
970 if n in showalways:
970 if n in showalways:
971 if opts['show_stage'] or n != 'parsed':
971 if opts['show_stage'] or n != 'parsed':
972 ui.write(("* %s:\n") % n)
972 ui.write(("* %s:\n") % n)
973 ui.write(filesetlang.prettyformat(tree), "\n")
973 ui.write(filesetlang.prettyformat(tree), "\n")
974
974
975 files = set()
975 files = set()
976 if opts['all_files']:
976 if opts['all_files']:
977 for r in repo:
977 for r in repo:
978 c = repo[r]
978 c = repo[r]
979 files.update(c.files())
979 files.update(c.files())
980 files.update(c.substate)
980 files.update(c.substate)
981 if opts['all_files'] or ctx.rev() is None:
981 if opts['all_files'] or ctx.rev() is None:
982 wctx = repo[None]
982 wctx = repo[None]
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
984 subrepos=list(wctx.substate),
984 subrepos=list(wctx.substate),
985 unknown=True, ignored=True))
985 unknown=True, ignored=True))
986 files.update(wctx.substate)
986 files.update(wctx.substate)
987 else:
987 else:
988 files.update(ctx.files())
988 files.update(ctx.files())
989 files.update(ctx.substate)
989 files.update(ctx.substate)
990
990
991 m = ctx.matchfileset(expr)
991 m = ctx.matchfileset(expr)
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
994 for f in sorted(files):
994 for f in sorted(files):
995 if not m(f):
995 if not m(f):
996 continue
996 continue
997 ui.write("%s\n" % f)
997 ui.write("%s\n" % f)
998
998
999 @command('debugformat',
999 @command('debugformat',
1000 [] + cmdutil.formatteropts)
1000 [] + cmdutil.formatteropts)
1001 def debugformat(ui, repo, **opts):
1001 def debugformat(ui, repo, **opts):
1002 """display format information about the current repository
1002 """display format information about the current repository
1003
1003
1004 Use --verbose to get extra information about current config value and
1004 Use --verbose to get extra information about current config value and
1005 Mercurial default."""
1005 Mercurial default."""
1006 opts = pycompat.byteskwargs(opts)
1006 opts = pycompat.byteskwargs(opts)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1009
1009
1010 def makeformatname(name):
1010 def makeformatname(name):
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1012
1012
1013 fm = ui.formatter('debugformat', opts)
1013 fm = ui.formatter('debugformat', opts)
1014 if fm.isplain():
1014 if fm.isplain():
1015 def formatvalue(value):
1015 def formatvalue(value):
1016 if util.safehasattr(value, 'startswith'):
1016 if util.safehasattr(value, 'startswith'):
1017 return value
1017 return value
1018 if value:
1018 if value:
1019 return 'yes'
1019 return 'yes'
1020 else:
1020 else:
1021 return 'no'
1021 return 'no'
1022 else:
1022 else:
1023 formatvalue = pycompat.identity
1023 formatvalue = pycompat.identity
1024
1024
1025 fm.plain('format-variant')
1025 fm.plain('format-variant')
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1027 fm.plain(' repo')
1027 fm.plain(' repo')
1028 if ui.verbose:
1028 if ui.verbose:
1029 fm.plain(' config default')
1029 fm.plain(' config default')
1030 fm.plain('\n')
1030 fm.plain('\n')
1031 for fv in upgrade.allformatvariant:
1031 for fv in upgrade.allformatvariant:
1032 fm.startitem()
1032 fm.startitem()
1033 repovalue = fv.fromrepo(repo)
1033 repovalue = fv.fromrepo(repo)
1034 configvalue = fv.fromconfig(repo)
1034 configvalue = fv.fromconfig(repo)
1035
1035
1036 if repovalue != configvalue:
1036 if repovalue != configvalue:
1037 namelabel = 'formatvariant.name.mismatchconfig'
1037 namelabel = 'formatvariant.name.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1039 elif repovalue != fv.default:
1039 elif repovalue != fv.default:
1040 namelabel = 'formatvariant.name.mismatchdefault'
1040 namelabel = 'formatvariant.name.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1042 else:
1042 else:
1043 namelabel = 'formatvariant.name.uptodate'
1043 namelabel = 'formatvariant.name.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1045
1045
1046 fm.write('name', makeformatname(fv.name), fv.name,
1046 fm.write('name', makeformatname(fv.name), fv.name,
1047 label=namelabel)
1047 label=namelabel)
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1049 label=repolabel)
1049 label=repolabel)
1050 if fv.default != configvalue:
1050 if fv.default != configvalue:
1051 configlabel = 'formatvariant.config.special'
1051 configlabel = 'formatvariant.config.special'
1052 else:
1052 else:
1053 configlabel = 'formatvariant.config.default'
1053 configlabel = 'formatvariant.config.default'
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1055 label=configlabel)
1055 label=configlabel)
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1057 label='formatvariant.default')
1057 label='formatvariant.default')
1058 fm.plain('\n')
1058 fm.plain('\n')
1059 fm.end()
1059 fm.end()
1060
1060
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1062 def debugfsinfo(ui, path="."):
1062 def debugfsinfo(ui, path="."):
1063 """show information detected about current filesystem"""
1063 """show information detected about current filesystem"""
1064 ui.write(('path: %s\n') % path)
1064 ui.write(('path: %s\n') % path)
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1070 casesensitive = '(unknown)'
1070 casesensitive = '(unknown)'
1071 try:
1071 try:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1074 except OSError:
1074 except OSError:
1075 pass
1075 pass
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1077
1077
1078 @command('debuggetbundle',
1078 @command('debuggetbundle',
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1082 _('REPO FILE [-H|-C ID]...'),
1082 _('REPO FILE [-H|-C ID]...'),
1083 norepo=True)
1083 norepo=True)
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1085 """retrieves a bundle from a repo
1085 """retrieves a bundle from a repo
1086
1086
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1088 given file.
1088 given file.
1089 """
1089 """
1090 opts = pycompat.byteskwargs(opts)
1090 opts = pycompat.byteskwargs(opts)
1091 repo = hg.peer(ui, opts, repopath)
1091 repo = hg.peer(ui, opts, repopath)
1092 if not repo.capable('getbundle'):
1092 if not repo.capable('getbundle'):
1093 raise error.Abort("getbundle() not supported by target repository")
1093 raise error.Abort("getbundle() not supported by target repository")
1094 args = {}
1094 args = {}
1095 if common:
1095 if common:
1096 args[r'common'] = [bin(s) for s in common]
1096 args[r'common'] = [bin(s) for s in common]
1097 if head:
1097 if head:
1098 args[r'heads'] = [bin(s) for s in head]
1098 args[r'heads'] = [bin(s) for s in head]
1099 # TODO: get desired bundlecaps from command line.
1099 # TODO: get desired bundlecaps from command line.
1100 args[r'bundlecaps'] = None
1100 args[r'bundlecaps'] = None
1101 bundle = repo.getbundle('debug', **args)
1101 bundle = repo.getbundle('debug', **args)
1102
1102
1103 bundletype = opts.get('type', 'bzip2').lower()
1103 bundletype = opts.get('type', 'bzip2').lower()
1104 btypes = {'none': 'HG10UN',
1104 btypes = {'none': 'HG10UN',
1105 'bzip2': 'HG10BZ',
1105 'bzip2': 'HG10BZ',
1106 'gzip': 'HG10GZ',
1106 'gzip': 'HG10GZ',
1107 'bundle2': 'HG20'}
1107 'bundle2': 'HG20'}
1108 bundletype = btypes.get(bundletype)
1108 bundletype = btypes.get(bundletype)
1109 if bundletype not in bundle2.bundletypes:
1109 if bundletype not in bundle2.bundletypes:
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1112
1112
1113 @command('debugignore', [], '[FILE]')
1113 @command('debugignore', [], '[FILE]')
1114 def debugignore(ui, repo, *files, **opts):
1114 def debugignore(ui, repo, *files, **opts):
1115 """display the combined ignore pattern and information about ignored files
1115 """display the combined ignore pattern and information about ignored files
1116
1116
1117 With no argument display the combined ignore pattern.
1117 With no argument display the combined ignore pattern.
1118
1118
1119 Given space separated file names, shows if the given file is ignored and
1119 Given space separated file names, shows if the given file is ignored and
1120 if so, show the ignore rule (file and line number) that matched it.
1120 if so, show the ignore rule (file and line number) that matched it.
1121 """
1121 """
1122 ignore = repo.dirstate._ignore
1122 ignore = repo.dirstate._ignore
1123 if not files:
1123 if not files:
1124 # Show all the patterns
1124 # Show all the patterns
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1126 else:
1126 else:
1127 m = scmutil.match(repo[None], pats=files)
1127 m = scmutil.match(repo[None], pats=files)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1129 for f in m.files():
1129 for f in m.files():
1130 nf = util.normpath(f)
1130 nf = util.normpath(f)
1131 ignored = None
1131 ignored = None
1132 ignoredata = None
1132 ignoredata = None
1133 if nf != '.':
1133 if nf != '.':
1134 if ignore(nf):
1134 if ignore(nf):
1135 ignored = nf
1135 ignored = nf
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1137 else:
1137 else:
1138 for p in util.finddirs(nf):
1138 for p in util.finddirs(nf):
1139 if ignore(p):
1139 if ignore(p):
1140 ignored = p
1140 ignored = p
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1142 break
1142 break
1143 if ignored:
1143 if ignored:
1144 if ignored == nf:
1144 if ignored == nf:
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1146 else:
1146 else:
1147 ui.write(_("%s is ignored because of "
1147 ui.write(_("%s is ignored because of "
1148 "containing directory %s\n")
1148 "containing directory %s\n")
1149 % (uipathfn(f), ignored))
1149 % (uipathfn(f), ignored))
1150 ignorefile, lineno, line = ignoredata
1150 ignorefile, lineno, line = ignoredata
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1152 % (ignorefile, lineno, line))
1152 % (ignorefile, lineno, line))
1153 else:
1153 else:
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1155
1155
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1157 _('-c|-m|FILE'))
1157 _('-c|-m|FILE'))
1158 def debugindex(ui, repo, file_=None, **opts):
1158 def debugindex(ui, repo, file_=None, **opts):
1159 """dump index data for a storage primitive"""
1159 """dump index data for a storage primitive"""
1160 opts = pycompat.byteskwargs(opts)
1160 opts = pycompat.byteskwargs(opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1162
1162
1163 if ui.debugflag:
1163 if ui.debugflag:
1164 shortfn = hex
1164 shortfn = hex
1165 else:
1165 else:
1166 shortfn = short
1166 shortfn = short
1167
1167
1168 idlen = 12
1168 idlen = 12
1169 for i in store:
1169 for i in store:
1170 idlen = len(shortfn(store.node(i)))
1170 idlen = len(shortfn(store.node(i)))
1171 break
1171 break
1172
1172
1173 fm = ui.formatter('debugindex', opts)
1173 fm = ui.formatter('debugindex', opts)
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1175 b'nodeid'.ljust(idlen),
1175 b'nodeid'.ljust(idlen),
1176 b'p1'.ljust(idlen)))
1176 b'p1'.ljust(idlen)))
1177
1177
1178 for rev in store:
1178 for rev in store:
1179 node = store.node(rev)
1179 node = store.node(rev)
1180 parents = store.parents(node)
1180 parents = store.parents(node)
1181
1181
1182 fm.startitem()
1182 fm.startitem()
1183 fm.write(b'rev', b'%6d ', rev)
1183 fm.write(b'rev', b'%6d ', rev)
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1185 fm.write(b'node', '%s ', shortfn(node))
1185 fm.write(b'node', '%s ', shortfn(node))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1188 fm.plain(b'\n')
1188 fm.plain(b'\n')
1189
1189
1190 fm.end()
1190 fm.end()
1191
1191
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1193 _('-c|-m|FILE'), optionalrepo=True)
1193 _('-c|-m|FILE'), optionalrepo=True)
1194 def debugindexdot(ui, repo, file_=None, **opts):
1194 def debugindexdot(ui, repo, file_=None, **opts):
1195 """dump an index DAG as a graphviz dot file"""
1195 """dump an index DAG as a graphviz dot file"""
1196 opts = pycompat.byteskwargs(opts)
1196 opts = pycompat.byteskwargs(opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1198 ui.write(("digraph G {\n"))
1198 ui.write(("digraph G {\n"))
1199 for i in r:
1199 for i in r:
1200 node = r.node(i)
1200 node = r.node(i)
1201 pp = r.parents(node)
1201 pp = r.parents(node)
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1203 if pp[1] != nullid:
1203 if pp[1] != nullid:
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1205 ui.write("}\n")
1205 ui.write("}\n")
1206
1206
1207 @command('debugindexstats', [])
1207 @command('debugindexstats', [])
1208 def debugindexstats(ui, repo):
1208 def debugindexstats(ui, repo):
1209 """show stats related to the changelog index"""
1209 """show stats related to the changelog index"""
1210 repo.changelog.shortest(nullid, 1)
1210 repo.changelog.shortest(nullid, 1)
1211 index = repo.changelog.index
1211 index = repo.changelog.index
1212 if not util.safehasattr(index, 'stats'):
1212 if not util.safehasattr(index, 'stats'):
1213 raise error.Abort(_('debugindexstats only works with native code'))
1213 raise error.Abort(_('debugindexstats only works with native code'))
1214 for k, v in sorted(index.stats().items()):
1214 for k, v in sorted(index.stats().items()):
1215 ui.write('%s: %d\n' % (k, v))
1215 ui.write('%s: %d\n' % (k, v))
1216
1216
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1218 def debuginstall(ui, **opts):
1218 def debuginstall(ui, **opts):
1219 '''test Mercurial installation
1219 '''test Mercurial installation
1220
1220
1221 Returns 0 on success.
1221 Returns 0 on success.
1222 '''
1222 '''
1223 opts = pycompat.byteskwargs(opts)
1223 opts = pycompat.byteskwargs(opts)
1224
1224
1225 problems = 0
1225 problems = 0
1226
1226
1227 fm = ui.formatter('debuginstall', opts)
1227 fm = ui.formatter('debuginstall', opts)
1228 fm.startitem()
1228 fm.startitem()
1229
1229
1230 # encoding
1230 # encoding
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1232 err = None
1232 err = None
1233 try:
1233 try:
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1235 except LookupError as inst:
1235 except LookupError as inst:
1236 err = stringutil.forcebytestr(inst)
1236 err = stringutil.forcebytestr(inst)
1237 problems += 1
1237 problems += 1
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1239 " (check that your locale is properly set)\n"), err)
1239 " (check that your locale is properly set)\n"), err)
1240
1240
1241 # Python
1241 # Python
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1243 pycompat.sysexecutable or _("unknown"))
1243 pycompat.sysexecutable or _("unknown"))
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1245 ("%d.%d.%d" % sys.version_info[:3]))
1245 ("%d.%d.%d" % sys.version_info[:3]))
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1248
1248
1249 security = set(sslutil.supportedprotocols)
1249 security = set(sslutil.supportedprotocols)
1250 if sslutil.hassni:
1250 if sslutil.hassni:
1251 security.add('sni')
1251 security.add('sni')
1252
1252
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1254 fm.formatlist(sorted(security), name='protocol',
1254 fm.formatlist(sorted(security), name='protocol',
1255 fmt='%s', sep=','))
1255 fmt='%s', sep=','))
1256
1256
1257 # These are warnings, not errors. So don't increment problem count. This
1257 # These are warnings, not errors. So don't increment problem count. This
1258 # may change in the future.
1258 # may change in the future.
1259 if 'tls1.2' not in security:
1259 if 'tls1.2' not in security:
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1261 'network connections lack modern security\n'))
1261 'network connections lack modern security\n'))
1262 if 'sni' not in security:
1262 if 'sni' not in security:
1263 fm.plain(_(' SNI not supported by Python install; may have '
1263 fm.plain(_(' SNI not supported by Python install; may have '
1264 'connectivity issues with some servers\n'))
1264 'connectivity issues with some servers\n'))
1265
1265
1266 # TODO print CA cert info
1266 # TODO print CA cert info
1267
1267
1268 # hg version
1268 # hg version
1269 hgver = util.version()
1269 hgver = util.version()
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1271 hgver.split('+')[0])
1271 hgver.split('+')[0])
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1273 '+'.join(hgver.split('+')[1:]))
1273 '+'.join(hgver.split('+')[1:]))
1274
1274
1275 # compiled modules
1275 # compiled modules
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1277 policy.policy)
1277 policy.policy)
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1279 os.path.dirname(pycompat.fsencode(__file__)))
1279 os.path.dirname(pycompat.fsencode(__file__)))
1280
1280
1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1282 rustext = rustandc # for now, that's the only case
1282 rustext = rustandc # for now, that's the only case
1283 cext = policy.policy in ('c', 'allow') or rustandc
1283 cext = policy.policy in ('c', 'allow') or rustandc
1284 nopure = cext or rustext
1284 nopure = cext or rustext
1285 if nopure:
1285 if nopure:
1286 err = None
1286 err = None
1287 try:
1287 try:
1288 if cext:
1288 if cext:
1289 from .cext import (
1289 from .cext import (
1290 base85,
1290 base85,
1291 bdiff,
1291 bdiff,
1292 mpatch,
1292 mpatch,
1293 osutil,
1293 osutil,
1294 )
1294 )
1295 # quiet pyflakes
1295 # quiet pyflakes
1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1297 if rustext:
1297 if rustext:
1298 from .rustext import (
1298 from .rustext import (
1299 ancestor,
1299 ancestor,
1300 dirstate,
1300 dirstate,
1301 )
1301 )
1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1303 except Exception as inst:
1303 except Exception as inst:
1304 err = stringutil.forcebytestr(inst)
1304 err = stringutil.forcebytestr(inst)
1305 problems += 1
1305 problems += 1
1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1307
1307
1308 compengines = util.compengines._engines.values()
1308 compengines = util.compengines._engines.values()
1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1310 fm.formatlist(sorted(e.name() for e in compengines),
1310 fm.formatlist(sorted(e.name() for e in compengines),
1311 name='compengine', fmt='%s', sep=', '))
1311 name='compengine', fmt='%s', sep=', '))
1312 fm.write('compenginesavail', _('checking available compression engines '
1312 fm.write('compenginesavail', _('checking available compression engines '
1313 '(%s)\n'),
1313 '(%s)\n'),
1314 fm.formatlist(sorted(e.name() for e in compengines
1314 fm.formatlist(sorted(e.name() for e in compengines
1315 if e.available()),
1315 if e.available()),
1316 name='compengine', fmt='%s', sep=', '))
1316 name='compengine', fmt='%s', sep=', '))
1317 wirecompengines = compression.compengines.supportedwireengines(
1317 wirecompengines = compression.compengines.supportedwireengines(
1318 compression.SERVERROLE)
1318 compression.SERVERROLE)
1319 fm.write('compenginesserver', _('checking available compression engines '
1319 fm.write('compenginesserver', _('checking available compression engines '
1320 'for wire protocol (%s)\n'),
1320 'for wire protocol (%s)\n'),
1321 fm.formatlist([e.name() for e in wirecompengines
1321 fm.formatlist([e.name() for e in wirecompengines
1322 if e.wireprotosupport()],
1322 if e.wireprotosupport()],
1323 name='compengine', fmt='%s', sep=', '))
1323 name='compengine', fmt='%s', sep=', '))
1324 re2 = 'missing'
1324 re2 = 'missing'
1325 if util._re2:
1325 if util._re2:
1326 re2 = 'available'
1326 re2 = 'available'
1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1328 fm.data(re2=bool(util._re2))
1328 fm.data(re2=bool(util._re2))
1329
1329
1330 # templates
1330 # templates
1331 p = templater.templatepaths()
1331 p = templater.templatepaths()
1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1334 if p:
1334 if p:
1335 m = templater.templatepath("map-cmdline.default")
1335 m = templater.templatepath("map-cmdline.default")
1336 if m:
1336 if m:
1337 # template found, check if it is working
1337 # template found, check if it is working
1338 err = None
1338 err = None
1339 try:
1339 try:
1340 templater.templater.frommapfile(m)
1340 templater.templater.frommapfile(m)
1341 except Exception as inst:
1341 except Exception as inst:
1342 err = stringutil.forcebytestr(inst)
1342 err = stringutil.forcebytestr(inst)
1343 p = None
1343 p = None
1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1345 else:
1345 else:
1346 p = None
1346 p = None
1347 fm.condwrite(p, 'defaulttemplate',
1347 fm.condwrite(p, 'defaulttemplate',
1348 _("checking default template (%s)\n"), m)
1348 _("checking default template (%s)\n"), m)
1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1350 _(" template '%s' not found\n"), "default")
1350 _(" template '%s' not found\n"), "default")
1351 if not p:
1351 if not p:
1352 problems += 1
1352 problems += 1
1353 fm.condwrite(not p, '',
1353 fm.condwrite(not p, '',
1354 _(" (templates seem to have been installed incorrectly)\n"))
1354 _(" (templates seem to have been installed incorrectly)\n"))
1355
1355
1356 # editor
1356 # editor
1357 editor = ui.geteditor()
1357 editor = ui.geteditor()
1358 editor = util.expandpath(editor)
1358 editor = util.expandpath(editor)
1359 editorbin = procutil.shellsplit(editor)[0]
1359 editorbin = procutil.shellsplit(editor)[0]
1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1361 cmdpath = procutil.findexe(editorbin)
1361 cmdpath = procutil.findexe(editorbin)
1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1363 _(" No commit editor set and can't find %s in PATH\n"
1363 _(" No commit editor set and can't find %s in PATH\n"
1364 " (specify a commit editor in your configuration"
1364 " (specify a commit editor in your configuration"
1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1367 _(" Can't find editor '%s' in PATH\n"
1367 _(" Can't find editor '%s' in PATH\n"
1368 " (specify a commit editor in your configuration"
1368 " (specify a commit editor in your configuration"
1369 " file)\n"), not cmdpath and editorbin)
1369 " file)\n"), not cmdpath and editorbin)
1370 if not cmdpath and editor != 'vi':
1370 if not cmdpath and editor != 'vi':
1371 problems += 1
1371 problems += 1
1372
1372
1373 # check username
1373 # check username
1374 username = None
1374 username = None
1375 err = None
1375 err = None
1376 try:
1376 try:
1377 username = ui.username()
1377 username = ui.username()
1378 except error.Abort as e:
1378 except error.Abort as e:
1379 err = stringutil.forcebytestr(e)
1379 err = stringutil.forcebytestr(e)
1380 problems += 1
1380 problems += 1
1381
1381
1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1384 " (specify a username in your configuration file)\n"), err)
1384 " (specify a username in your configuration file)\n"), err)
1385
1385
1386 for name, mod in extensions.extensions():
1387 handler = getattr(mod, 'debuginstall', None)
1388 if handler is not None:
1389 problems += handler(ui, fm)
1390
1386 fm.condwrite(not problems, '',
1391 fm.condwrite(not problems, '',
1387 _("no problems detected\n"))
1392 _("no problems detected\n"))
1388 if not problems:
1393 if not problems:
1389 fm.data(problems=problems)
1394 fm.data(problems=problems)
1390 fm.condwrite(problems, 'problems',
1395 fm.condwrite(problems, 'problems',
1391 _("%d problems detected,"
1396 _("%d problems detected,"
1392 " please check your install!\n"), problems)
1397 " please check your install!\n"), problems)
1393 fm.end()
1398 fm.end()
1394
1399
1395 return problems
1400 return problems
1396
1401
1397 @command('debugknown', [], _('REPO ID...'), norepo=True)
1402 @command('debugknown', [], _('REPO ID...'), norepo=True)
1398 def debugknown(ui, repopath, *ids, **opts):
1403 def debugknown(ui, repopath, *ids, **opts):
1399 """test whether node ids are known to a repo
1404 """test whether node ids are known to a repo
1400
1405
1401 Every ID must be a full-length hex node id string. Returns a list of 0s
1406 Every ID must be a full-length hex node id string. Returns a list of 0s
1402 and 1s indicating unknown/known.
1407 and 1s indicating unknown/known.
1403 """
1408 """
1404 opts = pycompat.byteskwargs(opts)
1409 opts = pycompat.byteskwargs(opts)
1405 repo = hg.peer(ui, opts, repopath)
1410 repo = hg.peer(ui, opts, repopath)
1406 if not repo.capable('known'):
1411 if not repo.capable('known'):
1407 raise error.Abort("known() not supported by target repository")
1412 raise error.Abort("known() not supported by target repository")
1408 flags = repo.known([bin(s) for s in ids])
1413 flags = repo.known([bin(s) for s in ids])
1409 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1414 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1410
1415
1411 @command('debuglabelcomplete', [], _('LABEL...'))
1416 @command('debuglabelcomplete', [], _('LABEL...'))
1412 def debuglabelcomplete(ui, repo, *args):
1417 def debuglabelcomplete(ui, repo, *args):
1413 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1418 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1414 debugnamecomplete(ui, repo, *args)
1419 debugnamecomplete(ui, repo, *args)
1415
1420
1416 @command('debuglocks',
1421 @command('debuglocks',
1417 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1422 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1418 ('W', 'force-wlock', None,
1423 ('W', 'force-wlock', None,
1419 _('free the working state lock (DANGEROUS)')),
1424 _('free the working state lock (DANGEROUS)')),
1420 ('s', 'set-lock', None, _('set the store lock until stopped')),
1425 ('s', 'set-lock', None, _('set the store lock until stopped')),
1421 ('S', 'set-wlock', None,
1426 ('S', 'set-wlock', None,
1422 _('set the working state lock until stopped'))],
1427 _('set the working state lock until stopped'))],
1423 _('[OPTION]...'))
1428 _('[OPTION]...'))
1424 def debuglocks(ui, repo, **opts):
1429 def debuglocks(ui, repo, **opts):
1425 """show or modify state of locks
1430 """show or modify state of locks
1426
1431
1427 By default, this command will show which locks are held. This
1432 By default, this command will show which locks are held. This
1428 includes the user and process holding the lock, the amount of time
1433 includes the user and process holding the lock, the amount of time
1429 the lock has been held, and the machine name where the process is
1434 the lock has been held, and the machine name where the process is
1430 running if it's not local.
1435 running if it's not local.
1431
1436
1432 Locks protect the integrity of Mercurial's data, so should be
1437 Locks protect the integrity of Mercurial's data, so should be
1433 treated with care. System crashes or other interruptions may cause
1438 treated with care. System crashes or other interruptions may cause
1434 locks to not be properly released, though Mercurial will usually
1439 locks to not be properly released, though Mercurial will usually
1435 detect and remove such stale locks automatically.
1440 detect and remove such stale locks automatically.
1436
1441
1437 However, detecting stale locks may not always be possible (for
1442 However, detecting stale locks may not always be possible (for
1438 instance, on a shared filesystem). Removing locks may also be
1443 instance, on a shared filesystem). Removing locks may also be
1439 blocked by filesystem permissions.
1444 blocked by filesystem permissions.
1440
1445
1441 Setting a lock will prevent other commands from changing the data.
1446 Setting a lock will prevent other commands from changing the data.
1442 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1447 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1443 The set locks are removed when the command exits.
1448 The set locks are removed when the command exits.
1444
1449
1445 Returns 0 if no locks are held.
1450 Returns 0 if no locks are held.
1446
1451
1447 """
1452 """
1448
1453
1449 if opts.get(r'force_lock'):
1454 if opts.get(r'force_lock'):
1450 repo.svfs.unlink('lock')
1455 repo.svfs.unlink('lock')
1451 if opts.get(r'force_wlock'):
1456 if opts.get(r'force_wlock'):
1452 repo.vfs.unlink('wlock')
1457 repo.vfs.unlink('wlock')
1453 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1458 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1454 return 0
1459 return 0
1455
1460
1456 locks = []
1461 locks = []
1457 try:
1462 try:
1458 if opts.get(r'set_wlock'):
1463 if opts.get(r'set_wlock'):
1459 try:
1464 try:
1460 locks.append(repo.wlock(False))
1465 locks.append(repo.wlock(False))
1461 except error.LockHeld:
1466 except error.LockHeld:
1462 raise error.Abort(_('wlock is already held'))
1467 raise error.Abort(_('wlock is already held'))
1463 if opts.get(r'set_lock'):
1468 if opts.get(r'set_lock'):
1464 try:
1469 try:
1465 locks.append(repo.lock(False))
1470 locks.append(repo.lock(False))
1466 except error.LockHeld:
1471 except error.LockHeld:
1467 raise error.Abort(_('lock is already held'))
1472 raise error.Abort(_('lock is already held'))
1468 if len(locks):
1473 if len(locks):
1469 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1474 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1470 return 0
1475 return 0
1471 finally:
1476 finally:
1472 release(*locks)
1477 release(*locks)
1473
1478
1474 now = time.time()
1479 now = time.time()
1475 held = 0
1480 held = 0
1476
1481
1477 def report(vfs, name, method):
1482 def report(vfs, name, method):
1478 # this causes stale locks to get reaped for more accurate reporting
1483 # this causes stale locks to get reaped for more accurate reporting
1479 try:
1484 try:
1480 l = method(False)
1485 l = method(False)
1481 except error.LockHeld:
1486 except error.LockHeld:
1482 l = None
1487 l = None
1483
1488
1484 if l:
1489 if l:
1485 l.release()
1490 l.release()
1486 else:
1491 else:
1487 try:
1492 try:
1488 st = vfs.lstat(name)
1493 st = vfs.lstat(name)
1489 age = now - st[stat.ST_MTIME]
1494 age = now - st[stat.ST_MTIME]
1490 user = util.username(st.st_uid)
1495 user = util.username(st.st_uid)
1491 locker = vfs.readlock(name)
1496 locker = vfs.readlock(name)
1492 if ":" in locker:
1497 if ":" in locker:
1493 host, pid = locker.split(':')
1498 host, pid = locker.split(':')
1494 if host == socket.gethostname():
1499 if host == socket.gethostname():
1495 locker = 'user %s, process %s' % (user or b'None', pid)
1500 locker = 'user %s, process %s' % (user or b'None', pid)
1496 else:
1501 else:
1497 locker = ('user %s, process %s, host %s'
1502 locker = ('user %s, process %s, host %s'
1498 % (user or b'None', pid, host))
1503 % (user or b'None', pid, host))
1499 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1504 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1500 return 1
1505 return 1
1501 except OSError as e:
1506 except OSError as e:
1502 if e.errno != errno.ENOENT:
1507 if e.errno != errno.ENOENT:
1503 raise
1508 raise
1504
1509
1505 ui.write(("%-6s free\n") % (name + ":"))
1510 ui.write(("%-6s free\n") % (name + ":"))
1506 return 0
1511 return 0
1507
1512
1508 held += report(repo.svfs, "lock", repo.lock)
1513 held += report(repo.svfs, "lock", repo.lock)
1509 held += report(repo.vfs, "wlock", repo.wlock)
1514 held += report(repo.vfs, "wlock", repo.wlock)
1510
1515
1511 return held
1516 return held
1512
1517
1513 @command('debugmanifestfulltextcache', [
1518 @command('debugmanifestfulltextcache', [
1514 ('', 'clear', False, _('clear the cache')),
1519 ('', 'clear', False, _('clear the cache')),
1515 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1520 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1516 _('NODE'))
1521 _('NODE'))
1517 ], '')
1522 ], '')
1518 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1523 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1519 """show, clear or amend the contents of the manifest fulltext cache"""
1524 """show, clear or amend the contents of the manifest fulltext cache"""
1520
1525
1521 def getcache():
1526 def getcache():
1522 r = repo.manifestlog.getstorage(b'')
1527 r = repo.manifestlog.getstorage(b'')
1523 try:
1528 try:
1524 return r._fulltextcache
1529 return r._fulltextcache
1525 except AttributeError:
1530 except AttributeError:
1526 msg = _("Current revlog implementation doesn't appear to have a "
1531 msg = _("Current revlog implementation doesn't appear to have a "
1527 "manifest fulltext cache\n")
1532 "manifest fulltext cache\n")
1528 raise error.Abort(msg)
1533 raise error.Abort(msg)
1529
1534
1530 if opts.get(r'clear'):
1535 if opts.get(r'clear'):
1531 with repo.wlock():
1536 with repo.wlock():
1532 cache = getcache()
1537 cache = getcache()
1533 cache.clear(clear_persisted_data=True)
1538 cache.clear(clear_persisted_data=True)
1534 return
1539 return
1535
1540
1536 if add:
1541 if add:
1537 with repo.wlock():
1542 with repo.wlock():
1538 m = repo.manifestlog
1543 m = repo.manifestlog
1539 store = m.getstorage(b'')
1544 store = m.getstorage(b'')
1540 for n in add:
1545 for n in add:
1541 try:
1546 try:
1542 manifest = m[store.lookup(n)]
1547 manifest = m[store.lookup(n)]
1543 except error.LookupError as e:
1548 except error.LookupError as e:
1544 raise error.Abort(e, hint="Check your manifest node id")
1549 raise error.Abort(e, hint="Check your manifest node id")
1545 manifest.read() # stores revisision in cache too
1550 manifest.read() # stores revisision in cache too
1546 return
1551 return
1547
1552
1548 cache = getcache()
1553 cache = getcache()
1549 if not len(cache):
1554 if not len(cache):
1550 ui.write(_('cache empty\n'))
1555 ui.write(_('cache empty\n'))
1551 else:
1556 else:
1552 ui.write(
1557 ui.write(
1553 _('cache contains %d manifest entries, in order of most to '
1558 _('cache contains %d manifest entries, in order of most to '
1554 'least recent:\n') % (len(cache),))
1559 'least recent:\n') % (len(cache),))
1555 totalsize = 0
1560 totalsize = 0
1556 for nodeid in cache:
1561 for nodeid in cache:
1557 # Use cache.get to not update the LRU order
1562 # Use cache.get to not update the LRU order
1558 data = cache.peek(nodeid)
1563 data = cache.peek(nodeid)
1559 size = len(data)
1564 size = len(data)
1560 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1565 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1561 ui.write(_('id: %s, size %s\n') % (
1566 ui.write(_('id: %s, size %s\n') % (
1562 hex(nodeid), util.bytecount(size)))
1567 hex(nodeid), util.bytecount(size)))
1563 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1568 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1564 ui.write(
1569 ui.write(
1565 _('total cache data size %s, on-disk %s\n') % (
1570 _('total cache data size %s, on-disk %s\n') % (
1566 util.bytecount(totalsize), util.bytecount(ondisk))
1571 util.bytecount(totalsize), util.bytecount(ondisk))
1567 )
1572 )
1568
1573
1569 @command('debugmergestate', [], '')
1574 @command('debugmergestate', [], '')
1570 def debugmergestate(ui, repo, *args):
1575 def debugmergestate(ui, repo, *args):
1571 """print merge state
1576 """print merge state
1572
1577
1573 Use --verbose to print out information about whether v1 or v2 merge state
1578 Use --verbose to print out information about whether v1 or v2 merge state
1574 was chosen."""
1579 was chosen."""
1575 def _hashornull(h):
1580 def _hashornull(h):
1576 if h == nullhex:
1581 if h == nullhex:
1577 return 'null'
1582 return 'null'
1578 else:
1583 else:
1579 return h
1584 return h
1580
1585
1581 def printrecords(version):
1586 def printrecords(version):
1582 ui.write(('* version %d records\n') % version)
1587 ui.write(('* version %d records\n') % version)
1583 if version == 1:
1588 if version == 1:
1584 records = v1records
1589 records = v1records
1585 else:
1590 else:
1586 records = v2records
1591 records = v2records
1587
1592
1588 for rtype, record in records:
1593 for rtype, record in records:
1589 # pretty print some record types
1594 # pretty print some record types
1590 if rtype == 'L':
1595 if rtype == 'L':
1591 ui.write(('local: %s\n') % record)
1596 ui.write(('local: %s\n') % record)
1592 elif rtype == 'O':
1597 elif rtype == 'O':
1593 ui.write(('other: %s\n') % record)
1598 ui.write(('other: %s\n') % record)
1594 elif rtype == 'm':
1599 elif rtype == 'm':
1595 driver, mdstate = record.split('\0', 1)
1600 driver, mdstate = record.split('\0', 1)
1596 ui.write(('merge driver: %s (state "%s")\n')
1601 ui.write(('merge driver: %s (state "%s")\n')
1597 % (driver, mdstate))
1602 % (driver, mdstate))
1598 elif rtype in 'FDC':
1603 elif rtype in 'FDC':
1599 r = record.split('\0')
1604 r = record.split('\0')
1600 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1605 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1601 if version == 1:
1606 if version == 1:
1602 onode = 'not stored in v1 format'
1607 onode = 'not stored in v1 format'
1603 flags = r[7]
1608 flags = r[7]
1604 else:
1609 else:
1605 onode, flags = r[7:9]
1610 onode, flags = r[7:9]
1606 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1611 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1607 % (f, rtype, state, _hashornull(hash)))
1612 % (f, rtype, state, _hashornull(hash)))
1608 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1613 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1609 ui.write((' ancestor path: %s (node %s)\n')
1614 ui.write((' ancestor path: %s (node %s)\n')
1610 % (afile, _hashornull(anode)))
1615 % (afile, _hashornull(anode)))
1611 ui.write((' other path: %s (node %s)\n')
1616 ui.write((' other path: %s (node %s)\n')
1612 % (ofile, _hashornull(onode)))
1617 % (ofile, _hashornull(onode)))
1613 elif rtype == 'f':
1618 elif rtype == 'f':
1614 filename, rawextras = record.split('\0', 1)
1619 filename, rawextras = record.split('\0', 1)
1615 extras = rawextras.split('\0')
1620 extras = rawextras.split('\0')
1616 i = 0
1621 i = 0
1617 extrastrings = []
1622 extrastrings = []
1618 while i < len(extras):
1623 while i < len(extras):
1619 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1624 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1620 i += 2
1625 i += 2
1621
1626
1622 ui.write(('file extras: %s (%s)\n')
1627 ui.write(('file extras: %s (%s)\n')
1623 % (filename, ', '.join(extrastrings)))
1628 % (filename, ', '.join(extrastrings)))
1624 elif rtype == 'l':
1629 elif rtype == 'l':
1625 labels = record.split('\0', 2)
1630 labels = record.split('\0', 2)
1626 labels = [l for l in labels if len(l) > 0]
1631 labels = [l for l in labels if len(l) > 0]
1627 ui.write(('labels:\n'))
1632 ui.write(('labels:\n'))
1628 ui.write((' local: %s\n' % labels[0]))
1633 ui.write((' local: %s\n' % labels[0]))
1629 ui.write((' other: %s\n' % labels[1]))
1634 ui.write((' other: %s\n' % labels[1]))
1630 if len(labels) > 2:
1635 if len(labels) > 2:
1631 ui.write((' base: %s\n' % labels[2]))
1636 ui.write((' base: %s\n' % labels[2]))
1632 else:
1637 else:
1633 ui.write(('unrecognized entry: %s\t%s\n')
1638 ui.write(('unrecognized entry: %s\t%s\n')
1634 % (rtype, record.replace('\0', '\t')))
1639 % (rtype, record.replace('\0', '\t')))
1635
1640
1636 # Avoid mergestate.read() since it may raise an exception for unsupported
1641 # Avoid mergestate.read() since it may raise an exception for unsupported
1637 # merge state records. We shouldn't be doing this, but this is OK since this
1642 # merge state records. We shouldn't be doing this, but this is OK since this
1638 # command is pretty low-level.
1643 # command is pretty low-level.
1639 ms = mergemod.mergestate(repo)
1644 ms = mergemod.mergestate(repo)
1640
1645
1641 # sort so that reasonable information is on top
1646 # sort so that reasonable information is on top
1642 v1records = ms._readrecordsv1()
1647 v1records = ms._readrecordsv1()
1643 v2records = ms._readrecordsv2()
1648 v2records = ms._readrecordsv2()
1644 order = 'LOml'
1649 order = 'LOml'
1645 def key(r):
1650 def key(r):
1646 idx = order.find(r[0])
1651 idx = order.find(r[0])
1647 if idx == -1:
1652 if idx == -1:
1648 return (1, r[1])
1653 return (1, r[1])
1649 else:
1654 else:
1650 return (0, idx)
1655 return (0, idx)
1651 v1records.sort(key=key)
1656 v1records.sort(key=key)
1652 v2records.sort(key=key)
1657 v2records.sort(key=key)
1653
1658
1654 if not v1records and not v2records:
1659 if not v1records and not v2records:
1655 ui.write(('no merge state found\n'))
1660 ui.write(('no merge state found\n'))
1656 elif not v2records:
1661 elif not v2records:
1657 ui.note(('no version 2 merge state\n'))
1662 ui.note(('no version 2 merge state\n'))
1658 printrecords(1)
1663 printrecords(1)
1659 elif ms._v1v2match(v1records, v2records):
1664 elif ms._v1v2match(v1records, v2records):
1660 ui.note(('v1 and v2 states match: using v2\n'))
1665 ui.note(('v1 and v2 states match: using v2\n'))
1661 printrecords(2)
1666 printrecords(2)
1662 else:
1667 else:
1663 ui.note(('v1 and v2 states mismatch: using v1\n'))
1668 ui.note(('v1 and v2 states mismatch: using v1\n'))
1664 printrecords(1)
1669 printrecords(1)
1665 if ui.verbose:
1670 if ui.verbose:
1666 printrecords(2)
1671 printrecords(2)
1667
1672
1668 @command('debugnamecomplete', [], _('NAME...'))
1673 @command('debugnamecomplete', [], _('NAME...'))
1669 def debugnamecomplete(ui, repo, *args):
1674 def debugnamecomplete(ui, repo, *args):
1670 '''complete "names" - tags, open branch names, bookmark names'''
1675 '''complete "names" - tags, open branch names, bookmark names'''
1671
1676
1672 names = set()
1677 names = set()
1673 # since we previously only listed open branches, we will handle that
1678 # since we previously only listed open branches, we will handle that
1674 # specially (after this for loop)
1679 # specially (after this for loop)
1675 for name, ns in repo.names.iteritems():
1680 for name, ns in repo.names.iteritems():
1676 if name != 'branches':
1681 if name != 'branches':
1677 names.update(ns.listnames(repo))
1682 names.update(ns.listnames(repo))
1678 names.update(tag for (tag, heads, tip, closed)
1683 names.update(tag for (tag, heads, tip, closed)
1679 in repo.branchmap().iterbranches() if not closed)
1684 in repo.branchmap().iterbranches() if not closed)
1680 completions = set()
1685 completions = set()
1681 if not args:
1686 if not args:
1682 args = ['']
1687 args = ['']
1683 for a in args:
1688 for a in args:
1684 completions.update(n for n in names if n.startswith(a))
1689 completions.update(n for n in names if n.startswith(a))
1685 ui.write('\n'.join(sorted(completions)))
1690 ui.write('\n'.join(sorted(completions)))
1686 ui.write('\n')
1691 ui.write('\n')
1687
1692
1688 @command('debugobsolete',
1693 @command('debugobsolete',
1689 [('', 'flags', 0, _('markers flag')),
1694 [('', 'flags', 0, _('markers flag')),
1690 ('', 'record-parents', False,
1695 ('', 'record-parents', False,
1691 _('record parent information for the precursor')),
1696 _('record parent information for the precursor')),
1692 ('r', 'rev', [], _('display markers relevant to REV')),
1697 ('r', 'rev', [], _('display markers relevant to REV')),
1693 ('', 'exclusive', False, _('restrict display to markers only '
1698 ('', 'exclusive', False, _('restrict display to markers only '
1694 'relevant to REV')),
1699 'relevant to REV')),
1695 ('', 'index', False, _('display index of the marker')),
1700 ('', 'index', False, _('display index of the marker')),
1696 ('', 'delete', [], _('delete markers specified by indices')),
1701 ('', 'delete', [], _('delete markers specified by indices')),
1697 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1702 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1698 _('[OBSOLETED [REPLACEMENT ...]]'))
1703 _('[OBSOLETED [REPLACEMENT ...]]'))
1699 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1704 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1700 """create arbitrary obsolete marker
1705 """create arbitrary obsolete marker
1701
1706
1702 With no arguments, displays the list of obsolescence markers."""
1707 With no arguments, displays the list of obsolescence markers."""
1703
1708
1704 opts = pycompat.byteskwargs(opts)
1709 opts = pycompat.byteskwargs(opts)
1705
1710
1706 def parsenodeid(s):
1711 def parsenodeid(s):
1707 try:
1712 try:
1708 # We do not use revsingle/revrange functions here to accept
1713 # We do not use revsingle/revrange functions here to accept
1709 # arbitrary node identifiers, possibly not present in the
1714 # arbitrary node identifiers, possibly not present in the
1710 # local repository.
1715 # local repository.
1711 n = bin(s)
1716 n = bin(s)
1712 if len(n) != len(nullid):
1717 if len(n) != len(nullid):
1713 raise TypeError()
1718 raise TypeError()
1714 return n
1719 return n
1715 except TypeError:
1720 except TypeError:
1716 raise error.Abort('changeset references must be full hexadecimal '
1721 raise error.Abort('changeset references must be full hexadecimal '
1717 'node identifiers')
1722 'node identifiers')
1718
1723
1719 if opts.get('delete'):
1724 if opts.get('delete'):
1720 indices = []
1725 indices = []
1721 for v in opts.get('delete'):
1726 for v in opts.get('delete'):
1722 try:
1727 try:
1723 indices.append(int(v))
1728 indices.append(int(v))
1724 except ValueError:
1729 except ValueError:
1725 raise error.Abort(_('invalid index value: %r') % v,
1730 raise error.Abort(_('invalid index value: %r') % v,
1726 hint=_('use integers for indices'))
1731 hint=_('use integers for indices'))
1727
1732
1728 if repo.currenttransaction():
1733 if repo.currenttransaction():
1729 raise error.Abort(_('cannot delete obsmarkers in the middle '
1734 raise error.Abort(_('cannot delete obsmarkers in the middle '
1730 'of transaction.'))
1735 'of transaction.'))
1731
1736
1732 with repo.lock():
1737 with repo.lock():
1733 n = repair.deleteobsmarkers(repo.obsstore, indices)
1738 n = repair.deleteobsmarkers(repo.obsstore, indices)
1734 ui.write(_('deleted %i obsolescence markers\n') % n)
1739 ui.write(_('deleted %i obsolescence markers\n') % n)
1735
1740
1736 return
1741 return
1737
1742
1738 if precursor is not None:
1743 if precursor is not None:
1739 if opts['rev']:
1744 if opts['rev']:
1740 raise error.Abort('cannot select revision when creating marker')
1745 raise error.Abort('cannot select revision when creating marker')
1741 metadata = {}
1746 metadata = {}
1742 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1747 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1743 succs = tuple(parsenodeid(succ) for succ in successors)
1748 succs = tuple(parsenodeid(succ) for succ in successors)
1744 l = repo.lock()
1749 l = repo.lock()
1745 try:
1750 try:
1746 tr = repo.transaction('debugobsolete')
1751 tr = repo.transaction('debugobsolete')
1747 try:
1752 try:
1748 date = opts.get('date')
1753 date = opts.get('date')
1749 if date:
1754 if date:
1750 date = dateutil.parsedate(date)
1755 date = dateutil.parsedate(date)
1751 else:
1756 else:
1752 date = None
1757 date = None
1753 prec = parsenodeid(precursor)
1758 prec = parsenodeid(precursor)
1754 parents = None
1759 parents = None
1755 if opts['record_parents']:
1760 if opts['record_parents']:
1756 if prec not in repo.unfiltered():
1761 if prec not in repo.unfiltered():
1757 raise error.Abort('cannot used --record-parents on '
1762 raise error.Abort('cannot used --record-parents on '
1758 'unknown changesets')
1763 'unknown changesets')
1759 parents = repo.unfiltered()[prec].parents()
1764 parents = repo.unfiltered()[prec].parents()
1760 parents = tuple(p.node() for p in parents)
1765 parents = tuple(p.node() for p in parents)
1761 repo.obsstore.create(tr, prec, succs, opts['flags'],
1766 repo.obsstore.create(tr, prec, succs, opts['flags'],
1762 parents=parents, date=date,
1767 parents=parents, date=date,
1763 metadata=metadata, ui=ui)
1768 metadata=metadata, ui=ui)
1764 tr.close()
1769 tr.close()
1765 except ValueError as exc:
1770 except ValueError as exc:
1766 raise error.Abort(_('bad obsmarker input: %s') %
1771 raise error.Abort(_('bad obsmarker input: %s') %
1767 pycompat.bytestr(exc))
1772 pycompat.bytestr(exc))
1768 finally:
1773 finally:
1769 tr.release()
1774 tr.release()
1770 finally:
1775 finally:
1771 l.release()
1776 l.release()
1772 else:
1777 else:
1773 if opts['rev']:
1778 if opts['rev']:
1774 revs = scmutil.revrange(repo, opts['rev'])
1779 revs = scmutil.revrange(repo, opts['rev'])
1775 nodes = [repo[r].node() for r in revs]
1780 nodes = [repo[r].node() for r in revs]
1776 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1781 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1777 exclusive=opts['exclusive']))
1782 exclusive=opts['exclusive']))
1778 markers.sort(key=lambda x: x._data)
1783 markers.sort(key=lambda x: x._data)
1779 else:
1784 else:
1780 markers = obsutil.getmarkers(repo)
1785 markers = obsutil.getmarkers(repo)
1781
1786
1782 markerstoiter = markers
1787 markerstoiter = markers
1783 isrelevant = lambda m: True
1788 isrelevant = lambda m: True
1784 if opts.get('rev') and opts.get('index'):
1789 if opts.get('rev') and opts.get('index'):
1785 markerstoiter = obsutil.getmarkers(repo)
1790 markerstoiter = obsutil.getmarkers(repo)
1786 markerset = set(markers)
1791 markerset = set(markers)
1787 isrelevant = lambda m: m in markerset
1792 isrelevant = lambda m: m in markerset
1788
1793
1789 fm = ui.formatter('debugobsolete', opts)
1794 fm = ui.formatter('debugobsolete', opts)
1790 for i, m in enumerate(markerstoiter):
1795 for i, m in enumerate(markerstoiter):
1791 if not isrelevant(m):
1796 if not isrelevant(m):
1792 # marker can be irrelevant when we're iterating over a set
1797 # marker can be irrelevant when we're iterating over a set
1793 # of markers (markerstoiter) which is bigger than the set
1798 # of markers (markerstoiter) which is bigger than the set
1794 # of markers we want to display (markers)
1799 # of markers we want to display (markers)
1795 # this can happen if both --index and --rev options are
1800 # this can happen if both --index and --rev options are
1796 # provided and thus we need to iterate over all of the markers
1801 # provided and thus we need to iterate over all of the markers
1797 # to get the correct indices, but only display the ones that
1802 # to get the correct indices, but only display the ones that
1798 # are relevant to --rev value
1803 # are relevant to --rev value
1799 continue
1804 continue
1800 fm.startitem()
1805 fm.startitem()
1801 ind = i if opts.get('index') else None
1806 ind = i if opts.get('index') else None
1802 cmdutil.showmarker(fm, m, index=ind)
1807 cmdutil.showmarker(fm, m, index=ind)
1803 fm.end()
1808 fm.end()
1804
1809
1805 @command('debugp1copies',
1810 @command('debugp1copies',
1806 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1811 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1807 _('[-r REV]'))
1812 _('[-r REV]'))
1808 def debugp1copies(ui, repo, **opts):
1813 def debugp1copies(ui, repo, **opts):
1809 """dump copy information compared to p1"""
1814 """dump copy information compared to p1"""
1810
1815
1811 opts = pycompat.byteskwargs(opts)
1816 opts = pycompat.byteskwargs(opts)
1812 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1817 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1813 for dst, src in ctx.p1copies().items():
1818 for dst, src in ctx.p1copies().items():
1814 ui.write('%s -> %s\n' % (src, dst))
1819 ui.write('%s -> %s\n' % (src, dst))
1815
1820
1816 @command('debugp2copies',
1821 @command('debugp2copies',
1817 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1822 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1818 _('[-r REV]'))
1823 _('[-r REV]'))
1819 def debugp1copies(ui, repo, **opts):
1824 def debugp1copies(ui, repo, **opts):
1820 """dump copy information compared to p2"""
1825 """dump copy information compared to p2"""
1821
1826
1822 opts = pycompat.byteskwargs(opts)
1827 opts = pycompat.byteskwargs(opts)
1823 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1828 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1824 for dst, src in ctx.p2copies().items():
1829 for dst, src in ctx.p2copies().items():
1825 ui.write('%s -> %s\n' % (src, dst))
1830 ui.write('%s -> %s\n' % (src, dst))
1826
1831
1827 @command('debugpathcomplete',
1832 @command('debugpathcomplete',
1828 [('f', 'full', None, _('complete an entire path')),
1833 [('f', 'full', None, _('complete an entire path')),
1829 ('n', 'normal', None, _('show only normal files')),
1834 ('n', 'normal', None, _('show only normal files')),
1830 ('a', 'added', None, _('show only added files')),
1835 ('a', 'added', None, _('show only added files')),
1831 ('r', 'removed', None, _('show only removed files'))],
1836 ('r', 'removed', None, _('show only removed files'))],
1832 _('FILESPEC...'))
1837 _('FILESPEC...'))
1833 def debugpathcomplete(ui, repo, *specs, **opts):
1838 def debugpathcomplete(ui, repo, *specs, **opts):
1834 '''complete part or all of a tracked path
1839 '''complete part or all of a tracked path
1835
1840
1836 This command supports shells that offer path name completion. It
1841 This command supports shells that offer path name completion. It
1837 currently completes only files already known to the dirstate.
1842 currently completes only files already known to the dirstate.
1838
1843
1839 Completion extends only to the next path segment unless
1844 Completion extends only to the next path segment unless
1840 --full is specified, in which case entire paths are used.'''
1845 --full is specified, in which case entire paths are used.'''
1841
1846
1842 def complete(path, acceptable):
1847 def complete(path, acceptable):
1843 dirstate = repo.dirstate
1848 dirstate = repo.dirstate
1844 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1849 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1845 rootdir = repo.root + pycompat.ossep
1850 rootdir = repo.root + pycompat.ossep
1846 if spec != repo.root and not spec.startswith(rootdir):
1851 if spec != repo.root and not spec.startswith(rootdir):
1847 return [], []
1852 return [], []
1848 if os.path.isdir(spec):
1853 if os.path.isdir(spec):
1849 spec += '/'
1854 spec += '/'
1850 spec = spec[len(rootdir):]
1855 spec = spec[len(rootdir):]
1851 fixpaths = pycompat.ossep != '/'
1856 fixpaths = pycompat.ossep != '/'
1852 if fixpaths:
1857 if fixpaths:
1853 spec = spec.replace(pycompat.ossep, '/')
1858 spec = spec.replace(pycompat.ossep, '/')
1854 speclen = len(spec)
1859 speclen = len(spec)
1855 fullpaths = opts[r'full']
1860 fullpaths = opts[r'full']
1856 files, dirs = set(), set()
1861 files, dirs = set(), set()
1857 adddir, addfile = dirs.add, files.add
1862 adddir, addfile = dirs.add, files.add
1858 for f, st in dirstate.iteritems():
1863 for f, st in dirstate.iteritems():
1859 if f.startswith(spec) and st[0] in acceptable:
1864 if f.startswith(spec) and st[0] in acceptable:
1860 if fixpaths:
1865 if fixpaths:
1861 f = f.replace('/', pycompat.ossep)
1866 f = f.replace('/', pycompat.ossep)
1862 if fullpaths:
1867 if fullpaths:
1863 addfile(f)
1868 addfile(f)
1864 continue
1869 continue
1865 s = f.find(pycompat.ossep, speclen)
1870 s = f.find(pycompat.ossep, speclen)
1866 if s >= 0:
1871 if s >= 0:
1867 adddir(f[:s])
1872 adddir(f[:s])
1868 else:
1873 else:
1869 addfile(f)
1874 addfile(f)
1870 return files, dirs
1875 return files, dirs
1871
1876
1872 acceptable = ''
1877 acceptable = ''
1873 if opts[r'normal']:
1878 if opts[r'normal']:
1874 acceptable += 'nm'
1879 acceptable += 'nm'
1875 if opts[r'added']:
1880 if opts[r'added']:
1876 acceptable += 'a'
1881 acceptable += 'a'
1877 if opts[r'removed']:
1882 if opts[r'removed']:
1878 acceptable += 'r'
1883 acceptable += 'r'
1879 cwd = repo.getcwd()
1884 cwd = repo.getcwd()
1880 if not specs:
1885 if not specs:
1881 specs = ['.']
1886 specs = ['.']
1882
1887
1883 files, dirs = set(), set()
1888 files, dirs = set(), set()
1884 for spec in specs:
1889 for spec in specs:
1885 f, d = complete(spec, acceptable or 'nmar')
1890 f, d = complete(spec, acceptable or 'nmar')
1886 files.update(f)
1891 files.update(f)
1887 dirs.update(d)
1892 dirs.update(d)
1888 files.update(dirs)
1893 files.update(dirs)
1889 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1894 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1890 ui.write('\n')
1895 ui.write('\n')
1891
1896
1892 @command('debugpathcopies',
1897 @command('debugpathcopies',
1893 cmdutil.walkopts,
1898 cmdutil.walkopts,
1894 'hg debugpathcopies REV1 REV2 [FILE]',
1899 'hg debugpathcopies REV1 REV2 [FILE]',
1895 inferrepo=True)
1900 inferrepo=True)
1896 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1901 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1897 """show copies between two revisions"""
1902 """show copies between two revisions"""
1898 ctx1 = scmutil.revsingle(repo, rev1)
1903 ctx1 = scmutil.revsingle(repo, rev1)
1899 ctx2 = scmutil.revsingle(repo, rev2)
1904 ctx2 = scmutil.revsingle(repo, rev2)
1900 m = scmutil.match(ctx1, pats, opts)
1905 m = scmutil.match(ctx1, pats, opts)
1901 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1906 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1902 ui.write('%s -> %s\n' % (src, dst))
1907 ui.write('%s -> %s\n' % (src, dst))
1903
1908
1904 @command('debugpeer', [], _('PATH'), norepo=True)
1909 @command('debugpeer', [], _('PATH'), norepo=True)
1905 def debugpeer(ui, path):
1910 def debugpeer(ui, path):
1906 """establish a connection to a peer repository"""
1911 """establish a connection to a peer repository"""
1907 # Always enable peer request logging. Requires --debug to display
1912 # Always enable peer request logging. Requires --debug to display
1908 # though.
1913 # though.
1909 overrides = {
1914 overrides = {
1910 ('devel', 'debug.peer-request'): True,
1915 ('devel', 'debug.peer-request'): True,
1911 }
1916 }
1912
1917
1913 with ui.configoverride(overrides):
1918 with ui.configoverride(overrides):
1914 peer = hg.peer(ui, {}, path)
1919 peer = hg.peer(ui, {}, path)
1915
1920
1916 local = peer.local() is not None
1921 local = peer.local() is not None
1917 canpush = peer.canpush()
1922 canpush = peer.canpush()
1918
1923
1919 ui.write(_('url: %s\n') % peer.url())
1924 ui.write(_('url: %s\n') % peer.url())
1920 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1925 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1921 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1926 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1922
1927
1923 @command('debugpickmergetool',
1928 @command('debugpickmergetool',
1924 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1929 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1925 ('', 'changedelete', None, _('emulate merging change and delete')),
1930 ('', 'changedelete', None, _('emulate merging change and delete')),
1926 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1931 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1927 _('[PATTERN]...'),
1932 _('[PATTERN]...'),
1928 inferrepo=True)
1933 inferrepo=True)
1929 def debugpickmergetool(ui, repo, *pats, **opts):
1934 def debugpickmergetool(ui, repo, *pats, **opts):
1930 """examine which merge tool is chosen for specified file
1935 """examine which merge tool is chosen for specified file
1931
1936
1932 As described in :hg:`help merge-tools`, Mercurial examines
1937 As described in :hg:`help merge-tools`, Mercurial examines
1933 configurations below in this order to decide which merge tool is
1938 configurations below in this order to decide which merge tool is
1934 chosen for specified file.
1939 chosen for specified file.
1935
1940
1936 1. ``--tool`` option
1941 1. ``--tool`` option
1937 2. ``HGMERGE`` environment variable
1942 2. ``HGMERGE`` environment variable
1938 3. configurations in ``merge-patterns`` section
1943 3. configurations in ``merge-patterns`` section
1939 4. configuration of ``ui.merge``
1944 4. configuration of ``ui.merge``
1940 5. configurations in ``merge-tools`` section
1945 5. configurations in ``merge-tools`` section
1941 6. ``hgmerge`` tool (for historical reason only)
1946 6. ``hgmerge`` tool (for historical reason only)
1942 7. default tool for fallback (``:merge`` or ``:prompt``)
1947 7. default tool for fallback (``:merge`` or ``:prompt``)
1943
1948
1944 This command writes out examination result in the style below::
1949 This command writes out examination result in the style below::
1945
1950
1946 FILE = MERGETOOL
1951 FILE = MERGETOOL
1947
1952
1948 By default, all files known in the first parent context of the
1953 By default, all files known in the first parent context of the
1949 working directory are examined. Use file patterns and/or -I/-X
1954 working directory are examined. Use file patterns and/or -I/-X
1950 options to limit target files. -r/--rev is also useful to examine
1955 options to limit target files. -r/--rev is also useful to examine
1951 files in another context without actual updating to it.
1956 files in another context without actual updating to it.
1952
1957
1953 With --debug, this command shows warning messages while matching
1958 With --debug, this command shows warning messages while matching
1954 against ``merge-patterns`` and so on, too. It is recommended to
1959 against ``merge-patterns`` and so on, too. It is recommended to
1955 use this option with explicit file patterns and/or -I/-X options,
1960 use this option with explicit file patterns and/or -I/-X options,
1956 because this option increases amount of output per file according
1961 because this option increases amount of output per file according
1957 to configurations in hgrc.
1962 to configurations in hgrc.
1958
1963
1959 With -v/--verbose, this command shows configurations below at
1964 With -v/--verbose, this command shows configurations below at
1960 first (only if specified).
1965 first (only if specified).
1961
1966
1962 - ``--tool`` option
1967 - ``--tool`` option
1963 - ``HGMERGE`` environment variable
1968 - ``HGMERGE`` environment variable
1964 - configuration of ``ui.merge``
1969 - configuration of ``ui.merge``
1965
1970
1966 If merge tool is chosen before matching against
1971 If merge tool is chosen before matching against
1967 ``merge-patterns``, this command can't show any helpful
1972 ``merge-patterns``, this command can't show any helpful
1968 information, even with --debug. In such case, information above is
1973 information, even with --debug. In such case, information above is
1969 useful to know why a merge tool is chosen.
1974 useful to know why a merge tool is chosen.
1970 """
1975 """
1971 opts = pycompat.byteskwargs(opts)
1976 opts = pycompat.byteskwargs(opts)
1972 overrides = {}
1977 overrides = {}
1973 if opts['tool']:
1978 if opts['tool']:
1974 overrides[('ui', 'forcemerge')] = opts['tool']
1979 overrides[('ui', 'forcemerge')] = opts['tool']
1975 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1980 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1976
1981
1977 with ui.configoverride(overrides, 'debugmergepatterns'):
1982 with ui.configoverride(overrides, 'debugmergepatterns'):
1978 hgmerge = encoding.environ.get("HGMERGE")
1983 hgmerge = encoding.environ.get("HGMERGE")
1979 if hgmerge is not None:
1984 if hgmerge is not None:
1980 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1985 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1981 uimerge = ui.config("ui", "merge")
1986 uimerge = ui.config("ui", "merge")
1982 if uimerge:
1987 if uimerge:
1983 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1988 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1984
1989
1985 ctx = scmutil.revsingle(repo, opts.get('rev'))
1990 ctx = scmutil.revsingle(repo, opts.get('rev'))
1986 m = scmutil.match(ctx, pats, opts)
1991 m = scmutil.match(ctx, pats, opts)
1987 changedelete = opts['changedelete']
1992 changedelete = opts['changedelete']
1988 for path in ctx.walk(m):
1993 for path in ctx.walk(m):
1989 fctx = ctx[path]
1994 fctx = ctx[path]
1990 try:
1995 try:
1991 if not ui.debugflag:
1996 if not ui.debugflag:
1992 ui.pushbuffer(error=True)
1997 ui.pushbuffer(error=True)
1993 tool, toolpath = filemerge._picktool(repo, ui, path,
1998 tool, toolpath = filemerge._picktool(repo, ui, path,
1994 fctx.isbinary(),
1999 fctx.isbinary(),
1995 'l' in fctx.flags(),
2000 'l' in fctx.flags(),
1996 changedelete)
2001 changedelete)
1997 finally:
2002 finally:
1998 if not ui.debugflag:
2003 if not ui.debugflag:
1999 ui.popbuffer()
2004 ui.popbuffer()
2000 ui.write(('%s = %s\n') % (path, tool))
2005 ui.write(('%s = %s\n') % (path, tool))
2001
2006
2002 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2007 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2003 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2008 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2004 '''access the pushkey key/value protocol
2009 '''access the pushkey key/value protocol
2005
2010
2006 With two args, list the keys in the given namespace.
2011 With two args, list the keys in the given namespace.
2007
2012
2008 With five args, set a key to new if it currently is set to old.
2013 With five args, set a key to new if it currently is set to old.
2009 Reports success or failure.
2014 Reports success or failure.
2010 '''
2015 '''
2011
2016
2012 target = hg.peer(ui, {}, repopath)
2017 target = hg.peer(ui, {}, repopath)
2013 if keyinfo:
2018 if keyinfo:
2014 key, old, new = keyinfo
2019 key, old, new = keyinfo
2015 with target.commandexecutor() as e:
2020 with target.commandexecutor() as e:
2016 r = e.callcommand('pushkey', {
2021 r = e.callcommand('pushkey', {
2017 'namespace': namespace,
2022 'namespace': namespace,
2018 'key': key,
2023 'key': key,
2019 'old': old,
2024 'old': old,
2020 'new': new,
2025 'new': new,
2021 }).result()
2026 }).result()
2022
2027
2023 ui.status(pycompat.bytestr(r) + '\n')
2028 ui.status(pycompat.bytestr(r) + '\n')
2024 return not r
2029 return not r
2025 else:
2030 else:
2026 for k, v in sorted(target.listkeys(namespace).iteritems()):
2031 for k, v in sorted(target.listkeys(namespace).iteritems()):
2027 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2032 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2028 stringutil.escapestr(v)))
2033 stringutil.escapestr(v)))
2029
2034
2030 @command('debugpvec', [], _('A B'))
2035 @command('debugpvec', [], _('A B'))
2031 def debugpvec(ui, repo, a, b=None):
2036 def debugpvec(ui, repo, a, b=None):
2032 ca = scmutil.revsingle(repo, a)
2037 ca = scmutil.revsingle(repo, a)
2033 cb = scmutil.revsingle(repo, b)
2038 cb = scmutil.revsingle(repo, b)
2034 pa = pvec.ctxpvec(ca)
2039 pa = pvec.ctxpvec(ca)
2035 pb = pvec.ctxpvec(cb)
2040 pb = pvec.ctxpvec(cb)
2036 if pa == pb:
2041 if pa == pb:
2037 rel = "="
2042 rel = "="
2038 elif pa > pb:
2043 elif pa > pb:
2039 rel = ">"
2044 rel = ">"
2040 elif pa < pb:
2045 elif pa < pb:
2041 rel = "<"
2046 rel = "<"
2042 elif pa | pb:
2047 elif pa | pb:
2043 rel = "|"
2048 rel = "|"
2044 ui.write(_("a: %s\n") % pa)
2049 ui.write(_("a: %s\n") % pa)
2045 ui.write(_("b: %s\n") % pb)
2050 ui.write(_("b: %s\n") % pb)
2046 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2051 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2047 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2052 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2048 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2053 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2049 pa.distance(pb), rel))
2054 pa.distance(pb), rel))
2050
2055
2051 @command('debugrebuilddirstate|debugrebuildstate',
2056 @command('debugrebuilddirstate|debugrebuildstate',
2052 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2057 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2053 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2058 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2054 'the working copy parent')),
2059 'the working copy parent')),
2055 ],
2060 ],
2056 _('[-r REV]'))
2061 _('[-r REV]'))
2057 def debugrebuilddirstate(ui, repo, rev, **opts):
2062 def debugrebuilddirstate(ui, repo, rev, **opts):
2058 """rebuild the dirstate as it would look like for the given revision
2063 """rebuild the dirstate as it would look like for the given revision
2059
2064
2060 If no revision is specified the first current parent will be used.
2065 If no revision is specified the first current parent will be used.
2061
2066
2062 The dirstate will be set to the files of the given revision.
2067 The dirstate will be set to the files of the given revision.
2063 The actual working directory content or existing dirstate
2068 The actual working directory content or existing dirstate
2064 information such as adds or removes is not considered.
2069 information such as adds or removes is not considered.
2065
2070
2066 ``minimal`` will only rebuild the dirstate status for files that claim to be
2071 ``minimal`` will only rebuild the dirstate status for files that claim to be
2067 tracked but are not in the parent manifest, or that exist in the parent
2072 tracked but are not in the parent manifest, or that exist in the parent
2068 manifest but are not in the dirstate. It will not change adds, removes, or
2073 manifest but are not in the dirstate. It will not change adds, removes, or
2069 modified files that are in the working copy parent.
2074 modified files that are in the working copy parent.
2070
2075
2071 One use of this command is to make the next :hg:`status` invocation
2076 One use of this command is to make the next :hg:`status` invocation
2072 check the actual file content.
2077 check the actual file content.
2073 """
2078 """
2074 ctx = scmutil.revsingle(repo, rev)
2079 ctx = scmutil.revsingle(repo, rev)
2075 with repo.wlock():
2080 with repo.wlock():
2076 dirstate = repo.dirstate
2081 dirstate = repo.dirstate
2077 changedfiles = None
2082 changedfiles = None
2078 # See command doc for what minimal does.
2083 # See command doc for what minimal does.
2079 if opts.get(r'minimal'):
2084 if opts.get(r'minimal'):
2080 manifestfiles = set(ctx.manifest().keys())
2085 manifestfiles = set(ctx.manifest().keys())
2081 dirstatefiles = set(dirstate)
2086 dirstatefiles = set(dirstate)
2082 manifestonly = manifestfiles - dirstatefiles
2087 manifestonly = manifestfiles - dirstatefiles
2083 dsonly = dirstatefiles - manifestfiles
2088 dsonly = dirstatefiles - manifestfiles
2084 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2089 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2085 changedfiles = manifestonly | dsnotadded
2090 changedfiles = manifestonly | dsnotadded
2086
2091
2087 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2092 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2088
2093
2089 @command('debugrebuildfncache', [], '')
2094 @command('debugrebuildfncache', [], '')
2090 def debugrebuildfncache(ui, repo):
2095 def debugrebuildfncache(ui, repo):
2091 """rebuild the fncache file"""
2096 """rebuild the fncache file"""
2092 repair.rebuildfncache(ui, repo)
2097 repair.rebuildfncache(ui, repo)
2093
2098
2094 @command('debugrename',
2099 @command('debugrename',
2095 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2100 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2096 _('[-r REV] [FILE]...'))
2101 _('[-r REV] [FILE]...'))
2097 def debugrename(ui, repo, *pats, **opts):
2102 def debugrename(ui, repo, *pats, **opts):
2098 """dump rename information"""
2103 """dump rename information"""
2099
2104
2100 opts = pycompat.byteskwargs(opts)
2105 opts = pycompat.byteskwargs(opts)
2101 ctx = scmutil.revsingle(repo, opts.get('rev'))
2106 ctx = scmutil.revsingle(repo, opts.get('rev'))
2102 m = scmutil.match(ctx, pats, opts)
2107 m = scmutil.match(ctx, pats, opts)
2103 for abs in ctx.walk(m):
2108 for abs in ctx.walk(m):
2104 fctx = ctx[abs]
2109 fctx = ctx[abs]
2105 o = fctx.filelog().renamed(fctx.filenode())
2110 o = fctx.filelog().renamed(fctx.filenode())
2106 rel = repo.pathto(abs)
2111 rel = repo.pathto(abs)
2107 if o:
2112 if o:
2108 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2113 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2109 else:
2114 else:
2110 ui.write(_("%s not renamed\n") % rel)
2115 ui.write(_("%s not renamed\n") % rel)
2111
2116
2112 @command('debugrevlog', cmdutil.debugrevlogopts +
2117 @command('debugrevlog', cmdutil.debugrevlogopts +
2113 [('d', 'dump', False, _('dump index data'))],
2118 [('d', 'dump', False, _('dump index data'))],
2114 _('-c|-m|FILE'),
2119 _('-c|-m|FILE'),
2115 optionalrepo=True)
2120 optionalrepo=True)
2116 def debugrevlog(ui, repo, file_=None, **opts):
2121 def debugrevlog(ui, repo, file_=None, **opts):
2117 """show data and statistics about a revlog"""
2122 """show data and statistics about a revlog"""
2118 opts = pycompat.byteskwargs(opts)
2123 opts = pycompat.byteskwargs(opts)
2119 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2124 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2120
2125
2121 if opts.get("dump"):
2126 if opts.get("dump"):
2122 numrevs = len(r)
2127 numrevs = len(r)
2123 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2128 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2124 " rawsize totalsize compression heads chainlen\n"))
2129 " rawsize totalsize compression heads chainlen\n"))
2125 ts = 0
2130 ts = 0
2126 heads = set()
2131 heads = set()
2127
2132
2128 for rev in pycompat.xrange(numrevs):
2133 for rev in pycompat.xrange(numrevs):
2129 dbase = r.deltaparent(rev)
2134 dbase = r.deltaparent(rev)
2130 if dbase == -1:
2135 if dbase == -1:
2131 dbase = rev
2136 dbase = rev
2132 cbase = r.chainbase(rev)
2137 cbase = r.chainbase(rev)
2133 clen = r.chainlen(rev)
2138 clen = r.chainlen(rev)
2134 p1, p2 = r.parentrevs(rev)
2139 p1, p2 = r.parentrevs(rev)
2135 rs = r.rawsize(rev)
2140 rs = r.rawsize(rev)
2136 ts = ts + rs
2141 ts = ts + rs
2137 heads -= set(r.parentrevs(rev))
2142 heads -= set(r.parentrevs(rev))
2138 heads.add(rev)
2143 heads.add(rev)
2139 try:
2144 try:
2140 compression = ts / r.end(rev)
2145 compression = ts / r.end(rev)
2141 except ZeroDivisionError:
2146 except ZeroDivisionError:
2142 compression = 0
2147 compression = 0
2143 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2148 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2144 "%11d %5d %8d\n" %
2149 "%11d %5d %8d\n" %
2145 (rev, p1, p2, r.start(rev), r.end(rev),
2150 (rev, p1, p2, r.start(rev), r.end(rev),
2146 r.start(dbase), r.start(cbase),
2151 r.start(dbase), r.start(cbase),
2147 r.start(p1), r.start(p2),
2152 r.start(p1), r.start(p2),
2148 rs, ts, compression, len(heads), clen))
2153 rs, ts, compression, len(heads), clen))
2149 return 0
2154 return 0
2150
2155
2151 v = r.version
2156 v = r.version
2152 format = v & 0xFFFF
2157 format = v & 0xFFFF
2153 flags = []
2158 flags = []
2154 gdelta = False
2159 gdelta = False
2155 if v & revlog.FLAG_INLINE_DATA:
2160 if v & revlog.FLAG_INLINE_DATA:
2156 flags.append('inline')
2161 flags.append('inline')
2157 if v & revlog.FLAG_GENERALDELTA:
2162 if v & revlog.FLAG_GENERALDELTA:
2158 gdelta = True
2163 gdelta = True
2159 flags.append('generaldelta')
2164 flags.append('generaldelta')
2160 if not flags:
2165 if not flags:
2161 flags = ['(none)']
2166 flags = ['(none)']
2162
2167
2163 ### tracks merge vs single parent
2168 ### tracks merge vs single parent
2164 nummerges = 0
2169 nummerges = 0
2165
2170
2166 ### tracks ways the "delta" are build
2171 ### tracks ways the "delta" are build
2167 # nodelta
2172 # nodelta
2168 numempty = 0
2173 numempty = 0
2169 numemptytext = 0
2174 numemptytext = 0
2170 numemptydelta = 0
2175 numemptydelta = 0
2171 # full file content
2176 # full file content
2172 numfull = 0
2177 numfull = 0
2173 # intermediate snapshot against a prior snapshot
2178 # intermediate snapshot against a prior snapshot
2174 numsemi = 0
2179 numsemi = 0
2175 # snapshot count per depth
2180 # snapshot count per depth
2176 numsnapdepth = collections.defaultdict(lambda: 0)
2181 numsnapdepth = collections.defaultdict(lambda: 0)
2177 # delta against previous revision
2182 # delta against previous revision
2178 numprev = 0
2183 numprev = 0
2179 # delta against first or second parent (not prev)
2184 # delta against first or second parent (not prev)
2180 nump1 = 0
2185 nump1 = 0
2181 nump2 = 0
2186 nump2 = 0
2182 # delta against neither prev nor parents
2187 # delta against neither prev nor parents
2183 numother = 0
2188 numother = 0
2184 # delta against prev that are also first or second parent
2189 # delta against prev that are also first or second parent
2185 # (details of `numprev`)
2190 # (details of `numprev`)
2186 nump1prev = 0
2191 nump1prev = 0
2187 nump2prev = 0
2192 nump2prev = 0
2188
2193
2189 # data about delta chain of each revs
2194 # data about delta chain of each revs
2190 chainlengths = []
2195 chainlengths = []
2191 chainbases = []
2196 chainbases = []
2192 chainspans = []
2197 chainspans = []
2193
2198
2194 # data about each revision
2199 # data about each revision
2195 datasize = [None, 0, 0]
2200 datasize = [None, 0, 0]
2196 fullsize = [None, 0, 0]
2201 fullsize = [None, 0, 0]
2197 semisize = [None, 0, 0]
2202 semisize = [None, 0, 0]
2198 # snapshot count per depth
2203 # snapshot count per depth
2199 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2204 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2200 deltasize = [None, 0, 0]
2205 deltasize = [None, 0, 0]
2201 chunktypecounts = {}
2206 chunktypecounts = {}
2202 chunktypesizes = {}
2207 chunktypesizes = {}
2203
2208
2204 def addsize(size, l):
2209 def addsize(size, l):
2205 if l[0] is None or size < l[0]:
2210 if l[0] is None or size < l[0]:
2206 l[0] = size
2211 l[0] = size
2207 if size > l[1]:
2212 if size > l[1]:
2208 l[1] = size
2213 l[1] = size
2209 l[2] += size
2214 l[2] += size
2210
2215
2211 numrevs = len(r)
2216 numrevs = len(r)
2212 for rev in pycompat.xrange(numrevs):
2217 for rev in pycompat.xrange(numrevs):
2213 p1, p2 = r.parentrevs(rev)
2218 p1, p2 = r.parentrevs(rev)
2214 delta = r.deltaparent(rev)
2219 delta = r.deltaparent(rev)
2215 if format > 0:
2220 if format > 0:
2216 addsize(r.rawsize(rev), datasize)
2221 addsize(r.rawsize(rev), datasize)
2217 if p2 != nullrev:
2222 if p2 != nullrev:
2218 nummerges += 1
2223 nummerges += 1
2219 size = r.length(rev)
2224 size = r.length(rev)
2220 if delta == nullrev:
2225 if delta == nullrev:
2221 chainlengths.append(0)
2226 chainlengths.append(0)
2222 chainbases.append(r.start(rev))
2227 chainbases.append(r.start(rev))
2223 chainspans.append(size)
2228 chainspans.append(size)
2224 if size == 0:
2229 if size == 0:
2225 numempty += 1
2230 numempty += 1
2226 numemptytext += 1
2231 numemptytext += 1
2227 else:
2232 else:
2228 numfull += 1
2233 numfull += 1
2229 numsnapdepth[0] += 1
2234 numsnapdepth[0] += 1
2230 addsize(size, fullsize)
2235 addsize(size, fullsize)
2231 addsize(size, snapsizedepth[0])
2236 addsize(size, snapsizedepth[0])
2232 else:
2237 else:
2233 chainlengths.append(chainlengths[delta] + 1)
2238 chainlengths.append(chainlengths[delta] + 1)
2234 baseaddr = chainbases[delta]
2239 baseaddr = chainbases[delta]
2235 revaddr = r.start(rev)
2240 revaddr = r.start(rev)
2236 chainbases.append(baseaddr)
2241 chainbases.append(baseaddr)
2237 chainspans.append((revaddr - baseaddr) + size)
2242 chainspans.append((revaddr - baseaddr) + size)
2238 if size == 0:
2243 if size == 0:
2239 numempty += 1
2244 numempty += 1
2240 numemptydelta += 1
2245 numemptydelta += 1
2241 elif r.issnapshot(rev):
2246 elif r.issnapshot(rev):
2242 addsize(size, semisize)
2247 addsize(size, semisize)
2243 numsemi += 1
2248 numsemi += 1
2244 depth = r.snapshotdepth(rev)
2249 depth = r.snapshotdepth(rev)
2245 numsnapdepth[depth] += 1
2250 numsnapdepth[depth] += 1
2246 addsize(size, snapsizedepth[depth])
2251 addsize(size, snapsizedepth[depth])
2247 else:
2252 else:
2248 addsize(size, deltasize)
2253 addsize(size, deltasize)
2249 if delta == rev - 1:
2254 if delta == rev - 1:
2250 numprev += 1
2255 numprev += 1
2251 if delta == p1:
2256 if delta == p1:
2252 nump1prev += 1
2257 nump1prev += 1
2253 elif delta == p2:
2258 elif delta == p2:
2254 nump2prev += 1
2259 nump2prev += 1
2255 elif delta == p1:
2260 elif delta == p1:
2256 nump1 += 1
2261 nump1 += 1
2257 elif delta == p2:
2262 elif delta == p2:
2258 nump2 += 1
2263 nump2 += 1
2259 elif delta != nullrev:
2264 elif delta != nullrev:
2260 numother += 1
2265 numother += 1
2261
2266
2262 # Obtain data on the raw chunks in the revlog.
2267 # Obtain data on the raw chunks in the revlog.
2263 if util.safehasattr(r, '_getsegmentforrevs'):
2268 if util.safehasattr(r, '_getsegmentforrevs'):
2264 segment = r._getsegmentforrevs(rev, rev)[1]
2269 segment = r._getsegmentforrevs(rev, rev)[1]
2265 else:
2270 else:
2266 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2271 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2267 if segment:
2272 if segment:
2268 chunktype = bytes(segment[0:1])
2273 chunktype = bytes(segment[0:1])
2269 else:
2274 else:
2270 chunktype = 'empty'
2275 chunktype = 'empty'
2271
2276
2272 if chunktype not in chunktypecounts:
2277 if chunktype not in chunktypecounts:
2273 chunktypecounts[chunktype] = 0
2278 chunktypecounts[chunktype] = 0
2274 chunktypesizes[chunktype] = 0
2279 chunktypesizes[chunktype] = 0
2275
2280
2276 chunktypecounts[chunktype] += 1
2281 chunktypecounts[chunktype] += 1
2277 chunktypesizes[chunktype] += size
2282 chunktypesizes[chunktype] += size
2278
2283
2279 # Adjust size min value for empty cases
2284 # Adjust size min value for empty cases
2280 for size in (datasize, fullsize, semisize, deltasize):
2285 for size in (datasize, fullsize, semisize, deltasize):
2281 if size[0] is None:
2286 if size[0] is None:
2282 size[0] = 0
2287 size[0] = 0
2283
2288
2284 numdeltas = numrevs - numfull - numempty - numsemi
2289 numdeltas = numrevs - numfull - numempty - numsemi
2285 numoprev = numprev - nump1prev - nump2prev
2290 numoprev = numprev - nump1prev - nump2prev
2286 totalrawsize = datasize[2]
2291 totalrawsize = datasize[2]
2287 datasize[2] /= numrevs
2292 datasize[2] /= numrevs
2288 fulltotal = fullsize[2]
2293 fulltotal = fullsize[2]
2289 if numfull == 0:
2294 if numfull == 0:
2290 fullsize[2] = 0
2295 fullsize[2] = 0
2291 else:
2296 else:
2292 fullsize[2] /= numfull
2297 fullsize[2] /= numfull
2293 semitotal = semisize[2]
2298 semitotal = semisize[2]
2294 snaptotal = {}
2299 snaptotal = {}
2295 if numsemi > 0:
2300 if numsemi > 0:
2296 semisize[2] /= numsemi
2301 semisize[2] /= numsemi
2297 for depth in snapsizedepth:
2302 for depth in snapsizedepth:
2298 snaptotal[depth] = snapsizedepth[depth][2]
2303 snaptotal[depth] = snapsizedepth[depth][2]
2299 snapsizedepth[depth][2] /= numsnapdepth[depth]
2304 snapsizedepth[depth][2] /= numsnapdepth[depth]
2300
2305
2301 deltatotal = deltasize[2]
2306 deltatotal = deltasize[2]
2302 if numdeltas > 0:
2307 if numdeltas > 0:
2303 deltasize[2] /= numdeltas
2308 deltasize[2] /= numdeltas
2304 totalsize = fulltotal + semitotal + deltatotal
2309 totalsize = fulltotal + semitotal + deltatotal
2305 avgchainlen = sum(chainlengths) / numrevs
2310 avgchainlen = sum(chainlengths) / numrevs
2306 maxchainlen = max(chainlengths)
2311 maxchainlen = max(chainlengths)
2307 maxchainspan = max(chainspans)
2312 maxchainspan = max(chainspans)
2308 compratio = 1
2313 compratio = 1
2309 if totalsize:
2314 if totalsize:
2310 compratio = totalrawsize / totalsize
2315 compratio = totalrawsize / totalsize
2311
2316
2312 basedfmtstr = '%%%dd\n'
2317 basedfmtstr = '%%%dd\n'
2313 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2318 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2314
2319
2315 def dfmtstr(max):
2320 def dfmtstr(max):
2316 return basedfmtstr % len(str(max))
2321 return basedfmtstr % len(str(max))
2317 def pcfmtstr(max, padding=0):
2322 def pcfmtstr(max, padding=0):
2318 return basepcfmtstr % (len(str(max)), ' ' * padding)
2323 return basepcfmtstr % (len(str(max)), ' ' * padding)
2319
2324
2320 def pcfmt(value, total):
2325 def pcfmt(value, total):
2321 if total:
2326 if total:
2322 return (value, 100 * float(value) / total)
2327 return (value, 100 * float(value) / total)
2323 else:
2328 else:
2324 return value, 100.0
2329 return value, 100.0
2325
2330
2326 ui.write(('format : %d\n') % format)
2331 ui.write(('format : %d\n') % format)
2327 ui.write(('flags : %s\n') % ', '.join(flags))
2332 ui.write(('flags : %s\n') % ', '.join(flags))
2328
2333
2329 ui.write('\n')
2334 ui.write('\n')
2330 fmt = pcfmtstr(totalsize)
2335 fmt = pcfmtstr(totalsize)
2331 fmt2 = dfmtstr(totalsize)
2336 fmt2 = dfmtstr(totalsize)
2332 ui.write(('revisions : ') + fmt2 % numrevs)
2337 ui.write(('revisions : ') + fmt2 % numrevs)
2333 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2338 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2334 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2339 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2335 ui.write(('revisions : ') + fmt2 % numrevs)
2340 ui.write(('revisions : ') + fmt2 % numrevs)
2336 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2341 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2337 ui.write((' text : ')
2342 ui.write((' text : ')
2338 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2343 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2339 ui.write((' delta : ')
2344 ui.write((' delta : ')
2340 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2345 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2341 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2346 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2342 for depth in sorted(numsnapdepth):
2347 for depth in sorted(numsnapdepth):
2343 ui.write((' lvl-%-3d : ' % depth)
2348 ui.write((' lvl-%-3d : ' % depth)
2344 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2349 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2345 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2350 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2346 ui.write(('revision size : ') + fmt2 % totalsize)
2351 ui.write(('revision size : ') + fmt2 % totalsize)
2347 ui.write((' snapshot : ')
2352 ui.write((' snapshot : ')
2348 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2353 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2349 for depth in sorted(numsnapdepth):
2354 for depth in sorted(numsnapdepth):
2350 ui.write((' lvl-%-3d : ' % depth)
2355 ui.write((' lvl-%-3d : ' % depth)
2351 + fmt % pcfmt(snaptotal[depth], totalsize))
2356 + fmt % pcfmt(snaptotal[depth], totalsize))
2352 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2357 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2353
2358
2354 def fmtchunktype(chunktype):
2359 def fmtchunktype(chunktype):
2355 if chunktype == 'empty':
2360 if chunktype == 'empty':
2356 return ' %s : ' % chunktype
2361 return ' %s : ' % chunktype
2357 elif chunktype in pycompat.bytestr(string.ascii_letters):
2362 elif chunktype in pycompat.bytestr(string.ascii_letters):
2358 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2363 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2359 else:
2364 else:
2360 return ' 0x%s : ' % hex(chunktype)
2365 return ' 0x%s : ' % hex(chunktype)
2361
2366
2362 ui.write('\n')
2367 ui.write('\n')
2363 ui.write(('chunks : ') + fmt2 % numrevs)
2368 ui.write(('chunks : ') + fmt2 % numrevs)
2364 for chunktype in sorted(chunktypecounts):
2369 for chunktype in sorted(chunktypecounts):
2365 ui.write(fmtchunktype(chunktype))
2370 ui.write(fmtchunktype(chunktype))
2366 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2371 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2367 ui.write(('chunks size : ') + fmt2 % totalsize)
2372 ui.write(('chunks size : ') + fmt2 % totalsize)
2368 for chunktype in sorted(chunktypecounts):
2373 for chunktype in sorted(chunktypecounts):
2369 ui.write(fmtchunktype(chunktype))
2374 ui.write(fmtchunktype(chunktype))
2370 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2375 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2371
2376
2372 ui.write('\n')
2377 ui.write('\n')
2373 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2378 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2374 ui.write(('avg chain length : ') + fmt % avgchainlen)
2379 ui.write(('avg chain length : ') + fmt % avgchainlen)
2375 ui.write(('max chain length : ') + fmt % maxchainlen)
2380 ui.write(('max chain length : ') + fmt % maxchainlen)
2376 ui.write(('max chain reach : ') + fmt % maxchainspan)
2381 ui.write(('max chain reach : ') + fmt % maxchainspan)
2377 ui.write(('compression ratio : ') + fmt % compratio)
2382 ui.write(('compression ratio : ') + fmt % compratio)
2378
2383
2379 if format > 0:
2384 if format > 0:
2380 ui.write('\n')
2385 ui.write('\n')
2381 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2386 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2382 % tuple(datasize))
2387 % tuple(datasize))
2383 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2388 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2384 % tuple(fullsize))
2389 % tuple(fullsize))
2385 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2390 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2386 % tuple(semisize))
2391 % tuple(semisize))
2387 for depth in sorted(snapsizedepth):
2392 for depth in sorted(snapsizedepth):
2388 if depth == 0:
2393 if depth == 0:
2389 continue
2394 continue
2390 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2395 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2391 % ((depth,) + tuple(snapsizedepth[depth])))
2396 % ((depth,) + tuple(snapsizedepth[depth])))
2392 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2397 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2393 % tuple(deltasize))
2398 % tuple(deltasize))
2394
2399
2395 if numdeltas > 0:
2400 if numdeltas > 0:
2396 ui.write('\n')
2401 ui.write('\n')
2397 fmt = pcfmtstr(numdeltas)
2402 fmt = pcfmtstr(numdeltas)
2398 fmt2 = pcfmtstr(numdeltas, 4)
2403 fmt2 = pcfmtstr(numdeltas, 4)
2399 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2404 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2400 if numprev > 0:
2405 if numprev > 0:
2401 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2406 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2402 numprev))
2407 numprev))
2403 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2408 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2404 numprev))
2409 numprev))
2405 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2410 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2406 numprev))
2411 numprev))
2407 if gdelta:
2412 if gdelta:
2408 ui.write(('deltas against p1 : ')
2413 ui.write(('deltas against p1 : ')
2409 + fmt % pcfmt(nump1, numdeltas))
2414 + fmt % pcfmt(nump1, numdeltas))
2410 ui.write(('deltas against p2 : ')
2415 ui.write(('deltas against p2 : ')
2411 + fmt % pcfmt(nump2, numdeltas))
2416 + fmt % pcfmt(nump2, numdeltas))
2412 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2417 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2413 numdeltas))
2418 numdeltas))
2414
2419
2415 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2420 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2416 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2421 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2417 _('[-f FORMAT] -c|-m|FILE'),
2422 _('[-f FORMAT] -c|-m|FILE'),
2418 optionalrepo=True)
2423 optionalrepo=True)
2419 def debugrevlogindex(ui, repo, file_=None, **opts):
2424 def debugrevlogindex(ui, repo, file_=None, **opts):
2420 """dump the contents of a revlog index"""
2425 """dump the contents of a revlog index"""
2421 opts = pycompat.byteskwargs(opts)
2426 opts = pycompat.byteskwargs(opts)
2422 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2427 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2423 format = opts.get('format', 0)
2428 format = opts.get('format', 0)
2424 if format not in (0, 1):
2429 if format not in (0, 1):
2425 raise error.Abort(_("unknown format %d") % format)
2430 raise error.Abort(_("unknown format %d") % format)
2426
2431
2427 if ui.debugflag:
2432 if ui.debugflag:
2428 shortfn = hex
2433 shortfn = hex
2429 else:
2434 else:
2430 shortfn = short
2435 shortfn = short
2431
2436
2432 # There might not be anything in r, so have a sane default
2437 # There might not be anything in r, so have a sane default
2433 idlen = 12
2438 idlen = 12
2434 for i in r:
2439 for i in r:
2435 idlen = len(shortfn(r.node(i)))
2440 idlen = len(shortfn(r.node(i)))
2436 break
2441 break
2437
2442
2438 if format == 0:
2443 if format == 0:
2439 if ui.verbose:
2444 if ui.verbose:
2440 ui.write((" rev offset length linkrev"
2445 ui.write((" rev offset length linkrev"
2441 " %s %s p2\n") % ("nodeid".ljust(idlen),
2446 " %s %s p2\n") % ("nodeid".ljust(idlen),
2442 "p1".ljust(idlen)))
2447 "p1".ljust(idlen)))
2443 else:
2448 else:
2444 ui.write((" rev linkrev %s %s p2\n") % (
2449 ui.write((" rev linkrev %s %s p2\n") % (
2445 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2450 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2446 elif format == 1:
2451 elif format == 1:
2447 if ui.verbose:
2452 if ui.verbose:
2448 ui.write((" rev flag offset length size link p1"
2453 ui.write((" rev flag offset length size link p1"
2449 " p2 %s\n") % "nodeid".rjust(idlen))
2454 " p2 %s\n") % "nodeid".rjust(idlen))
2450 else:
2455 else:
2451 ui.write((" rev flag size link p1 p2 %s\n") %
2456 ui.write((" rev flag size link p1 p2 %s\n") %
2452 "nodeid".rjust(idlen))
2457 "nodeid".rjust(idlen))
2453
2458
2454 for i in r:
2459 for i in r:
2455 node = r.node(i)
2460 node = r.node(i)
2456 if format == 0:
2461 if format == 0:
2457 try:
2462 try:
2458 pp = r.parents(node)
2463 pp = r.parents(node)
2459 except Exception:
2464 except Exception:
2460 pp = [nullid, nullid]
2465 pp = [nullid, nullid]
2461 if ui.verbose:
2466 if ui.verbose:
2462 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2467 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2463 i, r.start(i), r.length(i), r.linkrev(i),
2468 i, r.start(i), r.length(i), r.linkrev(i),
2464 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2469 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2465 else:
2470 else:
2466 ui.write("% 6d % 7d %s %s %s\n" % (
2471 ui.write("% 6d % 7d %s %s %s\n" % (
2467 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2472 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2468 shortfn(pp[1])))
2473 shortfn(pp[1])))
2469 elif format == 1:
2474 elif format == 1:
2470 pr = r.parentrevs(i)
2475 pr = r.parentrevs(i)
2471 if ui.verbose:
2476 if ui.verbose:
2472 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2477 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2473 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2478 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2474 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2479 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2475 else:
2480 else:
2476 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2481 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2477 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2482 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2478 shortfn(node)))
2483 shortfn(node)))
2479
2484
2480 @command('debugrevspec',
2485 @command('debugrevspec',
2481 [('', 'optimize', None,
2486 [('', 'optimize', None,
2482 _('print parsed tree after optimizing (DEPRECATED)')),
2487 _('print parsed tree after optimizing (DEPRECATED)')),
2483 ('', 'show-revs', True, _('print list of result revisions (default)')),
2488 ('', 'show-revs', True, _('print list of result revisions (default)')),
2484 ('s', 'show-set', None, _('print internal representation of result set')),
2489 ('s', 'show-set', None, _('print internal representation of result set')),
2485 ('p', 'show-stage', [],
2490 ('p', 'show-stage', [],
2486 _('print parsed tree at the given stage'), _('NAME')),
2491 _('print parsed tree at the given stage'), _('NAME')),
2487 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2492 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2488 ('', 'verify-optimized', False, _('verify optimized result')),
2493 ('', 'verify-optimized', False, _('verify optimized result')),
2489 ],
2494 ],
2490 ('REVSPEC'))
2495 ('REVSPEC'))
2491 def debugrevspec(ui, repo, expr, **opts):
2496 def debugrevspec(ui, repo, expr, **opts):
2492 """parse and apply a revision specification
2497 """parse and apply a revision specification
2493
2498
2494 Use -p/--show-stage option to print the parsed tree at the given stages.
2499 Use -p/--show-stage option to print the parsed tree at the given stages.
2495 Use -p all to print tree at every stage.
2500 Use -p all to print tree at every stage.
2496
2501
2497 Use --no-show-revs option with -s or -p to print only the set
2502 Use --no-show-revs option with -s or -p to print only the set
2498 representation or the parsed tree respectively.
2503 representation or the parsed tree respectively.
2499
2504
2500 Use --verify-optimized to compare the optimized result with the unoptimized
2505 Use --verify-optimized to compare the optimized result with the unoptimized
2501 one. Returns 1 if the optimized result differs.
2506 one. Returns 1 if the optimized result differs.
2502 """
2507 """
2503 opts = pycompat.byteskwargs(opts)
2508 opts = pycompat.byteskwargs(opts)
2504 aliases = ui.configitems('revsetalias')
2509 aliases = ui.configitems('revsetalias')
2505 stages = [
2510 stages = [
2506 ('parsed', lambda tree: tree),
2511 ('parsed', lambda tree: tree),
2507 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2512 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2508 ui.warn)),
2513 ui.warn)),
2509 ('concatenated', revsetlang.foldconcat),
2514 ('concatenated', revsetlang.foldconcat),
2510 ('analyzed', revsetlang.analyze),
2515 ('analyzed', revsetlang.analyze),
2511 ('optimized', revsetlang.optimize),
2516 ('optimized', revsetlang.optimize),
2512 ]
2517 ]
2513 if opts['no_optimized']:
2518 if opts['no_optimized']:
2514 stages = stages[:-1]
2519 stages = stages[:-1]
2515 if opts['verify_optimized'] and opts['no_optimized']:
2520 if opts['verify_optimized'] and opts['no_optimized']:
2516 raise error.Abort(_('cannot use --verify-optimized with '
2521 raise error.Abort(_('cannot use --verify-optimized with '
2517 '--no-optimized'))
2522 '--no-optimized'))
2518 stagenames = set(n for n, f in stages)
2523 stagenames = set(n for n, f in stages)
2519
2524
2520 showalways = set()
2525 showalways = set()
2521 showchanged = set()
2526 showchanged = set()
2522 if ui.verbose and not opts['show_stage']:
2527 if ui.verbose and not opts['show_stage']:
2523 # show parsed tree by --verbose (deprecated)
2528 # show parsed tree by --verbose (deprecated)
2524 showalways.add('parsed')
2529 showalways.add('parsed')
2525 showchanged.update(['expanded', 'concatenated'])
2530 showchanged.update(['expanded', 'concatenated'])
2526 if opts['optimize']:
2531 if opts['optimize']:
2527 showalways.add('optimized')
2532 showalways.add('optimized')
2528 if opts['show_stage'] and opts['optimize']:
2533 if opts['show_stage'] and opts['optimize']:
2529 raise error.Abort(_('cannot use --optimize with --show-stage'))
2534 raise error.Abort(_('cannot use --optimize with --show-stage'))
2530 if opts['show_stage'] == ['all']:
2535 if opts['show_stage'] == ['all']:
2531 showalways.update(stagenames)
2536 showalways.update(stagenames)
2532 else:
2537 else:
2533 for n in opts['show_stage']:
2538 for n in opts['show_stage']:
2534 if n not in stagenames:
2539 if n not in stagenames:
2535 raise error.Abort(_('invalid stage name: %s') % n)
2540 raise error.Abort(_('invalid stage name: %s') % n)
2536 showalways.update(opts['show_stage'])
2541 showalways.update(opts['show_stage'])
2537
2542
2538 treebystage = {}
2543 treebystage = {}
2539 printedtree = None
2544 printedtree = None
2540 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2545 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2541 for n, f in stages:
2546 for n, f in stages:
2542 treebystage[n] = tree = f(tree)
2547 treebystage[n] = tree = f(tree)
2543 if n in showalways or (n in showchanged and tree != printedtree):
2548 if n in showalways or (n in showchanged and tree != printedtree):
2544 if opts['show_stage'] or n != 'parsed':
2549 if opts['show_stage'] or n != 'parsed':
2545 ui.write(("* %s:\n") % n)
2550 ui.write(("* %s:\n") % n)
2546 ui.write(revsetlang.prettyformat(tree), "\n")
2551 ui.write(revsetlang.prettyformat(tree), "\n")
2547 printedtree = tree
2552 printedtree = tree
2548
2553
2549 if opts['verify_optimized']:
2554 if opts['verify_optimized']:
2550 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2555 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2551 brevs = revset.makematcher(treebystage['optimized'])(repo)
2556 brevs = revset.makematcher(treebystage['optimized'])(repo)
2552 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2557 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2553 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2558 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2554 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2559 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2555 arevs = list(arevs)
2560 arevs = list(arevs)
2556 brevs = list(brevs)
2561 brevs = list(brevs)
2557 if arevs == brevs:
2562 if arevs == brevs:
2558 return 0
2563 return 0
2559 ui.write(('--- analyzed\n'), label='diff.file_a')
2564 ui.write(('--- analyzed\n'), label='diff.file_a')
2560 ui.write(('+++ optimized\n'), label='diff.file_b')
2565 ui.write(('+++ optimized\n'), label='diff.file_b')
2561 sm = difflib.SequenceMatcher(None, arevs, brevs)
2566 sm = difflib.SequenceMatcher(None, arevs, brevs)
2562 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2567 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2563 if tag in (r'delete', r'replace'):
2568 if tag in (r'delete', r'replace'):
2564 for c in arevs[alo:ahi]:
2569 for c in arevs[alo:ahi]:
2565 ui.write('-%d\n' % c, label='diff.deleted')
2570 ui.write('-%d\n' % c, label='diff.deleted')
2566 if tag in (r'insert', r'replace'):
2571 if tag in (r'insert', r'replace'):
2567 for c in brevs[blo:bhi]:
2572 for c in brevs[blo:bhi]:
2568 ui.write('+%d\n' % c, label='diff.inserted')
2573 ui.write('+%d\n' % c, label='diff.inserted')
2569 if tag == r'equal':
2574 if tag == r'equal':
2570 for c in arevs[alo:ahi]:
2575 for c in arevs[alo:ahi]:
2571 ui.write(' %d\n' % c)
2576 ui.write(' %d\n' % c)
2572 return 1
2577 return 1
2573
2578
2574 func = revset.makematcher(tree)
2579 func = revset.makematcher(tree)
2575 revs = func(repo)
2580 revs = func(repo)
2576 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2581 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2577 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2582 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2578 if not opts['show_revs']:
2583 if not opts['show_revs']:
2579 return
2584 return
2580 for c in revs:
2585 for c in revs:
2581 ui.write("%d\n" % c)
2586 ui.write("%d\n" % c)
2582
2587
2583 @command('debugserve', [
2588 @command('debugserve', [
2584 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2589 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2585 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2590 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2586 ('', 'logiofile', '', _('file to log server I/O to')),
2591 ('', 'logiofile', '', _('file to log server I/O to')),
2587 ], '')
2592 ], '')
2588 def debugserve(ui, repo, **opts):
2593 def debugserve(ui, repo, **opts):
2589 """run a server with advanced settings
2594 """run a server with advanced settings
2590
2595
2591 This command is similar to :hg:`serve`. It exists partially as a
2596 This command is similar to :hg:`serve`. It exists partially as a
2592 workaround to the fact that ``hg serve --stdio`` must have specific
2597 workaround to the fact that ``hg serve --stdio`` must have specific
2593 arguments for security reasons.
2598 arguments for security reasons.
2594 """
2599 """
2595 opts = pycompat.byteskwargs(opts)
2600 opts = pycompat.byteskwargs(opts)
2596
2601
2597 if not opts['sshstdio']:
2602 if not opts['sshstdio']:
2598 raise error.Abort(_('only --sshstdio is currently supported'))
2603 raise error.Abort(_('only --sshstdio is currently supported'))
2599
2604
2600 logfh = None
2605 logfh = None
2601
2606
2602 if opts['logiofd'] and opts['logiofile']:
2607 if opts['logiofd'] and opts['logiofile']:
2603 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2608 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2604
2609
2605 if opts['logiofd']:
2610 if opts['logiofd']:
2606 # Line buffered because output is line based.
2611 # Line buffered because output is line based.
2607 try:
2612 try:
2608 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2613 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2609 except OSError as e:
2614 except OSError as e:
2610 if e.errno != errno.ESPIPE:
2615 if e.errno != errno.ESPIPE:
2611 raise
2616 raise
2612 # can't seek a pipe, so `ab` mode fails on py3
2617 # can't seek a pipe, so `ab` mode fails on py3
2613 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2618 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2614 elif opts['logiofile']:
2619 elif opts['logiofile']:
2615 logfh = open(opts['logiofile'], 'ab', 1)
2620 logfh = open(opts['logiofile'], 'ab', 1)
2616
2621
2617 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2622 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2618 s.serve_forever()
2623 s.serve_forever()
2619
2624
2620 @command('debugsetparents', [], _('REV1 [REV2]'))
2625 @command('debugsetparents', [], _('REV1 [REV2]'))
2621 def debugsetparents(ui, repo, rev1, rev2=None):
2626 def debugsetparents(ui, repo, rev1, rev2=None):
2622 """manually set the parents of the current working directory
2627 """manually set the parents of the current working directory
2623
2628
2624 This is useful for writing repository conversion tools, but should
2629 This is useful for writing repository conversion tools, but should
2625 be used with care. For example, neither the working directory nor the
2630 be used with care. For example, neither the working directory nor the
2626 dirstate is updated, so file status may be incorrect after running this
2631 dirstate is updated, so file status may be incorrect after running this
2627 command.
2632 command.
2628
2633
2629 Returns 0 on success.
2634 Returns 0 on success.
2630 """
2635 """
2631
2636
2632 node1 = scmutil.revsingle(repo, rev1).node()
2637 node1 = scmutil.revsingle(repo, rev1).node()
2633 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2638 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2634
2639
2635 with repo.wlock():
2640 with repo.wlock():
2636 repo.setparents(node1, node2)
2641 repo.setparents(node1, node2)
2637
2642
2638 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2643 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2639 def debugssl(ui, repo, source=None, **opts):
2644 def debugssl(ui, repo, source=None, **opts):
2640 '''test a secure connection to a server
2645 '''test a secure connection to a server
2641
2646
2642 This builds the certificate chain for the server on Windows, installing the
2647 This builds the certificate chain for the server on Windows, installing the
2643 missing intermediates and trusted root via Windows Update if necessary. It
2648 missing intermediates and trusted root via Windows Update if necessary. It
2644 does nothing on other platforms.
2649 does nothing on other platforms.
2645
2650
2646 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2651 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2647 that server is used. See :hg:`help urls` for more information.
2652 that server is used. See :hg:`help urls` for more information.
2648
2653
2649 If the update succeeds, retry the original operation. Otherwise, the cause
2654 If the update succeeds, retry the original operation. Otherwise, the cause
2650 of the SSL error is likely another issue.
2655 of the SSL error is likely another issue.
2651 '''
2656 '''
2652 if not pycompat.iswindows:
2657 if not pycompat.iswindows:
2653 raise error.Abort(_('certificate chain building is only possible on '
2658 raise error.Abort(_('certificate chain building is only possible on '
2654 'Windows'))
2659 'Windows'))
2655
2660
2656 if not source:
2661 if not source:
2657 if not repo:
2662 if not repo:
2658 raise error.Abort(_("there is no Mercurial repository here, and no "
2663 raise error.Abort(_("there is no Mercurial repository here, and no "
2659 "server specified"))
2664 "server specified"))
2660 source = "default"
2665 source = "default"
2661
2666
2662 source, branches = hg.parseurl(ui.expandpath(source))
2667 source, branches = hg.parseurl(ui.expandpath(source))
2663 url = util.url(source)
2668 url = util.url(source)
2664
2669
2665 defaultport = {'https': 443, 'ssh': 22}
2670 defaultport = {'https': 443, 'ssh': 22}
2666 if url.scheme in defaultport:
2671 if url.scheme in defaultport:
2667 try:
2672 try:
2668 addr = (url.host, int(url.port or defaultport[url.scheme]))
2673 addr = (url.host, int(url.port or defaultport[url.scheme]))
2669 except ValueError:
2674 except ValueError:
2670 raise error.Abort(_("malformed port number in URL"))
2675 raise error.Abort(_("malformed port number in URL"))
2671 else:
2676 else:
2672 raise error.Abort(_("only https and ssh connections are supported"))
2677 raise error.Abort(_("only https and ssh connections are supported"))
2673
2678
2674 from . import win32
2679 from . import win32
2675
2680
2676 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2681 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2677 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2682 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2678
2683
2679 try:
2684 try:
2680 s.connect(addr)
2685 s.connect(addr)
2681 cert = s.getpeercert(True)
2686 cert = s.getpeercert(True)
2682
2687
2683 ui.status(_('checking the certificate chain for %s\n') % url.host)
2688 ui.status(_('checking the certificate chain for %s\n') % url.host)
2684
2689
2685 complete = win32.checkcertificatechain(cert, build=False)
2690 complete = win32.checkcertificatechain(cert, build=False)
2686
2691
2687 if not complete:
2692 if not complete:
2688 ui.status(_('certificate chain is incomplete, updating... '))
2693 ui.status(_('certificate chain is incomplete, updating... '))
2689
2694
2690 if not win32.checkcertificatechain(cert):
2695 if not win32.checkcertificatechain(cert):
2691 ui.status(_('failed.\n'))
2696 ui.status(_('failed.\n'))
2692 else:
2697 else:
2693 ui.status(_('done.\n'))
2698 ui.status(_('done.\n'))
2694 else:
2699 else:
2695 ui.status(_('full certificate chain is available\n'))
2700 ui.status(_('full certificate chain is available\n'))
2696 finally:
2701 finally:
2697 s.close()
2702 s.close()
2698
2703
2699 @command('debugsub',
2704 @command('debugsub',
2700 [('r', 'rev', '',
2705 [('r', 'rev', '',
2701 _('revision to check'), _('REV'))],
2706 _('revision to check'), _('REV'))],
2702 _('[-r REV] [REV]'))
2707 _('[-r REV] [REV]'))
2703 def debugsub(ui, repo, rev=None):
2708 def debugsub(ui, repo, rev=None):
2704 ctx = scmutil.revsingle(repo, rev, None)
2709 ctx = scmutil.revsingle(repo, rev, None)
2705 for k, v in sorted(ctx.substate.items()):
2710 for k, v in sorted(ctx.substate.items()):
2706 ui.write(('path %s\n') % k)
2711 ui.write(('path %s\n') % k)
2707 ui.write((' source %s\n') % v[0])
2712 ui.write((' source %s\n') % v[0])
2708 ui.write((' revision %s\n') % v[1])
2713 ui.write((' revision %s\n') % v[1])
2709
2714
2710 @command('debugsuccessorssets',
2715 @command('debugsuccessorssets',
2711 [('', 'closest', False, _('return closest successors sets only'))],
2716 [('', 'closest', False, _('return closest successors sets only'))],
2712 _('[REV]'))
2717 _('[REV]'))
2713 def debugsuccessorssets(ui, repo, *revs, **opts):
2718 def debugsuccessorssets(ui, repo, *revs, **opts):
2714 """show set of successors for revision
2719 """show set of successors for revision
2715
2720
2716 A successors set of changeset A is a consistent group of revisions that
2721 A successors set of changeset A is a consistent group of revisions that
2717 succeed A. It contains non-obsolete changesets only unless closests
2722 succeed A. It contains non-obsolete changesets only unless closests
2718 successors set is set.
2723 successors set is set.
2719
2724
2720 In most cases a changeset A has a single successors set containing a single
2725 In most cases a changeset A has a single successors set containing a single
2721 successor (changeset A replaced by A').
2726 successor (changeset A replaced by A').
2722
2727
2723 A changeset that is made obsolete with no successors are called "pruned".
2728 A changeset that is made obsolete with no successors are called "pruned".
2724 Such changesets have no successors sets at all.
2729 Such changesets have no successors sets at all.
2725
2730
2726 A changeset that has been "split" will have a successors set containing
2731 A changeset that has been "split" will have a successors set containing
2727 more than one successor.
2732 more than one successor.
2728
2733
2729 A changeset that has been rewritten in multiple different ways is called
2734 A changeset that has been rewritten in multiple different ways is called
2730 "divergent". Such changesets have multiple successor sets (each of which
2735 "divergent". Such changesets have multiple successor sets (each of which
2731 may also be split, i.e. have multiple successors).
2736 may also be split, i.e. have multiple successors).
2732
2737
2733 Results are displayed as follows::
2738 Results are displayed as follows::
2734
2739
2735 <rev1>
2740 <rev1>
2736 <successors-1A>
2741 <successors-1A>
2737 <rev2>
2742 <rev2>
2738 <successors-2A>
2743 <successors-2A>
2739 <successors-2B1> <successors-2B2> <successors-2B3>
2744 <successors-2B1> <successors-2B2> <successors-2B3>
2740
2745
2741 Here rev2 has two possible (i.e. divergent) successors sets. The first
2746 Here rev2 has two possible (i.e. divergent) successors sets. The first
2742 holds one element, whereas the second holds three (i.e. the changeset has
2747 holds one element, whereas the second holds three (i.e. the changeset has
2743 been split).
2748 been split).
2744 """
2749 """
2745 # passed to successorssets caching computation from one call to another
2750 # passed to successorssets caching computation from one call to another
2746 cache = {}
2751 cache = {}
2747 ctx2str = bytes
2752 ctx2str = bytes
2748 node2str = short
2753 node2str = short
2749 for rev in scmutil.revrange(repo, revs):
2754 for rev in scmutil.revrange(repo, revs):
2750 ctx = repo[rev]
2755 ctx = repo[rev]
2751 ui.write('%s\n'% ctx2str(ctx))
2756 ui.write('%s\n'% ctx2str(ctx))
2752 for succsset in obsutil.successorssets(repo, ctx.node(),
2757 for succsset in obsutil.successorssets(repo, ctx.node(),
2753 closest=opts[r'closest'],
2758 closest=opts[r'closest'],
2754 cache=cache):
2759 cache=cache):
2755 if succsset:
2760 if succsset:
2756 ui.write(' ')
2761 ui.write(' ')
2757 ui.write(node2str(succsset[0]))
2762 ui.write(node2str(succsset[0]))
2758 for node in succsset[1:]:
2763 for node in succsset[1:]:
2759 ui.write(' ')
2764 ui.write(' ')
2760 ui.write(node2str(node))
2765 ui.write(node2str(node))
2761 ui.write('\n')
2766 ui.write('\n')
2762
2767
2763 @command('debugtemplate',
2768 @command('debugtemplate',
2764 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2769 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2765 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2770 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2766 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2771 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2767 optionalrepo=True)
2772 optionalrepo=True)
2768 def debugtemplate(ui, repo, tmpl, **opts):
2773 def debugtemplate(ui, repo, tmpl, **opts):
2769 """parse and apply a template
2774 """parse and apply a template
2770
2775
2771 If -r/--rev is given, the template is processed as a log template and
2776 If -r/--rev is given, the template is processed as a log template and
2772 applied to the given changesets. Otherwise, it is processed as a generic
2777 applied to the given changesets. Otherwise, it is processed as a generic
2773 template.
2778 template.
2774
2779
2775 Use --verbose to print the parsed tree.
2780 Use --verbose to print the parsed tree.
2776 """
2781 """
2777 revs = None
2782 revs = None
2778 if opts[r'rev']:
2783 if opts[r'rev']:
2779 if repo is None:
2784 if repo is None:
2780 raise error.RepoError(_('there is no Mercurial repository here '
2785 raise error.RepoError(_('there is no Mercurial repository here '
2781 '(.hg not found)'))
2786 '(.hg not found)'))
2782 revs = scmutil.revrange(repo, opts[r'rev'])
2787 revs = scmutil.revrange(repo, opts[r'rev'])
2783
2788
2784 props = {}
2789 props = {}
2785 for d in opts[r'define']:
2790 for d in opts[r'define']:
2786 try:
2791 try:
2787 k, v = (e.strip() for e in d.split('=', 1))
2792 k, v = (e.strip() for e in d.split('=', 1))
2788 if not k or k == 'ui':
2793 if not k or k == 'ui':
2789 raise ValueError
2794 raise ValueError
2790 props[k] = v
2795 props[k] = v
2791 except ValueError:
2796 except ValueError:
2792 raise error.Abort(_('malformed keyword definition: %s') % d)
2797 raise error.Abort(_('malformed keyword definition: %s') % d)
2793
2798
2794 if ui.verbose:
2799 if ui.verbose:
2795 aliases = ui.configitems('templatealias')
2800 aliases = ui.configitems('templatealias')
2796 tree = templater.parse(tmpl)
2801 tree = templater.parse(tmpl)
2797 ui.note(templater.prettyformat(tree), '\n')
2802 ui.note(templater.prettyformat(tree), '\n')
2798 newtree = templater.expandaliases(tree, aliases)
2803 newtree = templater.expandaliases(tree, aliases)
2799 if newtree != tree:
2804 if newtree != tree:
2800 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2805 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2801
2806
2802 if revs is None:
2807 if revs is None:
2803 tres = formatter.templateresources(ui, repo)
2808 tres = formatter.templateresources(ui, repo)
2804 t = formatter.maketemplater(ui, tmpl, resources=tres)
2809 t = formatter.maketemplater(ui, tmpl, resources=tres)
2805 if ui.verbose:
2810 if ui.verbose:
2806 kwds, funcs = t.symbolsuseddefault()
2811 kwds, funcs = t.symbolsuseddefault()
2807 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2812 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2808 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2813 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2809 ui.write(t.renderdefault(props))
2814 ui.write(t.renderdefault(props))
2810 else:
2815 else:
2811 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2816 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2812 if ui.verbose:
2817 if ui.verbose:
2813 kwds, funcs = displayer.t.symbolsuseddefault()
2818 kwds, funcs = displayer.t.symbolsuseddefault()
2814 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2819 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2815 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2820 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2816 for r in revs:
2821 for r in revs:
2817 displayer.show(repo[r], **pycompat.strkwargs(props))
2822 displayer.show(repo[r], **pycompat.strkwargs(props))
2818 displayer.close()
2823 displayer.close()
2819
2824
2820 @command('debuguigetpass', [
2825 @command('debuguigetpass', [
2821 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2826 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2822 ], _('[-p TEXT]'), norepo=True)
2827 ], _('[-p TEXT]'), norepo=True)
2823 def debuguigetpass(ui, prompt=''):
2828 def debuguigetpass(ui, prompt=''):
2824 """show prompt to type password"""
2829 """show prompt to type password"""
2825 r = ui.getpass(prompt)
2830 r = ui.getpass(prompt)
2826 ui.write(('respose: %s\n') % r)
2831 ui.write(('respose: %s\n') % r)
2827
2832
2828 @command('debuguiprompt', [
2833 @command('debuguiprompt', [
2829 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2834 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2830 ], _('[-p TEXT]'), norepo=True)
2835 ], _('[-p TEXT]'), norepo=True)
2831 def debuguiprompt(ui, prompt=''):
2836 def debuguiprompt(ui, prompt=''):
2832 """show plain prompt"""
2837 """show plain prompt"""
2833 r = ui.prompt(prompt)
2838 r = ui.prompt(prompt)
2834 ui.write(('response: %s\n') % r)
2839 ui.write(('response: %s\n') % r)
2835
2840
2836 @command('debugupdatecaches', [])
2841 @command('debugupdatecaches', [])
2837 def debugupdatecaches(ui, repo, *pats, **opts):
2842 def debugupdatecaches(ui, repo, *pats, **opts):
2838 """warm all known caches in the repository"""
2843 """warm all known caches in the repository"""
2839 with repo.wlock(), repo.lock():
2844 with repo.wlock(), repo.lock():
2840 repo.updatecaches(full=True)
2845 repo.updatecaches(full=True)
2841
2846
2842 @command('debugupgraderepo', [
2847 @command('debugupgraderepo', [
2843 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2848 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2844 ('', 'run', False, _('performs an upgrade')),
2849 ('', 'run', False, _('performs an upgrade')),
2845 ('', 'backup', True, _('keep the old repository content around')),
2850 ('', 'backup', True, _('keep the old repository content around')),
2846 ])
2851 ])
2847 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2852 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2848 """upgrade a repository to use different features
2853 """upgrade a repository to use different features
2849
2854
2850 If no arguments are specified, the repository is evaluated for upgrade
2855 If no arguments are specified, the repository is evaluated for upgrade
2851 and a list of problems and potential optimizations is printed.
2856 and a list of problems and potential optimizations is printed.
2852
2857
2853 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2858 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2854 can be influenced via additional arguments. More details will be provided
2859 can be influenced via additional arguments. More details will be provided
2855 by the command output when run without ``--run``.
2860 by the command output when run without ``--run``.
2856
2861
2857 During the upgrade, the repository will be locked and no writes will be
2862 During the upgrade, the repository will be locked and no writes will be
2858 allowed.
2863 allowed.
2859
2864
2860 At the end of the upgrade, the repository may not be readable while new
2865 At the end of the upgrade, the repository may not be readable while new
2861 repository data is swapped in. This window will be as long as it takes to
2866 repository data is swapped in. This window will be as long as it takes to
2862 rename some directories inside the ``.hg`` directory. On most machines, this
2867 rename some directories inside the ``.hg`` directory. On most machines, this
2863 should complete almost instantaneously and the chances of a consumer being
2868 should complete almost instantaneously and the chances of a consumer being
2864 unable to access the repository should be low.
2869 unable to access the repository should be low.
2865 """
2870 """
2866 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2871 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2867 backup=backup)
2872 backup=backup)
2868
2873
2869 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2874 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2870 inferrepo=True)
2875 inferrepo=True)
2871 def debugwalk(ui, repo, *pats, **opts):
2876 def debugwalk(ui, repo, *pats, **opts):
2872 """show how files match on given patterns"""
2877 """show how files match on given patterns"""
2873 opts = pycompat.byteskwargs(opts)
2878 opts = pycompat.byteskwargs(opts)
2874 m = scmutil.match(repo[None], pats, opts)
2879 m = scmutil.match(repo[None], pats, opts)
2875 if ui.verbose:
2880 if ui.verbose:
2876 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2881 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2877 items = list(repo[None].walk(m))
2882 items = list(repo[None].walk(m))
2878 if not items:
2883 if not items:
2879 return
2884 return
2880 f = lambda fn: fn
2885 f = lambda fn: fn
2881 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2886 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2882 f = lambda fn: util.normpath(fn)
2887 f = lambda fn: util.normpath(fn)
2883 fmt = 'f %%-%ds %%-%ds %%s' % (
2888 fmt = 'f %%-%ds %%-%ds %%s' % (
2884 max([len(abs) for abs in items]),
2889 max([len(abs) for abs in items]),
2885 max([len(repo.pathto(abs)) for abs in items]))
2890 max([len(repo.pathto(abs)) for abs in items]))
2886 for abs in items:
2891 for abs in items:
2887 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2892 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2888 ui.write("%s\n" % line.rstrip())
2893 ui.write("%s\n" % line.rstrip())
2889
2894
2890 @command('debugwhyunstable', [], _('REV'))
2895 @command('debugwhyunstable', [], _('REV'))
2891 def debugwhyunstable(ui, repo, rev):
2896 def debugwhyunstable(ui, repo, rev):
2892 """explain instabilities of a changeset"""
2897 """explain instabilities of a changeset"""
2893 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2898 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2894 dnodes = ''
2899 dnodes = ''
2895 if entry.get('divergentnodes'):
2900 if entry.get('divergentnodes'):
2896 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2901 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2897 for ctx in entry['divergentnodes']) + ' '
2902 for ctx in entry['divergentnodes']) + ' '
2898 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2903 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2899 entry['reason'], entry['node']))
2904 entry['reason'], entry['node']))
2900
2905
2901 @command('debugwireargs',
2906 @command('debugwireargs',
2902 [('', 'three', '', 'three'),
2907 [('', 'three', '', 'three'),
2903 ('', 'four', '', 'four'),
2908 ('', 'four', '', 'four'),
2904 ('', 'five', '', 'five'),
2909 ('', 'five', '', 'five'),
2905 ] + cmdutil.remoteopts,
2910 ] + cmdutil.remoteopts,
2906 _('REPO [OPTIONS]... [ONE [TWO]]'),
2911 _('REPO [OPTIONS]... [ONE [TWO]]'),
2907 norepo=True)
2912 norepo=True)
2908 def debugwireargs(ui, repopath, *vals, **opts):
2913 def debugwireargs(ui, repopath, *vals, **opts):
2909 opts = pycompat.byteskwargs(opts)
2914 opts = pycompat.byteskwargs(opts)
2910 repo = hg.peer(ui, opts, repopath)
2915 repo = hg.peer(ui, opts, repopath)
2911 for opt in cmdutil.remoteopts:
2916 for opt in cmdutil.remoteopts:
2912 del opts[opt[1]]
2917 del opts[opt[1]]
2913 args = {}
2918 args = {}
2914 for k, v in opts.iteritems():
2919 for k, v in opts.iteritems():
2915 if v:
2920 if v:
2916 args[k] = v
2921 args[k] = v
2917 args = pycompat.strkwargs(args)
2922 args = pycompat.strkwargs(args)
2918 # run twice to check that we don't mess up the stream for the next command
2923 # run twice to check that we don't mess up the stream for the next command
2919 res1 = repo.debugwireargs(*vals, **args)
2924 res1 = repo.debugwireargs(*vals, **args)
2920 res2 = repo.debugwireargs(*vals, **args)
2925 res2 = repo.debugwireargs(*vals, **args)
2921 ui.write("%s\n" % res1)
2926 ui.write("%s\n" % res1)
2922 if res1 != res2:
2927 if res1 != res2:
2923 ui.warn("%s\n" % res2)
2928 ui.warn("%s\n" % res2)
2924
2929
2925 def _parsewirelangblocks(fh):
2930 def _parsewirelangblocks(fh):
2926 activeaction = None
2931 activeaction = None
2927 blocklines = []
2932 blocklines = []
2928 lastindent = 0
2933 lastindent = 0
2929
2934
2930 for line in fh:
2935 for line in fh:
2931 line = line.rstrip()
2936 line = line.rstrip()
2932 if not line:
2937 if not line:
2933 continue
2938 continue
2934
2939
2935 if line.startswith(b'#'):
2940 if line.startswith(b'#'):
2936 continue
2941 continue
2937
2942
2938 if not line.startswith(b' '):
2943 if not line.startswith(b' '):
2939 # New block. Flush previous one.
2944 # New block. Flush previous one.
2940 if activeaction:
2945 if activeaction:
2941 yield activeaction, blocklines
2946 yield activeaction, blocklines
2942
2947
2943 activeaction = line
2948 activeaction = line
2944 blocklines = []
2949 blocklines = []
2945 lastindent = 0
2950 lastindent = 0
2946 continue
2951 continue
2947
2952
2948 # Else we start with an indent.
2953 # Else we start with an indent.
2949
2954
2950 if not activeaction:
2955 if not activeaction:
2951 raise error.Abort(_('indented line outside of block'))
2956 raise error.Abort(_('indented line outside of block'))
2952
2957
2953 indent = len(line) - len(line.lstrip())
2958 indent = len(line) - len(line.lstrip())
2954
2959
2955 # If this line is indented more than the last line, concatenate it.
2960 # If this line is indented more than the last line, concatenate it.
2956 if indent > lastindent and blocklines:
2961 if indent > lastindent and blocklines:
2957 blocklines[-1] += line.lstrip()
2962 blocklines[-1] += line.lstrip()
2958 else:
2963 else:
2959 blocklines.append(line)
2964 blocklines.append(line)
2960 lastindent = indent
2965 lastindent = indent
2961
2966
2962 # Flush last block.
2967 # Flush last block.
2963 if activeaction:
2968 if activeaction:
2964 yield activeaction, blocklines
2969 yield activeaction, blocklines
2965
2970
2966 @command('debugwireproto',
2971 @command('debugwireproto',
2967 [
2972 [
2968 ('', 'localssh', False, _('start an SSH server for this repo')),
2973 ('', 'localssh', False, _('start an SSH server for this repo')),
2969 ('', 'peer', '', _('construct a specific version of the peer')),
2974 ('', 'peer', '', _('construct a specific version of the peer')),
2970 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2975 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2971 ('', 'nologhandshake', False,
2976 ('', 'nologhandshake', False,
2972 _('do not log I/O related to the peer handshake')),
2977 _('do not log I/O related to the peer handshake')),
2973 ] + cmdutil.remoteopts,
2978 ] + cmdutil.remoteopts,
2974 _('[PATH]'),
2979 _('[PATH]'),
2975 optionalrepo=True)
2980 optionalrepo=True)
2976 def debugwireproto(ui, repo, path=None, **opts):
2981 def debugwireproto(ui, repo, path=None, **opts):
2977 """send wire protocol commands to a server
2982 """send wire protocol commands to a server
2978
2983
2979 This command can be used to issue wire protocol commands to remote
2984 This command can be used to issue wire protocol commands to remote
2980 peers and to debug the raw data being exchanged.
2985 peers and to debug the raw data being exchanged.
2981
2986
2982 ``--localssh`` will start an SSH server against the current repository
2987 ``--localssh`` will start an SSH server against the current repository
2983 and connect to that. By default, the connection will perform a handshake
2988 and connect to that. By default, the connection will perform a handshake
2984 and establish an appropriate peer instance.
2989 and establish an appropriate peer instance.
2985
2990
2986 ``--peer`` can be used to bypass the handshake protocol and construct a
2991 ``--peer`` can be used to bypass the handshake protocol and construct a
2987 peer instance using the specified class type. Valid values are ``raw``,
2992 peer instance using the specified class type. Valid values are ``raw``,
2988 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2993 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2989 raw data payloads and don't support higher-level command actions.
2994 raw data payloads and don't support higher-level command actions.
2990
2995
2991 ``--noreadstderr`` can be used to disable automatic reading from stderr
2996 ``--noreadstderr`` can be used to disable automatic reading from stderr
2992 of the peer (for SSH connections only). Disabling automatic reading of
2997 of the peer (for SSH connections only). Disabling automatic reading of
2993 stderr is useful for making output more deterministic.
2998 stderr is useful for making output more deterministic.
2994
2999
2995 Commands are issued via a mini language which is specified via stdin.
3000 Commands are issued via a mini language which is specified via stdin.
2996 The language consists of individual actions to perform. An action is
3001 The language consists of individual actions to perform. An action is
2997 defined by a block. A block is defined as a line with no leading
3002 defined by a block. A block is defined as a line with no leading
2998 space followed by 0 or more lines with leading space. Blocks are
3003 space followed by 0 or more lines with leading space. Blocks are
2999 effectively a high-level command with additional metadata.
3004 effectively a high-level command with additional metadata.
3000
3005
3001 Lines beginning with ``#`` are ignored.
3006 Lines beginning with ``#`` are ignored.
3002
3007
3003 The following sections denote available actions.
3008 The following sections denote available actions.
3004
3009
3005 raw
3010 raw
3006 ---
3011 ---
3007
3012
3008 Send raw data to the server.
3013 Send raw data to the server.
3009
3014
3010 The block payload contains the raw data to send as one atomic send
3015 The block payload contains the raw data to send as one atomic send
3011 operation. The data may not actually be delivered in a single system
3016 operation. The data may not actually be delivered in a single system
3012 call: it depends on the abilities of the transport being used.
3017 call: it depends on the abilities of the transport being used.
3013
3018
3014 Each line in the block is de-indented and concatenated. Then, that
3019 Each line in the block is de-indented and concatenated. Then, that
3015 value is evaluated as a Python b'' literal. This allows the use of
3020 value is evaluated as a Python b'' literal. This allows the use of
3016 backslash escaping, etc.
3021 backslash escaping, etc.
3017
3022
3018 raw+
3023 raw+
3019 ----
3024 ----
3020
3025
3021 Behaves like ``raw`` except flushes output afterwards.
3026 Behaves like ``raw`` except flushes output afterwards.
3022
3027
3023 command <X>
3028 command <X>
3024 -----------
3029 -----------
3025
3030
3026 Send a request to run a named command, whose name follows the ``command``
3031 Send a request to run a named command, whose name follows the ``command``
3027 string.
3032 string.
3028
3033
3029 Arguments to the command are defined as lines in this block. The format of
3034 Arguments to the command are defined as lines in this block. The format of
3030 each line is ``<key> <value>``. e.g.::
3035 each line is ``<key> <value>``. e.g.::
3031
3036
3032 command listkeys
3037 command listkeys
3033 namespace bookmarks
3038 namespace bookmarks
3034
3039
3035 If the value begins with ``eval:``, it will be interpreted as a Python
3040 If the value begins with ``eval:``, it will be interpreted as a Python
3036 literal expression. Otherwise values are interpreted as Python b'' literals.
3041 literal expression. Otherwise values are interpreted as Python b'' literals.
3037 This allows sending complex types and encoding special byte sequences via
3042 This allows sending complex types and encoding special byte sequences via
3038 backslash escaping.
3043 backslash escaping.
3039
3044
3040 The following arguments have special meaning:
3045 The following arguments have special meaning:
3041
3046
3042 ``PUSHFILE``
3047 ``PUSHFILE``
3043 When defined, the *push* mechanism of the peer will be used instead
3048 When defined, the *push* mechanism of the peer will be used instead
3044 of the static request-response mechanism and the content of the
3049 of the static request-response mechanism and the content of the
3045 file specified in the value of this argument will be sent as the
3050 file specified in the value of this argument will be sent as the
3046 command payload.
3051 command payload.
3047
3052
3048 This can be used to submit a local bundle file to the remote.
3053 This can be used to submit a local bundle file to the remote.
3049
3054
3050 batchbegin
3055 batchbegin
3051 ----------
3056 ----------
3052
3057
3053 Instruct the peer to begin a batched send.
3058 Instruct the peer to begin a batched send.
3054
3059
3055 All ``command`` blocks are queued for execution until the next
3060 All ``command`` blocks are queued for execution until the next
3056 ``batchsubmit`` block.
3061 ``batchsubmit`` block.
3057
3062
3058 batchsubmit
3063 batchsubmit
3059 -----------
3064 -----------
3060
3065
3061 Submit previously queued ``command`` blocks as a batch request.
3066 Submit previously queued ``command`` blocks as a batch request.
3062
3067
3063 This action MUST be paired with a ``batchbegin`` action.
3068 This action MUST be paired with a ``batchbegin`` action.
3064
3069
3065 httprequest <method> <path>
3070 httprequest <method> <path>
3066 ---------------------------
3071 ---------------------------
3067
3072
3068 (HTTP peer only)
3073 (HTTP peer only)
3069
3074
3070 Send an HTTP request to the peer.
3075 Send an HTTP request to the peer.
3071
3076
3072 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3077 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3073
3078
3074 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3079 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3075 headers to add to the request. e.g. ``Accept: foo``.
3080 headers to add to the request. e.g. ``Accept: foo``.
3076
3081
3077 The following arguments are special:
3082 The following arguments are special:
3078
3083
3079 ``BODYFILE``
3084 ``BODYFILE``
3080 The content of the file defined as the value to this argument will be
3085 The content of the file defined as the value to this argument will be
3081 transferred verbatim as the HTTP request body.
3086 transferred verbatim as the HTTP request body.
3082
3087
3083 ``frame <type> <flags> <payload>``
3088 ``frame <type> <flags> <payload>``
3084 Send a unified protocol frame as part of the request body.
3089 Send a unified protocol frame as part of the request body.
3085
3090
3086 All frames will be collected and sent as the body to the HTTP
3091 All frames will be collected and sent as the body to the HTTP
3087 request.
3092 request.
3088
3093
3089 close
3094 close
3090 -----
3095 -----
3091
3096
3092 Close the connection to the server.
3097 Close the connection to the server.
3093
3098
3094 flush
3099 flush
3095 -----
3100 -----
3096
3101
3097 Flush data written to the server.
3102 Flush data written to the server.
3098
3103
3099 readavailable
3104 readavailable
3100 -------------
3105 -------------
3101
3106
3102 Close the write end of the connection and read all available data from
3107 Close the write end of the connection and read all available data from
3103 the server.
3108 the server.
3104
3109
3105 If the connection to the server encompasses multiple pipes, we poll both
3110 If the connection to the server encompasses multiple pipes, we poll both
3106 pipes and read available data.
3111 pipes and read available data.
3107
3112
3108 readline
3113 readline
3109 --------
3114 --------
3110
3115
3111 Read a line of output from the server. If there are multiple output
3116 Read a line of output from the server. If there are multiple output
3112 pipes, reads only the main pipe.
3117 pipes, reads only the main pipe.
3113
3118
3114 ereadline
3119 ereadline
3115 ---------
3120 ---------
3116
3121
3117 Like ``readline``, but read from the stderr pipe, if available.
3122 Like ``readline``, but read from the stderr pipe, if available.
3118
3123
3119 read <X>
3124 read <X>
3120 --------
3125 --------
3121
3126
3122 ``read()`` N bytes from the server's main output pipe.
3127 ``read()`` N bytes from the server's main output pipe.
3123
3128
3124 eread <X>
3129 eread <X>
3125 ---------
3130 ---------
3126
3131
3127 ``read()`` N bytes from the server's stderr pipe, if available.
3132 ``read()`` N bytes from the server's stderr pipe, if available.
3128
3133
3129 Specifying Unified Frame-Based Protocol Frames
3134 Specifying Unified Frame-Based Protocol Frames
3130 ----------------------------------------------
3135 ----------------------------------------------
3131
3136
3132 It is possible to emit a *Unified Frame-Based Protocol* by using special
3137 It is possible to emit a *Unified Frame-Based Protocol* by using special
3133 syntax.
3138 syntax.
3134
3139
3135 A frame is composed as a type, flags, and payload. These can be parsed
3140 A frame is composed as a type, flags, and payload. These can be parsed
3136 from a string of the form:
3141 from a string of the form:
3137
3142
3138 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3143 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3139
3144
3140 ``request-id`` and ``stream-id`` are integers defining the request and
3145 ``request-id`` and ``stream-id`` are integers defining the request and
3141 stream identifiers.
3146 stream identifiers.
3142
3147
3143 ``type`` can be an integer value for the frame type or the string name
3148 ``type`` can be an integer value for the frame type or the string name
3144 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3149 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3145 ``command-name``.
3150 ``command-name``.
3146
3151
3147 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3152 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3148 components. Each component (and there can be just one) can be an integer
3153 components. Each component (and there can be just one) can be an integer
3149 or a flag name for stream flags or frame flags, respectively. Values are
3154 or a flag name for stream flags or frame flags, respectively. Values are
3150 resolved to integers and then bitwise OR'd together.
3155 resolved to integers and then bitwise OR'd together.
3151
3156
3152 ``payload`` represents the raw frame payload. If it begins with
3157 ``payload`` represents the raw frame payload. If it begins with
3153 ``cbor:``, the following string is evaluated as Python code and the
3158 ``cbor:``, the following string is evaluated as Python code and the
3154 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3159 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3155 as a Python byte string literal.
3160 as a Python byte string literal.
3156 """
3161 """
3157 opts = pycompat.byteskwargs(opts)
3162 opts = pycompat.byteskwargs(opts)
3158
3163
3159 if opts['localssh'] and not repo:
3164 if opts['localssh'] and not repo:
3160 raise error.Abort(_('--localssh requires a repository'))
3165 raise error.Abort(_('--localssh requires a repository'))
3161
3166
3162 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3167 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3163 raise error.Abort(_('invalid value for --peer'),
3168 raise error.Abort(_('invalid value for --peer'),
3164 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3169 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3165
3170
3166 if path and opts['localssh']:
3171 if path and opts['localssh']:
3167 raise error.Abort(_('cannot specify --localssh with an explicit '
3172 raise error.Abort(_('cannot specify --localssh with an explicit '
3168 'path'))
3173 'path'))
3169
3174
3170 if ui.interactive():
3175 if ui.interactive():
3171 ui.write(_('(waiting for commands on stdin)\n'))
3176 ui.write(_('(waiting for commands on stdin)\n'))
3172
3177
3173 blocks = list(_parsewirelangblocks(ui.fin))
3178 blocks = list(_parsewirelangblocks(ui.fin))
3174
3179
3175 proc = None
3180 proc = None
3176 stdin = None
3181 stdin = None
3177 stdout = None
3182 stdout = None
3178 stderr = None
3183 stderr = None
3179 opener = None
3184 opener = None
3180
3185
3181 if opts['localssh']:
3186 if opts['localssh']:
3182 # We start the SSH server in its own process so there is process
3187 # We start the SSH server in its own process so there is process
3183 # separation. This prevents a whole class of potential bugs around
3188 # separation. This prevents a whole class of potential bugs around
3184 # shared state from interfering with server operation.
3189 # shared state from interfering with server operation.
3185 args = procutil.hgcmd() + [
3190 args = procutil.hgcmd() + [
3186 '-R', repo.root,
3191 '-R', repo.root,
3187 'debugserve', '--sshstdio',
3192 'debugserve', '--sshstdio',
3188 ]
3193 ]
3189 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3194 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3190 stdin=subprocess.PIPE,
3195 stdin=subprocess.PIPE,
3191 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3196 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3192 bufsize=0)
3197 bufsize=0)
3193
3198
3194 stdin = proc.stdin
3199 stdin = proc.stdin
3195 stdout = proc.stdout
3200 stdout = proc.stdout
3196 stderr = proc.stderr
3201 stderr = proc.stderr
3197
3202
3198 # We turn the pipes into observers so we can log I/O.
3203 # We turn the pipes into observers so we can log I/O.
3199 if ui.verbose or opts['peer'] == 'raw':
3204 if ui.verbose or opts['peer'] == 'raw':
3200 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3205 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3201 logdata=True)
3206 logdata=True)
3202 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3207 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3203 logdata=True)
3208 logdata=True)
3204 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3209 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3205 logdata=True)
3210 logdata=True)
3206
3211
3207 # --localssh also implies the peer connection settings.
3212 # --localssh also implies the peer connection settings.
3208
3213
3209 url = 'ssh://localserver'
3214 url = 'ssh://localserver'
3210 autoreadstderr = not opts['noreadstderr']
3215 autoreadstderr = not opts['noreadstderr']
3211
3216
3212 if opts['peer'] == 'ssh1':
3217 if opts['peer'] == 'ssh1':
3213 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3218 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3214 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3219 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3215 None, autoreadstderr=autoreadstderr)
3220 None, autoreadstderr=autoreadstderr)
3216 elif opts['peer'] == 'ssh2':
3221 elif opts['peer'] == 'ssh2':
3217 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3222 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3218 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3223 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3219 None, autoreadstderr=autoreadstderr)
3224 None, autoreadstderr=autoreadstderr)
3220 elif opts['peer'] == 'raw':
3225 elif opts['peer'] == 'raw':
3221 ui.write(_('using raw connection to peer\n'))
3226 ui.write(_('using raw connection to peer\n'))
3222 peer = None
3227 peer = None
3223 else:
3228 else:
3224 ui.write(_('creating ssh peer from handshake results\n'))
3229 ui.write(_('creating ssh peer from handshake results\n'))
3225 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3230 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3226 autoreadstderr=autoreadstderr)
3231 autoreadstderr=autoreadstderr)
3227
3232
3228 elif path:
3233 elif path:
3229 # We bypass hg.peer() so we can proxy the sockets.
3234 # We bypass hg.peer() so we can proxy the sockets.
3230 # TODO consider not doing this because we skip
3235 # TODO consider not doing this because we skip
3231 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3236 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3232 u = util.url(path)
3237 u = util.url(path)
3233 if u.scheme != 'http':
3238 if u.scheme != 'http':
3234 raise error.Abort(_('only http:// paths are currently supported'))
3239 raise error.Abort(_('only http:// paths are currently supported'))
3235
3240
3236 url, authinfo = u.authinfo()
3241 url, authinfo = u.authinfo()
3237 openerargs = {
3242 openerargs = {
3238 r'useragent': b'Mercurial debugwireproto',
3243 r'useragent': b'Mercurial debugwireproto',
3239 }
3244 }
3240
3245
3241 # Turn pipes/sockets into observers so we can log I/O.
3246 # Turn pipes/sockets into observers so we can log I/O.
3242 if ui.verbose:
3247 if ui.verbose:
3243 openerargs.update({
3248 openerargs.update({
3244 r'loggingfh': ui,
3249 r'loggingfh': ui,
3245 r'loggingname': b's',
3250 r'loggingname': b's',
3246 r'loggingopts': {
3251 r'loggingopts': {
3247 r'logdata': True,
3252 r'logdata': True,
3248 r'logdataapis': False,
3253 r'logdataapis': False,
3249 },
3254 },
3250 })
3255 })
3251
3256
3252 if ui.debugflag:
3257 if ui.debugflag:
3253 openerargs[r'loggingopts'][r'logdataapis'] = True
3258 openerargs[r'loggingopts'][r'logdataapis'] = True
3254
3259
3255 # Don't send default headers when in raw mode. This allows us to
3260 # Don't send default headers when in raw mode. This allows us to
3256 # bypass most of the behavior of our URL handling code so we can
3261 # bypass most of the behavior of our URL handling code so we can
3257 # have near complete control over what's sent on the wire.
3262 # have near complete control over what's sent on the wire.
3258 if opts['peer'] == 'raw':
3263 if opts['peer'] == 'raw':
3259 openerargs[r'sendaccept'] = False
3264 openerargs[r'sendaccept'] = False
3260
3265
3261 opener = urlmod.opener(ui, authinfo, **openerargs)
3266 opener = urlmod.opener(ui, authinfo, **openerargs)
3262
3267
3263 if opts['peer'] == 'http2':
3268 if opts['peer'] == 'http2':
3264 ui.write(_('creating http peer for wire protocol version 2\n'))
3269 ui.write(_('creating http peer for wire protocol version 2\n'))
3265 # We go through makepeer() because we need an API descriptor for
3270 # We go through makepeer() because we need an API descriptor for
3266 # the peer instance to be useful.
3271 # the peer instance to be useful.
3267 with ui.configoverride({
3272 with ui.configoverride({
3268 ('experimental', 'httppeer.advertise-v2'): True}):
3273 ('experimental', 'httppeer.advertise-v2'): True}):
3269 if opts['nologhandshake']:
3274 if opts['nologhandshake']:
3270 ui.pushbuffer()
3275 ui.pushbuffer()
3271
3276
3272 peer = httppeer.makepeer(ui, path, opener=opener)
3277 peer = httppeer.makepeer(ui, path, opener=opener)
3273
3278
3274 if opts['nologhandshake']:
3279 if opts['nologhandshake']:
3275 ui.popbuffer()
3280 ui.popbuffer()
3276
3281
3277 if not isinstance(peer, httppeer.httpv2peer):
3282 if not isinstance(peer, httppeer.httpv2peer):
3278 raise error.Abort(_('could not instantiate HTTP peer for '
3283 raise error.Abort(_('could not instantiate HTTP peer for '
3279 'wire protocol version 2'),
3284 'wire protocol version 2'),
3280 hint=_('the server may not have the feature '
3285 hint=_('the server may not have the feature '
3281 'enabled or is not allowing this '
3286 'enabled or is not allowing this '
3282 'client version'))
3287 'client version'))
3283
3288
3284 elif opts['peer'] == 'raw':
3289 elif opts['peer'] == 'raw':
3285 ui.write(_('using raw connection to peer\n'))
3290 ui.write(_('using raw connection to peer\n'))
3286 peer = None
3291 peer = None
3287 elif opts['peer']:
3292 elif opts['peer']:
3288 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3293 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3289 opts['peer'])
3294 opts['peer'])
3290 else:
3295 else:
3291 peer = httppeer.makepeer(ui, path, opener=opener)
3296 peer = httppeer.makepeer(ui, path, opener=opener)
3292
3297
3293 # We /could/ populate stdin/stdout with sock.makefile()...
3298 # We /could/ populate stdin/stdout with sock.makefile()...
3294 else:
3299 else:
3295 raise error.Abort(_('unsupported connection configuration'))
3300 raise error.Abort(_('unsupported connection configuration'))
3296
3301
3297 batchedcommands = None
3302 batchedcommands = None
3298
3303
3299 # Now perform actions based on the parsed wire language instructions.
3304 # Now perform actions based on the parsed wire language instructions.
3300 for action, lines in blocks:
3305 for action, lines in blocks:
3301 if action in ('raw', 'raw+'):
3306 if action in ('raw', 'raw+'):
3302 if not stdin:
3307 if not stdin:
3303 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3308 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3304
3309
3305 # Concatenate the data together.
3310 # Concatenate the data together.
3306 data = ''.join(l.lstrip() for l in lines)
3311 data = ''.join(l.lstrip() for l in lines)
3307 data = stringutil.unescapestr(data)
3312 data = stringutil.unescapestr(data)
3308 stdin.write(data)
3313 stdin.write(data)
3309
3314
3310 if action == 'raw+':
3315 if action == 'raw+':
3311 stdin.flush()
3316 stdin.flush()
3312 elif action == 'flush':
3317 elif action == 'flush':
3313 if not stdin:
3318 if not stdin:
3314 raise error.Abort(_('cannot call flush on this peer'))
3319 raise error.Abort(_('cannot call flush on this peer'))
3315 stdin.flush()
3320 stdin.flush()
3316 elif action.startswith('command'):
3321 elif action.startswith('command'):
3317 if not peer:
3322 if not peer:
3318 raise error.Abort(_('cannot send commands unless peer instance '
3323 raise error.Abort(_('cannot send commands unless peer instance '
3319 'is available'))
3324 'is available'))
3320
3325
3321 command = action.split(' ', 1)[1]
3326 command = action.split(' ', 1)[1]
3322
3327
3323 args = {}
3328 args = {}
3324 for line in lines:
3329 for line in lines:
3325 # We need to allow empty values.
3330 # We need to allow empty values.
3326 fields = line.lstrip().split(' ', 1)
3331 fields = line.lstrip().split(' ', 1)
3327 if len(fields) == 1:
3332 if len(fields) == 1:
3328 key = fields[0]
3333 key = fields[0]
3329 value = ''
3334 value = ''
3330 else:
3335 else:
3331 key, value = fields
3336 key, value = fields
3332
3337
3333 if value.startswith('eval:'):
3338 if value.startswith('eval:'):
3334 value = stringutil.evalpythonliteral(value[5:])
3339 value = stringutil.evalpythonliteral(value[5:])
3335 else:
3340 else:
3336 value = stringutil.unescapestr(value)
3341 value = stringutil.unescapestr(value)
3337
3342
3338 args[key] = value
3343 args[key] = value
3339
3344
3340 if batchedcommands is not None:
3345 if batchedcommands is not None:
3341 batchedcommands.append((command, args))
3346 batchedcommands.append((command, args))
3342 continue
3347 continue
3343
3348
3344 ui.status(_('sending %s command\n') % command)
3349 ui.status(_('sending %s command\n') % command)
3345
3350
3346 if 'PUSHFILE' in args:
3351 if 'PUSHFILE' in args:
3347 with open(args['PUSHFILE'], r'rb') as fh:
3352 with open(args['PUSHFILE'], r'rb') as fh:
3348 del args['PUSHFILE']
3353 del args['PUSHFILE']
3349 res, output = peer._callpush(command, fh,
3354 res, output = peer._callpush(command, fh,
3350 **pycompat.strkwargs(args))
3355 **pycompat.strkwargs(args))
3351 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3356 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3352 ui.status(_('remote output: %s\n') %
3357 ui.status(_('remote output: %s\n') %
3353 stringutil.escapestr(output))
3358 stringutil.escapestr(output))
3354 else:
3359 else:
3355 with peer.commandexecutor() as e:
3360 with peer.commandexecutor() as e:
3356 res = e.callcommand(command, args).result()
3361 res = e.callcommand(command, args).result()
3357
3362
3358 if isinstance(res, wireprotov2peer.commandresponse):
3363 if isinstance(res, wireprotov2peer.commandresponse):
3359 val = res.objects()
3364 val = res.objects()
3360 ui.status(_('response: %s\n') %
3365 ui.status(_('response: %s\n') %
3361 stringutil.pprint(val, bprefix=True, indent=2))
3366 stringutil.pprint(val, bprefix=True, indent=2))
3362 else:
3367 else:
3363 ui.status(_('response: %s\n') %
3368 ui.status(_('response: %s\n') %
3364 stringutil.pprint(res, bprefix=True, indent=2))
3369 stringutil.pprint(res, bprefix=True, indent=2))
3365
3370
3366 elif action == 'batchbegin':
3371 elif action == 'batchbegin':
3367 if batchedcommands is not None:
3372 if batchedcommands is not None:
3368 raise error.Abort(_('nested batchbegin not allowed'))
3373 raise error.Abort(_('nested batchbegin not allowed'))
3369
3374
3370 batchedcommands = []
3375 batchedcommands = []
3371 elif action == 'batchsubmit':
3376 elif action == 'batchsubmit':
3372 # There is a batching API we could go through. But it would be
3377 # There is a batching API we could go through. But it would be
3373 # difficult to normalize requests into function calls. It is easier
3378 # difficult to normalize requests into function calls. It is easier
3374 # to bypass this layer and normalize to commands + args.
3379 # to bypass this layer and normalize to commands + args.
3375 ui.status(_('sending batch with %d sub-commands\n') %
3380 ui.status(_('sending batch with %d sub-commands\n') %
3376 len(batchedcommands))
3381 len(batchedcommands))
3377 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3382 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3378 ui.status(_('response #%d: %s\n') %
3383 ui.status(_('response #%d: %s\n') %
3379 (i, stringutil.escapestr(chunk)))
3384 (i, stringutil.escapestr(chunk)))
3380
3385
3381 batchedcommands = None
3386 batchedcommands = None
3382
3387
3383 elif action.startswith('httprequest '):
3388 elif action.startswith('httprequest '):
3384 if not opener:
3389 if not opener:
3385 raise error.Abort(_('cannot use httprequest without an HTTP '
3390 raise error.Abort(_('cannot use httprequest without an HTTP '
3386 'peer'))
3391 'peer'))
3387
3392
3388 request = action.split(' ', 2)
3393 request = action.split(' ', 2)
3389 if len(request) != 3:
3394 if len(request) != 3:
3390 raise error.Abort(_('invalid httprequest: expected format is '
3395 raise error.Abort(_('invalid httprequest: expected format is '
3391 '"httprequest <method> <path>'))
3396 '"httprequest <method> <path>'))
3392
3397
3393 method, httppath = request[1:]
3398 method, httppath = request[1:]
3394 headers = {}
3399 headers = {}
3395 body = None
3400 body = None
3396 frames = []
3401 frames = []
3397 for line in lines:
3402 for line in lines:
3398 line = line.lstrip()
3403 line = line.lstrip()
3399 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3404 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3400 if m:
3405 if m:
3401 # Headers need to use native strings.
3406 # Headers need to use native strings.
3402 key = pycompat.strurl(m.group(1))
3407 key = pycompat.strurl(m.group(1))
3403 value = pycompat.strurl(m.group(2))
3408 value = pycompat.strurl(m.group(2))
3404 headers[key] = value
3409 headers[key] = value
3405 continue
3410 continue
3406
3411
3407 if line.startswith(b'BODYFILE '):
3412 if line.startswith(b'BODYFILE '):
3408 with open(line.split(b' ', 1), 'rb') as fh:
3413 with open(line.split(b' ', 1), 'rb') as fh:
3409 body = fh.read()
3414 body = fh.read()
3410 elif line.startswith(b'frame '):
3415 elif line.startswith(b'frame '):
3411 frame = wireprotoframing.makeframefromhumanstring(
3416 frame = wireprotoframing.makeframefromhumanstring(
3412 line[len(b'frame '):])
3417 line[len(b'frame '):])
3413
3418
3414 frames.append(frame)
3419 frames.append(frame)
3415 else:
3420 else:
3416 raise error.Abort(_('unknown argument to httprequest: %s') %
3421 raise error.Abort(_('unknown argument to httprequest: %s') %
3417 line)
3422 line)
3418
3423
3419 url = path + httppath
3424 url = path + httppath
3420
3425
3421 if frames:
3426 if frames:
3422 body = b''.join(bytes(f) for f in frames)
3427 body = b''.join(bytes(f) for f in frames)
3423
3428
3424 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3429 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3425
3430
3426 # urllib.Request insists on using has_data() as a proxy for
3431 # urllib.Request insists on using has_data() as a proxy for
3427 # determining the request method. Override that to use our
3432 # determining the request method. Override that to use our
3428 # explicitly requested method.
3433 # explicitly requested method.
3429 req.get_method = lambda: pycompat.sysstr(method)
3434 req.get_method = lambda: pycompat.sysstr(method)
3430
3435
3431 try:
3436 try:
3432 res = opener.open(req)
3437 res = opener.open(req)
3433 body = res.read()
3438 body = res.read()
3434 except util.urlerr.urlerror as e:
3439 except util.urlerr.urlerror as e:
3435 # read() method must be called, but only exists in Python 2
3440 # read() method must be called, but only exists in Python 2
3436 getattr(e, 'read', lambda: None)()
3441 getattr(e, 'read', lambda: None)()
3437 continue
3442 continue
3438
3443
3439 ct = res.headers.get(r'Content-Type')
3444 ct = res.headers.get(r'Content-Type')
3440 if ct == r'application/mercurial-cbor':
3445 if ct == r'application/mercurial-cbor':
3441 ui.write(_('cbor> %s\n') %
3446 ui.write(_('cbor> %s\n') %
3442 stringutil.pprint(cborutil.decodeall(body),
3447 stringutil.pprint(cborutil.decodeall(body),
3443 bprefix=True,
3448 bprefix=True,
3444 indent=2))
3449 indent=2))
3445
3450
3446 elif action == 'close':
3451 elif action == 'close':
3447 peer.close()
3452 peer.close()
3448 elif action == 'readavailable':
3453 elif action == 'readavailable':
3449 if not stdout or not stderr:
3454 if not stdout or not stderr:
3450 raise error.Abort(_('readavailable not available on this peer'))
3455 raise error.Abort(_('readavailable not available on this peer'))
3451
3456
3452 stdin.close()
3457 stdin.close()
3453 stdout.read()
3458 stdout.read()
3454 stderr.read()
3459 stderr.read()
3455
3460
3456 elif action == 'readline':
3461 elif action == 'readline':
3457 if not stdout:
3462 if not stdout:
3458 raise error.Abort(_('readline not available on this peer'))
3463 raise error.Abort(_('readline not available on this peer'))
3459 stdout.readline()
3464 stdout.readline()
3460 elif action == 'ereadline':
3465 elif action == 'ereadline':
3461 if not stderr:
3466 if not stderr:
3462 raise error.Abort(_('ereadline not available on this peer'))
3467 raise error.Abort(_('ereadline not available on this peer'))
3463 stderr.readline()
3468 stderr.readline()
3464 elif action.startswith('read '):
3469 elif action.startswith('read '):
3465 count = int(action.split(' ', 1)[1])
3470 count = int(action.split(' ', 1)[1])
3466 if not stdout:
3471 if not stdout:
3467 raise error.Abort(_('read not available on this peer'))
3472 raise error.Abort(_('read not available on this peer'))
3468 stdout.read(count)
3473 stdout.read(count)
3469 elif action.startswith('eread '):
3474 elif action.startswith('eread '):
3470 count = int(action.split(' ', 1)[1])
3475 count = int(action.split(' ', 1)[1])
3471 if not stderr:
3476 if not stderr:
3472 raise error.Abort(_('eread not available on this peer'))
3477 raise error.Abort(_('eread not available on this peer'))
3473 stderr.read(count)
3478 stderr.read(count)
3474 else:
3479 else:
3475 raise error.Abort(_('unknown action: %s') % action)
3480 raise error.Abort(_('unknown action: %s') % action)
3476
3481
3477 if batchedcommands is not None:
3482 if batchedcommands is not None:
3478 raise error.Abort(_('unclosed "batchbegin" request'))
3483 raise error.Abort(_('unclosed "batchbegin" request'))
3479
3484
3480 if peer:
3485 if peer:
3481 peer.close()
3486 peer.close()
3482
3487
3483 if proc:
3488 if proc:
3484 proc.kill()
3489 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now