##// END OF EJS Templates
debugcommands: use our CBOR decoder...
Gregory Szorc -
r39480:e5eb67de default
parent child Browse files
Show More
@@ -1,3365 +1,3364 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
36 cbor,
37 )
38 from . import (
35 from . import (
39 bundle2,
36 bundle2,
40 changegroup,
37 changegroup,
41 cmdutil,
38 cmdutil,
42 color,
39 color,
43 context,
40 context,
44 dagparser,
41 dagparser,
45 encoding,
42 encoding,
46 error,
43 error,
47 exchange,
44 exchange,
48 extensions,
45 extensions,
49 filemerge,
46 filemerge,
50 filesetlang,
47 filesetlang,
51 formatter,
48 formatter,
52 hg,
49 hg,
53 httppeer,
50 httppeer,
54 localrepo,
51 localrepo,
55 lock as lockmod,
52 lock as lockmod,
56 logcmdutil,
53 logcmdutil,
57 merge as mergemod,
54 merge as mergemod,
58 obsolete,
55 obsolete,
59 obsutil,
56 obsutil,
60 phases,
57 phases,
61 policy,
58 policy,
62 pvec,
59 pvec,
63 pycompat,
60 pycompat,
64 registrar,
61 registrar,
65 repair,
62 repair,
66 revlog,
63 revlog,
67 revset,
64 revset,
68 revsetlang,
65 revsetlang,
69 scmutil,
66 scmutil,
70 setdiscovery,
67 setdiscovery,
71 simplemerge,
68 simplemerge,
72 sshpeer,
69 sshpeer,
73 sslutil,
70 sslutil,
74 streamclone,
71 streamclone,
75 templater,
72 templater,
76 treediscovery,
73 treediscovery,
77 upgrade,
74 upgrade,
78 url as urlmod,
75 url as urlmod,
79 util,
76 util,
80 vfs as vfsmod,
77 vfs as vfsmod,
81 wireprotoframing,
78 wireprotoframing,
82 wireprotoserver,
79 wireprotoserver,
83 wireprotov2peer,
80 wireprotov2peer,
84 )
81 )
85 from .utils import (
82 from .utils import (
83 cborutil,
86 dateutil,
84 dateutil,
87 procutil,
85 procutil,
88 stringutil,
86 stringutil,
89 )
87 )
90
88
91 from .revlogutils import (
89 from .revlogutils import (
92 deltas as deltautil
90 deltas as deltautil
93 )
91 )
94
92
95 release = lockmod.release
93 release = lockmod.release
96
94
97 command = registrar.command()
95 command = registrar.command()
98
96
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 def debugancestor(ui, repo, *args):
98 def debugancestor(ui, repo, *args):
101 """find the ancestor revision of two revisions in a given index"""
99 """find the ancestor revision of two revisions in a given index"""
102 if len(args) == 3:
100 if len(args) == 3:
103 index, rev1, rev2 = args
101 index, rev1, rev2 = args
104 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
105 lookup = r.lookup
103 lookup = r.lookup
106 elif len(args) == 2:
104 elif len(args) == 2:
107 if not repo:
105 if not repo:
108 raise error.Abort(_('there is no Mercurial repository here '
106 raise error.Abort(_('there is no Mercurial repository here '
109 '(.hg not found)'))
107 '(.hg not found)'))
110 rev1, rev2 = args
108 rev1, rev2 = args
111 r = repo.changelog
109 r = repo.changelog
112 lookup = repo.lookup
110 lookup = repo.lookup
113 else:
111 else:
114 raise error.Abort(_('either two or three arguments required'))
112 raise error.Abort(_('either two or three arguments required'))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
113 a = r.ancestor(lookup(rev1), lookup(rev2))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117
115
118 @command('debugapplystreamclonebundle', [], 'FILE')
116 @command('debugapplystreamclonebundle', [], 'FILE')
119 def debugapplystreamclonebundle(ui, repo, fname):
117 def debugapplystreamclonebundle(ui, repo, fname):
120 """apply a stream clone bundle file"""
118 """apply a stream clone bundle file"""
121 f = hg.openpath(ui, fname)
119 f = hg.openpath(ui, fname)
122 gen = exchange.readbundle(ui, f, fname)
120 gen = exchange.readbundle(ui, f, fname)
123 gen.apply(repo)
121 gen.apply(repo)
124
122
125 @command('debugbuilddag',
123 @command('debugbuilddag',
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 ('n', 'new-file', None, _('add new file at each rev'))],
126 ('n', 'new-file', None, _('add new file at each rev'))],
129 _('[OPTION]... [TEXT]'))
127 _('[OPTION]... [TEXT]'))
130 def debugbuilddag(ui, repo, text=None,
128 def debugbuilddag(ui, repo, text=None,
131 mergeable_file=False,
129 mergeable_file=False,
132 overwritten_file=False,
130 overwritten_file=False,
133 new_file=False):
131 new_file=False):
134 """builds a repo with a given DAG from scratch in the current empty repo
132 """builds a repo with a given DAG from scratch in the current empty repo
135
133
136 The description of the DAG is read from stdin if not given on the
134 The description of the DAG is read from stdin if not given on the
137 command line.
135 command line.
138
136
139 Elements:
137 Elements:
140
138
141 - "+n" is a linear run of n nodes based on the current default parent
139 - "+n" is a linear run of n nodes based on the current default parent
142 - "." is a single node based on the current default parent
140 - "." is a single node based on the current default parent
143 - "$" resets the default parent to null (implied at the start);
141 - "$" resets the default parent to null (implied at the start);
144 otherwise the default parent is always the last node created
142 otherwise the default parent is always the last node created
145 - "<p" sets the default parent to the backref p
143 - "<p" sets the default parent to the backref p
146 - "*p" is a fork at parent p, which is a backref
144 - "*p" is a fork at parent p, which is a backref
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 - "/p2" is a merge of the preceding node and p2
146 - "/p2" is a merge of the preceding node and p2
149 - ":tag" defines a local tag for the preceding node
147 - ":tag" defines a local tag for the preceding node
150 - "@branch" sets the named branch for subsequent nodes
148 - "@branch" sets the named branch for subsequent nodes
151 - "#...\\n" is a comment up to the end of the line
149 - "#...\\n" is a comment up to the end of the line
152
150
153 Whitespace between the above elements is ignored.
151 Whitespace between the above elements is ignored.
154
152
155 A backref is either
153 A backref is either
156
154
157 - a number n, which references the node curr-n, where curr is the current
155 - a number n, which references the node curr-n, where curr is the current
158 node, or
156 node, or
159 - the name of a local tag you placed earlier using ":tag", or
157 - the name of a local tag you placed earlier using ":tag", or
160 - empty to denote the default parent.
158 - empty to denote the default parent.
161
159
162 All string valued-elements are either strictly alphanumeric, or must
160 All string valued-elements are either strictly alphanumeric, or must
163 be enclosed in double quotes ("..."), with "\\" as escape character.
161 be enclosed in double quotes ("..."), with "\\" as escape character.
164 """
162 """
165
163
166 if text is None:
164 if text is None:
167 ui.status(_("reading DAG from stdin\n"))
165 ui.status(_("reading DAG from stdin\n"))
168 text = ui.fin.read()
166 text = ui.fin.read()
169
167
170 cl = repo.changelog
168 cl = repo.changelog
171 if len(cl) > 0:
169 if len(cl) > 0:
172 raise error.Abort(_('repository is not empty'))
170 raise error.Abort(_('repository is not empty'))
173
171
174 # determine number of revs in DAG
172 # determine number of revs in DAG
175 total = 0
173 total = 0
176 for type, data in dagparser.parsedag(text):
174 for type, data in dagparser.parsedag(text):
177 if type == 'n':
175 if type == 'n':
178 total += 1
176 total += 1
179
177
180 if mergeable_file:
178 if mergeable_file:
181 linesperrev = 2
179 linesperrev = 2
182 # make a file with k lines per rev
180 # make a file with k lines per rev
183 initialmergedlines = ['%d' % i
181 initialmergedlines = ['%d' % i
184 for i in pycompat.xrange(0, total * linesperrev)]
182 for i in pycompat.xrange(0, total * linesperrev)]
185 initialmergedlines.append("")
183 initialmergedlines.append("")
186
184
187 tags = []
185 tags = []
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 total=total)
187 total=total)
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 at = -1
189 at = -1
192 atbranch = 'default'
190 atbranch = 'default'
193 nodeids = []
191 nodeids = []
194 id = 0
192 id = 0
195 progress.update(id)
193 progress.update(id)
196 for type, data in dagparser.parsedag(text):
194 for type, data in dagparser.parsedag(text):
197 if type == 'n':
195 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
197 id, ps = data
200
198
201 files = []
199 files = []
202 filecontent = {}
200 filecontent = {}
203
201
204 p2 = None
202 p2 = None
205 if mergeable_file:
203 if mergeable_file:
206 fn = "mf"
204 fn = "mf"
207 p1 = repo[ps[0]]
205 p1 = repo[ps[0]]
208 if len(ps) > 1:
206 if len(ps) > 1:
209 p2 = repo[ps[1]]
207 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
208 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
209 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
210 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
211 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
212 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
213 ml.append("")
216 elif at > 0:
214 elif at > 0:
217 ml = p1[fn].data().split("\n")
215 ml = p1[fn].data().split("\n")
218 else:
216 else:
219 ml = initialmergedlines
217 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
218 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
219 mergedtext = "\n".join(ml)
222 files.append(fn)
220 files.append(fn)
223 filecontent[fn] = mergedtext
221 filecontent[fn] = mergedtext
224
222
225 if overwritten_file:
223 if overwritten_file:
226 fn = "of"
224 fn = "of"
227 files.append(fn)
225 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
226 filecontent[fn] = "r%i\n" % id
229
227
230 if new_file:
228 if new_file:
231 fn = "nf%i" % id
229 fn = "nf%i" % id
232 files.append(fn)
230 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
231 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
232 if len(ps) > 1:
235 if not p2:
233 if not p2:
236 p2 = repo[ps[1]]
234 p2 = repo[ps[1]]
237 for fn in p2:
235 for fn in p2:
238 if fn.startswith("nf"):
236 if fn.startswith("nf"):
239 files.append(fn)
237 files.append(fn)
240 filecontent[fn] = p2[fn].data()
238 filecontent[fn] = p2[fn].data()
241
239
242 def fctxfn(repo, cx, path):
240 def fctxfn(repo, cx, path):
243 if path in filecontent:
241 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
242 return context.memfilectx(repo, cx, path,
245 filecontent[path])
243 filecontent[path])
246 return None
244 return None
247
245
248 if len(ps) == 0 or ps[0] < 0:
246 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
247 pars = [None, None]
250 elif len(ps) == 1:
248 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
249 pars = [nodeids[ps[0]], None]
252 else:
250 else:
253 pars = [nodeids[p] for p in ps]
251 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
253 date=(id, 0),
256 user="debugbuilddag",
254 user="debugbuilddag",
257 extra={'branch': atbranch})
255 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
256 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
257 nodeids.append(nodeid)
260 at = id
258 at = id
261 elif type == 'l':
259 elif type == 'l':
262 id, name = data
260 id, name = data
263 ui.note(('tag %s\n' % name))
261 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
263 elif type == 'a':
266 ui.note(('branch %s\n' % data))
264 ui.note(('branch %s\n' % data))
267 atbranch = data
265 atbranch = data
268 progress.update(id)
266 progress.update(id)
269
267
270 if tags:
268 if tags:
271 repo.vfs.write("localtags", "".join(tags))
269 repo.vfs.write("localtags", "".join(tags))
272
270
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 indent_string = ' ' * indent
272 indent_string = ' ' * indent
275 if all:
273 if all:
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 % indent_string)
275 % indent_string)
278
276
279 def showchunks(named):
277 def showchunks(named):
280 ui.write("\n%s%s\n" % (indent_string, named))
278 ui.write("\n%s%s\n" % (indent_string, named))
281 for deltadata in gen.deltaiter():
279 for deltadata in gen.deltaiter():
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 ui.write("%s%s %s %s %s %s %d\n" %
281 ui.write("%s%s %s %s %s %s %d\n" %
284 (indent_string, hex(node), hex(p1), hex(p2),
282 (indent_string, hex(node), hex(p1), hex(p2),
285 hex(cs), hex(deltabase), len(delta)))
283 hex(cs), hex(deltabase), len(delta)))
286
284
287 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
288 showchunks("changelog")
286 showchunks("changelog")
289 chunkdata = gen.manifestheader()
287 chunkdata = gen.manifestheader()
290 showchunks("manifest")
288 showchunks("manifest")
291 for chunkdata in iter(gen.filelogheader, {}):
289 for chunkdata in iter(gen.filelogheader, {}):
292 fname = chunkdata['filename']
290 fname = chunkdata['filename']
293 showchunks(fname)
291 showchunks(fname)
294 else:
292 else:
295 if isinstance(gen, bundle2.unbundle20):
293 if isinstance(gen, bundle2.unbundle20):
296 raise error.Abort(_('use debugbundle2 for this file'))
294 raise error.Abort(_('use debugbundle2 for this file'))
297 chunkdata = gen.changelogheader()
295 chunkdata = gen.changelogheader()
298 for deltadata in gen.deltaiter():
296 for deltadata in gen.deltaiter():
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 ui.write("%s%s\n" % (indent_string, hex(node)))
298 ui.write("%s%s\n" % (indent_string, hex(node)))
301
299
302 def _debugobsmarkers(ui, part, indent=0, **opts):
300 def _debugobsmarkers(ui, part, indent=0, **opts):
303 """display version and markers contained in 'data'"""
301 """display version and markers contained in 'data'"""
304 opts = pycompat.byteskwargs(opts)
302 opts = pycompat.byteskwargs(opts)
305 data = part.read()
303 data = part.read()
306 indent_string = ' ' * indent
304 indent_string = ' ' * indent
307 try:
305 try:
308 version, markers = obsolete._readmarkers(data)
306 version, markers = obsolete._readmarkers(data)
309 except error.UnknownVersion as exc:
307 except error.UnknownVersion as exc:
310 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg = "%sunsupported version: %s (%d bytes)\n"
311 msg %= indent_string, exc.version, len(data)
309 msg %= indent_string, exc.version, len(data)
312 ui.write(msg)
310 ui.write(msg)
313 else:
311 else:
314 msg = "%sversion: %d (%d bytes)\n"
312 msg = "%sversion: %d (%d bytes)\n"
315 msg %= indent_string, version, len(data)
313 msg %= indent_string, version, len(data)
316 ui.write(msg)
314 ui.write(msg)
317 fm = ui.formatter('debugobsolete', opts)
315 fm = ui.formatter('debugobsolete', opts)
318 for rawmarker in sorted(markers):
316 for rawmarker in sorted(markers):
319 m = obsutil.marker(None, rawmarker)
317 m = obsutil.marker(None, rawmarker)
320 fm.startitem()
318 fm.startitem()
321 fm.plain(indent_string)
319 fm.plain(indent_string)
322 cmdutil.showmarker(fm, m)
320 cmdutil.showmarker(fm, m)
323 fm.end()
321 fm.end()
324
322
325 def _debugphaseheads(ui, data, indent=0):
323 def _debugphaseheads(ui, data, indent=0):
326 """display version and markers contained in 'data'"""
324 """display version and markers contained in 'data'"""
327 indent_string = ' ' * indent
325 indent_string = ' ' * indent
328 headsbyphase = phases.binarydecode(data)
326 headsbyphase = phases.binarydecode(data)
329 for phase in phases.allphases:
327 for phase in phases.allphases:
330 for head in headsbyphase[phase]:
328 for head in headsbyphase[phase]:
331 ui.write(indent_string)
329 ui.write(indent_string)
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333
331
334 def _quasirepr(thing):
332 def _quasirepr(thing):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 return '{%s}' % (
334 return '{%s}' % (
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 return pycompat.bytestr(repr(thing))
336 return pycompat.bytestr(repr(thing))
339
337
340 def _debugbundle2(ui, gen, all=None, **opts):
338 def _debugbundle2(ui, gen, all=None, **opts):
341 """lists the contents of a bundle2"""
339 """lists the contents of a bundle2"""
342 if not isinstance(gen, bundle2.unbundle20):
340 if not isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_('not a bundle2 file'))
341 raise error.Abort(_('not a bundle2 file'))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 parttypes = opts.get(r'part_type', [])
343 parttypes = opts.get(r'part_type', [])
346 for part in gen.iterparts():
344 for part in gen.iterparts():
347 if parttypes and part.type not in parttypes:
345 if parttypes and part.type not in parttypes:
348 continue
346 continue
349 msg = '%s -- %s (mandatory: %r)\n'
347 msg = '%s -- %s (mandatory: %r)\n'
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 if part.type == 'changegroup':
349 if part.type == 'changegroup':
352 version = part.params.get('version', '01')
350 version = part.params.get('version', '01')
353 cg = changegroup.getunbundler(version, part, 'UN')
351 cg = changegroup.getunbundler(version, part, 'UN')
354 if not ui.quiet:
352 if not ui.quiet:
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 if part.type == 'obsmarkers':
354 if part.type == 'obsmarkers':
357 if not ui.quiet:
355 if not ui.quiet:
358 _debugobsmarkers(ui, part, indent=4, **opts)
356 _debugobsmarkers(ui, part, indent=4, **opts)
359 if part.type == 'phase-heads':
357 if part.type == 'phase-heads':
360 if not ui.quiet:
358 if not ui.quiet:
361 _debugphaseheads(ui, part, indent=4)
359 _debugphaseheads(ui, part, indent=4)
362
360
363 @command('debugbundle',
361 @command('debugbundle',
364 [('a', 'all', None, _('show all details')),
362 [('a', 'all', None, _('show all details')),
365 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 _('FILE'),
365 _('FILE'),
368 norepo=True)
366 norepo=True)
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 """lists the contents of a bundle"""
368 """lists the contents of a bundle"""
371 with hg.openpath(ui, bundlepath) as f:
369 with hg.openpath(ui, bundlepath) as f:
372 if spec:
370 if spec:
373 spec = exchange.getbundlespec(ui, f)
371 spec = exchange.getbundlespec(ui, f)
374 ui.write('%s\n' % spec)
372 ui.write('%s\n' % spec)
375 return
373 return
376
374
377 gen = exchange.readbundle(ui, f, bundlepath)
375 gen = exchange.readbundle(ui, f, bundlepath)
378 if isinstance(gen, bundle2.unbundle20):
376 if isinstance(gen, bundle2.unbundle20):
379 return _debugbundle2(ui, gen, all=all, **opts)
377 return _debugbundle2(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
378 _debugchangegroup(ui, gen, all=all, **opts)
381
379
382 @command('debugcapabilities',
380 @command('debugcapabilities',
383 [], _('PATH'),
381 [], _('PATH'),
384 norepo=True)
382 norepo=True)
385 def debugcapabilities(ui, path, **opts):
383 def debugcapabilities(ui, path, **opts):
386 """lists the capabilities of a remote peer"""
384 """lists the capabilities of a remote peer"""
387 opts = pycompat.byteskwargs(opts)
385 opts = pycompat.byteskwargs(opts)
388 peer = hg.peer(ui, opts, path)
386 peer = hg.peer(ui, opts, path)
389 caps = peer.capabilities()
387 caps = peer.capabilities()
390 ui.write(('Main capabilities:\n'))
388 ui.write(('Main capabilities:\n'))
391 for c in sorted(caps):
389 for c in sorted(caps):
392 ui.write((' %s\n') % c)
390 ui.write((' %s\n') % c)
393 b2caps = bundle2.bundle2caps(peer)
391 b2caps = bundle2.bundle2caps(peer)
394 if b2caps:
392 if b2caps:
395 ui.write(('Bundle2 capabilities:\n'))
393 ui.write(('Bundle2 capabilities:\n'))
396 for key, values in sorted(b2caps.iteritems()):
394 for key, values in sorted(b2caps.iteritems()):
397 ui.write((' %s\n') % key)
395 ui.write((' %s\n') % key)
398 for v in values:
396 for v in values:
399 ui.write((' %s\n') % v)
397 ui.write((' %s\n') % v)
400
398
401 @command('debugcheckstate', [], '')
399 @command('debugcheckstate', [], '')
402 def debugcheckstate(ui, repo):
400 def debugcheckstate(ui, repo):
403 """validate the correctness of the current dirstate"""
401 """validate the correctness of the current dirstate"""
404 parent1, parent2 = repo.dirstate.parents()
402 parent1, parent2 = repo.dirstate.parents()
405 m1 = repo[parent1].manifest()
403 m1 = repo[parent1].manifest()
406 m2 = repo[parent2].manifest()
404 m2 = repo[parent2].manifest()
407 errors = 0
405 errors = 0
408 for f in repo.dirstate:
406 for f in repo.dirstate:
409 state = repo.dirstate[f]
407 state = repo.dirstate[f]
410 if state in "nr" and f not in m1:
408 if state in "nr" and f not in m1:
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 errors += 1
410 errors += 1
413 if state in "a" and f in m1:
411 if state in "a" and f in m1:
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 errors += 1
413 errors += 1
416 if state in "m" and f not in m1 and f not in m2:
414 if state in "m" and f not in m1 and f not in m2:
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 (f, state))
416 (f, state))
419 errors += 1
417 errors += 1
420 for f in m1:
418 for f in m1:
421 state = repo.dirstate[f]
419 state = repo.dirstate[f]
422 if state not in "nrm":
420 if state not in "nrm":
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 errors += 1
422 errors += 1
425 if errors:
423 if errors:
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 raise error.Abort(error)
425 raise error.Abort(error)
428
426
429 @command('debugcolor',
427 @command('debugcolor',
430 [('', 'style', None, _('show all configured styles'))],
428 [('', 'style', None, _('show all configured styles'))],
431 'hg debugcolor')
429 'hg debugcolor')
432 def debugcolor(ui, repo, **opts):
430 def debugcolor(ui, repo, **opts):
433 """show available color, effects or style"""
431 """show available color, effects or style"""
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 if opts.get(r'style'):
433 if opts.get(r'style'):
436 return _debugdisplaystyle(ui)
434 return _debugdisplaystyle(ui)
437 else:
435 else:
438 return _debugdisplaycolor(ui)
436 return _debugdisplaycolor(ui)
439
437
440 def _debugdisplaycolor(ui):
438 def _debugdisplaycolor(ui):
441 ui = ui.copy()
439 ui = ui.copy()
442 ui._styles.clear()
440 ui._styles.clear()
443 for effect in color._activeeffects(ui).keys():
441 for effect in color._activeeffects(ui).keys():
444 ui._styles[effect] = effect
442 ui._styles[effect] = effect
445 if ui._terminfoparams:
443 if ui._terminfoparams:
446 for k, v in ui.configitems('color'):
444 for k, v in ui.configitems('color'):
447 if k.startswith('color.'):
445 if k.startswith('color.'):
448 ui._styles[k] = k[6:]
446 ui._styles[k] = k[6:]
449 elif k.startswith('terminfo.'):
447 elif k.startswith('terminfo.'):
450 ui._styles[k] = k[9:]
448 ui._styles[k] = k[9:]
451 ui.write(_('available colors:\n'))
449 ui.write(_('available colors:\n'))
452 # sort label with a '_' after the other to group '_background' entry.
450 # sort label with a '_' after the other to group '_background' entry.
453 items = sorted(ui._styles.items(),
451 items = sorted(ui._styles.items(),
454 key=lambda i: ('_' in i[0], i[0], i[1]))
452 key=lambda i: ('_' in i[0], i[0], i[1]))
455 for colorname, label in items:
453 for colorname, label in items:
456 ui.write(('%s\n') % colorname, label=label)
454 ui.write(('%s\n') % colorname, label=label)
457
455
458 def _debugdisplaystyle(ui):
456 def _debugdisplaystyle(ui):
459 ui.write(_('available style:\n'))
457 ui.write(_('available style:\n'))
460 if not ui._styles:
458 if not ui._styles:
461 return
459 return
462 width = max(len(s) for s in ui._styles)
460 width = max(len(s) for s in ui._styles)
463 for label, effects in sorted(ui._styles.items()):
461 for label, effects in sorted(ui._styles.items()):
464 ui.write('%s' % label, label=label)
462 ui.write('%s' % label, label=label)
465 if effects:
463 if effects:
466 # 50
464 # 50
467 ui.write(': ')
465 ui.write(': ')
468 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(' ' * (max(0, width - len(label))))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 ui.write('\n')
468 ui.write('\n')
471
469
472 @command('debugcreatestreamclonebundle', [], 'FILE')
470 @command('debugcreatestreamclonebundle', [], 'FILE')
473 def debugcreatestreamclonebundle(ui, repo, fname):
471 def debugcreatestreamclonebundle(ui, repo, fname):
474 """create a stream clone bundle file
472 """create a stream clone bundle file
475
473
476 Stream bundles are special bundles that are essentially archives of
474 Stream bundles are special bundles that are essentially archives of
477 revlog files. They are commonly used for cloning very quickly.
475 revlog files. They are commonly used for cloning very quickly.
478 """
476 """
479 # TODO we may want to turn this into an abort when this functionality
477 # TODO we may want to turn this into an abort when this functionality
480 # is moved into `hg bundle`.
478 # is moved into `hg bundle`.
481 if phases.hassecret(repo):
479 if phases.hassecret(repo):
482 ui.warn(_('(warning: stream clone bundle will contain secret '
480 ui.warn(_('(warning: stream clone bundle will contain secret '
483 'revisions)\n'))
481 'revisions)\n'))
484
482
485 requirements, gen = streamclone.generatebundlev1(repo)
483 requirements, gen = streamclone.generatebundlev1(repo)
486 changegroup.writechunks(ui, gen, fname)
484 changegroup.writechunks(ui, gen, fname)
487
485
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489
487
490 @command('debugdag',
488 @command('debugdag',
491 [('t', 'tags', None, _('use tags as labels')),
489 [('t', 'tags', None, _('use tags as labels')),
492 ('b', 'branches', None, _('annotate with branch names')),
490 ('b', 'branches', None, _('annotate with branch names')),
493 ('', 'dots', None, _('use dots for runs')),
491 ('', 'dots', None, _('use dots for runs')),
494 ('s', 'spaces', None, _('separate elements by spaces'))],
492 ('s', 'spaces', None, _('separate elements by spaces'))],
495 _('[OPTION]... [FILE [REV]...]'),
493 _('[OPTION]... [FILE [REV]...]'),
496 optionalrepo=True)
494 optionalrepo=True)
497 def debugdag(ui, repo, file_=None, *revs, **opts):
495 def debugdag(ui, repo, file_=None, *revs, **opts):
498 """format the changelog or an index DAG as a concise textual description
496 """format the changelog or an index DAG as a concise textual description
499
497
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 revision numbers, they get labeled in the output as rN.
499 revision numbers, they get labeled in the output as rN.
502
500
503 Otherwise, the changelog DAG of the current repo is emitted.
501 Otherwise, the changelog DAG of the current repo is emitted.
504 """
502 """
505 spaces = opts.get(r'spaces')
503 spaces = opts.get(r'spaces')
506 dots = opts.get(r'dots')
504 dots = opts.get(r'dots')
507 if file_:
505 if file_:
508 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
509 file_)
507 file_)
510 revs = set((int(r) for r in revs))
508 revs = set((int(r) for r in revs))
511 def events():
509 def events():
512 for r in rlog:
510 for r in rlog:
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 if p != -1))
512 if p != -1))
515 if r in revs:
513 if r in revs:
516 yield 'l', (r, "r%i" % r)
514 yield 'l', (r, "r%i" % r)
517 elif repo:
515 elif repo:
518 cl = repo.changelog
516 cl = repo.changelog
519 tags = opts.get(r'tags')
517 tags = opts.get(r'tags')
520 branches = opts.get(r'branches')
518 branches = opts.get(r'branches')
521 if tags:
519 if tags:
522 labels = {}
520 labels = {}
523 for l, n in repo.tags().items():
521 for l, n in repo.tags().items():
524 labels.setdefault(cl.rev(n), []).append(l)
522 labels.setdefault(cl.rev(n), []).append(l)
525 def events():
523 def events():
526 b = "default"
524 b = "default"
527 for r in cl:
525 for r in cl:
528 if branches:
526 if branches:
529 newb = cl.read(cl.node(r))[5]['branch']
527 newb = cl.read(cl.node(r))[5]['branch']
530 if newb != b:
528 if newb != b:
531 yield 'a', newb
529 yield 'a', newb
532 b = newb
530 b = newb
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 if p != -1))
532 if p != -1))
535 if tags:
533 if tags:
536 ls = labels.get(r)
534 ls = labels.get(r)
537 if ls:
535 if ls:
538 for l in ls:
536 for l in ls:
539 yield 'l', (r, l)
537 yield 'l', (r, l)
540 else:
538 else:
541 raise error.Abort(_('need repo for changelog dag'))
539 raise error.Abort(_('need repo for changelog dag'))
542
540
543 for line in dagparser.dagtextlines(events(),
541 for line in dagparser.dagtextlines(events(),
544 addspaces=spaces,
542 addspaces=spaces,
545 wraplabels=True,
543 wraplabels=True,
546 wrapannotations=True,
544 wrapannotations=True,
547 wrapnonlinear=dots,
545 wrapnonlinear=dots,
548 usedots=dots,
546 usedots=dots,
549 maxlinewidth=70):
547 maxlinewidth=70):
550 ui.write(line)
548 ui.write(line)
551 ui.write("\n")
549 ui.write("\n")
552
550
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 def debugdata(ui, repo, file_, rev=None, **opts):
552 def debugdata(ui, repo, file_, rev=None, **opts):
555 """dump the contents of a data file revision"""
553 """dump the contents of a data file revision"""
556 opts = pycompat.byteskwargs(opts)
554 opts = pycompat.byteskwargs(opts)
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 if rev is not None:
556 if rev is not None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
557 raise error.CommandError('debugdata', _('invalid arguments'))
560 file_, rev = None, file_
558 file_, rev = None, file_
561 elif rev is None:
559 elif rev is None:
562 raise error.CommandError('debugdata', _('invalid arguments'))
560 raise error.CommandError('debugdata', _('invalid arguments'))
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 try:
562 try:
565 ui.write(r.revision(r.lookup(rev), raw=True))
563 ui.write(r.revision(r.lookup(rev), raw=True))
566 except KeyError:
564 except KeyError:
567 raise error.Abort(_('invalid revision identifier %s') % rev)
565 raise error.Abort(_('invalid revision identifier %s') % rev)
568
566
569 @command('debugdate',
567 @command('debugdate',
570 [('e', 'extended', None, _('try extended date formats'))],
568 [('e', 'extended', None, _('try extended date formats'))],
571 _('[-e] DATE [RANGE]'),
569 _('[-e] DATE [RANGE]'),
572 norepo=True, optionalrepo=True)
570 norepo=True, optionalrepo=True)
573 def debugdate(ui, date, range=None, **opts):
571 def debugdate(ui, date, range=None, **opts):
574 """parse and display a date"""
572 """parse and display a date"""
575 if opts[r"extended"]:
573 if opts[r"extended"]:
576 d = dateutil.parsedate(date, util.extendeddateformats)
574 d = dateutil.parsedate(date, util.extendeddateformats)
577 else:
575 else:
578 d = dateutil.parsedate(date)
576 d = dateutil.parsedate(date)
579 ui.write(("internal: %d %d\n") % d)
577 ui.write(("internal: %d %d\n") % d)
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 if range:
579 if range:
582 m = dateutil.matchdate(range)
580 m = dateutil.matchdate(range)
583 ui.write(("match: %s\n") % m(d[0]))
581 ui.write(("match: %s\n") % m(d[0]))
584
582
585 @command('debugdeltachain',
583 @command('debugdeltachain',
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 _('-c|-m|FILE'),
585 _('-c|-m|FILE'),
588 optionalrepo=True)
586 optionalrepo=True)
589 def debugdeltachain(ui, repo, file_=None, **opts):
587 def debugdeltachain(ui, repo, file_=None, **opts):
590 """dump information about delta chains in a revlog
588 """dump information about delta chains in a revlog
591
589
592 Output can be templatized. Available template keywords are:
590 Output can be templatized. Available template keywords are:
593
591
594 :``rev``: revision number
592 :``rev``: revision number
595 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainid``: delta chain identifier (numbered by unique base)
596 :``chainlen``: delta chain length to this revision
594 :``chainlen``: delta chain length to this revision
597 :``prevrev``: previous revision in delta chain
595 :``prevrev``: previous revision in delta chain
598 :``deltatype``: role of delta / how it was computed
596 :``deltatype``: role of delta / how it was computed
599 :``compsize``: compressed size of revision
597 :``compsize``: compressed size of revision
600 :``uncompsize``: uncompressed size of revision
598 :``uncompsize``: uncompressed size of revision
601 :``chainsize``: total size of compressed revisions in chain
599 :``chainsize``: total size of compressed revisions in chain
602 :``chainratio``: total chain size divided by uncompressed revision size
600 :``chainratio``: total chain size divided by uncompressed revision size
603 (new delta chains typically start at ratio 2.00)
601 (new delta chains typically start at ratio 2.00)
604 :``lindist``: linear distance from base revision in delta chain to end
602 :``lindist``: linear distance from base revision in delta chain to end
605 of this revision
603 of this revision
606 :``extradist``: total size of revisions not part of this delta chain from
604 :``extradist``: total size of revisions not part of this delta chain from
607 base of delta chain to end of this revision; a measurement
605 base of delta chain to end of this revision; a measurement
608 of how much extra data we need to read/seek across to read
606 of how much extra data we need to read/seek across to read
609 the delta chain for this revision
607 the delta chain for this revision
610 :``extraratio``: extradist divided by chainsize; another representation of
608 :``extraratio``: extradist divided by chainsize; another representation of
611 how much unrelated data is needed to load this delta chain
609 how much unrelated data is needed to load this delta chain
612
610
613 If the repository is configured to use the sparse read, additional keywords
611 If the repository is configured to use the sparse read, additional keywords
614 are available:
612 are available:
615
613
616 :``readsize``: total size of data read from the disk for a revision
614 :``readsize``: total size of data read from the disk for a revision
617 (sum of the sizes of all the blocks)
615 (sum of the sizes of all the blocks)
618 :``largestblock``: size of the largest block of data read from the disk
616 :``largestblock``: size of the largest block of data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
617 :``readdensity``: density of useful bytes in the data read from the disk
620 :``srchunks``: in how many data hunks the whole revision would be read
618 :``srchunks``: in how many data hunks the whole revision would be read
621
619
622 The sparse read can be enabled with experimental.sparse-read = True
620 The sparse read can be enabled with experimental.sparse-read = True
623 """
621 """
624 opts = pycompat.byteskwargs(opts)
622 opts = pycompat.byteskwargs(opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 index = r.index
624 index = r.index
627 start = r.start
625 start = r.start
628 length = r.length
626 length = r.length
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 withsparseread = getattr(r, '_withsparseread', False)
628 withsparseread = getattr(r, '_withsparseread', False)
631
629
632 def revinfo(rev):
630 def revinfo(rev):
633 e = index[rev]
631 e = index[rev]
634 compsize = e[1]
632 compsize = e[1]
635 uncompsize = e[2]
633 uncompsize = e[2]
636 chainsize = 0
634 chainsize = 0
637
635
638 if generaldelta:
636 if generaldelta:
639 if e[3] == e[5]:
637 if e[3] == e[5]:
640 deltatype = 'p1'
638 deltatype = 'p1'
641 elif e[3] == e[6]:
639 elif e[3] == e[6]:
642 deltatype = 'p2'
640 deltatype = 'p2'
643 elif e[3] == rev - 1:
641 elif e[3] == rev - 1:
644 deltatype = 'prev'
642 deltatype = 'prev'
645 elif e[3] == rev:
643 elif e[3] == rev:
646 deltatype = 'base'
644 deltatype = 'base'
647 else:
645 else:
648 deltatype = 'other'
646 deltatype = 'other'
649 else:
647 else:
650 if e[3] == rev:
648 if e[3] == rev:
651 deltatype = 'base'
649 deltatype = 'base'
652 else:
650 else:
653 deltatype = 'prev'
651 deltatype = 'prev'
654
652
655 chain = r._deltachain(rev)[0]
653 chain = r._deltachain(rev)[0]
656 for iterrev in chain:
654 for iterrev in chain:
657 e = index[iterrev]
655 e = index[iterrev]
658 chainsize += e[1]
656 chainsize += e[1]
659
657
660 return compsize, uncompsize, deltatype, chain, chainsize
658 return compsize, uncompsize, deltatype, chain, chainsize
661
659
662 fm = ui.formatter('debugdeltachain', opts)
660 fm = ui.formatter('debugdeltachain', opts)
663
661
664 fm.plain(' rev chain# chainlen prev delta '
662 fm.plain(' rev chain# chainlen prev delta '
665 'size rawsize chainsize ratio lindist extradist '
663 'size rawsize chainsize ratio lindist extradist '
666 'extraratio')
664 'extraratio')
667 if withsparseread:
665 if withsparseread:
668 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain('\n')
667 fm.plain('\n')
670
668
671 chainbases = {}
669 chainbases = {}
672 for rev in r:
670 for rev in r:
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 chainbase = chain[0]
672 chainbase = chain[0]
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 basestart = start(chainbase)
674 basestart = start(chainbase)
677 revstart = start(rev)
675 revstart = start(rev)
678 lineardist = revstart + comp - basestart
676 lineardist = revstart + comp - basestart
679 extradist = lineardist - chainsize
677 extradist = lineardist - chainsize
680 try:
678 try:
681 prevrev = chain[-2]
679 prevrev = chain[-2]
682 except IndexError:
680 except IndexError:
683 prevrev = -1
681 prevrev = -1
684
682
685 if uncomp != 0:
683 if uncomp != 0:
686 chainratio = float(chainsize) / float(uncomp)
684 chainratio = float(chainsize) / float(uncomp)
687 else:
685 else:
688 chainratio = chainsize
686 chainratio = chainsize
689
687
690 if chainsize != 0:
688 if chainsize != 0:
691 extraratio = float(extradist) / float(chainsize)
689 extraratio = float(extradist) / float(chainsize)
692 else:
690 else:
693 extraratio = extradist
691 extraratio = extradist
694
692
695 fm.startitem()
693 fm.startitem()
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 'uncompsize chainsize chainratio lindist extradist '
695 'uncompsize chainsize chainratio lindist extradist '
698 'extraratio',
696 'extraratio',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 rev, chainid, len(chain), prevrev, deltatype, comp,
698 rev, chainid, len(chain), prevrev, deltatype, comp,
701 uncomp, chainsize, chainratio, lineardist, extradist,
699 uncomp, chainsize, chainratio, lineardist, extradist,
702 extraratio,
700 extraratio,
703 rev=rev, chainid=chainid, chainlen=len(chain),
701 rev=rev, chainid=chainid, chainlen=len(chain),
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 uncompsize=uncomp, chainsize=chainsize,
703 uncompsize=uncomp, chainsize=chainsize,
706 chainratio=chainratio, lindist=lineardist,
704 chainratio=chainratio, lindist=lineardist,
707 extradist=extradist, extraratio=extraratio)
705 extradist=extradist, extraratio=extraratio)
708 if withsparseread:
706 if withsparseread:
709 readsize = 0
707 readsize = 0
710 largestblock = 0
708 largestblock = 0
711 srchunks = 0
709 srchunks = 0
712
710
713 for revschunk in deltautil.slicechunk(r, chain):
711 for revschunk in deltautil.slicechunk(r, chain):
714 srchunks += 1
712 srchunks += 1
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 blksize = blkend - start(revschunk[0])
714 blksize = blkend - start(revschunk[0])
717
715
718 readsize += blksize
716 readsize += blksize
719 if largestblock < blksize:
717 if largestblock < blksize:
720 largestblock = blksize
718 largestblock = blksize
721
719
722 if readsize:
720 if readsize:
723 readdensity = float(chainsize) / float(readsize)
721 readdensity = float(chainsize) / float(readsize)
724 else:
722 else:
725 readdensity = 1
723 readdensity = 1
726
724
727 fm.write('readsize largestblock readdensity srchunks',
725 fm.write('readsize largestblock readdensity srchunks',
728 ' %10d %10d %9.5f %8d',
726 ' %10d %10d %9.5f %8d',
729 readsize, largestblock, readdensity, srchunks,
727 readsize, largestblock, readdensity, srchunks,
730 readsize=readsize, largestblock=largestblock,
728 readsize=readsize, largestblock=largestblock,
731 readdensity=readdensity, srchunks=srchunks)
729 readdensity=readdensity, srchunks=srchunks)
732
730
733 fm.plain('\n')
731 fm.plain('\n')
734
732
735 fm.end()
733 fm.end()
736
734
737 @command('debugdirstate|debugstate',
735 @command('debugdirstate|debugstate',
738 [('', 'nodates', None, _('do not display the saved mtime')),
736 [('', 'nodates', None, _('do not display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
737 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
738 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
739 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
740 """show the contents of the current dirstate"""
743
741
744 nodates = opts.get(r'nodates')
742 nodates = opts.get(r'nodates')
745 datesort = opts.get(r'datesort')
743 datesort = opts.get(r'datesort')
746
744
747 timestr = ""
745 timestr = ""
748 if datesort:
746 if datesort:
749 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 else:
748 else:
751 keyfunc = None # sort by filename
749 keyfunc = None # sort by filename
752 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 if ent[3] == -1:
751 if ent[3] == -1:
754 timestr = 'unset '
752 timestr = 'unset '
755 elif nodates:
753 elif nodates:
756 timestr = 'set '
754 timestr = 'set '
757 else:
755 else:
758 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 time.localtime(ent[3]))
757 time.localtime(ent[3]))
760 timestr = encoding.strtolocal(timestr)
758 timestr = encoding.strtolocal(timestr)
761 if ent[1] & 0o20000:
759 if ent[1] & 0o20000:
762 mode = 'lnk'
760 mode = 'lnk'
763 else:
761 else:
764 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 for f in repo.dirstate.copies():
764 for f in repo.dirstate.copies():
767 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768
766
769 @command('debugdiscovery',
767 @command('debugdiscovery',
770 [('', 'old', None, _('use old-style discovery')),
768 [('', 'old', None, _('use old-style discovery')),
771 ('', 'nonheads', None,
769 ('', 'nonheads', None,
772 _('use old-style discovery with non-heads included')),
770 _('use old-style discovery with non-heads included')),
773 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ] + cmdutil.remoteopts,
772 ] + cmdutil.remoteopts,
775 _('[--rev REV] [OTHER]'))
773 _('[--rev REV] [OTHER]'))
776 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 def debugdiscovery(ui, repo, remoteurl="default", **opts):
777 """runs the changeset discovery protocol in isolation"""
775 """runs the changeset discovery protocol in isolation"""
778 opts = pycompat.byteskwargs(opts)
776 opts = pycompat.byteskwargs(opts)
779 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
780 remote = hg.peer(repo, opts, remoteurl)
778 remote = hg.peer(repo, opts, remoteurl)
781 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
782
780
783 # make sure tests are repeatable
781 # make sure tests are repeatable
784 random.seed(12323)
782 random.seed(12323)
785
783
786 def doit(pushedrevs, remoteheads, remote=remote):
784 def doit(pushedrevs, remoteheads, remote=remote):
787 if opts.get('old'):
785 if opts.get('old'):
788 if not util.safehasattr(remote, 'branches'):
786 if not util.safehasattr(remote, 'branches'):
789 # enable in-client legacy support
787 # enable in-client legacy support
790 remote = localrepo.locallegacypeer(remote.local())
788 remote = localrepo.locallegacypeer(remote.local())
791 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
792 force=True)
790 force=True)
793 common = set(common)
791 common = set(common)
794 if not opts.get('nonheads'):
792 if not opts.get('nonheads'):
795 ui.write(("unpruned common: %s\n") %
793 ui.write(("unpruned common: %s\n") %
796 " ".join(sorted(short(n) for n in common)))
794 " ".join(sorted(short(n) for n in common)))
797
795
798 clnode = repo.changelog.node
796 clnode = repo.changelog.node
799 common = repo.revs('heads(::%ln)', common)
797 common = repo.revs('heads(::%ln)', common)
800 common = {clnode(r) for r in common}
798 common = {clnode(r) for r in common}
801 else:
799 else:
802 nodes = None
800 nodes = None
803 if pushedrevs:
801 if pushedrevs:
804 revs = scmutil.revrange(repo, pushedrevs)
802 revs = scmutil.revrange(repo, pushedrevs)
805 nodes = [repo[r].node() for r in revs]
803 nodes = [repo[r].node() for r in revs]
806 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
807 ancestorsof=nodes)
805 ancestorsof=nodes)
808 common = set(common)
806 common = set(common)
809 rheads = set(hds)
807 rheads = set(hds)
810 lheads = set(repo.heads())
808 lheads = set(repo.heads())
811 ui.write(("common heads: %s\n") %
809 ui.write(("common heads: %s\n") %
812 " ".join(sorted(short(n) for n in common)))
810 " ".join(sorted(short(n) for n in common)))
813 if lheads <= common:
811 if lheads <= common:
814 ui.write(("local is subset\n"))
812 ui.write(("local is subset\n"))
815 elif rheads <= common:
813 elif rheads <= common:
816 ui.write(("remote is subset\n"))
814 ui.write(("remote is subset\n"))
817
815
818 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
819 localrevs = opts['rev']
817 localrevs = opts['rev']
820 doit(localrevs, remoterevs)
818 doit(localrevs, remoterevs)
821
819
822 _chunksize = 4 << 10
820 _chunksize = 4 << 10
823
821
824 @command('debugdownload',
822 @command('debugdownload',
825 [
823 [
826 ('o', 'output', '', _('path')),
824 ('o', 'output', '', _('path')),
827 ],
825 ],
828 optionalrepo=True)
826 optionalrepo=True)
829 def debugdownload(ui, repo, url, output=None, **opts):
827 def debugdownload(ui, repo, url, output=None, **opts):
830 """download a resource using Mercurial logic and config
828 """download a resource using Mercurial logic and config
831 """
829 """
832 fh = urlmod.open(ui, url, output)
830 fh = urlmod.open(ui, url, output)
833
831
834 dest = ui
832 dest = ui
835 if output:
833 if output:
836 dest = open(output, "wb", _chunksize)
834 dest = open(output, "wb", _chunksize)
837 try:
835 try:
838 data = fh.read(_chunksize)
836 data = fh.read(_chunksize)
839 while data:
837 while data:
840 dest.write(data)
838 dest.write(data)
841 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
842 finally:
840 finally:
843 if output:
841 if output:
844 dest.close()
842 dest.close()
845
843
846 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
847 def debugextensions(ui, repo, **opts):
845 def debugextensions(ui, repo, **opts):
848 '''show information about active extensions'''
846 '''show information about active extensions'''
849 opts = pycompat.byteskwargs(opts)
847 opts = pycompat.byteskwargs(opts)
850 exts = extensions.extensions(ui)
848 exts = extensions.extensions(ui)
851 hgver = util.version()
849 hgver = util.version()
852 fm = ui.formatter('debugextensions', opts)
850 fm = ui.formatter('debugextensions', opts)
853 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
854 isinternal = extensions.ismoduleinternal(extmod)
852 isinternal = extensions.ismoduleinternal(extmod)
855 extsource = pycompat.fsencode(extmod.__file__)
853 extsource = pycompat.fsencode(extmod.__file__)
856 if isinternal:
854 if isinternal:
857 exttestedwith = [] # never expose magic string to users
855 exttestedwith = [] # never expose magic string to users
858 else:
856 else:
859 exttestedwith = getattr(extmod, 'testedwith', '').split()
857 exttestedwith = getattr(extmod, 'testedwith', '').split()
860 extbuglink = getattr(extmod, 'buglink', None)
858 extbuglink = getattr(extmod, 'buglink', None)
861
859
862 fm.startitem()
860 fm.startitem()
863
861
864 if ui.quiet or ui.verbose:
862 if ui.quiet or ui.verbose:
865 fm.write('name', '%s\n', extname)
863 fm.write('name', '%s\n', extname)
866 else:
864 else:
867 fm.write('name', '%s', extname)
865 fm.write('name', '%s', extname)
868 if isinternal or hgver in exttestedwith:
866 if isinternal or hgver in exttestedwith:
869 fm.plain('\n')
867 fm.plain('\n')
870 elif not exttestedwith:
868 elif not exttestedwith:
871 fm.plain(_(' (untested!)\n'))
869 fm.plain(_(' (untested!)\n'))
872 else:
870 else:
873 lasttestedversion = exttestedwith[-1]
871 lasttestedversion = exttestedwith[-1]
874 fm.plain(' (%s!)\n' % lasttestedversion)
872 fm.plain(' (%s!)\n' % lasttestedversion)
875
873
876 fm.condwrite(ui.verbose and extsource, 'source',
874 fm.condwrite(ui.verbose and extsource, 'source',
877 _(' location: %s\n'), extsource or "")
875 _(' location: %s\n'), extsource or "")
878
876
879 if ui.verbose:
877 if ui.verbose:
880 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
881 fm.data(bundled=isinternal)
879 fm.data(bundled=isinternal)
882
880
883 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
884 _(' tested with: %s\n'),
882 _(' tested with: %s\n'),
885 fm.formatlist(exttestedwith, name='ver'))
883 fm.formatlist(exttestedwith, name='ver'))
886
884
887 fm.condwrite(ui.verbose and extbuglink, 'buglink',
885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
888 _(' bug reporting: %s\n'), extbuglink or "")
886 _(' bug reporting: %s\n'), extbuglink or "")
889
887
890 fm.end()
888 fm.end()
891
889
892 @command('debugfileset',
890 @command('debugfileset',
893 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
894 ('', 'all-files', False,
892 ('', 'all-files', False,
895 _('test files from all revisions and working directory')),
893 _('test files from all revisions and working directory')),
896 ('s', 'show-matcher', None,
894 ('s', 'show-matcher', None,
897 _('print internal representation of matcher')),
895 _('print internal representation of matcher')),
898 ('p', 'show-stage', [],
896 ('p', 'show-stage', [],
899 _('print parsed tree at the given stage'), _('NAME'))],
897 _('print parsed tree at the given stage'), _('NAME'))],
900 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
901 def debugfileset(ui, repo, expr, **opts):
899 def debugfileset(ui, repo, expr, **opts):
902 '''parse and apply a fileset specification'''
900 '''parse and apply a fileset specification'''
903 from . import fileset
901 from . import fileset
904 fileset.symbols # force import of fileset so we have predicates to optimize
902 fileset.symbols # force import of fileset so we have predicates to optimize
905 opts = pycompat.byteskwargs(opts)
903 opts = pycompat.byteskwargs(opts)
906 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
907
905
908 stages = [
906 stages = [
909 ('parsed', pycompat.identity),
907 ('parsed', pycompat.identity),
910 ('analyzed', filesetlang.analyze),
908 ('analyzed', filesetlang.analyze),
911 ('optimized', filesetlang.optimize),
909 ('optimized', filesetlang.optimize),
912 ]
910 ]
913 stagenames = set(n for n, f in stages)
911 stagenames = set(n for n, f in stages)
914
912
915 showalways = set()
913 showalways = set()
916 if ui.verbose and not opts['show_stage']:
914 if ui.verbose and not opts['show_stage']:
917 # show parsed tree by --verbose (deprecated)
915 # show parsed tree by --verbose (deprecated)
918 showalways.add('parsed')
916 showalways.add('parsed')
919 if opts['show_stage'] == ['all']:
917 if opts['show_stage'] == ['all']:
920 showalways.update(stagenames)
918 showalways.update(stagenames)
921 else:
919 else:
922 for n in opts['show_stage']:
920 for n in opts['show_stage']:
923 if n not in stagenames:
921 if n not in stagenames:
924 raise error.Abort(_('invalid stage name: %s') % n)
922 raise error.Abort(_('invalid stage name: %s') % n)
925 showalways.update(opts['show_stage'])
923 showalways.update(opts['show_stage'])
926
924
927 tree = filesetlang.parse(expr)
925 tree = filesetlang.parse(expr)
928 for n, f in stages:
926 for n, f in stages:
929 tree = f(tree)
927 tree = f(tree)
930 if n in showalways:
928 if n in showalways:
931 if opts['show_stage'] or n != 'parsed':
929 if opts['show_stage'] or n != 'parsed':
932 ui.write(("* %s:\n") % n)
930 ui.write(("* %s:\n") % n)
933 ui.write(filesetlang.prettyformat(tree), "\n")
931 ui.write(filesetlang.prettyformat(tree), "\n")
934
932
935 files = set()
933 files = set()
936 if opts['all_files']:
934 if opts['all_files']:
937 for r in repo:
935 for r in repo:
938 c = repo[r]
936 c = repo[r]
939 files.update(c.files())
937 files.update(c.files())
940 files.update(c.substate)
938 files.update(c.substate)
941 if opts['all_files'] or ctx.rev() is None:
939 if opts['all_files'] or ctx.rev() is None:
942 wctx = repo[None]
940 wctx = repo[None]
943 files.update(repo.dirstate.walk(scmutil.matchall(repo),
941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
944 subrepos=list(wctx.substate),
942 subrepos=list(wctx.substate),
945 unknown=True, ignored=True))
943 unknown=True, ignored=True))
946 files.update(wctx.substate)
944 files.update(wctx.substate)
947 else:
945 else:
948 files.update(ctx.files())
946 files.update(ctx.files())
949 files.update(ctx.substate)
947 files.update(ctx.substate)
950
948
951 m = ctx.matchfileset(expr)
949 m = ctx.matchfileset(expr)
952 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
953 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
954 for f in sorted(files):
952 for f in sorted(files):
955 if not m(f):
953 if not m(f):
956 continue
954 continue
957 ui.write("%s\n" % f)
955 ui.write("%s\n" % f)
958
956
959 @command('debugformat',
957 @command('debugformat',
960 [] + cmdutil.formatteropts)
958 [] + cmdutil.formatteropts)
961 def debugformat(ui, repo, **opts):
959 def debugformat(ui, repo, **opts):
962 """display format information about the current repository
960 """display format information about the current repository
963
961
964 Use --verbose to get extra information about current config value and
962 Use --verbose to get extra information about current config value and
965 Mercurial default."""
963 Mercurial default."""
966 opts = pycompat.byteskwargs(opts)
964 opts = pycompat.byteskwargs(opts)
967 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
968 maxvariantlength = max(len('format-variant'), maxvariantlength)
966 maxvariantlength = max(len('format-variant'), maxvariantlength)
969
967
970 def makeformatname(name):
968 def makeformatname(name):
971 return '%s:' + (' ' * (maxvariantlength - len(name)))
969 return '%s:' + (' ' * (maxvariantlength - len(name)))
972
970
973 fm = ui.formatter('debugformat', opts)
971 fm = ui.formatter('debugformat', opts)
974 if fm.isplain():
972 if fm.isplain():
975 def formatvalue(value):
973 def formatvalue(value):
976 if util.safehasattr(value, 'startswith'):
974 if util.safehasattr(value, 'startswith'):
977 return value
975 return value
978 if value:
976 if value:
979 return 'yes'
977 return 'yes'
980 else:
978 else:
981 return 'no'
979 return 'no'
982 else:
980 else:
983 formatvalue = pycompat.identity
981 formatvalue = pycompat.identity
984
982
985 fm.plain('format-variant')
983 fm.plain('format-variant')
986 fm.plain(' ' * (maxvariantlength - len('format-variant')))
984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
987 fm.plain(' repo')
985 fm.plain(' repo')
988 if ui.verbose:
986 if ui.verbose:
989 fm.plain(' config default')
987 fm.plain(' config default')
990 fm.plain('\n')
988 fm.plain('\n')
991 for fv in upgrade.allformatvariant:
989 for fv in upgrade.allformatvariant:
992 fm.startitem()
990 fm.startitem()
993 repovalue = fv.fromrepo(repo)
991 repovalue = fv.fromrepo(repo)
994 configvalue = fv.fromconfig(repo)
992 configvalue = fv.fromconfig(repo)
995
993
996 if repovalue != configvalue:
994 if repovalue != configvalue:
997 namelabel = 'formatvariant.name.mismatchconfig'
995 namelabel = 'formatvariant.name.mismatchconfig'
998 repolabel = 'formatvariant.repo.mismatchconfig'
996 repolabel = 'formatvariant.repo.mismatchconfig'
999 elif repovalue != fv.default:
997 elif repovalue != fv.default:
1000 namelabel = 'formatvariant.name.mismatchdefault'
998 namelabel = 'formatvariant.name.mismatchdefault'
1001 repolabel = 'formatvariant.repo.mismatchdefault'
999 repolabel = 'formatvariant.repo.mismatchdefault'
1002 else:
1000 else:
1003 namelabel = 'formatvariant.name.uptodate'
1001 namelabel = 'formatvariant.name.uptodate'
1004 repolabel = 'formatvariant.repo.uptodate'
1002 repolabel = 'formatvariant.repo.uptodate'
1005
1003
1006 fm.write('name', makeformatname(fv.name), fv.name,
1004 fm.write('name', makeformatname(fv.name), fv.name,
1007 label=namelabel)
1005 label=namelabel)
1008 fm.write('repo', ' %3s', formatvalue(repovalue),
1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1009 label=repolabel)
1007 label=repolabel)
1010 if fv.default != configvalue:
1008 if fv.default != configvalue:
1011 configlabel = 'formatvariant.config.special'
1009 configlabel = 'formatvariant.config.special'
1012 else:
1010 else:
1013 configlabel = 'formatvariant.config.default'
1011 configlabel = 'formatvariant.config.default'
1014 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1015 label=configlabel)
1013 label=configlabel)
1016 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1017 label='formatvariant.default')
1015 label='formatvariant.default')
1018 fm.plain('\n')
1016 fm.plain('\n')
1019 fm.end()
1017 fm.end()
1020
1018
1021 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1022 def debugfsinfo(ui, path="."):
1020 def debugfsinfo(ui, path="."):
1023 """show information detected about current filesystem"""
1021 """show information detected about current filesystem"""
1024 ui.write(('path: %s\n') % path)
1022 ui.write(('path: %s\n') % path)
1025 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1026 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1027 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1028 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1029 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1030 casesensitive = '(unknown)'
1028 casesensitive = '(unknown)'
1031 try:
1029 try:
1032 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1033 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1034 except OSError:
1032 except OSError:
1035 pass
1033 pass
1036 ui.write(('case-sensitive: %s\n') % casesensitive)
1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1037
1035
1038 @command('debuggetbundle',
1036 @command('debuggetbundle',
1039 [('H', 'head', [], _('id of head node'), _('ID')),
1037 [('H', 'head', [], _('id of head node'), _('ID')),
1040 ('C', 'common', [], _('id of common node'), _('ID')),
1038 ('C', 'common', [], _('id of common node'), _('ID')),
1041 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1042 _('REPO FILE [-H|-C ID]...'),
1040 _('REPO FILE [-H|-C ID]...'),
1043 norepo=True)
1041 norepo=True)
1044 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1045 """retrieves a bundle from a repo
1043 """retrieves a bundle from a repo
1046
1044
1047 Every ID must be a full-length hex node id string. Saves the bundle to the
1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1048 given file.
1046 given file.
1049 """
1047 """
1050 opts = pycompat.byteskwargs(opts)
1048 opts = pycompat.byteskwargs(opts)
1051 repo = hg.peer(ui, opts, repopath)
1049 repo = hg.peer(ui, opts, repopath)
1052 if not repo.capable('getbundle'):
1050 if not repo.capable('getbundle'):
1053 raise error.Abort("getbundle() not supported by target repository")
1051 raise error.Abort("getbundle() not supported by target repository")
1054 args = {}
1052 args = {}
1055 if common:
1053 if common:
1056 args[r'common'] = [bin(s) for s in common]
1054 args[r'common'] = [bin(s) for s in common]
1057 if head:
1055 if head:
1058 args[r'heads'] = [bin(s) for s in head]
1056 args[r'heads'] = [bin(s) for s in head]
1059 # TODO: get desired bundlecaps from command line.
1057 # TODO: get desired bundlecaps from command line.
1060 args[r'bundlecaps'] = None
1058 args[r'bundlecaps'] = None
1061 bundle = repo.getbundle('debug', **args)
1059 bundle = repo.getbundle('debug', **args)
1062
1060
1063 bundletype = opts.get('type', 'bzip2').lower()
1061 bundletype = opts.get('type', 'bzip2').lower()
1064 btypes = {'none': 'HG10UN',
1062 btypes = {'none': 'HG10UN',
1065 'bzip2': 'HG10BZ',
1063 'bzip2': 'HG10BZ',
1066 'gzip': 'HG10GZ',
1064 'gzip': 'HG10GZ',
1067 'bundle2': 'HG20'}
1065 'bundle2': 'HG20'}
1068 bundletype = btypes.get(bundletype)
1066 bundletype = btypes.get(bundletype)
1069 if bundletype not in bundle2.bundletypes:
1067 if bundletype not in bundle2.bundletypes:
1070 raise error.Abort(_('unknown bundle type specified with --type'))
1068 raise error.Abort(_('unknown bundle type specified with --type'))
1071 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1072
1070
1073 @command('debugignore', [], '[FILE]')
1071 @command('debugignore', [], '[FILE]')
1074 def debugignore(ui, repo, *files, **opts):
1072 def debugignore(ui, repo, *files, **opts):
1075 """display the combined ignore pattern and information about ignored files
1073 """display the combined ignore pattern and information about ignored files
1076
1074
1077 With no argument display the combined ignore pattern.
1075 With no argument display the combined ignore pattern.
1078
1076
1079 Given space separated file names, shows if the given file is ignored and
1077 Given space separated file names, shows if the given file is ignored and
1080 if so, show the ignore rule (file and line number) that matched it.
1078 if so, show the ignore rule (file and line number) that matched it.
1081 """
1079 """
1082 ignore = repo.dirstate._ignore
1080 ignore = repo.dirstate._ignore
1083 if not files:
1081 if not files:
1084 # Show all the patterns
1082 # Show all the patterns
1085 ui.write("%s\n" % pycompat.byterepr(ignore))
1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1086 else:
1084 else:
1087 m = scmutil.match(repo[None], pats=files)
1085 m = scmutil.match(repo[None], pats=files)
1088 for f in m.files():
1086 for f in m.files():
1089 nf = util.normpath(f)
1087 nf = util.normpath(f)
1090 ignored = None
1088 ignored = None
1091 ignoredata = None
1089 ignoredata = None
1092 if nf != '.':
1090 if nf != '.':
1093 if ignore(nf):
1091 if ignore(nf):
1094 ignored = nf
1092 ignored = nf
1095 ignoredata = repo.dirstate._ignorefileandline(nf)
1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1096 else:
1094 else:
1097 for p in util.finddirs(nf):
1095 for p in util.finddirs(nf):
1098 if ignore(p):
1096 if ignore(p):
1099 ignored = p
1097 ignored = p
1100 ignoredata = repo.dirstate._ignorefileandline(p)
1098 ignoredata = repo.dirstate._ignorefileandline(p)
1101 break
1099 break
1102 if ignored:
1100 if ignored:
1103 if ignored == nf:
1101 if ignored == nf:
1104 ui.write(_("%s is ignored\n") % m.uipath(f))
1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1105 else:
1103 else:
1106 ui.write(_("%s is ignored because of "
1104 ui.write(_("%s is ignored because of "
1107 "containing folder %s\n")
1105 "containing folder %s\n")
1108 % (m.uipath(f), ignored))
1106 % (m.uipath(f), ignored))
1109 ignorefile, lineno, line = ignoredata
1107 ignorefile, lineno, line = ignoredata
1110 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1111 % (ignorefile, lineno, line))
1109 % (ignorefile, lineno, line))
1112 else:
1110 else:
1113 ui.write(_("%s is not ignored\n") % m.uipath(f))
1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1114
1112
1115 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1113 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1116 _('-c|-m|FILE'))
1114 _('-c|-m|FILE'))
1117 def debugindex(ui, repo, file_=None, **opts):
1115 def debugindex(ui, repo, file_=None, **opts):
1118 """dump index data for a storage primitive"""
1116 """dump index data for a storage primitive"""
1119 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1120 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1118 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1121
1119
1122 if ui.debugflag:
1120 if ui.debugflag:
1123 shortfn = hex
1121 shortfn = hex
1124 else:
1122 else:
1125 shortfn = short
1123 shortfn = short
1126
1124
1127 idlen = 12
1125 idlen = 12
1128 for i in store:
1126 for i in store:
1129 idlen = len(shortfn(store.node(i)))
1127 idlen = len(shortfn(store.node(i)))
1130 break
1128 break
1131
1129
1132 fm = ui.formatter('debugindex', opts)
1130 fm = ui.formatter('debugindex', opts)
1133 fm.plain(b' rev linkrev %s %s p2\n' % (
1131 fm.plain(b' rev linkrev %s %s p2\n' % (
1134 b'nodeid'.ljust(idlen),
1132 b'nodeid'.ljust(idlen),
1135 b'p1'.ljust(idlen)))
1133 b'p1'.ljust(idlen)))
1136
1134
1137 for rev in store:
1135 for rev in store:
1138 node = store.node(rev)
1136 node = store.node(rev)
1139 parents = store.parents(node)
1137 parents = store.parents(node)
1140
1138
1141 fm.startitem()
1139 fm.startitem()
1142 fm.write(b'rev', b'%6d ', rev)
1140 fm.write(b'rev', b'%6d ', rev)
1143 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1141 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1144 fm.write(b'node', '%s ', shortfn(node))
1142 fm.write(b'node', '%s ', shortfn(node))
1145 fm.write(b'p1', '%s ', shortfn(parents[0]))
1143 fm.write(b'p1', '%s ', shortfn(parents[0]))
1146 fm.write(b'p2', '%s', shortfn(parents[1]))
1144 fm.write(b'p2', '%s', shortfn(parents[1]))
1147 fm.plain(b'\n')
1145 fm.plain(b'\n')
1148
1146
1149 fm.end()
1147 fm.end()
1150
1148
1151 @command('debugindexdot', cmdutil.debugrevlogopts,
1149 @command('debugindexdot', cmdutil.debugrevlogopts,
1152 _('-c|-m|FILE'), optionalrepo=True)
1150 _('-c|-m|FILE'), optionalrepo=True)
1153 def debugindexdot(ui, repo, file_=None, **opts):
1151 def debugindexdot(ui, repo, file_=None, **opts):
1154 """dump an index DAG as a graphviz dot file"""
1152 """dump an index DAG as a graphviz dot file"""
1155 opts = pycompat.byteskwargs(opts)
1153 opts = pycompat.byteskwargs(opts)
1156 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1154 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1157 ui.write(("digraph G {\n"))
1155 ui.write(("digraph G {\n"))
1158 for i in r:
1156 for i in r:
1159 node = r.node(i)
1157 node = r.node(i)
1160 pp = r.parents(node)
1158 pp = r.parents(node)
1161 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1159 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1162 if pp[1] != nullid:
1160 if pp[1] != nullid:
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1161 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1164 ui.write("}\n")
1162 ui.write("}\n")
1165
1163
1166 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1164 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1167 def debuginstall(ui, **opts):
1165 def debuginstall(ui, **opts):
1168 '''test Mercurial installation
1166 '''test Mercurial installation
1169
1167
1170 Returns 0 on success.
1168 Returns 0 on success.
1171 '''
1169 '''
1172 opts = pycompat.byteskwargs(opts)
1170 opts = pycompat.byteskwargs(opts)
1173
1171
1174 def writetemp(contents):
1172 def writetemp(contents):
1175 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1173 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1176 f = os.fdopen(fd, r"wb")
1174 f = os.fdopen(fd, r"wb")
1177 f.write(contents)
1175 f.write(contents)
1178 f.close()
1176 f.close()
1179 return name
1177 return name
1180
1178
1181 problems = 0
1179 problems = 0
1182
1180
1183 fm = ui.formatter('debuginstall', opts)
1181 fm = ui.formatter('debuginstall', opts)
1184 fm.startitem()
1182 fm.startitem()
1185
1183
1186 # encoding
1184 # encoding
1187 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1185 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1188 err = None
1186 err = None
1189 try:
1187 try:
1190 codecs.lookup(pycompat.sysstr(encoding.encoding))
1188 codecs.lookup(pycompat.sysstr(encoding.encoding))
1191 except LookupError as inst:
1189 except LookupError as inst:
1192 err = stringutil.forcebytestr(inst)
1190 err = stringutil.forcebytestr(inst)
1193 problems += 1
1191 problems += 1
1194 fm.condwrite(err, 'encodingerror', _(" %s\n"
1192 fm.condwrite(err, 'encodingerror', _(" %s\n"
1195 " (check that your locale is properly set)\n"), err)
1193 " (check that your locale is properly set)\n"), err)
1196
1194
1197 # Python
1195 # Python
1198 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1196 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1199 pycompat.sysexecutable)
1197 pycompat.sysexecutable)
1200 fm.write('pythonver', _("checking Python version (%s)\n"),
1198 fm.write('pythonver', _("checking Python version (%s)\n"),
1201 ("%d.%d.%d" % sys.version_info[:3]))
1199 ("%d.%d.%d" % sys.version_info[:3]))
1202 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1200 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1203 os.path.dirname(pycompat.fsencode(os.__file__)))
1201 os.path.dirname(pycompat.fsencode(os.__file__)))
1204
1202
1205 security = set(sslutil.supportedprotocols)
1203 security = set(sslutil.supportedprotocols)
1206 if sslutil.hassni:
1204 if sslutil.hassni:
1207 security.add('sni')
1205 security.add('sni')
1208
1206
1209 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1207 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1210 fm.formatlist(sorted(security), name='protocol',
1208 fm.formatlist(sorted(security), name='protocol',
1211 fmt='%s', sep=','))
1209 fmt='%s', sep=','))
1212
1210
1213 # These are warnings, not errors. So don't increment problem count. This
1211 # These are warnings, not errors. So don't increment problem count. This
1214 # may change in the future.
1212 # may change in the future.
1215 if 'tls1.2' not in security:
1213 if 'tls1.2' not in security:
1216 fm.plain(_(' TLS 1.2 not supported by Python install; '
1214 fm.plain(_(' TLS 1.2 not supported by Python install; '
1217 'network connections lack modern security\n'))
1215 'network connections lack modern security\n'))
1218 if 'sni' not in security:
1216 if 'sni' not in security:
1219 fm.plain(_(' SNI not supported by Python install; may have '
1217 fm.plain(_(' SNI not supported by Python install; may have '
1220 'connectivity issues with some servers\n'))
1218 'connectivity issues with some servers\n'))
1221
1219
1222 # TODO print CA cert info
1220 # TODO print CA cert info
1223
1221
1224 # hg version
1222 # hg version
1225 hgver = util.version()
1223 hgver = util.version()
1226 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1224 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1227 hgver.split('+')[0])
1225 hgver.split('+')[0])
1228 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1226 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1229 '+'.join(hgver.split('+')[1:]))
1227 '+'.join(hgver.split('+')[1:]))
1230
1228
1231 # compiled modules
1229 # compiled modules
1232 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1230 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1233 policy.policy)
1231 policy.policy)
1234 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1232 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1235 os.path.dirname(pycompat.fsencode(__file__)))
1233 os.path.dirname(pycompat.fsencode(__file__)))
1236
1234
1237 if policy.policy in ('c', 'allow'):
1235 if policy.policy in ('c', 'allow'):
1238 err = None
1236 err = None
1239 try:
1237 try:
1240 from .cext import (
1238 from .cext import (
1241 base85,
1239 base85,
1242 bdiff,
1240 bdiff,
1243 mpatch,
1241 mpatch,
1244 osutil,
1242 osutil,
1245 )
1243 )
1246 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1244 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1247 except Exception as inst:
1245 except Exception as inst:
1248 err = stringutil.forcebytestr(inst)
1246 err = stringutil.forcebytestr(inst)
1249 problems += 1
1247 problems += 1
1250 fm.condwrite(err, 'extensionserror', " %s\n", err)
1248 fm.condwrite(err, 'extensionserror', " %s\n", err)
1251
1249
1252 compengines = util.compengines._engines.values()
1250 compengines = util.compengines._engines.values()
1253 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1251 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1254 fm.formatlist(sorted(e.name() for e in compengines),
1252 fm.formatlist(sorted(e.name() for e in compengines),
1255 name='compengine', fmt='%s', sep=', '))
1253 name='compengine', fmt='%s', sep=', '))
1256 fm.write('compenginesavail', _('checking available compression engines '
1254 fm.write('compenginesavail', _('checking available compression engines '
1257 '(%s)\n'),
1255 '(%s)\n'),
1258 fm.formatlist(sorted(e.name() for e in compengines
1256 fm.formatlist(sorted(e.name() for e in compengines
1259 if e.available()),
1257 if e.available()),
1260 name='compengine', fmt='%s', sep=', '))
1258 name='compengine', fmt='%s', sep=', '))
1261 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1259 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1262 fm.write('compenginesserver', _('checking available compression engines '
1260 fm.write('compenginesserver', _('checking available compression engines '
1263 'for wire protocol (%s)\n'),
1261 'for wire protocol (%s)\n'),
1264 fm.formatlist([e.name() for e in wirecompengines
1262 fm.formatlist([e.name() for e in wirecompengines
1265 if e.wireprotosupport()],
1263 if e.wireprotosupport()],
1266 name='compengine', fmt='%s', sep=', '))
1264 name='compengine', fmt='%s', sep=', '))
1267 re2 = 'missing'
1265 re2 = 'missing'
1268 if util._re2:
1266 if util._re2:
1269 re2 = 'available'
1267 re2 = 'available'
1270 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1268 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1271 fm.data(re2=bool(util._re2))
1269 fm.data(re2=bool(util._re2))
1272
1270
1273 # templates
1271 # templates
1274 p = templater.templatepaths()
1272 p = templater.templatepaths()
1275 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1273 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1276 fm.condwrite(not p, '', _(" no template directories found\n"))
1274 fm.condwrite(not p, '', _(" no template directories found\n"))
1277 if p:
1275 if p:
1278 m = templater.templatepath("map-cmdline.default")
1276 m = templater.templatepath("map-cmdline.default")
1279 if m:
1277 if m:
1280 # template found, check if it is working
1278 # template found, check if it is working
1281 err = None
1279 err = None
1282 try:
1280 try:
1283 templater.templater.frommapfile(m)
1281 templater.templater.frommapfile(m)
1284 except Exception as inst:
1282 except Exception as inst:
1285 err = stringutil.forcebytestr(inst)
1283 err = stringutil.forcebytestr(inst)
1286 p = None
1284 p = None
1287 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1285 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1288 else:
1286 else:
1289 p = None
1287 p = None
1290 fm.condwrite(p, 'defaulttemplate',
1288 fm.condwrite(p, 'defaulttemplate',
1291 _("checking default template (%s)\n"), m)
1289 _("checking default template (%s)\n"), m)
1292 fm.condwrite(not m, 'defaulttemplatenotfound',
1290 fm.condwrite(not m, 'defaulttemplatenotfound',
1293 _(" template '%s' not found\n"), "default")
1291 _(" template '%s' not found\n"), "default")
1294 if not p:
1292 if not p:
1295 problems += 1
1293 problems += 1
1296 fm.condwrite(not p, '',
1294 fm.condwrite(not p, '',
1297 _(" (templates seem to have been installed incorrectly)\n"))
1295 _(" (templates seem to have been installed incorrectly)\n"))
1298
1296
1299 # editor
1297 # editor
1300 editor = ui.geteditor()
1298 editor = ui.geteditor()
1301 editor = util.expandpath(editor)
1299 editor = util.expandpath(editor)
1302 editorbin = procutil.shellsplit(editor)[0]
1300 editorbin = procutil.shellsplit(editor)[0]
1303 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1301 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1304 cmdpath = procutil.findexe(editorbin)
1302 cmdpath = procutil.findexe(editorbin)
1305 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1303 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1306 _(" No commit editor set and can't find %s in PATH\n"
1304 _(" No commit editor set and can't find %s in PATH\n"
1307 " (specify a commit editor in your configuration"
1305 " (specify a commit editor in your configuration"
1308 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1306 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1309 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1307 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1310 _(" Can't find editor '%s' in PATH\n"
1308 _(" Can't find editor '%s' in PATH\n"
1311 " (specify a commit editor in your configuration"
1309 " (specify a commit editor in your configuration"
1312 " file)\n"), not cmdpath and editorbin)
1310 " file)\n"), not cmdpath and editorbin)
1313 if not cmdpath and editor != 'vi':
1311 if not cmdpath and editor != 'vi':
1314 problems += 1
1312 problems += 1
1315
1313
1316 # check username
1314 # check username
1317 username = None
1315 username = None
1318 err = None
1316 err = None
1319 try:
1317 try:
1320 username = ui.username()
1318 username = ui.username()
1321 except error.Abort as e:
1319 except error.Abort as e:
1322 err = stringutil.forcebytestr(e)
1320 err = stringutil.forcebytestr(e)
1323 problems += 1
1321 problems += 1
1324
1322
1325 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1323 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1326 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1324 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1327 " (specify a username in your configuration file)\n"), err)
1325 " (specify a username in your configuration file)\n"), err)
1328
1326
1329 fm.condwrite(not problems, '',
1327 fm.condwrite(not problems, '',
1330 _("no problems detected\n"))
1328 _("no problems detected\n"))
1331 if not problems:
1329 if not problems:
1332 fm.data(problems=problems)
1330 fm.data(problems=problems)
1333 fm.condwrite(problems, 'problems',
1331 fm.condwrite(problems, 'problems',
1334 _("%d problems detected,"
1332 _("%d problems detected,"
1335 " please check your install!\n"), problems)
1333 " please check your install!\n"), problems)
1336 fm.end()
1334 fm.end()
1337
1335
1338 return problems
1336 return problems
1339
1337
1340 @command('debugknown', [], _('REPO ID...'), norepo=True)
1338 @command('debugknown', [], _('REPO ID...'), norepo=True)
1341 def debugknown(ui, repopath, *ids, **opts):
1339 def debugknown(ui, repopath, *ids, **opts):
1342 """test whether node ids are known to a repo
1340 """test whether node ids are known to a repo
1343
1341
1344 Every ID must be a full-length hex node id string. Returns a list of 0s
1342 Every ID must be a full-length hex node id string. Returns a list of 0s
1345 and 1s indicating unknown/known.
1343 and 1s indicating unknown/known.
1346 """
1344 """
1347 opts = pycompat.byteskwargs(opts)
1345 opts = pycompat.byteskwargs(opts)
1348 repo = hg.peer(ui, opts, repopath)
1346 repo = hg.peer(ui, opts, repopath)
1349 if not repo.capable('known'):
1347 if not repo.capable('known'):
1350 raise error.Abort("known() not supported by target repository")
1348 raise error.Abort("known() not supported by target repository")
1351 flags = repo.known([bin(s) for s in ids])
1349 flags = repo.known([bin(s) for s in ids])
1352 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1350 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1353
1351
1354 @command('debuglabelcomplete', [], _('LABEL...'))
1352 @command('debuglabelcomplete', [], _('LABEL...'))
1355 def debuglabelcomplete(ui, repo, *args):
1353 def debuglabelcomplete(ui, repo, *args):
1356 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1354 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1357 debugnamecomplete(ui, repo, *args)
1355 debugnamecomplete(ui, repo, *args)
1358
1356
1359 @command('debuglocks',
1357 @command('debuglocks',
1360 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1358 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1361 ('W', 'force-wlock', None,
1359 ('W', 'force-wlock', None,
1362 _('free the working state lock (DANGEROUS)')),
1360 _('free the working state lock (DANGEROUS)')),
1363 ('s', 'set-lock', None, _('set the store lock until stopped')),
1361 ('s', 'set-lock', None, _('set the store lock until stopped')),
1364 ('S', 'set-wlock', None,
1362 ('S', 'set-wlock', None,
1365 _('set the working state lock until stopped'))],
1363 _('set the working state lock until stopped'))],
1366 _('[OPTION]...'))
1364 _('[OPTION]...'))
1367 def debuglocks(ui, repo, **opts):
1365 def debuglocks(ui, repo, **opts):
1368 """show or modify state of locks
1366 """show or modify state of locks
1369
1367
1370 By default, this command will show which locks are held. This
1368 By default, this command will show which locks are held. This
1371 includes the user and process holding the lock, the amount of time
1369 includes the user and process holding the lock, the amount of time
1372 the lock has been held, and the machine name where the process is
1370 the lock has been held, and the machine name where the process is
1373 running if it's not local.
1371 running if it's not local.
1374
1372
1375 Locks protect the integrity of Mercurial's data, so should be
1373 Locks protect the integrity of Mercurial's data, so should be
1376 treated with care. System crashes or other interruptions may cause
1374 treated with care. System crashes or other interruptions may cause
1377 locks to not be properly released, though Mercurial will usually
1375 locks to not be properly released, though Mercurial will usually
1378 detect and remove such stale locks automatically.
1376 detect and remove such stale locks automatically.
1379
1377
1380 However, detecting stale locks may not always be possible (for
1378 However, detecting stale locks may not always be possible (for
1381 instance, on a shared filesystem). Removing locks may also be
1379 instance, on a shared filesystem). Removing locks may also be
1382 blocked by filesystem permissions.
1380 blocked by filesystem permissions.
1383
1381
1384 Setting a lock will prevent other commands from changing the data.
1382 Setting a lock will prevent other commands from changing the data.
1385 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1383 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1386 The set locks are removed when the command exits.
1384 The set locks are removed when the command exits.
1387
1385
1388 Returns 0 if no locks are held.
1386 Returns 0 if no locks are held.
1389
1387
1390 """
1388 """
1391
1389
1392 if opts.get(r'force_lock'):
1390 if opts.get(r'force_lock'):
1393 repo.svfs.unlink('lock')
1391 repo.svfs.unlink('lock')
1394 if opts.get(r'force_wlock'):
1392 if opts.get(r'force_wlock'):
1395 repo.vfs.unlink('wlock')
1393 repo.vfs.unlink('wlock')
1396 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1394 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1397 return 0
1395 return 0
1398
1396
1399 locks = []
1397 locks = []
1400 try:
1398 try:
1401 if opts.get(r'set_wlock'):
1399 if opts.get(r'set_wlock'):
1402 try:
1400 try:
1403 locks.append(repo.wlock(False))
1401 locks.append(repo.wlock(False))
1404 except error.LockHeld:
1402 except error.LockHeld:
1405 raise error.Abort(_('wlock is already held'))
1403 raise error.Abort(_('wlock is already held'))
1406 if opts.get(r'set_lock'):
1404 if opts.get(r'set_lock'):
1407 try:
1405 try:
1408 locks.append(repo.lock(False))
1406 locks.append(repo.lock(False))
1409 except error.LockHeld:
1407 except error.LockHeld:
1410 raise error.Abort(_('lock is already held'))
1408 raise error.Abort(_('lock is already held'))
1411 if len(locks):
1409 if len(locks):
1412 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1410 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1413 return 0
1411 return 0
1414 finally:
1412 finally:
1415 release(*locks)
1413 release(*locks)
1416
1414
1417 now = time.time()
1415 now = time.time()
1418 held = 0
1416 held = 0
1419
1417
1420 def report(vfs, name, method):
1418 def report(vfs, name, method):
1421 # this causes stale locks to get reaped for more accurate reporting
1419 # this causes stale locks to get reaped for more accurate reporting
1422 try:
1420 try:
1423 l = method(False)
1421 l = method(False)
1424 except error.LockHeld:
1422 except error.LockHeld:
1425 l = None
1423 l = None
1426
1424
1427 if l:
1425 if l:
1428 l.release()
1426 l.release()
1429 else:
1427 else:
1430 try:
1428 try:
1431 st = vfs.lstat(name)
1429 st = vfs.lstat(name)
1432 age = now - st[stat.ST_MTIME]
1430 age = now - st[stat.ST_MTIME]
1433 user = util.username(st.st_uid)
1431 user = util.username(st.st_uid)
1434 locker = vfs.readlock(name)
1432 locker = vfs.readlock(name)
1435 if ":" in locker:
1433 if ":" in locker:
1436 host, pid = locker.split(':')
1434 host, pid = locker.split(':')
1437 if host == socket.gethostname():
1435 if host == socket.gethostname():
1438 locker = 'user %s, process %s' % (user, pid)
1436 locker = 'user %s, process %s' % (user, pid)
1439 else:
1437 else:
1440 locker = 'user %s, process %s, host %s' \
1438 locker = 'user %s, process %s, host %s' \
1441 % (user, pid, host)
1439 % (user, pid, host)
1442 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1440 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1443 return 1
1441 return 1
1444 except OSError as e:
1442 except OSError as e:
1445 if e.errno != errno.ENOENT:
1443 if e.errno != errno.ENOENT:
1446 raise
1444 raise
1447
1445
1448 ui.write(("%-6s free\n") % (name + ":"))
1446 ui.write(("%-6s free\n") % (name + ":"))
1449 return 0
1447 return 0
1450
1448
1451 held += report(repo.svfs, "lock", repo.lock)
1449 held += report(repo.svfs, "lock", repo.lock)
1452 held += report(repo.vfs, "wlock", repo.wlock)
1450 held += report(repo.vfs, "wlock", repo.wlock)
1453
1451
1454 return held
1452 return held
1455
1453
1456 @command('debugmanifestfulltextcache', [
1454 @command('debugmanifestfulltextcache', [
1457 ('', 'clear', False, _('clear the cache')),
1455 ('', 'clear', False, _('clear the cache')),
1458 ('a', 'add', '', _('add the given manifest node to the cache'),
1456 ('a', 'add', '', _('add the given manifest node to the cache'),
1459 _('NODE'))
1457 _('NODE'))
1460 ], '')
1458 ], '')
1461 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1459 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1462 """show, clear or amend the contents of the manifest fulltext cache"""
1460 """show, clear or amend the contents of the manifest fulltext cache"""
1463 with repo.lock():
1461 with repo.lock():
1464 r = repo.manifestlog.getstorage(b'')
1462 r = repo.manifestlog.getstorage(b'')
1465 try:
1463 try:
1466 cache = r._fulltextcache
1464 cache = r._fulltextcache
1467 except AttributeError:
1465 except AttributeError:
1468 ui.warn(_(
1466 ui.warn(_(
1469 "Current revlog implementation doesn't appear to have a "
1467 "Current revlog implementation doesn't appear to have a "
1470 'manifest fulltext cache\n'))
1468 'manifest fulltext cache\n'))
1471 return
1469 return
1472
1470
1473 if opts.get(r'clear'):
1471 if opts.get(r'clear'):
1474 cache.clear()
1472 cache.clear()
1475
1473
1476 if add:
1474 if add:
1477 try:
1475 try:
1478 manifest = repo.manifestlog[r.lookup(add)]
1476 manifest = repo.manifestlog[r.lookup(add)]
1479 except error.LookupError as e:
1477 except error.LookupError as e:
1480 raise error.Abort(e, hint="Check your manifest node id")
1478 raise error.Abort(e, hint="Check your manifest node id")
1481 manifest.read() # stores revisision in cache too
1479 manifest.read() # stores revisision in cache too
1482
1480
1483 if not len(cache):
1481 if not len(cache):
1484 ui.write(_('Cache empty'))
1482 ui.write(_('Cache empty'))
1485 else:
1483 else:
1486 ui.write(
1484 ui.write(
1487 _('Cache contains %d manifest entries, in order of most to '
1485 _('Cache contains %d manifest entries, in order of most to '
1488 'least recent:\n') % (len(cache),))
1486 'least recent:\n') % (len(cache),))
1489 totalsize = 0
1487 totalsize = 0
1490 for nodeid in cache:
1488 for nodeid in cache:
1491 # Use cache.get to not update the LRU order
1489 # Use cache.get to not update the LRU order
1492 data = cache.get(nodeid)
1490 data = cache.get(nodeid)
1493 size = len(data)
1491 size = len(data)
1494 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1492 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1495 ui.write(_('id: %s, size %s\n') % (
1493 ui.write(_('id: %s, size %s\n') % (
1496 hex(nodeid), util.bytecount(size)))
1494 hex(nodeid), util.bytecount(size)))
1497 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1495 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1498 ui.write(
1496 ui.write(
1499 _('Total cache data size %s, on-disk %s\n') % (
1497 _('Total cache data size %s, on-disk %s\n') % (
1500 util.bytecount(totalsize), util.bytecount(ondisk))
1498 util.bytecount(totalsize), util.bytecount(ondisk))
1501 )
1499 )
1502
1500
1503 @command('debugmergestate', [], '')
1501 @command('debugmergestate', [], '')
1504 def debugmergestate(ui, repo, *args):
1502 def debugmergestate(ui, repo, *args):
1505 """print merge state
1503 """print merge state
1506
1504
1507 Use --verbose to print out information about whether v1 or v2 merge state
1505 Use --verbose to print out information about whether v1 or v2 merge state
1508 was chosen."""
1506 was chosen."""
1509 def _hashornull(h):
1507 def _hashornull(h):
1510 if h == nullhex:
1508 if h == nullhex:
1511 return 'null'
1509 return 'null'
1512 else:
1510 else:
1513 return h
1511 return h
1514
1512
1515 def printrecords(version):
1513 def printrecords(version):
1516 ui.write(('* version %d records\n') % version)
1514 ui.write(('* version %d records\n') % version)
1517 if version == 1:
1515 if version == 1:
1518 records = v1records
1516 records = v1records
1519 else:
1517 else:
1520 records = v2records
1518 records = v2records
1521
1519
1522 for rtype, record in records:
1520 for rtype, record in records:
1523 # pretty print some record types
1521 # pretty print some record types
1524 if rtype == 'L':
1522 if rtype == 'L':
1525 ui.write(('local: %s\n') % record)
1523 ui.write(('local: %s\n') % record)
1526 elif rtype == 'O':
1524 elif rtype == 'O':
1527 ui.write(('other: %s\n') % record)
1525 ui.write(('other: %s\n') % record)
1528 elif rtype == 'm':
1526 elif rtype == 'm':
1529 driver, mdstate = record.split('\0', 1)
1527 driver, mdstate = record.split('\0', 1)
1530 ui.write(('merge driver: %s (state "%s")\n')
1528 ui.write(('merge driver: %s (state "%s")\n')
1531 % (driver, mdstate))
1529 % (driver, mdstate))
1532 elif rtype in 'FDC':
1530 elif rtype in 'FDC':
1533 r = record.split('\0')
1531 r = record.split('\0')
1534 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1532 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1535 if version == 1:
1533 if version == 1:
1536 onode = 'not stored in v1 format'
1534 onode = 'not stored in v1 format'
1537 flags = r[7]
1535 flags = r[7]
1538 else:
1536 else:
1539 onode, flags = r[7:9]
1537 onode, flags = r[7:9]
1540 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1538 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1541 % (f, rtype, state, _hashornull(hash)))
1539 % (f, rtype, state, _hashornull(hash)))
1542 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1540 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1543 ui.write((' ancestor path: %s (node %s)\n')
1541 ui.write((' ancestor path: %s (node %s)\n')
1544 % (afile, _hashornull(anode)))
1542 % (afile, _hashornull(anode)))
1545 ui.write((' other path: %s (node %s)\n')
1543 ui.write((' other path: %s (node %s)\n')
1546 % (ofile, _hashornull(onode)))
1544 % (ofile, _hashornull(onode)))
1547 elif rtype == 'f':
1545 elif rtype == 'f':
1548 filename, rawextras = record.split('\0', 1)
1546 filename, rawextras = record.split('\0', 1)
1549 extras = rawextras.split('\0')
1547 extras = rawextras.split('\0')
1550 i = 0
1548 i = 0
1551 extrastrings = []
1549 extrastrings = []
1552 while i < len(extras):
1550 while i < len(extras):
1553 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1551 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1554 i += 2
1552 i += 2
1555
1553
1556 ui.write(('file extras: %s (%s)\n')
1554 ui.write(('file extras: %s (%s)\n')
1557 % (filename, ', '.join(extrastrings)))
1555 % (filename, ', '.join(extrastrings)))
1558 elif rtype == 'l':
1556 elif rtype == 'l':
1559 labels = record.split('\0', 2)
1557 labels = record.split('\0', 2)
1560 labels = [l for l in labels if len(l) > 0]
1558 labels = [l for l in labels if len(l) > 0]
1561 ui.write(('labels:\n'))
1559 ui.write(('labels:\n'))
1562 ui.write((' local: %s\n' % labels[0]))
1560 ui.write((' local: %s\n' % labels[0]))
1563 ui.write((' other: %s\n' % labels[1]))
1561 ui.write((' other: %s\n' % labels[1]))
1564 if len(labels) > 2:
1562 if len(labels) > 2:
1565 ui.write((' base: %s\n' % labels[2]))
1563 ui.write((' base: %s\n' % labels[2]))
1566 else:
1564 else:
1567 ui.write(('unrecognized entry: %s\t%s\n')
1565 ui.write(('unrecognized entry: %s\t%s\n')
1568 % (rtype, record.replace('\0', '\t')))
1566 % (rtype, record.replace('\0', '\t')))
1569
1567
1570 # Avoid mergestate.read() since it may raise an exception for unsupported
1568 # Avoid mergestate.read() since it may raise an exception for unsupported
1571 # merge state records. We shouldn't be doing this, but this is OK since this
1569 # merge state records. We shouldn't be doing this, but this is OK since this
1572 # command is pretty low-level.
1570 # command is pretty low-level.
1573 ms = mergemod.mergestate(repo)
1571 ms = mergemod.mergestate(repo)
1574
1572
1575 # sort so that reasonable information is on top
1573 # sort so that reasonable information is on top
1576 v1records = ms._readrecordsv1()
1574 v1records = ms._readrecordsv1()
1577 v2records = ms._readrecordsv2()
1575 v2records = ms._readrecordsv2()
1578 order = 'LOml'
1576 order = 'LOml'
1579 def key(r):
1577 def key(r):
1580 idx = order.find(r[0])
1578 idx = order.find(r[0])
1581 if idx == -1:
1579 if idx == -1:
1582 return (1, r[1])
1580 return (1, r[1])
1583 else:
1581 else:
1584 return (0, idx)
1582 return (0, idx)
1585 v1records.sort(key=key)
1583 v1records.sort(key=key)
1586 v2records.sort(key=key)
1584 v2records.sort(key=key)
1587
1585
1588 if not v1records and not v2records:
1586 if not v1records and not v2records:
1589 ui.write(('no merge state found\n'))
1587 ui.write(('no merge state found\n'))
1590 elif not v2records:
1588 elif not v2records:
1591 ui.note(('no version 2 merge state\n'))
1589 ui.note(('no version 2 merge state\n'))
1592 printrecords(1)
1590 printrecords(1)
1593 elif ms._v1v2match(v1records, v2records):
1591 elif ms._v1v2match(v1records, v2records):
1594 ui.note(('v1 and v2 states match: using v2\n'))
1592 ui.note(('v1 and v2 states match: using v2\n'))
1595 printrecords(2)
1593 printrecords(2)
1596 else:
1594 else:
1597 ui.note(('v1 and v2 states mismatch: using v1\n'))
1595 ui.note(('v1 and v2 states mismatch: using v1\n'))
1598 printrecords(1)
1596 printrecords(1)
1599 if ui.verbose:
1597 if ui.verbose:
1600 printrecords(2)
1598 printrecords(2)
1601
1599
1602 @command('debugnamecomplete', [], _('NAME...'))
1600 @command('debugnamecomplete', [], _('NAME...'))
1603 def debugnamecomplete(ui, repo, *args):
1601 def debugnamecomplete(ui, repo, *args):
1604 '''complete "names" - tags, open branch names, bookmark names'''
1602 '''complete "names" - tags, open branch names, bookmark names'''
1605
1603
1606 names = set()
1604 names = set()
1607 # since we previously only listed open branches, we will handle that
1605 # since we previously only listed open branches, we will handle that
1608 # specially (after this for loop)
1606 # specially (after this for loop)
1609 for name, ns in repo.names.iteritems():
1607 for name, ns in repo.names.iteritems():
1610 if name != 'branches':
1608 if name != 'branches':
1611 names.update(ns.listnames(repo))
1609 names.update(ns.listnames(repo))
1612 names.update(tag for (tag, heads, tip, closed)
1610 names.update(tag for (tag, heads, tip, closed)
1613 in repo.branchmap().iterbranches() if not closed)
1611 in repo.branchmap().iterbranches() if not closed)
1614 completions = set()
1612 completions = set()
1615 if not args:
1613 if not args:
1616 args = ['']
1614 args = ['']
1617 for a in args:
1615 for a in args:
1618 completions.update(n for n in names if n.startswith(a))
1616 completions.update(n for n in names if n.startswith(a))
1619 ui.write('\n'.join(sorted(completions)))
1617 ui.write('\n'.join(sorted(completions)))
1620 ui.write('\n')
1618 ui.write('\n')
1621
1619
1622 @command('debugobsolete',
1620 @command('debugobsolete',
1623 [('', 'flags', 0, _('markers flag')),
1621 [('', 'flags', 0, _('markers flag')),
1624 ('', 'record-parents', False,
1622 ('', 'record-parents', False,
1625 _('record parent information for the precursor')),
1623 _('record parent information for the precursor')),
1626 ('r', 'rev', [], _('display markers relevant to REV')),
1624 ('r', 'rev', [], _('display markers relevant to REV')),
1627 ('', 'exclusive', False, _('restrict display to markers only '
1625 ('', 'exclusive', False, _('restrict display to markers only '
1628 'relevant to REV')),
1626 'relevant to REV')),
1629 ('', 'index', False, _('display index of the marker')),
1627 ('', 'index', False, _('display index of the marker')),
1630 ('', 'delete', [], _('delete markers specified by indices')),
1628 ('', 'delete', [], _('delete markers specified by indices')),
1631 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1629 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1632 _('[OBSOLETED [REPLACEMENT ...]]'))
1630 _('[OBSOLETED [REPLACEMENT ...]]'))
1633 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1631 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1634 """create arbitrary obsolete marker
1632 """create arbitrary obsolete marker
1635
1633
1636 With no arguments, displays the list of obsolescence markers."""
1634 With no arguments, displays the list of obsolescence markers."""
1637
1635
1638 opts = pycompat.byteskwargs(opts)
1636 opts = pycompat.byteskwargs(opts)
1639
1637
1640 def parsenodeid(s):
1638 def parsenodeid(s):
1641 try:
1639 try:
1642 # We do not use revsingle/revrange functions here to accept
1640 # We do not use revsingle/revrange functions here to accept
1643 # arbitrary node identifiers, possibly not present in the
1641 # arbitrary node identifiers, possibly not present in the
1644 # local repository.
1642 # local repository.
1645 n = bin(s)
1643 n = bin(s)
1646 if len(n) != len(nullid):
1644 if len(n) != len(nullid):
1647 raise TypeError()
1645 raise TypeError()
1648 return n
1646 return n
1649 except TypeError:
1647 except TypeError:
1650 raise error.Abort('changeset references must be full hexadecimal '
1648 raise error.Abort('changeset references must be full hexadecimal '
1651 'node identifiers')
1649 'node identifiers')
1652
1650
1653 if opts.get('delete'):
1651 if opts.get('delete'):
1654 indices = []
1652 indices = []
1655 for v in opts.get('delete'):
1653 for v in opts.get('delete'):
1656 try:
1654 try:
1657 indices.append(int(v))
1655 indices.append(int(v))
1658 except ValueError:
1656 except ValueError:
1659 raise error.Abort(_('invalid index value: %r') % v,
1657 raise error.Abort(_('invalid index value: %r') % v,
1660 hint=_('use integers for indices'))
1658 hint=_('use integers for indices'))
1661
1659
1662 if repo.currenttransaction():
1660 if repo.currenttransaction():
1663 raise error.Abort(_('cannot delete obsmarkers in the middle '
1661 raise error.Abort(_('cannot delete obsmarkers in the middle '
1664 'of transaction.'))
1662 'of transaction.'))
1665
1663
1666 with repo.lock():
1664 with repo.lock():
1667 n = repair.deleteobsmarkers(repo.obsstore, indices)
1665 n = repair.deleteobsmarkers(repo.obsstore, indices)
1668 ui.write(_('deleted %i obsolescence markers\n') % n)
1666 ui.write(_('deleted %i obsolescence markers\n') % n)
1669
1667
1670 return
1668 return
1671
1669
1672 if precursor is not None:
1670 if precursor is not None:
1673 if opts['rev']:
1671 if opts['rev']:
1674 raise error.Abort('cannot select revision when creating marker')
1672 raise error.Abort('cannot select revision when creating marker')
1675 metadata = {}
1673 metadata = {}
1676 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1674 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1677 succs = tuple(parsenodeid(succ) for succ in successors)
1675 succs = tuple(parsenodeid(succ) for succ in successors)
1678 l = repo.lock()
1676 l = repo.lock()
1679 try:
1677 try:
1680 tr = repo.transaction('debugobsolete')
1678 tr = repo.transaction('debugobsolete')
1681 try:
1679 try:
1682 date = opts.get('date')
1680 date = opts.get('date')
1683 if date:
1681 if date:
1684 date = dateutil.parsedate(date)
1682 date = dateutil.parsedate(date)
1685 else:
1683 else:
1686 date = None
1684 date = None
1687 prec = parsenodeid(precursor)
1685 prec = parsenodeid(precursor)
1688 parents = None
1686 parents = None
1689 if opts['record_parents']:
1687 if opts['record_parents']:
1690 if prec not in repo.unfiltered():
1688 if prec not in repo.unfiltered():
1691 raise error.Abort('cannot used --record-parents on '
1689 raise error.Abort('cannot used --record-parents on '
1692 'unknown changesets')
1690 'unknown changesets')
1693 parents = repo.unfiltered()[prec].parents()
1691 parents = repo.unfiltered()[prec].parents()
1694 parents = tuple(p.node() for p in parents)
1692 parents = tuple(p.node() for p in parents)
1695 repo.obsstore.create(tr, prec, succs, opts['flags'],
1693 repo.obsstore.create(tr, prec, succs, opts['flags'],
1696 parents=parents, date=date,
1694 parents=parents, date=date,
1697 metadata=metadata, ui=ui)
1695 metadata=metadata, ui=ui)
1698 tr.close()
1696 tr.close()
1699 except ValueError as exc:
1697 except ValueError as exc:
1700 raise error.Abort(_('bad obsmarker input: %s') %
1698 raise error.Abort(_('bad obsmarker input: %s') %
1701 pycompat.bytestr(exc))
1699 pycompat.bytestr(exc))
1702 finally:
1700 finally:
1703 tr.release()
1701 tr.release()
1704 finally:
1702 finally:
1705 l.release()
1703 l.release()
1706 else:
1704 else:
1707 if opts['rev']:
1705 if opts['rev']:
1708 revs = scmutil.revrange(repo, opts['rev'])
1706 revs = scmutil.revrange(repo, opts['rev'])
1709 nodes = [repo[r].node() for r in revs]
1707 nodes = [repo[r].node() for r in revs]
1710 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1708 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1711 exclusive=opts['exclusive']))
1709 exclusive=opts['exclusive']))
1712 markers.sort(key=lambda x: x._data)
1710 markers.sort(key=lambda x: x._data)
1713 else:
1711 else:
1714 markers = obsutil.getmarkers(repo)
1712 markers = obsutil.getmarkers(repo)
1715
1713
1716 markerstoiter = markers
1714 markerstoiter = markers
1717 isrelevant = lambda m: True
1715 isrelevant = lambda m: True
1718 if opts.get('rev') and opts.get('index'):
1716 if opts.get('rev') and opts.get('index'):
1719 markerstoiter = obsutil.getmarkers(repo)
1717 markerstoiter = obsutil.getmarkers(repo)
1720 markerset = set(markers)
1718 markerset = set(markers)
1721 isrelevant = lambda m: m in markerset
1719 isrelevant = lambda m: m in markerset
1722
1720
1723 fm = ui.formatter('debugobsolete', opts)
1721 fm = ui.formatter('debugobsolete', opts)
1724 for i, m in enumerate(markerstoiter):
1722 for i, m in enumerate(markerstoiter):
1725 if not isrelevant(m):
1723 if not isrelevant(m):
1726 # marker can be irrelevant when we're iterating over a set
1724 # marker can be irrelevant when we're iterating over a set
1727 # of markers (markerstoiter) which is bigger than the set
1725 # of markers (markerstoiter) which is bigger than the set
1728 # of markers we want to display (markers)
1726 # of markers we want to display (markers)
1729 # this can happen if both --index and --rev options are
1727 # this can happen if both --index and --rev options are
1730 # provided and thus we need to iterate over all of the markers
1728 # provided and thus we need to iterate over all of the markers
1731 # to get the correct indices, but only display the ones that
1729 # to get the correct indices, but only display the ones that
1732 # are relevant to --rev value
1730 # are relevant to --rev value
1733 continue
1731 continue
1734 fm.startitem()
1732 fm.startitem()
1735 ind = i if opts.get('index') else None
1733 ind = i if opts.get('index') else None
1736 cmdutil.showmarker(fm, m, index=ind)
1734 cmdutil.showmarker(fm, m, index=ind)
1737 fm.end()
1735 fm.end()
1738
1736
1739 @command('debugpathcomplete',
1737 @command('debugpathcomplete',
1740 [('f', 'full', None, _('complete an entire path')),
1738 [('f', 'full', None, _('complete an entire path')),
1741 ('n', 'normal', None, _('show only normal files')),
1739 ('n', 'normal', None, _('show only normal files')),
1742 ('a', 'added', None, _('show only added files')),
1740 ('a', 'added', None, _('show only added files')),
1743 ('r', 'removed', None, _('show only removed files'))],
1741 ('r', 'removed', None, _('show only removed files'))],
1744 _('FILESPEC...'))
1742 _('FILESPEC...'))
1745 def debugpathcomplete(ui, repo, *specs, **opts):
1743 def debugpathcomplete(ui, repo, *specs, **opts):
1746 '''complete part or all of a tracked path
1744 '''complete part or all of a tracked path
1747
1745
1748 This command supports shells that offer path name completion. It
1746 This command supports shells that offer path name completion. It
1749 currently completes only files already known to the dirstate.
1747 currently completes only files already known to the dirstate.
1750
1748
1751 Completion extends only to the next path segment unless
1749 Completion extends only to the next path segment unless
1752 --full is specified, in which case entire paths are used.'''
1750 --full is specified, in which case entire paths are used.'''
1753
1751
1754 def complete(path, acceptable):
1752 def complete(path, acceptable):
1755 dirstate = repo.dirstate
1753 dirstate = repo.dirstate
1756 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1754 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1757 rootdir = repo.root + pycompat.ossep
1755 rootdir = repo.root + pycompat.ossep
1758 if spec != repo.root and not spec.startswith(rootdir):
1756 if spec != repo.root and not spec.startswith(rootdir):
1759 return [], []
1757 return [], []
1760 if os.path.isdir(spec):
1758 if os.path.isdir(spec):
1761 spec += '/'
1759 spec += '/'
1762 spec = spec[len(rootdir):]
1760 spec = spec[len(rootdir):]
1763 fixpaths = pycompat.ossep != '/'
1761 fixpaths = pycompat.ossep != '/'
1764 if fixpaths:
1762 if fixpaths:
1765 spec = spec.replace(pycompat.ossep, '/')
1763 spec = spec.replace(pycompat.ossep, '/')
1766 speclen = len(spec)
1764 speclen = len(spec)
1767 fullpaths = opts[r'full']
1765 fullpaths = opts[r'full']
1768 files, dirs = set(), set()
1766 files, dirs = set(), set()
1769 adddir, addfile = dirs.add, files.add
1767 adddir, addfile = dirs.add, files.add
1770 for f, st in dirstate.iteritems():
1768 for f, st in dirstate.iteritems():
1771 if f.startswith(spec) and st[0] in acceptable:
1769 if f.startswith(spec) and st[0] in acceptable:
1772 if fixpaths:
1770 if fixpaths:
1773 f = f.replace('/', pycompat.ossep)
1771 f = f.replace('/', pycompat.ossep)
1774 if fullpaths:
1772 if fullpaths:
1775 addfile(f)
1773 addfile(f)
1776 continue
1774 continue
1777 s = f.find(pycompat.ossep, speclen)
1775 s = f.find(pycompat.ossep, speclen)
1778 if s >= 0:
1776 if s >= 0:
1779 adddir(f[:s])
1777 adddir(f[:s])
1780 else:
1778 else:
1781 addfile(f)
1779 addfile(f)
1782 return files, dirs
1780 return files, dirs
1783
1781
1784 acceptable = ''
1782 acceptable = ''
1785 if opts[r'normal']:
1783 if opts[r'normal']:
1786 acceptable += 'nm'
1784 acceptable += 'nm'
1787 if opts[r'added']:
1785 if opts[r'added']:
1788 acceptable += 'a'
1786 acceptable += 'a'
1789 if opts[r'removed']:
1787 if opts[r'removed']:
1790 acceptable += 'r'
1788 acceptable += 'r'
1791 cwd = repo.getcwd()
1789 cwd = repo.getcwd()
1792 if not specs:
1790 if not specs:
1793 specs = ['.']
1791 specs = ['.']
1794
1792
1795 files, dirs = set(), set()
1793 files, dirs = set(), set()
1796 for spec in specs:
1794 for spec in specs:
1797 f, d = complete(spec, acceptable or 'nmar')
1795 f, d = complete(spec, acceptable or 'nmar')
1798 files.update(f)
1796 files.update(f)
1799 dirs.update(d)
1797 dirs.update(d)
1800 files.update(dirs)
1798 files.update(dirs)
1801 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1799 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1802 ui.write('\n')
1800 ui.write('\n')
1803
1801
1804 @command('debugpeer', [], _('PATH'), norepo=True)
1802 @command('debugpeer', [], _('PATH'), norepo=True)
1805 def debugpeer(ui, path):
1803 def debugpeer(ui, path):
1806 """establish a connection to a peer repository"""
1804 """establish a connection to a peer repository"""
1807 # Always enable peer request logging. Requires --debug to display
1805 # Always enable peer request logging. Requires --debug to display
1808 # though.
1806 # though.
1809 overrides = {
1807 overrides = {
1810 ('devel', 'debug.peer-request'): True,
1808 ('devel', 'debug.peer-request'): True,
1811 }
1809 }
1812
1810
1813 with ui.configoverride(overrides):
1811 with ui.configoverride(overrides):
1814 peer = hg.peer(ui, {}, path)
1812 peer = hg.peer(ui, {}, path)
1815
1813
1816 local = peer.local() is not None
1814 local = peer.local() is not None
1817 canpush = peer.canpush()
1815 canpush = peer.canpush()
1818
1816
1819 ui.write(_('url: %s\n') % peer.url())
1817 ui.write(_('url: %s\n') % peer.url())
1820 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1818 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1821 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1819 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1822
1820
1823 @command('debugpickmergetool',
1821 @command('debugpickmergetool',
1824 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1822 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1825 ('', 'changedelete', None, _('emulate merging change and delete')),
1823 ('', 'changedelete', None, _('emulate merging change and delete')),
1826 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1824 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1827 _('[PATTERN]...'),
1825 _('[PATTERN]...'),
1828 inferrepo=True)
1826 inferrepo=True)
1829 def debugpickmergetool(ui, repo, *pats, **opts):
1827 def debugpickmergetool(ui, repo, *pats, **opts):
1830 """examine which merge tool is chosen for specified file
1828 """examine which merge tool is chosen for specified file
1831
1829
1832 As described in :hg:`help merge-tools`, Mercurial examines
1830 As described in :hg:`help merge-tools`, Mercurial examines
1833 configurations below in this order to decide which merge tool is
1831 configurations below in this order to decide which merge tool is
1834 chosen for specified file.
1832 chosen for specified file.
1835
1833
1836 1. ``--tool`` option
1834 1. ``--tool`` option
1837 2. ``HGMERGE`` environment variable
1835 2. ``HGMERGE`` environment variable
1838 3. configurations in ``merge-patterns`` section
1836 3. configurations in ``merge-patterns`` section
1839 4. configuration of ``ui.merge``
1837 4. configuration of ``ui.merge``
1840 5. configurations in ``merge-tools`` section
1838 5. configurations in ``merge-tools`` section
1841 6. ``hgmerge`` tool (for historical reason only)
1839 6. ``hgmerge`` tool (for historical reason only)
1842 7. default tool for fallback (``:merge`` or ``:prompt``)
1840 7. default tool for fallback (``:merge`` or ``:prompt``)
1843
1841
1844 This command writes out examination result in the style below::
1842 This command writes out examination result in the style below::
1845
1843
1846 FILE = MERGETOOL
1844 FILE = MERGETOOL
1847
1845
1848 By default, all files known in the first parent context of the
1846 By default, all files known in the first parent context of the
1849 working directory are examined. Use file patterns and/or -I/-X
1847 working directory are examined. Use file patterns and/or -I/-X
1850 options to limit target files. -r/--rev is also useful to examine
1848 options to limit target files. -r/--rev is also useful to examine
1851 files in another context without actual updating to it.
1849 files in another context without actual updating to it.
1852
1850
1853 With --debug, this command shows warning messages while matching
1851 With --debug, this command shows warning messages while matching
1854 against ``merge-patterns`` and so on, too. It is recommended to
1852 against ``merge-patterns`` and so on, too. It is recommended to
1855 use this option with explicit file patterns and/or -I/-X options,
1853 use this option with explicit file patterns and/or -I/-X options,
1856 because this option increases amount of output per file according
1854 because this option increases amount of output per file according
1857 to configurations in hgrc.
1855 to configurations in hgrc.
1858
1856
1859 With -v/--verbose, this command shows configurations below at
1857 With -v/--verbose, this command shows configurations below at
1860 first (only if specified).
1858 first (only if specified).
1861
1859
1862 - ``--tool`` option
1860 - ``--tool`` option
1863 - ``HGMERGE`` environment variable
1861 - ``HGMERGE`` environment variable
1864 - configuration of ``ui.merge``
1862 - configuration of ``ui.merge``
1865
1863
1866 If merge tool is chosen before matching against
1864 If merge tool is chosen before matching against
1867 ``merge-patterns``, this command can't show any helpful
1865 ``merge-patterns``, this command can't show any helpful
1868 information, even with --debug. In such case, information above is
1866 information, even with --debug. In such case, information above is
1869 useful to know why a merge tool is chosen.
1867 useful to know why a merge tool is chosen.
1870 """
1868 """
1871 opts = pycompat.byteskwargs(opts)
1869 opts = pycompat.byteskwargs(opts)
1872 overrides = {}
1870 overrides = {}
1873 if opts['tool']:
1871 if opts['tool']:
1874 overrides[('ui', 'forcemerge')] = opts['tool']
1872 overrides[('ui', 'forcemerge')] = opts['tool']
1875 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1873 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1876
1874
1877 with ui.configoverride(overrides, 'debugmergepatterns'):
1875 with ui.configoverride(overrides, 'debugmergepatterns'):
1878 hgmerge = encoding.environ.get("HGMERGE")
1876 hgmerge = encoding.environ.get("HGMERGE")
1879 if hgmerge is not None:
1877 if hgmerge is not None:
1880 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1878 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1881 uimerge = ui.config("ui", "merge")
1879 uimerge = ui.config("ui", "merge")
1882 if uimerge:
1880 if uimerge:
1883 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1881 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1884
1882
1885 ctx = scmutil.revsingle(repo, opts.get('rev'))
1883 ctx = scmutil.revsingle(repo, opts.get('rev'))
1886 m = scmutil.match(ctx, pats, opts)
1884 m = scmutil.match(ctx, pats, opts)
1887 changedelete = opts['changedelete']
1885 changedelete = opts['changedelete']
1888 for path in ctx.walk(m):
1886 for path in ctx.walk(m):
1889 fctx = ctx[path]
1887 fctx = ctx[path]
1890 try:
1888 try:
1891 if not ui.debugflag:
1889 if not ui.debugflag:
1892 ui.pushbuffer(error=True)
1890 ui.pushbuffer(error=True)
1893 tool, toolpath = filemerge._picktool(repo, ui, path,
1891 tool, toolpath = filemerge._picktool(repo, ui, path,
1894 fctx.isbinary(),
1892 fctx.isbinary(),
1895 'l' in fctx.flags(),
1893 'l' in fctx.flags(),
1896 changedelete)
1894 changedelete)
1897 finally:
1895 finally:
1898 if not ui.debugflag:
1896 if not ui.debugflag:
1899 ui.popbuffer()
1897 ui.popbuffer()
1900 ui.write(('%s = %s\n') % (path, tool))
1898 ui.write(('%s = %s\n') % (path, tool))
1901
1899
1902 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1900 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1903 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1901 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1904 '''access the pushkey key/value protocol
1902 '''access the pushkey key/value protocol
1905
1903
1906 With two args, list the keys in the given namespace.
1904 With two args, list the keys in the given namespace.
1907
1905
1908 With five args, set a key to new if it currently is set to old.
1906 With five args, set a key to new if it currently is set to old.
1909 Reports success or failure.
1907 Reports success or failure.
1910 '''
1908 '''
1911
1909
1912 target = hg.peer(ui, {}, repopath)
1910 target = hg.peer(ui, {}, repopath)
1913 if keyinfo:
1911 if keyinfo:
1914 key, old, new = keyinfo
1912 key, old, new = keyinfo
1915 with target.commandexecutor() as e:
1913 with target.commandexecutor() as e:
1916 r = e.callcommand('pushkey', {
1914 r = e.callcommand('pushkey', {
1917 'namespace': namespace,
1915 'namespace': namespace,
1918 'key': key,
1916 'key': key,
1919 'old': old,
1917 'old': old,
1920 'new': new,
1918 'new': new,
1921 }).result()
1919 }).result()
1922
1920
1923 ui.status(pycompat.bytestr(r) + '\n')
1921 ui.status(pycompat.bytestr(r) + '\n')
1924 return not r
1922 return not r
1925 else:
1923 else:
1926 for k, v in sorted(target.listkeys(namespace).iteritems()):
1924 for k, v in sorted(target.listkeys(namespace).iteritems()):
1927 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1925 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1928 stringutil.escapestr(v)))
1926 stringutil.escapestr(v)))
1929
1927
1930 @command('debugpvec', [], _('A B'))
1928 @command('debugpvec', [], _('A B'))
1931 def debugpvec(ui, repo, a, b=None):
1929 def debugpvec(ui, repo, a, b=None):
1932 ca = scmutil.revsingle(repo, a)
1930 ca = scmutil.revsingle(repo, a)
1933 cb = scmutil.revsingle(repo, b)
1931 cb = scmutil.revsingle(repo, b)
1934 pa = pvec.ctxpvec(ca)
1932 pa = pvec.ctxpvec(ca)
1935 pb = pvec.ctxpvec(cb)
1933 pb = pvec.ctxpvec(cb)
1936 if pa == pb:
1934 if pa == pb:
1937 rel = "="
1935 rel = "="
1938 elif pa > pb:
1936 elif pa > pb:
1939 rel = ">"
1937 rel = ">"
1940 elif pa < pb:
1938 elif pa < pb:
1941 rel = "<"
1939 rel = "<"
1942 elif pa | pb:
1940 elif pa | pb:
1943 rel = "|"
1941 rel = "|"
1944 ui.write(_("a: %s\n") % pa)
1942 ui.write(_("a: %s\n") % pa)
1945 ui.write(_("b: %s\n") % pb)
1943 ui.write(_("b: %s\n") % pb)
1946 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1944 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1947 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1945 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1948 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1946 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1949 pa.distance(pb), rel))
1947 pa.distance(pb), rel))
1950
1948
1951 @command('debugrebuilddirstate|debugrebuildstate',
1949 @command('debugrebuilddirstate|debugrebuildstate',
1952 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1950 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1953 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1951 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1954 'the working copy parent')),
1952 'the working copy parent')),
1955 ],
1953 ],
1956 _('[-r REV]'))
1954 _('[-r REV]'))
1957 def debugrebuilddirstate(ui, repo, rev, **opts):
1955 def debugrebuilddirstate(ui, repo, rev, **opts):
1958 """rebuild the dirstate as it would look like for the given revision
1956 """rebuild the dirstate as it would look like for the given revision
1959
1957
1960 If no revision is specified the first current parent will be used.
1958 If no revision is specified the first current parent will be used.
1961
1959
1962 The dirstate will be set to the files of the given revision.
1960 The dirstate will be set to the files of the given revision.
1963 The actual working directory content or existing dirstate
1961 The actual working directory content or existing dirstate
1964 information such as adds or removes is not considered.
1962 information such as adds or removes is not considered.
1965
1963
1966 ``minimal`` will only rebuild the dirstate status for files that claim to be
1964 ``minimal`` will only rebuild the dirstate status for files that claim to be
1967 tracked but are not in the parent manifest, or that exist in the parent
1965 tracked but are not in the parent manifest, or that exist in the parent
1968 manifest but are not in the dirstate. It will not change adds, removes, or
1966 manifest but are not in the dirstate. It will not change adds, removes, or
1969 modified files that are in the working copy parent.
1967 modified files that are in the working copy parent.
1970
1968
1971 One use of this command is to make the next :hg:`status` invocation
1969 One use of this command is to make the next :hg:`status` invocation
1972 check the actual file content.
1970 check the actual file content.
1973 """
1971 """
1974 ctx = scmutil.revsingle(repo, rev)
1972 ctx = scmutil.revsingle(repo, rev)
1975 with repo.wlock():
1973 with repo.wlock():
1976 dirstate = repo.dirstate
1974 dirstate = repo.dirstate
1977 changedfiles = None
1975 changedfiles = None
1978 # See command doc for what minimal does.
1976 # See command doc for what minimal does.
1979 if opts.get(r'minimal'):
1977 if opts.get(r'minimal'):
1980 manifestfiles = set(ctx.manifest().keys())
1978 manifestfiles = set(ctx.manifest().keys())
1981 dirstatefiles = set(dirstate)
1979 dirstatefiles = set(dirstate)
1982 manifestonly = manifestfiles - dirstatefiles
1980 manifestonly = manifestfiles - dirstatefiles
1983 dsonly = dirstatefiles - manifestfiles
1981 dsonly = dirstatefiles - manifestfiles
1984 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1982 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1985 changedfiles = manifestonly | dsnotadded
1983 changedfiles = manifestonly | dsnotadded
1986
1984
1987 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1985 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1988
1986
1989 @command('debugrebuildfncache', [], '')
1987 @command('debugrebuildfncache', [], '')
1990 def debugrebuildfncache(ui, repo):
1988 def debugrebuildfncache(ui, repo):
1991 """rebuild the fncache file"""
1989 """rebuild the fncache file"""
1992 repair.rebuildfncache(ui, repo)
1990 repair.rebuildfncache(ui, repo)
1993
1991
1994 @command('debugrename',
1992 @command('debugrename',
1995 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1993 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1996 _('[-r REV] FILE'))
1994 _('[-r REV] FILE'))
1997 def debugrename(ui, repo, file1, *pats, **opts):
1995 def debugrename(ui, repo, file1, *pats, **opts):
1998 """dump rename information"""
1996 """dump rename information"""
1999
1997
2000 opts = pycompat.byteskwargs(opts)
1998 opts = pycompat.byteskwargs(opts)
2001 ctx = scmutil.revsingle(repo, opts.get('rev'))
1999 ctx = scmutil.revsingle(repo, opts.get('rev'))
2002 m = scmutil.match(ctx, (file1,) + pats, opts)
2000 m = scmutil.match(ctx, (file1,) + pats, opts)
2003 for abs in ctx.walk(m):
2001 for abs in ctx.walk(m):
2004 fctx = ctx[abs]
2002 fctx = ctx[abs]
2005 o = fctx.filelog().renamed(fctx.filenode())
2003 o = fctx.filelog().renamed(fctx.filenode())
2006 rel = m.rel(abs)
2004 rel = m.rel(abs)
2007 if o:
2005 if o:
2008 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2006 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2009 else:
2007 else:
2010 ui.write(_("%s not renamed\n") % rel)
2008 ui.write(_("%s not renamed\n") % rel)
2011
2009
2012 @command('debugrevlog', cmdutil.debugrevlogopts +
2010 @command('debugrevlog', cmdutil.debugrevlogopts +
2013 [('d', 'dump', False, _('dump index data'))],
2011 [('d', 'dump', False, _('dump index data'))],
2014 _('-c|-m|FILE'),
2012 _('-c|-m|FILE'),
2015 optionalrepo=True)
2013 optionalrepo=True)
2016 def debugrevlog(ui, repo, file_=None, **opts):
2014 def debugrevlog(ui, repo, file_=None, **opts):
2017 """show data and statistics about a revlog"""
2015 """show data and statistics about a revlog"""
2018 opts = pycompat.byteskwargs(opts)
2016 opts = pycompat.byteskwargs(opts)
2019 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2017 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2020
2018
2021 if opts.get("dump"):
2019 if opts.get("dump"):
2022 numrevs = len(r)
2020 numrevs = len(r)
2023 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2021 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2024 " rawsize totalsize compression heads chainlen\n"))
2022 " rawsize totalsize compression heads chainlen\n"))
2025 ts = 0
2023 ts = 0
2026 heads = set()
2024 heads = set()
2027
2025
2028 for rev in pycompat.xrange(numrevs):
2026 for rev in pycompat.xrange(numrevs):
2029 dbase = r.deltaparent(rev)
2027 dbase = r.deltaparent(rev)
2030 if dbase == -1:
2028 if dbase == -1:
2031 dbase = rev
2029 dbase = rev
2032 cbase = r.chainbase(rev)
2030 cbase = r.chainbase(rev)
2033 clen = r.chainlen(rev)
2031 clen = r.chainlen(rev)
2034 p1, p2 = r.parentrevs(rev)
2032 p1, p2 = r.parentrevs(rev)
2035 rs = r.rawsize(rev)
2033 rs = r.rawsize(rev)
2036 ts = ts + rs
2034 ts = ts + rs
2037 heads -= set(r.parentrevs(rev))
2035 heads -= set(r.parentrevs(rev))
2038 heads.add(rev)
2036 heads.add(rev)
2039 try:
2037 try:
2040 compression = ts / r.end(rev)
2038 compression = ts / r.end(rev)
2041 except ZeroDivisionError:
2039 except ZeroDivisionError:
2042 compression = 0
2040 compression = 0
2043 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2041 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2044 "%11d %5d %8d\n" %
2042 "%11d %5d %8d\n" %
2045 (rev, p1, p2, r.start(rev), r.end(rev),
2043 (rev, p1, p2, r.start(rev), r.end(rev),
2046 r.start(dbase), r.start(cbase),
2044 r.start(dbase), r.start(cbase),
2047 r.start(p1), r.start(p2),
2045 r.start(p1), r.start(p2),
2048 rs, ts, compression, len(heads), clen))
2046 rs, ts, compression, len(heads), clen))
2049 return 0
2047 return 0
2050
2048
2051 v = r.version
2049 v = r.version
2052 format = v & 0xFFFF
2050 format = v & 0xFFFF
2053 flags = []
2051 flags = []
2054 gdelta = False
2052 gdelta = False
2055 if v & revlog.FLAG_INLINE_DATA:
2053 if v & revlog.FLAG_INLINE_DATA:
2056 flags.append('inline')
2054 flags.append('inline')
2057 if v & revlog.FLAG_GENERALDELTA:
2055 if v & revlog.FLAG_GENERALDELTA:
2058 gdelta = True
2056 gdelta = True
2059 flags.append('generaldelta')
2057 flags.append('generaldelta')
2060 if not flags:
2058 if not flags:
2061 flags = ['(none)']
2059 flags = ['(none)']
2062
2060
2063 ### tracks merge vs single parent
2061 ### tracks merge vs single parent
2064 nummerges = 0
2062 nummerges = 0
2065
2063
2066 ### tracks ways the "delta" are build
2064 ### tracks ways the "delta" are build
2067 # nodelta
2065 # nodelta
2068 numempty = 0
2066 numempty = 0
2069 numemptytext = 0
2067 numemptytext = 0
2070 numemptydelta = 0
2068 numemptydelta = 0
2071 # full file content
2069 # full file content
2072 numfull = 0
2070 numfull = 0
2073 # intermediate snapshot against a prior snapshot
2071 # intermediate snapshot against a prior snapshot
2074 numsemi = 0
2072 numsemi = 0
2075 # snapshot count per depth
2073 # snapshot count per depth
2076 numsnapdepth = collections.defaultdict(lambda: 0)
2074 numsnapdepth = collections.defaultdict(lambda: 0)
2077 # delta against previous revision
2075 # delta against previous revision
2078 numprev = 0
2076 numprev = 0
2079 # delta against first or second parent (not prev)
2077 # delta against first or second parent (not prev)
2080 nump1 = 0
2078 nump1 = 0
2081 nump2 = 0
2079 nump2 = 0
2082 # delta against neither prev nor parents
2080 # delta against neither prev nor parents
2083 numother = 0
2081 numother = 0
2084 # delta against prev that are also first or second parent
2082 # delta against prev that are also first or second parent
2085 # (details of `numprev`)
2083 # (details of `numprev`)
2086 nump1prev = 0
2084 nump1prev = 0
2087 nump2prev = 0
2085 nump2prev = 0
2088
2086
2089 # data about delta chain of each revs
2087 # data about delta chain of each revs
2090 chainlengths = []
2088 chainlengths = []
2091 chainbases = []
2089 chainbases = []
2092 chainspans = []
2090 chainspans = []
2093
2091
2094 # data about each revision
2092 # data about each revision
2095 datasize = [None, 0, 0]
2093 datasize = [None, 0, 0]
2096 fullsize = [None, 0, 0]
2094 fullsize = [None, 0, 0]
2097 semisize = [None, 0, 0]
2095 semisize = [None, 0, 0]
2098 # snapshot count per depth
2096 # snapshot count per depth
2099 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2097 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2100 deltasize = [None, 0, 0]
2098 deltasize = [None, 0, 0]
2101 chunktypecounts = {}
2099 chunktypecounts = {}
2102 chunktypesizes = {}
2100 chunktypesizes = {}
2103
2101
2104 def addsize(size, l):
2102 def addsize(size, l):
2105 if l[0] is None or size < l[0]:
2103 if l[0] is None or size < l[0]:
2106 l[0] = size
2104 l[0] = size
2107 if size > l[1]:
2105 if size > l[1]:
2108 l[1] = size
2106 l[1] = size
2109 l[2] += size
2107 l[2] += size
2110
2108
2111 numrevs = len(r)
2109 numrevs = len(r)
2112 for rev in pycompat.xrange(numrevs):
2110 for rev in pycompat.xrange(numrevs):
2113 p1, p2 = r.parentrevs(rev)
2111 p1, p2 = r.parentrevs(rev)
2114 delta = r.deltaparent(rev)
2112 delta = r.deltaparent(rev)
2115 if format > 0:
2113 if format > 0:
2116 addsize(r.rawsize(rev), datasize)
2114 addsize(r.rawsize(rev), datasize)
2117 if p2 != nullrev:
2115 if p2 != nullrev:
2118 nummerges += 1
2116 nummerges += 1
2119 size = r.length(rev)
2117 size = r.length(rev)
2120 if delta == nullrev:
2118 if delta == nullrev:
2121 chainlengths.append(0)
2119 chainlengths.append(0)
2122 chainbases.append(r.start(rev))
2120 chainbases.append(r.start(rev))
2123 chainspans.append(size)
2121 chainspans.append(size)
2124 if size == 0:
2122 if size == 0:
2125 numempty += 1
2123 numempty += 1
2126 numemptytext += 1
2124 numemptytext += 1
2127 else:
2125 else:
2128 numfull += 1
2126 numfull += 1
2129 numsnapdepth[0] += 1
2127 numsnapdepth[0] += 1
2130 addsize(size, fullsize)
2128 addsize(size, fullsize)
2131 addsize(size, snapsizedepth[0])
2129 addsize(size, snapsizedepth[0])
2132 else:
2130 else:
2133 chainlengths.append(chainlengths[delta] + 1)
2131 chainlengths.append(chainlengths[delta] + 1)
2134 baseaddr = chainbases[delta]
2132 baseaddr = chainbases[delta]
2135 revaddr = r.start(rev)
2133 revaddr = r.start(rev)
2136 chainbases.append(baseaddr)
2134 chainbases.append(baseaddr)
2137 chainspans.append((revaddr - baseaddr) + size)
2135 chainspans.append((revaddr - baseaddr) + size)
2138 if size == 0:
2136 if size == 0:
2139 numempty += 1
2137 numempty += 1
2140 numemptydelta += 1
2138 numemptydelta += 1
2141 elif r.issnapshot(rev):
2139 elif r.issnapshot(rev):
2142 addsize(size, semisize)
2140 addsize(size, semisize)
2143 numsemi += 1
2141 numsemi += 1
2144 depth = r.snapshotdepth(rev)
2142 depth = r.snapshotdepth(rev)
2145 numsnapdepth[depth] += 1
2143 numsnapdepth[depth] += 1
2146 addsize(size, snapsizedepth[depth])
2144 addsize(size, snapsizedepth[depth])
2147 else:
2145 else:
2148 addsize(size, deltasize)
2146 addsize(size, deltasize)
2149 if delta == rev - 1:
2147 if delta == rev - 1:
2150 numprev += 1
2148 numprev += 1
2151 if delta == p1:
2149 if delta == p1:
2152 nump1prev += 1
2150 nump1prev += 1
2153 elif delta == p2:
2151 elif delta == p2:
2154 nump2prev += 1
2152 nump2prev += 1
2155 elif delta == p1:
2153 elif delta == p1:
2156 nump1 += 1
2154 nump1 += 1
2157 elif delta == p2:
2155 elif delta == p2:
2158 nump2 += 1
2156 nump2 += 1
2159 elif delta != nullrev:
2157 elif delta != nullrev:
2160 numother += 1
2158 numother += 1
2161
2159
2162 # Obtain data on the raw chunks in the revlog.
2160 # Obtain data on the raw chunks in the revlog.
2163 if util.safehasattr(r, '_getsegmentforrevs'):
2161 if util.safehasattr(r, '_getsegmentforrevs'):
2164 segment = r._getsegmentforrevs(rev, rev)[1]
2162 segment = r._getsegmentforrevs(rev, rev)[1]
2165 else:
2163 else:
2166 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2164 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2167 if segment:
2165 if segment:
2168 chunktype = bytes(segment[0:1])
2166 chunktype = bytes(segment[0:1])
2169 else:
2167 else:
2170 chunktype = 'empty'
2168 chunktype = 'empty'
2171
2169
2172 if chunktype not in chunktypecounts:
2170 if chunktype not in chunktypecounts:
2173 chunktypecounts[chunktype] = 0
2171 chunktypecounts[chunktype] = 0
2174 chunktypesizes[chunktype] = 0
2172 chunktypesizes[chunktype] = 0
2175
2173
2176 chunktypecounts[chunktype] += 1
2174 chunktypecounts[chunktype] += 1
2177 chunktypesizes[chunktype] += size
2175 chunktypesizes[chunktype] += size
2178
2176
2179 # Adjust size min value for empty cases
2177 # Adjust size min value for empty cases
2180 for size in (datasize, fullsize, semisize, deltasize):
2178 for size in (datasize, fullsize, semisize, deltasize):
2181 if size[0] is None:
2179 if size[0] is None:
2182 size[0] = 0
2180 size[0] = 0
2183
2181
2184 numdeltas = numrevs - numfull - numempty - numsemi
2182 numdeltas = numrevs - numfull - numempty - numsemi
2185 numoprev = numprev - nump1prev - nump2prev
2183 numoprev = numprev - nump1prev - nump2prev
2186 totalrawsize = datasize[2]
2184 totalrawsize = datasize[2]
2187 datasize[2] /= numrevs
2185 datasize[2] /= numrevs
2188 fulltotal = fullsize[2]
2186 fulltotal = fullsize[2]
2189 fullsize[2] /= numfull
2187 fullsize[2] /= numfull
2190 semitotal = semisize[2]
2188 semitotal = semisize[2]
2191 snaptotal = {}
2189 snaptotal = {}
2192 if 0 < numsemi:
2190 if 0 < numsemi:
2193 semisize[2] /= numsemi
2191 semisize[2] /= numsemi
2194 for depth in snapsizedepth:
2192 for depth in snapsizedepth:
2195 snaptotal[depth] = snapsizedepth[depth][2]
2193 snaptotal[depth] = snapsizedepth[depth][2]
2196 snapsizedepth[depth][2] /= numsnapdepth[depth]
2194 snapsizedepth[depth][2] /= numsnapdepth[depth]
2197
2195
2198 deltatotal = deltasize[2]
2196 deltatotal = deltasize[2]
2199 if numdeltas > 0:
2197 if numdeltas > 0:
2200 deltasize[2] /= numdeltas
2198 deltasize[2] /= numdeltas
2201 totalsize = fulltotal + semitotal + deltatotal
2199 totalsize = fulltotal + semitotal + deltatotal
2202 avgchainlen = sum(chainlengths) / numrevs
2200 avgchainlen = sum(chainlengths) / numrevs
2203 maxchainlen = max(chainlengths)
2201 maxchainlen = max(chainlengths)
2204 maxchainspan = max(chainspans)
2202 maxchainspan = max(chainspans)
2205 compratio = 1
2203 compratio = 1
2206 if totalsize:
2204 if totalsize:
2207 compratio = totalrawsize / totalsize
2205 compratio = totalrawsize / totalsize
2208
2206
2209 basedfmtstr = '%%%dd\n'
2207 basedfmtstr = '%%%dd\n'
2210 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2208 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2211
2209
2212 def dfmtstr(max):
2210 def dfmtstr(max):
2213 return basedfmtstr % len(str(max))
2211 return basedfmtstr % len(str(max))
2214 def pcfmtstr(max, padding=0):
2212 def pcfmtstr(max, padding=0):
2215 return basepcfmtstr % (len(str(max)), ' ' * padding)
2213 return basepcfmtstr % (len(str(max)), ' ' * padding)
2216
2214
2217 def pcfmt(value, total):
2215 def pcfmt(value, total):
2218 if total:
2216 if total:
2219 return (value, 100 * float(value) / total)
2217 return (value, 100 * float(value) / total)
2220 else:
2218 else:
2221 return value, 100.0
2219 return value, 100.0
2222
2220
2223 ui.write(('format : %d\n') % format)
2221 ui.write(('format : %d\n') % format)
2224 ui.write(('flags : %s\n') % ', '.join(flags))
2222 ui.write(('flags : %s\n') % ', '.join(flags))
2225
2223
2226 ui.write('\n')
2224 ui.write('\n')
2227 fmt = pcfmtstr(totalsize)
2225 fmt = pcfmtstr(totalsize)
2228 fmt2 = dfmtstr(totalsize)
2226 fmt2 = dfmtstr(totalsize)
2229 ui.write(('revisions : ') + fmt2 % numrevs)
2227 ui.write(('revisions : ') + fmt2 % numrevs)
2230 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2228 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2231 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2229 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2232 ui.write(('revisions : ') + fmt2 % numrevs)
2230 ui.write(('revisions : ') + fmt2 % numrevs)
2233 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2231 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2234 ui.write((' text : ')
2232 ui.write((' text : ')
2235 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2233 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2236 ui.write((' delta : ')
2234 ui.write((' delta : ')
2237 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2235 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2238 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2236 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2239 for depth in sorted(numsnapdepth):
2237 for depth in sorted(numsnapdepth):
2240 ui.write((' lvl-%-3d : ' % depth)
2238 ui.write((' lvl-%-3d : ' % depth)
2241 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2239 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2242 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2240 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2243 ui.write(('revision size : ') + fmt2 % totalsize)
2241 ui.write(('revision size : ') + fmt2 % totalsize)
2244 ui.write((' snapshot : ')
2242 ui.write((' snapshot : ')
2245 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2243 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2246 for depth in sorted(numsnapdepth):
2244 for depth in sorted(numsnapdepth):
2247 ui.write((' lvl-%-3d : ' % depth)
2245 ui.write((' lvl-%-3d : ' % depth)
2248 + fmt % pcfmt(snaptotal[depth], totalsize))
2246 + fmt % pcfmt(snaptotal[depth], totalsize))
2249 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2247 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2250
2248
2251 def fmtchunktype(chunktype):
2249 def fmtchunktype(chunktype):
2252 if chunktype == 'empty':
2250 if chunktype == 'empty':
2253 return ' %s : ' % chunktype
2251 return ' %s : ' % chunktype
2254 elif chunktype in pycompat.bytestr(string.ascii_letters):
2252 elif chunktype in pycompat.bytestr(string.ascii_letters):
2255 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2253 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2256 else:
2254 else:
2257 return ' 0x%s : ' % hex(chunktype)
2255 return ' 0x%s : ' % hex(chunktype)
2258
2256
2259 ui.write('\n')
2257 ui.write('\n')
2260 ui.write(('chunks : ') + fmt2 % numrevs)
2258 ui.write(('chunks : ') + fmt2 % numrevs)
2261 for chunktype in sorted(chunktypecounts):
2259 for chunktype in sorted(chunktypecounts):
2262 ui.write(fmtchunktype(chunktype))
2260 ui.write(fmtchunktype(chunktype))
2263 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2261 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2264 ui.write(('chunks size : ') + fmt2 % totalsize)
2262 ui.write(('chunks size : ') + fmt2 % totalsize)
2265 for chunktype in sorted(chunktypecounts):
2263 for chunktype in sorted(chunktypecounts):
2266 ui.write(fmtchunktype(chunktype))
2264 ui.write(fmtchunktype(chunktype))
2267 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2265 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2268
2266
2269 ui.write('\n')
2267 ui.write('\n')
2270 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2268 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2271 ui.write(('avg chain length : ') + fmt % avgchainlen)
2269 ui.write(('avg chain length : ') + fmt % avgchainlen)
2272 ui.write(('max chain length : ') + fmt % maxchainlen)
2270 ui.write(('max chain length : ') + fmt % maxchainlen)
2273 ui.write(('max chain reach : ') + fmt % maxchainspan)
2271 ui.write(('max chain reach : ') + fmt % maxchainspan)
2274 ui.write(('compression ratio : ') + fmt % compratio)
2272 ui.write(('compression ratio : ') + fmt % compratio)
2275
2273
2276 if format > 0:
2274 if format > 0:
2277 ui.write('\n')
2275 ui.write('\n')
2278 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2276 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2279 % tuple(datasize))
2277 % tuple(datasize))
2280 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2278 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2281 % tuple(fullsize))
2279 % tuple(fullsize))
2282 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2280 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2283 % tuple(semisize))
2281 % tuple(semisize))
2284 for depth in sorted(snapsizedepth):
2282 for depth in sorted(snapsizedepth):
2285 if depth == 0:
2283 if depth == 0:
2286 continue
2284 continue
2287 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2285 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2288 % ((depth,) + tuple(snapsizedepth[depth])))
2286 % ((depth,) + tuple(snapsizedepth[depth])))
2289 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2287 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2290 % tuple(deltasize))
2288 % tuple(deltasize))
2291
2289
2292 if numdeltas > 0:
2290 if numdeltas > 0:
2293 ui.write('\n')
2291 ui.write('\n')
2294 fmt = pcfmtstr(numdeltas)
2292 fmt = pcfmtstr(numdeltas)
2295 fmt2 = pcfmtstr(numdeltas, 4)
2293 fmt2 = pcfmtstr(numdeltas, 4)
2296 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2294 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2297 if numprev > 0:
2295 if numprev > 0:
2298 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2296 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2299 numprev))
2297 numprev))
2300 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2298 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2301 numprev))
2299 numprev))
2302 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2300 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2303 numprev))
2301 numprev))
2304 if gdelta:
2302 if gdelta:
2305 ui.write(('deltas against p1 : ')
2303 ui.write(('deltas against p1 : ')
2306 + fmt % pcfmt(nump1, numdeltas))
2304 + fmt % pcfmt(nump1, numdeltas))
2307 ui.write(('deltas against p2 : ')
2305 ui.write(('deltas against p2 : ')
2308 + fmt % pcfmt(nump2, numdeltas))
2306 + fmt % pcfmt(nump2, numdeltas))
2309 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2307 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2310 numdeltas))
2308 numdeltas))
2311
2309
2312 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2310 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2313 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2311 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2314 _('[-f FORMAT] -c|-m|FILE'),
2312 _('[-f FORMAT] -c|-m|FILE'),
2315 optionalrepo=True)
2313 optionalrepo=True)
2316 def debugrevlogindex(ui, repo, file_=None, **opts):
2314 def debugrevlogindex(ui, repo, file_=None, **opts):
2317 """dump the contents of a revlog index"""
2315 """dump the contents of a revlog index"""
2318 opts = pycompat.byteskwargs(opts)
2316 opts = pycompat.byteskwargs(opts)
2319 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2317 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2320 format = opts.get('format', 0)
2318 format = opts.get('format', 0)
2321 if format not in (0, 1):
2319 if format not in (0, 1):
2322 raise error.Abort(_("unknown format %d") % format)
2320 raise error.Abort(_("unknown format %d") % format)
2323
2321
2324 if ui.debugflag:
2322 if ui.debugflag:
2325 shortfn = hex
2323 shortfn = hex
2326 else:
2324 else:
2327 shortfn = short
2325 shortfn = short
2328
2326
2329 # There might not be anything in r, so have a sane default
2327 # There might not be anything in r, so have a sane default
2330 idlen = 12
2328 idlen = 12
2331 for i in r:
2329 for i in r:
2332 idlen = len(shortfn(r.node(i)))
2330 idlen = len(shortfn(r.node(i)))
2333 break
2331 break
2334
2332
2335 if format == 0:
2333 if format == 0:
2336 if ui.verbose:
2334 if ui.verbose:
2337 ui.write((" rev offset length linkrev"
2335 ui.write((" rev offset length linkrev"
2338 " %s %s p2\n") % ("nodeid".ljust(idlen),
2336 " %s %s p2\n") % ("nodeid".ljust(idlen),
2339 "p1".ljust(idlen)))
2337 "p1".ljust(idlen)))
2340 else:
2338 else:
2341 ui.write((" rev linkrev %s %s p2\n") % (
2339 ui.write((" rev linkrev %s %s p2\n") % (
2342 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2340 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2343 elif format == 1:
2341 elif format == 1:
2344 if ui.verbose:
2342 if ui.verbose:
2345 ui.write((" rev flag offset length size link p1"
2343 ui.write((" rev flag offset length size link p1"
2346 " p2 %s\n") % "nodeid".rjust(idlen))
2344 " p2 %s\n") % "nodeid".rjust(idlen))
2347 else:
2345 else:
2348 ui.write((" rev flag size link p1 p2 %s\n") %
2346 ui.write((" rev flag size link p1 p2 %s\n") %
2349 "nodeid".rjust(idlen))
2347 "nodeid".rjust(idlen))
2350
2348
2351 for i in r:
2349 for i in r:
2352 node = r.node(i)
2350 node = r.node(i)
2353 if format == 0:
2351 if format == 0:
2354 try:
2352 try:
2355 pp = r.parents(node)
2353 pp = r.parents(node)
2356 except Exception:
2354 except Exception:
2357 pp = [nullid, nullid]
2355 pp = [nullid, nullid]
2358 if ui.verbose:
2356 if ui.verbose:
2359 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2357 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2360 i, r.start(i), r.length(i), r.linkrev(i),
2358 i, r.start(i), r.length(i), r.linkrev(i),
2361 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2359 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2362 else:
2360 else:
2363 ui.write("% 6d % 7d %s %s %s\n" % (
2361 ui.write("% 6d % 7d %s %s %s\n" % (
2364 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2362 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2365 shortfn(pp[1])))
2363 shortfn(pp[1])))
2366 elif format == 1:
2364 elif format == 1:
2367 pr = r.parentrevs(i)
2365 pr = r.parentrevs(i)
2368 if ui.verbose:
2366 if ui.verbose:
2369 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2367 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2370 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2368 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2371 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2369 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2372 else:
2370 else:
2373 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2371 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2374 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2372 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2375 shortfn(node)))
2373 shortfn(node)))
2376
2374
2377 @command('debugrevspec',
2375 @command('debugrevspec',
2378 [('', 'optimize', None,
2376 [('', 'optimize', None,
2379 _('print parsed tree after optimizing (DEPRECATED)')),
2377 _('print parsed tree after optimizing (DEPRECATED)')),
2380 ('', 'show-revs', True, _('print list of result revisions (default)')),
2378 ('', 'show-revs', True, _('print list of result revisions (default)')),
2381 ('s', 'show-set', None, _('print internal representation of result set')),
2379 ('s', 'show-set', None, _('print internal representation of result set')),
2382 ('p', 'show-stage', [],
2380 ('p', 'show-stage', [],
2383 _('print parsed tree at the given stage'), _('NAME')),
2381 _('print parsed tree at the given stage'), _('NAME')),
2384 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2382 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2385 ('', 'verify-optimized', False, _('verify optimized result')),
2383 ('', 'verify-optimized', False, _('verify optimized result')),
2386 ],
2384 ],
2387 ('REVSPEC'))
2385 ('REVSPEC'))
2388 def debugrevspec(ui, repo, expr, **opts):
2386 def debugrevspec(ui, repo, expr, **opts):
2389 """parse and apply a revision specification
2387 """parse and apply a revision specification
2390
2388
2391 Use -p/--show-stage option to print the parsed tree at the given stages.
2389 Use -p/--show-stage option to print the parsed tree at the given stages.
2392 Use -p all to print tree at every stage.
2390 Use -p all to print tree at every stage.
2393
2391
2394 Use --no-show-revs option with -s or -p to print only the set
2392 Use --no-show-revs option with -s or -p to print only the set
2395 representation or the parsed tree respectively.
2393 representation or the parsed tree respectively.
2396
2394
2397 Use --verify-optimized to compare the optimized result with the unoptimized
2395 Use --verify-optimized to compare the optimized result with the unoptimized
2398 one. Returns 1 if the optimized result differs.
2396 one. Returns 1 if the optimized result differs.
2399 """
2397 """
2400 opts = pycompat.byteskwargs(opts)
2398 opts = pycompat.byteskwargs(opts)
2401 aliases = ui.configitems('revsetalias')
2399 aliases = ui.configitems('revsetalias')
2402 stages = [
2400 stages = [
2403 ('parsed', lambda tree: tree),
2401 ('parsed', lambda tree: tree),
2404 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2402 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2405 ui.warn)),
2403 ui.warn)),
2406 ('concatenated', revsetlang.foldconcat),
2404 ('concatenated', revsetlang.foldconcat),
2407 ('analyzed', revsetlang.analyze),
2405 ('analyzed', revsetlang.analyze),
2408 ('optimized', revsetlang.optimize),
2406 ('optimized', revsetlang.optimize),
2409 ]
2407 ]
2410 if opts['no_optimized']:
2408 if opts['no_optimized']:
2411 stages = stages[:-1]
2409 stages = stages[:-1]
2412 if opts['verify_optimized'] and opts['no_optimized']:
2410 if opts['verify_optimized'] and opts['no_optimized']:
2413 raise error.Abort(_('cannot use --verify-optimized with '
2411 raise error.Abort(_('cannot use --verify-optimized with '
2414 '--no-optimized'))
2412 '--no-optimized'))
2415 stagenames = set(n for n, f in stages)
2413 stagenames = set(n for n, f in stages)
2416
2414
2417 showalways = set()
2415 showalways = set()
2418 showchanged = set()
2416 showchanged = set()
2419 if ui.verbose and not opts['show_stage']:
2417 if ui.verbose and not opts['show_stage']:
2420 # show parsed tree by --verbose (deprecated)
2418 # show parsed tree by --verbose (deprecated)
2421 showalways.add('parsed')
2419 showalways.add('parsed')
2422 showchanged.update(['expanded', 'concatenated'])
2420 showchanged.update(['expanded', 'concatenated'])
2423 if opts['optimize']:
2421 if opts['optimize']:
2424 showalways.add('optimized')
2422 showalways.add('optimized')
2425 if opts['show_stage'] and opts['optimize']:
2423 if opts['show_stage'] and opts['optimize']:
2426 raise error.Abort(_('cannot use --optimize with --show-stage'))
2424 raise error.Abort(_('cannot use --optimize with --show-stage'))
2427 if opts['show_stage'] == ['all']:
2425 if opts['show_stage'] == ['all']:
2428 showalways.update(stagenames)
2426 showalways.update(stagenames)
2429 else:
2427 else:
2430 for n in opts['show_stage']:
2428 for n in opts['show_stage']:
2431 if n not in stagenames:
2429 if n not in stagenames:
2432 raise error.Abort(_('invalid stage name: %s') % n)
2430 raise error.Abort(_('invalid stage name: %s') % n)
2433 showalways.update(opts['show_stage'])
2431 showalways.update(opts['show_stage'])
2434
2432
2435 treebystage = {}
2433 treebystage = {}
2436 printedtree = None
2434 printedtree = None
2437 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2435 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2438 for n, f in stages:
2436 for n, f in stages:
2439 treebystage[n] = tree = f(tree)
2437 treebystage[n] = tree = f(tree)
2440 if n in showalways or (n in showchanged and tree != printedtree):
2438 if n in showalways or (n in showchanged and tree != printedtree):
2441 if opts['show_stage'] or n != 'parsed':
2439 if opts['show_stage'] or n != 'parsed':
2442 ui.write(("* %s:\n") % n)
2440 ui.write(("* %s:\n") % n)
2443 ui.write(revsetlang.prettyformat(tree), "\n")
2441 ui.write(revsetlang.prettyformat(tree), "\n")
2444 printedtree = tree
2442 printedtree = tree
2445
2443
2446 if opts['verify_optimized']:
2444 if opts['verify_optimized']:
2447 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2445 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2448 brevs = revset.makematcher(treebystage['optimized'])(repo)
2446 brevs = revset.makematcher(treebystage['optimized'])(repo)
2449 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2447 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2450 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2448 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2451 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2449 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2452 arevs = list(arevs)
2450 arevs = list(arevs)
2453 brevs = list(brevs)
2451 brevs = list(brevs)
2454 if arevs == brevs:
2452 if arevs == brevs:
2455 return 0
2453 return 0
2456 ui.write(('--- analyzed\n'), label='diff.file_a')
2454 ui.write(('--- analyzed\n'), label='diff.file_a')
2457 ui.write(('+++ optimized\n'), label='diff.file_b')
2455 ui.write(('+++ optimized\n'), label='diff.file_b')
2458 sm = difflib.SequenceMatcher(None, arevs, brevs)
2456 sm = difflib.SequenceMatcher(None, arevs, brevs)
2459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2457 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2460 if tag in ('delete', 'replace'):
2458 if tag in ('delete', 'replace'):
2461 for c in arevs[alo:ahi]:
2459 for c in arevs[alo:ahi]:
2462 ui.write('-%s\n' % c, label='diff.deleted')
2460 ui.write('-%s\n' % c, label='diff.deleted')
2463 if tag in ('insert', 'replace'):
2461 if tag in ('insert', 'replace'):
2464 for c in brevs[blo:bhi]:
2462 for c in brevs[blo:bhi]:
2465 ui.write('+%s\n' % c, label='diff.inserted')
2463 ui.write('+%s\n' % c, label='diff.inserted')
2466 if tag == 'equal':
2464 if tag == 'equal':
2467 for c in arevs[alo:ahi]:
2465 for c in arevs[alo:ahi]:
2468 ui.write(' %s\n' % c)
2466 ui.write(' %s\n' % c)
2469 return 1
2467 return 1
2470
2468
2471 func = revset.makematcher(tree)
2469 func = revset.makematcher(tree)
2472 revs = func(repo)
2470 revs = func(repo)
2473 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2471 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2474 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2472 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2475 if not opts['show_revs']:
2473 if not opts['show_revs']:
2476 return
2474 return
2477 for c in revs:
2475 for c in revs:
2478 ui.write("%d\n" % c)
2476 ui.write("%d\n" % c)
2479
2477
2480 @command('debugserve', [
2478 @command('debugserve', [
2481 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2479 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2482 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2480 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2483 ('', 'logiofile', '', _('file to log server I/O to')),
2481 ('', 'logiofile', '', _('file to log server I/O to')),
2484 ], '')
2482 ], '')
2485 def debugserve(ui, repo, **opts):
2483 def debugserve(ui, repo, **opts):
2486 """run a server with advanced settings
2484 """run a server with advanced settings
2487
2485
2488 This command is similar to :hg:`serve`. It exists partially as a
2486 This command is similar to :hg:`serve`. It exists partially as a
2489 workaround to the fact that ``hg serve --stdio`` must have specific
2487 workaround to the fact that ``hg serve --stdio`` must have specific
2490 arguments for security reasons.
2488 arguments for security reasons.
2491 """
2489 """
2492 opts = pycompat.byteskwargs(opts)
2490 opts = pycompat.byteskwargs(opts)
2493
2491
2494 if not opts['sshstdio']:
2492 if not opts['sshstdio']:
2495 raise error.Abort(_('only --sshstdio is currently supported'))
2493 raise error.Abort(_('only --sshstdio is currently supported'))
2496
2494
2497 logfh = None
2495 logfh = None
2498
2496
2499 if opts['logiofd'] and opts['logiofile']:
2497 if opts['logiofd'] and opts['logiofile']:
2500 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2498 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2501
2499
2502 if opts['logiofd']:
2500 if opts['logiofd']:
2503 # Line buffered because output is line based.
2501 # Line buffered because output is line based.
2504 try:
2502 try:
2505 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2503 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2506 except OSError as e:
2504 except OSError as e:
2507 if e.errno != errno.ESPIPE:
2505 if e.errno != errno.ESPIPE:
2508 raise
2506 raise
2509 # can't seek a pipe, so `ab` mode fails on py3
2507 # can't seek a pipe, so `ab` mode fails on py3
2510 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2508 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2511 elif opts['logiofile']:
2509 elif opts['logiofile']:
2512 logfh = open(opts['logiofile'], 'ab', 1)
2510 logfh = open(opts['logiofile'], 'ab', 1)
2513
2511
2514 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2512 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2515 s.serve_forever()
2513 s.serve_forever()
2516
2514
2517 @command('debugsetparents', [], _('REV1 [REV2]'))
2515 @command('debugsetparents', [], _('REV1 [REV2]'))
2518 def debugsetparents(ui, repo, rev1, rev2=None):
2516 def debugsetparents(ui, repo, rev1, rev2=None):
2519 """manually set the parents of the current working directory
2517 """manually set the parents of the current working directory
2520
2518
2521 This is useful for writing repository conversion tools, but should
2519 This is useful for writing repository conversion tools, but should
2522 be used with care. For example, neither the working directory nor the
2520 be used with care. For example, neither the working directory nor the
2523 dirstate is updated, so file status may be incorrect after running this
2521 dirstate is updated, so file status may be incorrect after running this
2524 command.
2522 command.
2525
2523
2526 Returns 0 on success.
2524 Returns 0 on success.
2527 """
2525 """
2528
2526
2529 node1 = scmutil.revsingle(repo, rev1).node()
2527 node1 = scmutil.revsingle(repo, rev1).node()
2530 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2528 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2531
2529
2532 with repo.wlock():
2530 with repo.wlock():
2533 repo.setparents(node1, node2)
2531 repo.setparents(node1, node2)
2534
2532
2535 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2533 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2536 def debugssl(ui, repo, source=None, **opts):
2534 def debugssl(ui, repo, source=None, **opts):
2537 '''test a secure connection to a server
2535 '''test a secure connection to a server
2538
2536
2539 This builds the certificate chain for the server on Windows, installing the
2537 This builds the certificate chain for the server on Windows, installing the
2540 missing intermediates and trusted root via Windows Update if necessary. It
2538 missing intermediates and trusted root via Windows Update if necessary. It
2541 does nothing on other platforms.
2539 does nothing on other platforms.
2542
2540
2543 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2541 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2544 that server is used. See :hg:`help urls` for more information.
2542 that server is used. See :hg:`help urls` for more information.
2545
2543
2546 If the update succeeds, retry the original operation. Otherwise, the cause
2544 If the update succeeds, retry the original operation. Otherwise, the cause
2547 of the SSL error is likely another issue.
2545 of the SSL error is likely another issue.
2548 '''
2546 '''
2549 if not pycompat.iswindows:
2547 if not pycompat.iswindows:
2550 raise error.Abort(_('certificate chain building is only possible on '
2548 raise error.Abort(_('certificate chain building is only possible on '
2551 'Windows'))
2549 'Windows'))
2552
2550
2553 if not source:
2551 if not source:
2554 if not repo:
2552 if not repo:
2555 raise error.Abort(_("there is no Mercurial repository here, and no "
2553 raise error.Abort(_("there is no Mercurial repository here, and no "
2556 "server specified"))
2554 "server specified"))
2557 source = "default"
2555 source = "default"
2558
2556
2559 source, branches = hg.parseurl(ui.expandpath(source))
2557 source, branches = hg.parseurl(ui.expandpath(source))
2560 url = util.url(source)
2558 url = util.url(source)
2561 addr = None
2559 addr = None
2562
2560
2563 defaultport = {'https': 443, 'ssh': 22}
2561 defaultport = {'https': 443, 'ssh': 22}
2564 if url.scheme in defaultport:
2562 if url.scheme in defaultport:
2565 try:
2563 try:
2566 addr = (url.host, int(url.port or defaultport[url.scheme]))
2564 addr = (url.host, int(url.port or defaultport[url.scheme]))
2567 except ValueError:
2565 except ValueError:
2568 raise error.Abort(_("malformed port number in URL"))
2566 raise error.Abort(_("malformed port number in URL"))
2569 else:
2567 else:
2570 raise error.Abort(_("only https and ssh connections are supported"))
2568 raise error.Abort(_("only https and ssh connections are supported"))
2571
2569
2572 from . import win32
2570 from . import win32
2573
2571
2574 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2572 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2575 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2573 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2576
2574
2577 try:
2575 try:
2578 s.connect(addr)
2576 s.connect(addr)
2579 cert = s.getpeercert(True)
2577 cert = s.getpeercert(True)
2580
2578
2581 ui.status(_('checking the certificate chain for %s\n') % url.host)
2579 ui.status(_('checking the certificate chain for %s\n') % url.host)
2582
2580
2583 complete = win32.checkcertificatechain(cert, build=False)
2581 complete = win32.checkcertificatechain(cert, build=False)
2584
2582
2585 if not complete:
2583 if not complete:
2586 ui.status(_('certificate chain is incomplete, updating... '))
2584 ui.status(_('certificate chain is incomplete, updating... '))
2587
2585
2588 if not win32.checkcertificatechain(cert):
2586 if not win32.checkcertificatechain(cert):
2589 ui.status(_('failed.\n'))
2587 ui.status(_('failed.\n'))
2590 else:
2588 else:
2591 ui.status(_('done.\n'))
2589 ui.status(_('done.\n'))
2592 else:
2590 else:
2593 ui.status(_('full certificate chain is available\n'))
2591 ui.status(_('full certificate chain is available\n'))
2594 finally:
2592 finally:
2595 s.close()
2593 s.close()
2596
2594
2597 @command('debugsub',
2595 @command('debugsub',
2598 [('r', 'rev', '',
2596 [('r', 'rev', '',
2599 _('revision to check'), _('REV'))],
2597 _('revision to check'), _('REV'))],
2600 _('[-r REV] [REV]'))
2598 _('[-r REV] [REV]'))
2601 def debugsub(ui, repo, rev=None):
2599 def debugsub(ui, repo, rev=None):
2602 ctx = scmutil.revsingle(repo, rev, None)
2600 ctx = scmutil.revsingle(repo, rev, None)
2603 for k, v in sorted(ctx.substate.items()):
2601 for k, v in sorted(ctx.substate.items()):
2604 ui.write(('path %s\n') % k)
2602 ui.write(('path %s\n') % k)
2605 ui.write((' source %s\n') % v[0])
2603 ui.write((' source %s\n') % v[0])
2606 ui.write((' revision %s\n') % v[1])
2604 ui.write((' revision %s\n') % v[1])
2607
2605
2608 @command('debugsuccessorssets',
2606 @command('debugsuccessorssets',
2609 [('', 'closest', False, _('return closest successors sets only'))],
2607 [('', 'closest', False, _('return closest successors sets only'))],
2610 _('[REV]'))
2608 _('[REV]'))
2611 def debugsuccessorssets(ui, repo, *revs, **opts):
2609 def debugsuccessorssets(ui, repo, *revs, **opts):
2612 """show set of successors for revision
2610 """show set of successors for revision
2613
2611
2614 A successors set of changeset A is a consistent group of revisions that
2612 A successors set of changeset A is a consistent group of revisions that
2615 succeed A. It contains non-obsolete changesets only unless closests
2613 succeed A. It contains non-obsolete changesets only unless closests
2616 successors set is set.
2614 successors set is set.
2617
2615
2618 In most cases a changeset A has a single successors set containing a single
2616 In most cases a changeset A has a single successors set containing a single
2619 successor (changeset A replaced by A').
2617 successor (changeset A replaced by A').
2620
2618
2621 A changeset that is made obsolete with no successors are called "pruned".
2619 A changeset that is made obsolete with no successors are called "pruned".
2622 Such changesets have no successors sets at all.
2620 Such changesets have no successors sets at all.
2623
2621
2624 A changeset that has been "split" will have a successors set containing
2622 A changeset that has been "split" will have a successors set containing
2625 more than one successor.
2623 more than one successor.
2626
2624
2627 A changeset that has been rewritten in multiple different ways is called
2625 A changeset that has been rewritten in multiple different ways is called
2628 "divergent". Such changesets have multiple successor sets (each of which
2626 "divergent". Such changesets have multiple successor sets (each of which
2629 may also be split, i.e. have multiple successors).
2627 may also be split, i.e. have multiple successors).
2630
2628
2631 Results are displayed as follows::
2629 Results are displayed as follows::
2632
2630
2633 <rev1>
2631 <rev1>
2634 <successors-1A>
2632 <successors-1A>
2635 <rev2>
2633 <rev2>
2636 <successors-2A>
2634 <successors-2A>
2637 <successors-2B1> <successors-2B2> <successors-2B3>
2635 <successors-2B1> <successors-2B2> <successors-2B3>
2638
2636
2639 Here rev2 has two possible (i.e. divergent) successors sets. The first
2637 Here rev2 has two possible (i.e. divergent) successors sets. The first
2640 holds one element, whereas the second holds three (i.e. the changeset has
2638 holds one element, whereas the second holds three (i.e. the changeset has
2641 been split).
2639 been split).
2642 """
2640 """
2643 # passed to successorssets caching computation from one call to another
2641 # passed to successorssets caching computation from one call to another
2644 cache = {}
2642 cache = {}
2645 ctx2str = bytes
2643 ctx2str = bytes
2646 node2str = short
2644 node2str = short
2647 for rev in scmutil.revrange(repo, revs):
2645 for rev in scmutil.revrange(repo, revs):
2648 ctx = repo[rev]
2646 ctx = repo[rev]
2649 ui.write('%s\n'% ctx2str(ctx))
2647 ui.write('%s\n'% ctx2str(ctx))
2650 for succsset in obsutil.successorssets(repo, ctx.node(),
2648 for succsset in obsutil.successorssets(repo, ctx.node(),
2651 closest=opts[r'closest'],
2649 closest=opts[r'closest'],
2652 cache=cache):
2650 cache=cache):
2653 if succsset:
2651 if succsset:
2654 ui.write(' ')
2652 ui.write(' ')
2655 ui.write(node2str(succsset[0]))
2653 ui.write(node2str(succsset[0]))
2656 for node in succsset[1:]:
2654 for node in succsset[1:]:
2657 ui.write(' ')
2655 ui.write(' ')
2658 ui.write(node2str(node))
2656 ui.write(node2str(node))
2659 ui.write('\n')
2657 ui.write('\n')
2660
2658
2661 @command('debugtemplate',
2659 @command('debugtemplate',
2662 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2660 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2663 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2661 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2664 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2662 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2665 optionalrepo=True)
2663 optionalrepo=True)
2666 def debugtemplate(ui, repo, tmpl, **opts):
2664 def debugtemplate(ui, repo, tmpl, **opts):
2667 """parse and apply a template
2665 """parse and apply a template
2668
2666
2669 If -r/--rev is given, the template is processed as a log template and
2667 If -r/--rev is given, the template is processed as a log template and
2670 applied to the given changesets. Otherwise, it is processed as a generic
2668 applied to the given changesets. Otherwise, it is processed as a generic
2671 template.
2669 template.
2672
2670
2673 Use --verbose to print the parsed tree.
2671 Use --verbose to print the parsed tree.
2674 """
2672 """
2675 revs = None
2673 revs = None
2676 if opts[r'rev']:
2674 if opts[r'rev']:
2677 if repo is None:
2675 if repo is None:
2678 raise error.RepoError(_('there is no Mercurial repository here '
2676 raise error.RepoError(_('there is no Mercurial repository here '
2679 '(.hg not found)'))
2677 '(.hg not found)'))
2680 revs = scmutil.revrange(repo, opts[r'rev'])
2678 revs = scmutil.revrange(repo, opts[r'rev'])
2681
2679
2682 props = {}
2680 props = {}
2683 for d in opts[r'define']:
2681 for d in opts[r'define']:
2684 try:
2682 try:
2685 k, v = (e.strip() for e in d.split('=', 1))
2683 k, v = (e.strip() for e in d.split('=', 1))
2686 if not k or k == 'ui':
2684 if not k or k == 'ui':
2687 raise ValueError
2685 raise ValueError
2688 props[k] = v
2686 props[k] = v
2689 except ValueError:
2687 except ValueError:
2690 raise error.Abort(_('malformed keyword definition: %s') % d)
2688 raise error.Abort(_('malformed keyword definition: %s') % d)
2691
2689
2692 if ui.verbose:
2690 if ui.verbose:
2693 aliases = ui.configitems('templatealias')
2691 aliases = ui.configitems('templatealias')
2694 tree = templater.parse(tmpl)
2692 tree = templater.parse(tmpl)
2695 ui.note(templater.prettyformat(tree), '\n')
2693 ui.note(templater.prettyformat(tree), '\n')
2696 newtree = templater.expandaliases(tree, aliases)
2694 newtree = templater.expandaliases(tree, aliases)
2697 if newtree != tree:
2695 if newtree != tree:
2698 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2696 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2699
2697
2700 if revs is None:
2698 if revs is None:
2701 tres = formatter.templateresources(ui, repo)
2699 tres = formatter.templateresources(ui, repo)
2702 t = formatter.maketemplater(ui, tmpl, resources=tres)
2700 t = formatter.maketemplater(ui, tmpl, resources=tres)
2703 if ui.verbose:
2701 if ui.verbose:
2704 kwds, funcs = t.symbolsuseddefault()
2702 kwds, funcs = t.symbolsuseddefault()
2705 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2703 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2706 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2704 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2707 ui.write(t.renderdefault(props))
2705 ui.write(t.renderdefault(props))
2708 else:
2706 else:
2709 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2707 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2710 if ui.verbose:
2708 if ui.verbose:
2711 kwds, funcs = displayer.t.symbolsuseddefault()
2709 kwds, funcs = displayer.t.symbolsuseddefault()
2712 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2710 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2713 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2711 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2714 for r in revs:
2712 for r in revs:
2715 displayer.show(repo[r], **pycompat.strkwargs(props))
2713 displayer.show(repo[r], **pycompat.strkwargs(props))
2716 displayer.close()
2714 displayer.close()
2717
2715
2718 @command('debuguigetpass', [
2716 @command('debuguigetpass', [
2719 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2717 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2720 ], _('[-p TEXT]'), norepo=True)
2718 ], _('[-p TEXT]'), norepo=True)
2721 def debuguigetpass(ui, prompt=''):
2719 def debuguigetpass(ui, prompt=''):
2722 """show prompt to type password"""
2720 """show prompt to type password"""
2723 r = ui.getpass(prompt)
2721 r = ui.getpass(prompt)
2724 ui.write(('respose: %s\n') % r)
2722 ui.write(('respose: %s\n') % r)
2725
2723
2726 @command('debuguiprompt', [
2724 @command('debuguiprompt', [
2727 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2725 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2728 ], _('[-p TEXT]'), norepo=True)
2726 ], _('[-p TEXT]'), norepo=True)
2729 def debuguiprompt(ui, prompt=''):
2727 def debuguiprompt(ui, prompt=''):
2730 """show plain prompt"""
2728 """show plain prompt"""
2731 r = ui.prompt(prompt)
2729 r = ui.prompt(prompt)
2732 ui.write(('response: %s\n') % r)
2730 ui.write(('response: %s\n') % r)
2733
2731
2734 @command('debugupdatecaches', [])
2732 @command('debugupdatecaches', [])
2735 def debugupdatecaches(ui, repo, *pats, **opts):
2733 def debugupdatecaches(ui, repo, *pats, **opts):
2736 """warm all known caches in the repository"""
2734 """warm all known caches in the repository"""
2737 with repo.wlock(), repo.lock():
2735 with repo.wlock(), repo.lock():
2738 repo.updatecaches(full=True)
2736 repo.updatecaches(full=True)
2739
2737
2740 @command('debugupgraderepo', [
2738 @command('debugupgraderepo', [
2741 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2739 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2742 ('', 'run', False, _('performs an upgrade')),
2740 ('', 'run', False, _('performs an upgrade')),
2743 ])
2741 ])
2744 def debugupgraderepo(ui, repo, run=False, optimize=None):
2742 def debugupgraderepo(ui, repo, run=False, optimize=None):
2745 """upgrade a repository to use different features
2743 """upgrade a repository to use different features
2746
2744
2747 If no arguments are specified, the repository is evaluated for upgrade
2745 If no arguments are specified, the repository is evaluated for upgrade
2748 and a list of problems and potential optimizations is printed.
2746 and a list of problems and potential optimizations is printed.
2749
2747
2750 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2748 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2751 can be influenced via additional arguments. More details will be provided
2749 can be influenced via additional arguments. More details will be provided
2752 by the command output when run without ``--run``.
2750 by the command output when run without ``--run``.
2753
2751
2754 During the upgrade, the repository will be locked and no writes will be
2752 During the upgrade, the repository will be locked and no writes will be
2755 allowed.
2753 allowed.
2756
2754
2757 At the end of the upgrade, the repository may not be readable while new
2755 At the end of the upgrade, the repository may not be readable while new
2758 repository data is swapped in. This window will be as long as it takes to
2756 repository data is swapped in. This window will be as long as it takes to
2759 rename some directories inside the ``.hg`` directory. On most machines, this
2757 rename some directories inside the ``.hg`` directory. On most machines, this
2760 should complete almost instantaneously and the chances of a consumer being
2758 should complete almost instantaneously and the chances of a consumer being
2761 unable to access the repository should be low.
2759 unable to access the repository should be low.
2762 """
2760 """
2763 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2761 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2764
2762
2765 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2763 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2766 inferrepo=True)
2764 inferrepo=True)
2767 def debugwalk(ui, repo, *pats, **opts):
2765 def debugwalk(ui, repo, *pats, **opts):
2768 """show how files match on given patterns"""
2766 """show how files match on given patterns"""
2769 opts = pycompat.byteskwargs(opts)
2767 opts = pycompat.byteskwargs(opts)
2770 m = scmutil.match(repo[None], pats, opts)
2768 m = scmutil.match(repo[None], pats, opts)
2771 if ui.verbose:
2769 if ui.verbose:
2772 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2770 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2773 items = list(repo[None].walk(m))
2771 items = list(repo[None].walk(m))
2774 if not items:
2772 if not items:
2775 return
2773 return
2776 f = lambda fn: fn
2774 f = lambda fn: fn
2777 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2775 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2778 f = lambda fn: util.normpath(fn)
2776 f = lambda fn: util.normpath(fn)
2779 fmt = 'f %%-%ds %%-%ds %%s' % (
2777 fmt = 'f %%-%ds %%-%ds %%s' % (
2780 max([len(abs) for abs in items]),
2778 max([len(abs) for abs in items]),
2781 max([len(m.rel(abs)) for abs in items]))
2779 max([len(m.rel(abs)) for abs in items]))
2782 for abs in items:
2780 for abs in items:
2783 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2781 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2784 ui.write("%s\n" % line.rstrip())
2782 ui.write("%s\n" % line.rstrip())
2785
2783
2786 @command('debugwhyunstable', [], _('REV'))
2784 @command('debugwhyunstable', [], _('REV'))
2787 def debugwhyunstable(ui, repo, rev):
2785 def debugwhyunstable(ui, repo, rev):
2788 """explain instabilities of a changeset"""
2786 """explain instabilities of a changeset"""
2789 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2787 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2790 dnodes = ''
2788 dnodes = ''
2791 if entry.get('divergentnodes'):
2789 if entry.get('divergentnodes'):
2792 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2790 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2793 for ctx in entry['divergentnodes']) + ' '
2791 for ctx in entry['divergentnodes']) + ' '
2794 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2792 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2795 entry['reason'], entry['node']))
2793 entry['reason'], entry['node']))
2796
2794
2797 @command('debugwireargs',
2795 @command('debugwireargs',
2798 [('', 'three', '', 'three'),
2796 [('', 'three', '', 'three'),
2799 ('', 'four', '', 'four'),
2797 ('', 'four', '', 'four'),
2800 ('', 'five', '', 'five'),
2798 ('', 'five', '', 'five'),
2801 ] + cmdutil.remoteopts,
2799 ] + cmdutil.remoteopts,
2802 _('REPO [OPTIONS]... [ONE [TWO]]'),
2800 _('REPO [OPTIONS]... [ONE [TWO]]'),
2803 norepo=True)
2801 norepo=True)
2804 def debugwireargs(ui, repopath, *vals, **opts):
2802 def debugwireargs(ui, repopath, *vals, **opts):
2805 opts = pycompat.byteskwargs(opts)
2803 opts = pycompat.byteskwargs(opts)
2806 repo = hg.peer(ui, opts, repopath)
2804 repo = hg.peer(ui, opts, repopath)
2807 for opt in cmdutil.remoteopts:
2805 for opt in cmdutil.remoteopts:
2808 del opts[opt[1]]
2806 del opts[opt[1]]
2809 args = {}
2807 args = {}
2810 for k, v in opts.iteritems():
2808 for k, v in opts.iteritems():
2811 if v:
2809 if v:
2812 args[k] = v
2810 args[k] = v
2813 args = pycompat.strkwargs(args)
2811 args = pycompat.strkwargs(args)
2814 # run twice to check that we don't mess up the stream for the next command
2812 # run twice to check that we don't mess up the stream for the next command
2815 res1 = repo.debugwireargs(*vals, **args)
2813 res1 = repo.debugwireargs(*vals, **args)
2816 res2 = repo.debugwireargs(*vals, **args)
2814 res2 = repo.debugwireargs(*vals, **args)
2817 ui.write("%s\n" % res1)
2815 ui.write("%s\n" % res1)
2818 if res1 != res2:
2816 if res1 != res2:
2819 ui.warn("%s\n" % res2)
2817 ui.warn("%s\n" % res2)
2820
2818
2821 def _parsewirelangblocks(fh):
2819 def _parsewirelangblocks(fh):
2822 activeaction = None
2820 activeaction = None
2823 blocklines = []
2821 blocklines = []
2824
2822
2825 for line in fh:
2823 for line in fh:
2826 line = line.rstrip()
2824 line = line.rstrip()
2827 if not line:
2825 if not line:
2828 continue
2826 continue
2829
2827
2830 if line.startswith(b'#'):
2828 if line.startswith(b'#'):
2831 continue
2829 continue
2832
2830
2833 if not line.startswith(b' '):
2831 if not line.startswith(b' '):
2834 # New block. Flush previous one.
2832 # New block. Flush previous one.
2835 if activeaction:
2833 if activeaction:
2836 yield activeaction, blocklines
2834 yield activeaction, blocklines
2837
2835
2838 activeaction = line
2836 activeaction = line
2839 blocklines = []
2837 blocklines = []
2840 continue
2838 continue
2841
2839
2842 # Else we start with an indent.
2840 # Else we start with an indent.
2843
2841
2844 if not activeaction:
2842 if not activeaction:
2845 raise error.Abort(_('indented line outside of block'))
2843 raise error.Abort(_('indented line outside of block'))
2846
2844
2847 blocklines.append(line)
2845 blocklines.append(line)
2848
2846
2849 # Flush last block.
2847 # Flush last block.
2850 if activeaction:
2848 if activeaction:
2851 yield activeaction, blocklines
2849 yield activeaction, blocklines
2852
2850
2853 @command('debugwireproto',
2851 @command('debugwireproto',
2854 [
2852 [
2855 ('', 'localssh', False, _('start an SSH server for this repo')),
2853 ('', 'localssh', False, _('start an SSH server for this repo')),
2856 ('', 'peer', '', _('construct a specific version of the peer')),
2854 ('', 'peer', '', _('construct a specific version of the peer')),
2857 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2855 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2858 ('', 'nologhandshake', False,
2856 ('', 'nologhandshake', False,
2859 _('do not log I/O related to the peer handshake')),
2857 _('do not log I/O related to the peer handshake')),
2860 ] + cmdutil.remoteopts,
2858 ] + cmdutil.remoteopts,
2861 _('[PATH]'),
2859 _('[PATH]'),
2862 optionalrepo=True)
2860 optionalrepo=True)
2863 def debugwireproto(ui, repo, path=None, **opts):
2861 def debugwireproto(ui, repo, path=None, **opts):
2864 """send wire protocol commands to a server
2862 """send wire protocol commands to a server
2865
2863
2866 This command can be used to issue wire protocol commands to remote
2864 This command can be used to issue wire protocol commands to remote
2867 peers and to debug the raw data being exchanged.
2865 peers and to debug the raw data being exchanged.
2868
2866
2869 ``--localssh`` will start an SSH server against the current repository
2867 ``--localssh`` will start an SSH server against the current repository
2870 and connect to that. By default, the connection will perform a handshake
2868 and connect to that. By default, the connection will perform a handshake
2871 and establish an appropriate peer instance.
2869 and establish an appropriate peer instance.
2872
2870
2873 ``--peer`` can be used to bypass the handshake protocol and construct a
2871 ``--peer`` can be used to bypass the handshake protocol and construct a
2874 peer instance using the specified class type. Valid values are ``raw``,
2872 peer instance using the specified class type. Valid values are ``raw``,
2875 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2873 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2876 raw data payloads and don't support higher-level command actions.
2874 raw data payloads and don't support higher-level command actions.
2877
2875
2878 ``--noreadstderr`` can be used to disable automatic reading from stderr
2876 ``--noreadstderr`` can be used to disable automatic reading from stderr
2879 of the peer (for SSH connections only). Disabling automatic reading of
2877 of the peer (for SSH connections only). Disabling automatic reading of
2880 stderr is useful for making output more deterministic.
2878 stderr is useful for making output more deterministic.
2881
2879
2882 Commands are issued via a mini language which is specified via stdin.
2880 Commands are issued via a mini language which is specified via stdin.
2883 The language consists of individual actions to perform. An action is
2881 The language consists of individual actions to perform. An action is
2884 defined by a block. A block is defined as a line with no leading
2882 defined by a block. A block is defined as a line with no leading
2885 space followed by 0 or more lines with leading space. Blocks are
2883 space followed by 0 or more lines with leading space. Blocks are
2886 effectively a high-level command with additional metadata.
2884 effectively a high-level command with additional metadata.
2887
2885
2888 Lines beginning with ``#`` are ignored.
2886 Lines beginning with ``#`` are ignored.
2889
2887
2890 The following sections denote available actions.
2888 The following sections denote available actions.
2891
2889
2892 raw
2890 raw
2893 ---
2891 ---
2894
2892
2895 Send raw data to the server.
2893 Send raw data to the server.
2896
2894
2897 The block payload contains the raw data to send as one atomic send
2895 The block payload contains the raw data to send as one atomic send
2898 operation. The data may not actually be delivered in a single system
2896 operation. The data may not actually be delivered in a single system
2899 call: it depends on the abilities of the transport being used.
2897 call: it depends on the abilities of the transport being used.
2900
2898
2901 Each line in the block is de-indented and concatenated. Then, that
2899 Each line in the block is de-indented and concatenated. Then, that
2902 value is evaluated as a Python b'' literal. This allows the use of
2900 value is evaluated as a Python b'' literal. This allows the use of
2903 backslash escaping, etc.
2901 backslash escaping, etc.
2904
2902
2905 raw+
2903 raw+
2906 ----
2904 ----
2907
2905
2908 Behaves like ``raw`` except flushes output afterwards.
2906 Behaves like ``raw`` except flushes output afterwards.
2909
2907
2910 command <X>
2908 command <X>
2911 -----------
2909 -----------
2912
2910
2913 Send a request to run a named command, whose name follows the ``command``
2911 Send a request to run a named command, whose name follows the ``command``
2914 string.
2912 string.
2915
2913
2916 Arguments to the command are defined as lines in this block. The format of
2914 Arguments to the command are defined as lines in this block. The format of
2917 each line is ``<key> <value>``. e.g.::
2915 each line is ``<key> <value>``. e.g.::
2918
2916
2919 command listkeys
2917 command listkeys
2920 namespace bookmarks
2918 namespace bookmarks
2921
2919
2922 If the value begins with ``eval:``, it will be interpreted as a Python
2920 If the value begins with ``eval:``, it will be interpreted as a Python
2923 literal expression. Otherwise values are interpreted as Python b'' literals.
2921 literal expression. Otherwise values are interpreted as Python b'' literals.
2924 This allows sending complex types and encoding special byte sequences via
2922 This allows sending complex types and encoding special byte sequences via
2925 backslash escaping.
2923 backslash escaping.
2926
2924
2927 The following arguments have special meaning:
2925 The following arguments have special meaning:
2928
2926
2929 ``PUSHFILE``
2927 ``PUSHFILE``
2930 When defined, the *push* mechanism of the peer will be used instead
2928 When defined, the *push* mechanism of the peer will be used instead
2931 of the static request-response mechanism and the content of the
2929 of the static request-response mechanism and the content of the
2932 file specified in the value of this argument will be sent as the
2930 file specified in the value of this argument will be sent as the
2933 command payload.
2931 command payload.
2934
2932
2935 This can be used to submit a local bundle file to the remote.
2933 This can be used to submit a local bundle file to the remote.
2936
2934
2937 batchbegin
2935 batchbegin
2938 ----------
2936 ----------
2939
2937
2940 Instruct the peer to begin a batched send.
2938 Instruct the peer to begin a batched send.
2941
2939
2942 All ``command`` blocks are queued for execution until the next
2940 All ``command`` blocks are queued for execution until the next
2943 ``batchsubmit`` block.
2941 ``batchsubmit`` block.
2944
2942
2945 batchsubmit
2943 batchsubmit
2946 -----------
2944 -----------
2947
2945
2948 Submit previously queued ``command`` blocks as a batch request.
2946 Submit previously queued ``command`` blocks as a batch request.
2949
2947
2950 This action MUST be paired with a ``batchbegin`` action.
2948 This action MUST be paired with a ``batchbegin`` action.
2951
2949
2952 httprequest <method> <path>
2950 httprequest <method> <path>
2953 ---------------------------
2951 ---------------------------
2954
2952
2955 (HTTP peer only)
2953 (HTTP peer only)
2956
2954
2957 Send an HTTP request to the peer.
2955 Send an HTTP request to the peer.
2958
2956
2959 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2957 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2960
2958
2961 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2959 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2962 headers to add to the request. e.g. ``Accept: foo``.
2960 headers to add to the request. e.g. ``Accept: foo``.
2963
2961
2964 The following arguments are special:
2962 The following arguments are special:
2965
2963
2966 ``BODYFILE``
2964 ``BODYFILE``
2967 The content of the file defined as the value to this argument will be
2965 The content of the file defined as the value to this argument will be
2968 transferred verbatim as the HTTP request body.
2966 transferred verbatim as the HTTP request body.
2969
2967
2970 ``frame <type> <flags> <payload>``
2968 ``frame <type> <flags> <payload>``
2971 Send a unified protocol frame as part of the request body.
2969 Send a unified protocol frame as part of the request body.
2972
2970
2973 All frames will be collected and sent as the body to the HTTP
2971 All frames will be collected and sent as the body to the HTTP
2974 request.
2972 request.
2975
2973
2976 close
2974 close
2977 -----
2975 -----
2978
2976
2979 Close the connection to the server.
2977 Close the connection to the server.
2980
2978
2981 flush
2979 flush
2982 -----
2980 -----
2983
2981
2984 Flush data written to the server.
2982 Flush data written to the server.
2985
2983
2986 readavailable
2984 readavailable
2987 -------------
2985 -------------
2988
2986
2989 Close the write end of the connection and read all available data from
2987 Close the write end of the connection and read all available data from
2990 the server.
2988 the server.
2991
2989
2992 If the connection to the server encompasses multiple pipes, we poll both
2990 If the connection to the server encompasses multiple pipes, we poll both
2993 pipes and read available data.
2991 pipes and read available data.
2994
2992
2995 readline
2993 readline
2996 --------
2994 --------
2997
2995
2998 Read a line of output from the server. If there are multiple output
2996 Read a line of output from the server. If there are multiple output
2999 pipes, reads only the main pipe.
2997 pipes, reads only the main pipe.
3000
2998
3001 ereadline
2999 ereadline
3002 ---------
3000 ---------
3003
3001
3004 Like ``readline``, but read from the stderr pipe, if available.
3002 Like ``readline``, but read from the stderr pipe, if available.
3005
3003
3006 read <X>
3004 read <X>
3007 --------
3005 --------
3008
3006
3009 ``read()`` N bytes from the server's main output pipe.
3007 ``read()`` N bytes from the server's main output pipe.
3010
3008
3011 eread <X>
3009 eread <X>
3012 ---------
3010 ---------
3013
3011
3014 ``read()`` N bytes from the server's stderr pipe, if available.
3012 ``read()`` N bytes from the server's stderr pipe, if available.
3015
3013
3016 Specifying Unified Frame-Based Protocol Frames
3014 Specifying Unified Frame-Based Protocol Frames
3017 ----------------------------------------------
3015 ----------------------------------------------
3018
3016
3019 It is possible to emit a *Unified Frame-Based Protocol* by using special
3017 It is possible to emit a *Unified Frame-Based Protocol* by using special
3020 syntax.
3018 syntax.
3021
3019
3022 A frame is composed as a type, flags, and payload. These can be parsed
3020 A frame is composed as a type, flags, and payload. These can be parsed
3023 from a string of the form:
3021 from a string of the form:
3024
3022
3025 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3023 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3026
3024
3027 ``request-id`` and ``stream-id`` are integers defining the request and
3025 ``request-id`` and ``stream-id`` are integers defining the request and
3028 stream identifiers.
3026 stream identifiers.
3029
3027
3030 ``type`` can be an integer value for the frame type or the string name
3028 ``type`` can be an integer value for the frame type or the string name
3031 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3029 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3032 ``command-name``.
3030 ``command-name``.
3033
3031
3034 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3032 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3035 components. Each component (and there can be just one) can be an integer
3033 components. Each component (and there can be just one) can be an integer
3036 or a flag name for stream flags or frame flags, respectively. Values are
3034 or a flag name for stream flags or frame flags, respectively. Values are
3037 resolved to integers and then bitwise OR'd together.
3035 resolved to integers and then bitwise OR'd together.
3038
3036
3039 ``payload`` represents the raw frame payload. If it begins with
3037 ``payload`` represents the raw frame payload. If it begins with
3040 ``cbor:``, the following string is evaluated as Python code and the
3038 ``cbor:``, the following string is evaluated as Python code and the
3041 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3039 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3042 as a Python byte string literal.
3040 as a Python byte string literal.
3043 """
3041 """
3044 opts = pycompat.byteskwargs(opts)
3042 opts = pycompat.byteskwargs(opts)
3045
3043
3046 if opts['localssh'] and not repo:
3044 if opts['localssh'] and not repo:
3047 raise error.Abort(_('--localssh requires a repository'))
3045 raise error.Abort(_('--localssh requires a repository'))
3048
3046
3049 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3047 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3050 raise error.Abort(_('invalid value for --peer'),
3048 raise error.Abort(_('invalid value for --peer'),
3051 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3049 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3052
3050
3053 if path and opts['localssh']:
3051 if path and opts['localssh']:
3054 raise error.Abort(_('cannot specify --localssh with an explicit '
3052 raise error.Abort(_('cannot specify --localssh with an explicit '
3055 'path'))
3053 'path'))
3056
3054
3057 if ui.interactive():
3055 if ui.interactive():
3058 ui.write(_('(waiting for commands on stdin)\n'))
3056 ui.write(_('(waiting for commands on stdin)\n'))
3059
3057
3060 blocks = list(_parsewirelangblocks(ui.fin))
3058 blocks = list(_parsewirelangblocks(ui.fin))
3061
3059
3062 proc = None
3060 proc = None
3063 stdin = None
3061 stdin = None
3064 stdout = None
3062 stdout = None
3065 stderr = None
3063 stderr = None
3066 opener = None
3064 opener = None
3067
3065
3068 if opts['localssh']:
3066 if opts['localssh']:
3069 # We start the SSH server in its own process so there is process
3067 # We start the SSH server in its own process so there is process
3070 # separation. This prevents a whole class of potential bugs around
3068 # separation. This prevents a whole class of potential bugs around
3071 # shared state from interfering with server operation.
3069 # shared state from interfering with server operation.
3072 args = procutil.hgcmd() + [
3070 args = procutil.hgcmd() + [
3073 '-R', repo.root,
3071 '-R', repo.root,
3074 'debugserve', '--sshstdio',
3072 'debugserve', '--sshstdio',
3075 ]
3073 ]
3076 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3074 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3077 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3075 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3078 bufsize=0)
3076 bufsize=0)
3079
3077
3080 stdin = proc.stdin
3078 stdin = proc.stdin
3081 stdout = proc.stdout
3079 stdout = proc.stdout
3082 stderr = proc.stderr
3080 stderr = proc.stderr
3083
3081
3084 # We turn the pipes into observers so we can log I/O.
3082 # We turn the pipes into observers so we can log I/O.
3085 if ui.verbose or opts['peer'] == 'raw':
3083 if ui.verbose or opts['peer'] == 'raw':
3086 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3084 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3087 logdata=True)
3085 logdata=True)
3088 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3086 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3089 logdata=True)
3087 logdata=True)
3090 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3088 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3091 logdata=True)
3089 logdata=True)
3092
3090
3093 # --localssh also implies the peer connection settings.
3091 # --localssh also implies the peer connection settings.
3094
3092
3095 url = 'ssh://localserver'
3093 url = 'ssh://localserver'
3096 autoreadstderr = not opts['noreadstderr']
3094 autoreadstderr = not opts['noreadstderr']
3097
3095
3098 if opts['peer'] == 'ssh1':
3096 if opts['peer'] == 'ssh1':
3099 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3097 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3100 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3098 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3101 None, autoreadstderr=autoreadstderr)
3099 None, autoreadstderr=autoreadstderr)
3102 elif opts['peer'] == 'ssh2':
3100 elif opts['peer'] == 'ssh2':
3103 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3101 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3104 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3102 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3105 None, autoreadstderr=autoreadstderr)
3103 None, autoreadstderr=autoreadstderr)
3106 elif opts['peer'] == 'raw':
3104 elif opts['peer'] == 'raw':
3107 ui.write(_('using raw connection to peer\n'))
3105 ui.write(_('using raw connection to peer\n'))
3108 peer = None
3106 peer = None
3109 else:
3107 else:
3110 ui.write(_('creating ssh peer from handshake results\n'))
3108 ui.write(_('creating ssh peer from handshake results\n'))
3111 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3109 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3112 autoreadstderr=autoreadstderr)
3110 autoreadstderr=autoreadstderr)
3113
3111
3114 elif path:
3112 elif path:
3115 # We bypass hg.peer() so we can proxy the sockets.
3113 # We bypass hg.peer() so we can proxy the sockets.
3116 # TODO consider not doing this because we skip
3114 # TODO consider not doing this because we skip
3117 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3115 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3118 u = util.url(path)
3116 u = util.url(path)
3119 if u.scheme != 'http':
3117 if u.scheme != 'http':
3120 raise error.Abort(_('only http:// paths are currently supported'))
3118 raise error.Abort(_('only http:// paths are currently supported'))
3121
3119
3122 url, authinfo = u.authinfo()
3120 url, authinfo = u.authinfo()
3123 openerargs = {
3121 openerargs = {
3124 r'useragent': b'Mercurial debugwireproto',
3122 r'useragent': b'Mercurial debugwireproto',
3125 }
3123 }
3126
3124
3127 # Turn pipes/sockets into observers so we can log I/O.
3125 # Turn pipes/sockets into observers so we can log I/O.
3128 if ui.verbose:
3126 if ui.verbose:
3129 openerargs.update({
3127 openerargs.update({
3130 r'loggingfh': ui,
3128 r'loggingfh': ui,
3131 r'loggingname': b's',
3129 r'loggingname': b's',
3132 r'loggingopts': {
3130 r'loggingopts': {
3133 r'logdata': True,
3131 r'logdata': True,
3134 r'logdataapis': False,
3132 r'logdataapis': False,
3135 },
3133 },
3136 })
3134 })
3137
3135
3138 if ui.debugflag:
3136 if ui.debugflag:
3139 openerargs[r'loggingopts'][r'logdataapis'] = True
3137 openerargs[r'loggingopts'][r'logdataapis'] = True
3140
3138
3141 # Don't send default headers when in raw mode. This allows us to
3139 # Don't send default headers when in raw mode. This allows us to
3142 # bypass most of the behavior of our URL handling code so we can
3140 # bypass most of the behavior of our URL handling code so we can
3143 # have near complete control over what's sent on the wire.
3141 # have near complete control over what's sent on the wire.
3144 if opts['peer'] == 'raw':
3142 if opts['peer'] == 'raw':
3145 openerargs[r'sendaccept'] = False
3143 openerargs[r'sendaccept'] = False
3146
3144
3147 opener = urlmod.opener(ui, authinfo, **openerargs)
3145 opener = urlmod.opener(ui, authinfo, **openerargs)
3148
3146
3149 if opts['peer'] == 'http2':
3147 if opts['peer'] == 'http2':
3150 ui.write(_('creating http peer for wire protocol version 2\n'))
3148 ui.write(_('creating http peer for wire protocol version 2\n'))
3151 # We go through makepeer() because we need an API descriptor for
3149 # We go through makepeer() because we need an API descriptor for
3152 # the peer instance to be useful.
3150 # the peer instance to be useful.
3153 with ui.configoverride({
3151 with ui.configoverride({
3154 ('experimental', 'httppeer.advertise-v2'): True}):
3152 ('experimental', 'httppeer.advertise-v2'): True}):
3155 if opts['nologhandshake']:
3153 if opts['nologhandshake']:
3156 ui.pushbuffer()
3154 ui.pushbuffer()
3157
3155
3158 peer = httppeer.makepeer(ui, path, opener=opener)
3156 peer = httppeer.makepeer(ui, path, opener=opener)
3159
3157
3160 if opts['nologhandshake']:
3158 if opts['nologhandshake']:
3161 ui.popbuffer()
3159 ui.popbuffer()
3162
3160
3163 if not isinstance(peer, httppeer.httpv2peer):
3161 if not isinstance(peer, httppeer.httpv2peer):
3164 raise error.Abort(_('could not instantiate HTTP peer for '
3162 raise error.Abort(_('could not instantiate HTTP peer for '
3165 'wire protocol version 2'),
3163 'wire protocol version 2'),
3166 hint=_('the server may not have the feature '
3164 hint=_('the server may not have the feature '
3167 'enabled or is not allowing this '
3165 'enabled or is not allowing this '
3168 'client version'))
3166 'client version'))
3169
3167
3170 elif opts['peer'] == 'raw':
3168 elif opts['peer'] == 'raw':
3171 ui.write(_('using raw connection to peer\n'))
3169 ui.write(_('using raw connection to peer\n'))
3172 peer = None
3170 peer = None
3173 elif opts['peer']:
3171 elif opts['peer']:
3174 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3172 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3175 opts['peer'])
3173 opts['peer'])
3176 else:
3174 else:
3177 peer = httppeer.makepeer(ui, path, opener=opener)
3175 peer = httppeer.makepeer(ui, path, opener=opener)
3178
3176
3179 # We /could/ populate stdin/stdout with sock.makefile()...
3177 # We /could/ populate stdin/stdout with sock.makefile()...
3180 else:
3178 else:
3181 raise error.Abort(_('unsupported connection configuration'))
3179 raise error.Abort(_('unsupported connection configuration'))
3182
3180
3183 batchedcommands = None
3181 batchedcommands = None
3184
3182
3185 # Now perform actions based on the parsed wire language instructions.
3183 # Now perform actions based on the parsed wire language instructions.
3186 for action, lines in blocks:
3184 for action, lines in blocks:
3187 if action in ('raw', 'raw+'):
3185 if action in ('raw', 'raw+'):
3188 if not stdin:
3186 if not stdin:
3189 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3187 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3190
3188
3191 # Concatenate the data together.
3189 # Concatenate the data together.
3192 data = ''.join(l.lstrip() for l in lines)
3190 data = ''.join(l.lstrip() for l in lines)
3193 data = stringutil.unescapestr(data)
3191 data = stringutil.unescapestr(data)
3194 stdin.write(data)
3192 stdin.write(data)
3195
3193
3196 if action == 'raw+':
3194 if action == 'raw+':
3197 stdin.flush()
3195 stdin.flush()
3198 elif action == 'flush':
3196 elif action == 'flush':
3199 if not stdin:
3197 if not stdin:
3200 raise error.Abort(_('cannot call flush on this peer'))
3198 raise error.Abort(_('cannot call flush on this peer'))
3201 stdin.flush()
3199 stdin.flush()
3202 elif action.startswith('command'):
3200 elif action.startswith('command'):
3203 if not peer:
3201 if not peer:
3204 raise error.Abort(_('cannot send commands unless peer instance '
3202 raise error.Abort(_('cannot send commands unless peer instance '
3205 'is available'))
3203 'is available'))
3206
3204
3207 command = action.split(' ', 1)[1]
3205 command = action.split(' ', 1)[1]
3208
3206
3209 args = {}
3207 args = {}
3210 for line in lines:
3208 for line in lines:
3211 # We need to allow empty values.
3209 # We need to allow empty values.
3212 fields = line.lstrip().split(' ', 1)
3210 fields = line.lstrip().split(' ', 1)
3213 if len(fields) == 1:
3211 if len(fields) == 1:
3214 key = fields[0]
3212 key = fields[0]
3215 value = ''
3213 value = ''
3216 else:
3214 else:
3217 key, value = fields
3215 key, value = fields
3218
3216
3219 if value.startswith('eval:'):
3217 if value.startswith('eval:'):
3220 value = stringutil.evalpythonliteral(value[5:])
3218 value = stringutil.evalpythonliteral(value[5:])
3221 else:
3219 else:
3222 value = stringutil.unescapestr(value)
3220 value = stringutil.unescapestr(value)
3223
3221
3224 args[key] = value
3222 args[key] = value
3225
3223
3226 if batchedcommands is not None:
3224 if batchedcommands is not None:
3227 batchedcommands.append((command, args))
3225 batchedcommands.append((command, args))
3228 continue
3226 continue
3229
3227
3230 ui.status(_('sending %s command\n') % command)
3228 ui.status(_('sending %s command\n') % command)
3231
3229
3232 if 'PUSHFILE' in args:
3230 if 'PUSHFILE' in args:
3233 with open(args['PUSHFILE'], r'rb') as fh:
3231 with open(args['PUSHFILE'], r'rb') as fh:
3234 del args['PUSHFILE']
3232 del args['PUSHFILE']
3235 res, output = peer._callpush(command, fh,
3233 res, output = peer._callpush(command, fh,
3236 **pycompat.strkwargs(args))
3234 **pycompat.strkwargs(args))
3237 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3235 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3238 ui.status(_('remote output: %s\n') %
3236 ui.status(_('remote output: %s\n') %
3239 stringutil.escapestr(output))
3237 stringutil.escapestr(output))
3240 else:
3238 else:
3241 with peer.commandexecutor() as e:
3239 with peer.commandexecutor() as e:
3242 res = e.callcommand(command, args).result()
3240 res = e.callcommand(command, args).result()
3243
3241
3244 if isinstance(res, wireprotov2peer.commandresponse):
3242 if isinstance(res, wireprotov2peer.commandresponse):
3245 val = list(res.cborobjects())
3243 val = list(res.cborobjects())
3246 ui.status(_('response: %s\n') %
3244 ui.status(_('response: %s\n') %
3247 stringutil.pprint(val, bprefix=True, indent=2))
3245 stringutil.pprint(val, bprefix=True, indent=2))
3248 else:
3246 else:
3249 ui.status(_('response: %s\n') %
3247 ui.status(_('response: %s\n') %
3250 stringutil.pprint(res, bprefix=True, indent=2))
3248 stringutil.pprint(res, bprefix=True, indent=2))
3251
3249
3252 elif action == 'batchbegin':
3250 elif action == 'batchbegin':
3253 if batchedcommands is not None:
3251 if batchedcommands is not None:
3254 raise error.Abort(_('nested batchbegin not allowed'))
3252 raise error.Abort(_('nested batchbegin not allowed'))
3255
3253
3256 batchedcommands = []
3254 batchedcommands = []
3257 elif action == 'batchsubmit':
3255 elif action == 'batchsubmit':
3258 # There is a batching API we could go through. But it would be
3256 # There is a batching API we could go through. But it would be
3259 # difficult to normalize requests into function calls. It is easier
3257 # difficult to normalize requests into function calls. It is easier
3260 # to bypass this layer and normalize to commands + args.
3258 # to bypass this layer and normalize to commands + args.
3261 ui.status(_('sending batch with %d sub-commands\n') %
3259 ui.status(_('sending batch with %d sub-commands\n') %
3262 len(batchedcommands))
3260 len(batchedcommands))
3263 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3261 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3264 ui.status(_('response #%d: %s\n') %
3262 ui.status(_('response #%d: %s\n') %
3265 (i, stringutil.escapestr(chunk)))
3263 (i, stringutil.escapestr(chunk)))
3266
3264
3267 batchedcommands = None
3265 batchedcommands = None
3268
3266
3269 elif action.startswith('httprequest '):
3267 elif action.startswith('httprequest '):
3270 if not opener:
3268 if not opener:
3271 raise error.Abort(_('cannot use httprequest without an HTTP '
3269 raise error.Abort(_('cannot use httprequest without an HTTP '
3272 'peer'))
3270 'peer'))
3273
3271
3274 request = action.split(' ', 2)
3272 request = action.split(' ', 2)
3275 if len(request) != 3:
3273 if len(request) != 3:
3276 raise error.Abort(_('invalid httprequest: expected format is '
3274 raise error.Abort(_('invalid httprequest: expected format is '
3277 '"httprequest <method> <path>'))
3275 '"httprequest <method> <path>'))
3278
3276
3279 method, httppath = request[1:]
3277 method, httppath = request[1:]
3280 headers = {}
3278 headers = {}
3281 body = None
3279 body = None
3282 frames = []
3280 frames = []
3283 for line in lines:
3281 for line in lines:
3284 line = line.lstrip()
3282 line = line.lstrip()
3285 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3283 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3286 if m:
3284 if m:
3287 headers[m.group(1)] = m.group(2)
3285 headers[m.group(1)] = m.group(2)
3288 continue
3286 continue
3289
3287
3290 if line.startswith(b'BODYFILE '):
3288 if line.startswith(b'BODYFILE '):
3291 with open(line.split(b' ', 1), 'rb') as fh:
3289 with open(line.split(b' ', 1), 'rb') as fh:
3292 body = fh.read()
3290 body = fh.read()
3293 elif line.startswith(b'frame '):
3291 elif line.startswith(b'frame '):
3294 frame = wireprotoframing.makeframefromhumanstring(
3292 frame = wireprotoframing.makeframefromhumanstring(
3295 line[len(b'frame '):])
3293 line[len(b'frame '):])
3296
3294
3297 frames.append(frame)
3295 frames.append(frame)
3298 else:
3296 else:
3299 raise error.Abort(_('unknown argument to httprequest: %s') %
3297 raise error.Abort(_('unknown argument to httprequest: %s') %
3300 line)
3298 line)
3301
3299
3302 url = path + httppath
3300 url = path + httppath
3303
3301
3304 if frames:
3302 if frames:
3305 body = b''.join(bytes(f) for f in frames)
3303 body = b''.join(bytes(f) for f in frames)
3306
3304
3307 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3305 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3308
3306
3309 # urllib.Request insists on using has_data() as a proxy for
3307 # urllib.Request insists on using has_data() as a proxy for
3310 # determining the request method. Override that to use our
3308 # determining the request method. Override that to use our
3311 # explicitly requested method.
3309 # explicitly requested method.
3312 req.get_method = lambda: pycompat.sysstr(method)
3310 req.get_method = lambda: pycompat.sysstr(method)
3313
3311
3314 try:
3312 try:
3315 res = opener.open(req)
3313 res = opener.open(req)
3316 body = res.read()
3314 body = res.read()
3317 except util.urlerr.urlerror as e:
3315 except util.urlerr.urlerror as e:
3318 # read() method must be called, but only exists in Python 2
3316 # read() method must be called, but only exists in Python 2
3319 getattr(e, 'read', lambda: None)()
3317 getattr(e, 'read', lambda: None)()
3320 continue
3318 continue
3321
3319
3322 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3320 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3323 ui.write(_('cbor> %s\n') %
3321 ui.write(_('cbor> %s\n') %
3324 stringutil.pprint(cbor.loads(body), bprefix=True,
3322 stringutil.pprint(cborutil.decodeall(body)[0],
3323 bprefix=True,
3325 indent=2))
3324 indent=2))
3326
3325
3327 elif action == 'close':
3326 elif action == 'close':
3328 peer.close()
3327 peer.close()
3329 elif action == 'readavailable':
3328 elif action == 'readavailable':
3330 if not stdout or not stderr:
3329 if not stdout or not stderr:
3331 raise error.Abort(_('readavailable not available on this peer'))
3330 raise error.Abort(_('readavailable not available on this peer'))
3332
3331
3333 stdin.close()
3332 stdin.close()
3334 stdout.read()
3333 stdout.read()
3335 stderr.read()
3334 stderr.read()
3336
3335
3337 elif action == 'readline':
3336 elif action == 'readline':
3338 if not stdout:
3337 if not stdout:
3339 raise error.Abort(_('readline not available on this peer'))
3338 raise error.Abort(_('readline not available on this peer'))
3340 stdout.readline()
3339 stdout.readline()
3341 elif action == 'ereadline':
3340 elif action == 'ereadline':
3342 if not stderr:
3341 if not stderr:
3343 raise error.Abort(_('ereadline not available on this peer'))
3342 raise error.Abort(_('ereadline not available on this peer'))
3344 stderr.readline()
3343 stderr.readline()
3345 elif action.startswith('read '):
3344 elif action.startswith('read '):
3346 count = int(action.split(' ', 1)[1])
3345 count = int(action.split(' ', 1)[1])
3347 if not stdout:
3346 if not stdout:
3348 raise error.Abort(_('read not available on this peer'))
3347 raise error.Abort(_('read not available on this peer'))
3349 stdout.read(count)
3348 stdout.read(count)
3350 elif action.startswith('eread '):
3349 elif action.startswith('eread '):
3351 count = int(action.split(' ', 1)[1])
3350 count = int(action.split(' ', 1)[1])
3352 if not stderr:
3351 if not stderr:
3353 raise error.Abort(_('eread not available on this peer'))
3352 raise error.Abort(_('eread not available on this peer'))
3354 stderr.read(count)
3353 stderr.read(count)
3355 else:
3354 else:
3356 raise error.Abort(_('unknown action: %s') % action)
3355 raise error.Abort(_('unknown action: %s') % action)
3357
3356
3358 if batchedcommands is not None:
3357 if batchedcommands is not None:
3359 raise error.Abort(_('unclosed "batchbegin" request'))
3358 raise error.Abort(_('unclosed "batchbegin" request'))
3360
3359
3361 if peer:
3360 if peer:
3362 peer.close()
3361 peer.close()
3363
3362
3364 if proc:
3363 if proc:
3365 proc.kill()
3364 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now