##// END OF EJS Templates
py3: mask out None type when printing in `debuglocks`...
Matt Harbison -
r39920:9c8eff5c default
parent child Browse files
Show More
@@ -1,3368 +1,3368 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 dagparser,
41 dagparser,
42 encoding,
42 encoding,
43 error,
43 error,
44 exchange,
44 exchange,
45 extensions,
45 extensions,
46 filemerge,
46 filemerge,
47 filesetlang,
47 filesetlang,
48 formatter,
48 formatter,
49 hg,
49 hg,
50 httppeer,
50 httppeer,
51 localrepo,
51 localrepo,
52 lock as lockmod,
52 lock as lockmod,
53 logcmdutil,
53 logcmdutil,
54 merge as mergemod,
54 merge as mergemod,
55 obsolete,
55 obsolete,
56 obsutil,
56 obsutil,
57 phases,
57 phases,
58 policy,
58 policy,
59 pvec,
59 pvec,
60 pycompat,
60 pycompat,
61 registrar,
61 registrar,
62 repair,
62 repair,
63 revlog,
63 revlog,
64 revset,
64 revset,
65 revsetlang,
65 revsetlang,
66 scmutil,
66 scmutil,
67 setdiscovery,
67 setdiscovery,
68 simplemerge,
68 simplemerge,
69 sshpeer,
69 sshpeer,
70 sslutil,
70 sslutil,
71 streamclone,
71 streamclone,
72 templater,
72 templater,
73 treediscovery,
73 treediscovery,
74 upgrade,
74 upgrade,
75 url as urlmod,
75 url as urlmod,
76 util,
76 util,
77 vfs as vfsmod,
77 vfs as vfsmod,
78 wireprotoframing,
78 wireprotoframing,
79 wireprotoserver,
79 wireprotoserver,
80 wireprotov2peer,
80 wireprotov2peer,
81 )
81 )
82 from .utils import (
82 from .utils import (
83 cborutil,
83 cborutil,
84 dateutil,
84 dateutil,
85 procutil,
85 procutil,
86 stringutil,
86 stringutil,
87 )
87 )
88
88
89 from .revlogutils import (
89 from .revlogutils import (
90 deltas as deltautil
90 deltas as deltautil
91 )
91 )
92
92
93 release = lockmod.release
93 release = lockmod.release
94
94
95 command = registrar.command()
95 command = registrar.command()
96
96
97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 def debugancestor(ui, repo, *args):
98 def debugancestor(ui, repo, *args):
99 """find the ancestor revision of two revisions in a given index"""
99 """find the ancestor revision of two revisions in a given index"""
100 if len(args) == 3:
100 if len(args) == 3:
101 index, rev1, rev2 = args
101 index, rev1, rev2 = args
102 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
102 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 lookup = r.lookup
103 lookup = r.lookup
104 elif len(args) == 2:
104 elif len(args) == 2:
105 if not repo:
105 if not repo:
106 raise error.Abort(_('there is no Mercurial repository here '
106 raise error.Abort(_('there is no Mercurial repository here '
107 '(.hg not found)'))
107 '(.hg not found)'))
108 rev1, rev2 = args
108 rev1, rev2 = args
109 r = repo.changelog
109 r = repo.changelog
110 lookup = repo.lookup
110 lookup = repo.lookup
111 else:
111 else:
112 raise error.Abort(_('either two or three arguments required'))
112 raise error.Abort(_('either two or three arguments required'))
113 a = r.ancestor(lookup(rev1), lookup(rev2))
113 a = r.ancestor(lookup(rev1), lookup(rev2))
114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115
115
116 @command('debugapplystreamclonebundle', [], 'FILE')
116 @command('debugapplystreamclonebundle', [], 'FILE')
117 def debugapplystreamclonebundle(ui, repo, fname):
117 def debugapplystreamclonebundle(ui, repo, fname):
118 """apply a stream clone bundle file"""
118 """apply a stream clone bundle file"""
119 f = hg.openpath(ui, fname)
119 f = hg.openpath(ui, fname)
120 gen = exchange.readbundle(ui, f, fname)
120 gen = exchange.readbundle(ui, f, fname)
121 gen.apply(repo)
121 gen.apply(repo)
122
122
123 @command('debugbuilddag',
123 @command('debugbuilddag',
124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('n', 'new-file', None, _('add new file at each rev'))],
126 ('n', 'new-file', None, _('add new file at each rev'))],
127 _('[OPTION]... [TEXT]'))
127 _('[OPTION]... [TEXT]'))
128 def debugbuilddag(ui, repo, text=None,
128 def debugbuilddag(ui, repo, text=None,
129 mergeable_file=False,
129 mergeable_file=False,
130 overwritten_file=False,
130 overwritten_file=False,
131 new_file=False):
131 new_file=False):
132 """builds a repo with a given DAG from scratch in the current empty repo
132 """builds a repo with a given DAG from scratch in the current empty repo
133
133
134 The description of the DAG is read from stdin if not given on the
134 The description of the DAG is read from stdin if not given on the
135 command line.
135 command line.
136
136
137 Elements:
137 Elements:
138
138
139 - "+n" is a linear run of n nodes based on the current default parent
139 - "+n" is a linear run of n nodes based on the current default parent
140 - "." is a single node based on the current default parent
140 - "." is a single node based on the current default parent
141 - "$" resets the default parent to null (implied at the start);
141 - "$" resets the default parent to null (implied at the start);
142 otherwise the default parent is always the last node created
142 otherwise the default parent is always the last node created
143 - "<p" sets the default parent to the backref p
143 - "<p" sets the default parent to the backref p
144 - "*p" is a fork at parent p, which is a backref
144 - "*p" is a fork at parent p, which is a backref
145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "/p2" is a merge of the preceding node and p2
146 - "/p2" is a merge of the preceding node and p2
147 - ":tag" defines a local tag for the preceding node
147 - ":tag" defines a local tag for the preceding node
148 - "@branch" sets the named branch for subsequent nodes
148 - "@branch" sets the named branch for subsequent nodes
149 - "#...\\n" is a comment up to the end of the line
149 - "#...\\n" is a comment up to the end of the line
150
150
151 Whitespace between the above elements is ignored.
151 Whitespace between the above elements is ignored.
152
152
153 A backref is either
153 A backref is either
154
154
155 - a number n, which references the node curr-n, where curr is the current
155 - a number n, which references the node curr-n, where curr is the current
156 node, or
156 node, or
157 - the name of a local tag you placed earlier using ":tag", or
157 - the name of a local tag you placed earlier using ":tag", or
158 - empty to denote the default parent.
158 - empty to denote the default parent.
159
159
160 All string valued-elements are either strictly alphanumeric, or must
160 All string valued-elements are either strictly alphanumeric, or must
161 be enclosed in double quotes ("..."), with "\\" as escape character.
161 be enclosed in double quotes ("..."), with "\\" as escape character.
162 """
162 """
163
163
164 if text is None:
164 if text is None:
165 ui.status(_("reading DAG from stdin\n"))
165 ui.status(_("reading DAG from stdin\n"))
166 text = ui.fin.read()
166 text = ui.fin.read()
167
167
168 cl = repo.changelog
168 cl = repo.changelog
169 if len(cl) > 0:
169 if len(cl) > 0:
170 raise error.Abort(_('repository is not empty'))
170 raise error.Abort(_('repository is not empty'))
171
171
172 # determine number of revs in DAG
172 # determine number of revs in DAG
173 total = 0
173 total = 0
174 for type, data in dagparser.parsedag(text):
174 for type, data in dagparser.parsedag(text):
175 if type == 'n':
175 if type == 'n':
176 total += 1
176 total += 1
177
177
178 if mergeable_file:
178 if mergeable_file:
179 linesperrev = 2
179 linesperrev = 2
180 # make a file with k lines per rev
180 # make a file with k lines per rev
181 initialmergedlines = ['%d' % i
181 initialmergedlines = ['%d' % i
182 for i in pycompat.xrange(0, total * linesperrev)]
182 for i in pycompat.xrange(0, total * linesperrev)]
183 initialmergedlines.append("")
183 initialmergedlines.append("")
184
184
185 tags = []
185 tags = []
186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 total=total)
187 total=total)
188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 at = -1
189 at = -1
190 atbranch = 'default'
190 atbranch = 'default'
191 nodeids = []
191 nodeids = []
192 id = 0
192 id = 0
193 progress.update(id)
193 progress.update(id)
194 for type, data in dagparser.parsedag(text):
194 for type, data in dagparser.parsedag(text):
195 if type == 'n':
195 if type == 'n':
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 id, ps = data
197 id, ps = data
198
198
199 files = []
199 files = []
200 filecontent = {}
200 filecontent = {}
201
201
202 p2 = None
202 p2 = None
203 if mergeable_file:
203 if mergeable_file:
204 fn = "mf"
204 fn = "mf"
205 p1 = repo[ps[0]]
205 p1 = repo[ps[0]]
206 if len(ps) > 1:
206 if len(ps) > 1:
207 p2 = repo[ps[1]]
207 p2 = repo[ps[1]]
208 pa = p1.ancestor(p2)
208 pa = p1.ancestor(p2)
209 base, local, other = [x[fn].data() for x in (pa, p1,
209 base, local, other = [x[fn].data() for x in (pa, p1,
210 p2)]
210 p2)]
211 m3 = simplemerge.Merge3Text(base, local, other)
211 m3 = simplemerge.Merge3Text(base, local, other)
212 ml = [l.strip() for l in m3.merge_lines()]
212 ml = [l.strip() for l in m3.merge_lines()]
213 ml.append("")
213 ml.append("")
214 elif at > 0:
214 elif at > 0:
215 ml = p1[fn].data().split("\n")
215 ml = p1[fn].data().split("\n")
216 else:
216 else:
217 ml = initialmergedlines
217 ml = initialmergedlines
218 ml[id * linesperrev] += " r%i" % id
218 ml[id * linesperrev] += " r%i" % id
219 mergedtext = "\n".join(ml)
219 mergedtext = "\n".join(ml)
220 files.append(fn)
220 files.append(fn)
221 filecontent[fn] = mergedtext
221 filecontent[fn] = mergedtext
222
222
223 if overwritten_file:
223 if overwritten_file:
224 fn = "of"
224 fn = "of"
225 files.append(fn)
225 files.append(fn)
226 filecontent[fn] = "r%i\n" % id
226 filecontent[fn] = "r%i\n" % id
227
227
228 if new_file:
228 if new_file:
229 fn = "nf%i" % id
229 fn = "nf%i" % id
230 files.append(fn)
230 files.append(fn)
231 filecontent[fn] = "r%i\n" % id
231 filecontent[fn] = "r%i\n" % id
232 if len(ps) > 1:
232 if len(ps) > 1:
233 if not p2:
233 if not p2:
234 p2 = repo[ps[1]]
234 p2 = repo[ps[1]]
235 for fn in p2:
235 for fn in p2:
236 if fn.startswith("nf"):
236 if fn.startswith("nf"):
237 files.append(fn)
237 files.append(fn)
238 filecontent[fn] = p2[fn].data()
238 filecontent[fn] = p2[fn].data()
239
239
240 def fctxfn(repo, cx, path):
240 def fctxfn(repo, cx, path):
241 if path in filecontent:
241 if path in filecontent:
242 return context.memfilectx(repo, cx, path,
242 return context.memfilectx(repo, cx, path,
243 filecontent[path])
243 filecontent[path])
244 return None
244 return None
245
245
246 if len(ps) == 0 or ps[0] < 0:
246 if len(ps) == 0 or ps[0] < 0:
247 pars = [None, None]
247 pars = [None, None]
248 elif len(ps) == 1:
248 elif len(ps) == 1:
249 pars = [nodeids[ps[0]], None]
249 pars = [nodeids[ps[0]], None]
250 else:
250 else:
251 pars = [nodeids[p] for p in ps]
251 pars = [nodeids[p] for p in ps]
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 date=(id, 0),
253 date=(id, 0),
254 user="debugbuilddag",
254 user="debugbuilddag",
255 extra={'branch': atbranch})
255 extra={'branch': atbranch})
256 nodeid = repo.commitctx(cx)
256 nodeid = repo.commitctx(cx)
257 nodeids.append(nodeid)
257 nodeids.append(nodeid)
258 at = id
258 at = id
259 elif type == 'l':
259 elif type == 'l':
260 id, name = data
260 id, name = data
261 ui.note(('tag %s\n' % name))
261 ui.note(('tag %s\n' % name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 elif type == 'a':
263 elif type == 'a':
264 ui.note(('branch %s\n' % data))
264 ui.note(('branch %s\n' % data))
265 atbranch = data
265 atbranch = data
266 progress.update(id)
266 progress.update(id)
267
267
268 if tags:
268 if tags:
269 repo.vfs.write("localtags", "".join(tags))
269 repo.vfs.write("localtags", "".join(tags))
270
270
271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 indent_string = ' ' * indent
272 indent_string = ' ' * indent
273 if all:
273 if all:
274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 % indent_string)
275 % indent_string)
276
276
277 def showchunks(named):
277 def showchunks(named):
278 ui.write("\n%s%s\n" % (indent_string, named))
278 ui.write("\n%s%s\n" % (indent_string, named))
279 for deltadata in gen.deltaiter():
279 for deltadata in gen.deltaiter():
280 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 ui.write("%s%s %s %s %s %s %d\n" %
281 ui.write("%s%s %s %s %s %s %d\n" %
282 (indent_string, hex(node), hex(p1), hex(p2),
282 (indent_string, hex(node), hex(p1), hex(p2),
283 hex(cs), hex(deltabase), len(delta)))
283 hex(cs), hex(deltabase), len(delta)))
284
284
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 showchunks("changelog")
286 showchunks("changelog")
287 chunkdata = gen.manifestheader()
287 chunkdata = gen.manifestheader()
288 showchunks("manifest")
288 showchunks("manifest")
289 for chunkdata in iter(gen.filelogheader, {}):
289 for chunkdata in iter(gen.filelogheader, {}):
290 fname = chunkdata['filename']
290 fname = chunkdata['filename']
291 showchunks(fname)
291 showchunks(fname)
292 else:
292 else:
293 if isinstance(gen, bundle2.unbundle20):
293 if isinstance(gen, bundle2.unbundle20):
294 raise error.Abort(_('use debugbundle2 for this file'))
294 raise error.Abort(_('use debugbundle2 for this file'))
295 chunkdata = gen.changelogheader()
295 chunkdata = gen.changelogheader()
296 for deltadata in gen.deltaiter():
296 for deltadata in gen.deltaiter():
297 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 ui.write("%s%s\n" % (indent_string, hex(node)))
298 ui.write("%s%s\n" % (indent_string, hex(node)))
299
299
300 def _debugobsmarkers(ui, part, indent=0, **opts):
300 def _debugobsmarkers(ui, part, indent=0, **opts):
301 """display version and markers contained in 'data'"""
301 """display version and markers contained in 'data'"""
302 opts = pycompat.byteskwargs(opts)
302 opts = pycompat.byteskwargs(opts)
303 data = part.read()
303 data = part.read()
304 indent_string = ' ' * indent
304 indent_string = ' ' * indent
305 try:
305 try:
306 version, markers = obsolete._readmarkers(data)
306 version, markers = obsolete._readmarkers(data)
307 except error.UnknownVersion as exc:
307 except error.UnknownVersion as exc:
308 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg %= indent_string, exc.version, len(data)
309 msg %= indent_string, exc.version, len(data)
310 ui.write(msg)
310 ui.write(msg)
311 else:
311 else:
312 msg = "%sversion: %d (%d bytes)\n"
312 msg = "%sversion: %d (%d bytes)\n"
313 msg %= indent_string, version, len(data)
313 msg %= indent_string, version, len(data)
314 ui.write(msg)
314 ui.write(msg)
315 fm = ui.formatter('debugobsolete', opts)
315 fm = ui.formatter('debugobsolete', opts)
316 for rawmarker in sorted(markers):
316 for rawmarker in sorted(markers):
317 m = obsutil.marker(None, rawmarker)
317 m = obsutil.marker(None, rawmarker)
318 fm.startitem()
318 fm.startitem()
319 fm.plain(indent_string)
319 fm.plain(indent_string)
320 cmdutil.showmarker(fm, m)
320 cmdutil.showmarker(fm, m)
321 fm.end()
321 fm.end()
322
322
323 def _debugphaseheads(ui, data, indent=0):
323 def _debugphaseheads(ui, data, indent=0):
324 """display version and markers contained in 'data'"""
324 """display version and markers contained in 'data'"""
325 indent_string = ' ' * indent
325 indent_string = ' ' * indent
326 headsbyphase = phases.binarydecode(data)
326 headsbyphase = phases.binarydecode(data)
327 for phase in phases.allphases:
327 for phase in phases.allphases:
328 for head in headsbyphase[phase]:
328 for head in headsbyphase[phase]:
329 ui.write(indent_string)
329 ui.write(indent_string)
330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331
331
332 def _quasirepr(thing):
332 def _quasirepr(thing):
333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 return '{%s}' % (
334 return '{%s}' % (
335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 return pycompat.bytestr(repr(thing))
336 return pycompat.bytestr(repr(thing))
337
337
338 def _debugbundle2(ui, gen, all=None, **opts):
338 def _debugbundle2(ui, gen, all=None, **opts):
339 """lists the contents of a bundle2"""
339 """lists the contents of a bundle2"""
340 if not isinstance(gen, bundle2.unbundle20):
340 if not isinstance(gen, bundle2.unbundle20):
341 raise error.Abort(_('not a bundle2 file'))
341 raise error.Abort(_('not a bundle2 file'))
342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 parttypes = opts.get(r'part_type', [])
343 parttypes = opts.get(r'part_type', [])
344 for part in gen.iterparts():
344 for part in gen.iterparts():
345 if parttypes and part.type not in parttypes:
345 if parttypes and part.type not in parttypes:
346 continue
346 continue
347 msg = '%s -- %s (mandatory: %r)\n'
347 msg = '%s -- %s (mandatory: %r)\n'
348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 if part.type == 'changegroup':
349 if part.type == 'changegroup':
350 version = part.params.get('version', '01')
350 version = part.params.get('version', '01')
351 cg = changegroup.getunbundler(version, part, 'UN')
351 cg = changegroup.getunbundler(version, part, 'UN')
352 if not ui.quiet:
352 if not ui.quiet:
353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 if part.type == 'obsmarkers':
354 if part.type == 'obsmarkers':
355 if not ui.quiet:
355 if not ui.quiet:
356 _debugobsmarkers(ui, part, indent=4, **opts)
356 _debugobsmarkers(ui, part, indent=4, **opts)
357 if part.type == 'phase-heads':
357 if part.type == 'phase-heads':
358 if not ui.quiet:
358 if not ui.quiet:
359 _debugphaseheads(ui, part, indent=4)
359 _debugphaseheads(ui, part, indent=4)
360
360
361 @command('debugbundle',
361 @command('debugbundle',
362 [('a', 'all', None, _('show all details')),
362 [('a', 'all', None, _('show all details')),
363 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 _('FILE'),
365 _('FILE'),
366 norepo=True)
366 norepo=True)
367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 """lists the contents of a bundle"""
368 """lists the contents of a bundle"""
369 with hg.openpath(ui, bundlepath) as f:
369 with hg.openpath(ui, bundlepath) as f:
370 if spec:
370 if spec:
371 spec = exchange.getbundlespec(ui, f)
371 spec = exchange.getbundlespec(ui, f)
372 ui.write('%s\n' % spec)
372 ui.write('%s\n' % spec)
373 return
373 return
374
374
375 gen = exchange.readbundle(ui, f, bundlepath)
375 gen = exchange.readbundle(ui, f, bundlepath)
376 if isinstance(gen, bundle2.unbundle20):
376 if isinstance(gen, bundle2.unbundle20):
377 return _debugbundle2(ui, gen, all=all, **opts)
377 return _debugbundle2(ui, gen, all=all, **opts)
378 _debugchangegroup(ui, gen, all=all, **opts)
378 _debugchangegroup(ui, gen, all=all, **opts)
379
379
380 @command('debugcapabilities',
380 @command('debugcapabilities',
381 [], _('PATH'),
381 [], _('PATH'),
382 norepo=True)
382 norepo=True)
383 def debugcapabilities(ui, path, **opts):
383 def debugcapabilities(ui, path, **opts):
384 """lists the capabilities of a remote peer"""
384 """lists the capabilities of a remote peer"""
385 opts = pycompat.byteskwargs(opts)
385 opts = pycompat.byteskwargs(opts)
386 peer = hg.peer(ui, opts, path)
386 peer = hg.peer(ui, opts, path)
387 caps = peer.capabilities()
387 caps = peer.capabilities()
388 ui.write(('Main capabilities:\n'))
388 ui.write(('Main capabilities:\n'))
389 for c in sorted(caps):
389 for c in sorted(caps):
390 ui.write((' %s\n') % c)
390 ui.write((' %s\n') % c)
391 b2caps = bundle2.bundle2caps(peer)
391 b2caps = bundle2.bundle2caps(peer)
392 if b2caps:
392 if b2caps:
393 ui.write(('Bundle2 capabilities:\n'))
393 ui.write(('Bundle2 capabilities:\n'))
394 for key, values in sorted(b2caps.iteritems()):
394 for key, values in sorted(b2caps.iteritems()):
395 ui.write((' %s\n') % key)
395 ui.write((' %s\n') % key)
396 for v in values:
396 for v in values:
397 ui.write((' %s\n') % v)
397 ui.write((' %s\n') % v)
398
398
399 @command('debugcheckstate', [], '')
399 @command('debugcheckstate', [], '')
400 def debugcheckstate(ui, repo):
400 def debugcheckstate(ui, repo):
401 """validate the correctness of the current dirstate"""
401 """validate the correctness of the current dirstate"""
402 parent1, parent2 = repo.dirstate.parents()
402 parent1, parent2 = repo.dirstate.parents()
403 m1 = repo[parent1].manifest()
403 m1 = repo[parent1].manifest()
404 m2 = repo[parent2].manifest()
404 m2 = repo[parent2].manifest()
405 errors = 0
405 errors = 0
406 for f in repo.dirstate:
406 for f in repo.dirstate:
407 state = repo.dirstate[f]
407 state = repo.dirstate[f]
408 if state in "nr" and f not in m1:
408 if state in "nr" and f not in m1:
409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 errors += 1
410 errors += 1
411 if state in "a" and f in m1:
411 if state in "a" and f in m1:
412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 errors += 1
413 errors += 1
414 if state in "m" and f not in m1 and f not in m2:
414 if state in "m" and f not in m1 and f not in m2:
415 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 (f, state))
416 (f, state))
417 errors += 1
417 errors += 1
418 for f in m1:
418 for f in m1:
419 state = repo.dirstate[f]
419 state = repo.dirstate[f]
420 if state not in "nrm":
420 if state not in "nrm":
421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 errors += 1
422 errors += 1
423 if errors:
423 if errors:
424 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 raise error.Abort(error)
425 raise error.Abort(error)
426
426
427 @command('debugcolor',
427 @command('debugcolor',
428 [('', 'style', None, _('show all configured styles'))],
428 [('', 'style', None, _('show all configured styles'))],
429 'hg debugcolor')
429 'hg debugcolor')
430 def debugcolor(ui, repo, **opts):
430 def debugcolor(ui, repo, **opts):
431 """show available color, effects or style"""
431 """show available color, effects or style"""
432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 if opts.get(r'style'):
433 if opts.get(r'style'):
434 return _debugdisplaystyle(ui)
434 return _debugdisplaystyle(ui)
435 else:
435 else:
436 return _debugdisplaycolor(ui)
436 return _debugdisplaycolor(ui)
437
437
438 def _debugdisplaycolor(ui):
438 def _debugdisplaycolor(ui):
439 ui = ui.copy()
439 ui = ui.copy()
440 ui._styles.clear()
440 ui._styles.clear()
441 for effect in color._activeeffects(ui).keys():
441 for effect in color._activeeffects(ui).keys():
442 ui._styles[effect] = effect
442 ui._styles[effect] = effect
443 if ui._terminfoparams:
443 if ui._terminfoparams:
444 for k, v in ui.configitems('color'):
444 for k, v in ui.configitems('color'):
445 if k.startswith('color.'):
445 if k.startswith('color.'):
446 ui._styles[k] = k[6:]
446 ui._styles[k] = k[6:]
447 elif k.startswith('terminfo.'):
447 elif k.startswith('terminfo.'):
448 ui._styles[k] = k[9:]
448 ui._styles[k] = k[9:]
449 ui.write(_('available colors:\n'))
449 ui.write(_('available colors:\n'))
450 # sort label with a '_' after the other to group '_background' entry.
450 # sort label with a '_' after the other to group '_background' entry.
451 items = sorted(ui._styles.items(),
451 items = sorted(ui._styles.items(),
452 key=lambda i: ('_' in i[0], i[0], i[1]))
452 key=lambda i: ('_' in i[0], i[0], i[1]))
453 for colorname, label in items:
453 for colorname, label in items:
454 ui.write(('%s\n') % colorname, label=label)
454 ui.write(('%s\n') % colorname, label=label)
455
455
456 def _debugdisplaystyle(ui):
456 def _debugdisplaystyle(ui):
457 ui.write(_('available style:\n'))
457 ui.write(_('available style:\n'))
458 if not ui._styles:
458 if not ui._styles:
459 return
459 return
460 width = max(len(s) for s in ui._styles)
460 width = max(len(s) for s in ui._styles)
461 for label, effects in sorted(ui._styles.items()):
461 for label, effects in sorted(ui._styles.items()):
462 ui.write('%s' % label, label=label)
462 ui.write('%s' % label, label=label)
463 if effects:
463 if effects:
464 # 50
464 # 50
465 ui.write(': ')
465 ui.write(': ')
466 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write('\n')
468 ui.write('\n')
469
469
470 @command('debugcreatestreamclonebundle', [], 'FILE')
470 @command('debugcreatestreamclonebundle', [], 'FILE')
471 def debugcreatestreamclonebundle(ui, repo, fname):
471 def debugcreatestreamclonebundle(ui, repo, fname):
472 """create a stream clone bundle file
472 """create a stream clone bundle file
473
473
474 Stream bundles are special bundles that are essentially archives of
474 Stream bundles are special bundles that are essentially archives of
475 revlog files. They are commonly used for cloning very quickly.
475 revlog files. They are commonly used for cloning very quickly.
476 """
476 """
477 # TODO we may want to turn this into an abort when this functionality
477 # TODO we may want to turn this into an abort when this functionality
478 # is moved into `hg bundle`.
478 # is moved into `hg bundle`.
479 if phases.hassecret(repo):
479 if phases.hassecret(repo):
480 ui.warn(_('(warning: stream clone bundle will contain secret '
480 ui.warn(_('(warning: stream clone bundle will contain secret '
481 'revisions)\n'))
481 'revisions)\n'))
482
482
483 requirements, gen = streamclone.generatebundlev1(repo)
483 requirements, gen = streamclone.generatebundlev1(repo)
484 changegroup.writechunks(ui, gen, fname)
484 changegroup.writechunks(ui, gen, fname)
485
485
486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487
487
488 @command('debugdag',
488 @command('debugdag',
489 [('t', 'tags', None, _('use tags as labels')),
489 [('t', 'tags', None, _('use tags as labels')),
490 ('b', 'branches', None, _('annotate with branch names')),
490 ('b', 'branches', None, _('annotate with branch names')),
491 ('', 'dots', None, _('use dots for runs')),
491 ('', 'dots', None, _('use dots for runs')),
492 ('s', 'spaces', None, _('separate elements by spaces'))],
492 ('s', 'spaces', None, _('separate elements by spaces'))],
493 _('[OPTION]... [FILE [REV]...]'),
493 _('[OPTION]... [FILE [REV]...]'),
494 optionalrepo=True)
494 optionalrepo=True)
495 def debugdag(ui, repo, file_=None, *revs, **opts):
495 def debugdag(ui, repo, file_=None, *revs, **opts):
496 """format the changelog or an index DAG as a concise textual description
496 """format the changelog or an index DAG as a concise textual description
497
497
498 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 revision numbers, they get labeled in the output as rN.
499 revision numbers, they get labeled in the output as rN.
500
500
501 Otherwise, the changelog DAG of the current repo is emitted.
501 Otherwise, the changelog DAG of the current repo is emitted.
502 """
502 """
503 spaces = opts.get(r'spaces')
503 spaces = opts.get(r'spaces')
504 dots = opts.get(r'dots')
504 dots = opts.get(r'dots')
505 if file_:
505 if file_:
506 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
506 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 file_)
507 file_)
508 revs = set((int(r) for r in revs))
508 revs = set((int(r) for r in revs))
509 def events():
509 def events():
510 for r in rlog:
510 for r in rlog:
511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 if p != -1))
512 if p != -1))
513 if r in revs:
513 if r in revs:
514 yield 'l', (r, "r%i" % r)
514 yield 'l', (r, "r%i" % r)
515 elif repo:
515 elif repo:
516 cl = repo.changelog
516 cl = repo.changelog
517 tags = opts.get(r'tags')
517 tags = opts.get(r'tags')
518 branches = opts.get(r'branches')
518 branches = opts.get(r'branches')
519 if tags:
519 if tags:
520 labels = {}
520 labels = {}
521 for l, n in repo.tags().items():
521 for l, n in repo.tags().items():
522 labels.setdefault(cl.rev(n), []).append(l)
522 labels.setdefault(cl.rev(n), []).append(l)
523 def events():
523 def events():
524 b = "default"
524 b = "default"
525 for r in cl:
525 for r in cl:
526 if branches:
526 if branches:
527 newb = cl.read(cl.node(r))[5]['branch']
527 newb = cl.read(cl.node(r))[5]['branch']
528 if newb != b:
528 if newb != b:
529 yield 'a', newb
529 yield 'a', newb
530 b = newb
530 b = newb
531 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 if p != -1))
532 if p != -1))
533 if tags:
533 if tags:
534 ls = labels.get(r)
534 ls = labels.get(r)
535 if ls:
535 if ls:
536 for l in ls:
536 for l in ls:
537 yield 'l', (r, l)
537 yield 'l', (r, l)
538 else:
538 else:
539 raise error.Abort(_('need repo for changelog dag'))
539 raise error.Abort(_('need repo for changelog dag'))
540
540
541 for line in dagparser.dagtextlines(events(),
541 for line in dagparser.dagtextlines(events(),
542 addspaces=spaces,
542 addspaces=spaces,
543 wraplabels=True,
543 wraplabels=True,
544 wrapannotations=True,
544 wrapannotations=True,
545 wrapnonlinear=dots,
545 wrapnonlinear=dots,
546 usedots=dots,
546 usedots=dots,
547 maxlinewidth=70):
547 maxlinewidth=70):
548 ui.write(line)
548 ui.write(line)
549 ui.write("\n")
549 ui.write("\n")
550
550
551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 def debugdata(ui, repo, file_, rev=None, **opts):
552 def debugdata(ui, repo, file_, rev=None, **opts):
553 """dump the contents of a data file revision"""
553 """dump the contents of a data file revision"""
554 opts = pycompat.byteskwargs(opts)
554 opts = pycompat.byteskwargs(opts)
555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if rev is not None:
556 if rev is not None:
557 raise error.CommandError('debugdata', _('invalid arguments'))
557 raise error.CommandError('debugdata', _('invalid arguments'))
558 file_, rev = None, file_
558 file_, rev = None, file_
559 elif rev is None:
559 elif rev is None:
560 raise error.CommandError('debugdata', _('invalid arguments'))
560 raise error.CommandError('debugdata', _('invalid arguments'))
561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
561 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 try:
562 try:
563 ui.write(r.revision(r.lookup(rev), raw=True))
563 ui.write(r.revision(r.lookup(rev), raw=True))
564 except KeyError:
564 except KeyError:
565 raise error.Abort(_('invalid revision identifier %s') % rev)
565 raise error.Abort(_('invalid revision identifier %s') % rev)
566
566
567 @command('debugdate',
567 @command('debugdate',
568 [('e', 'extended', None, _('try extended date formats'))],
568 [('e', 'extended', None, _('try extended date formats'))],
569 _('[-e] DATE [RANGE]'),
569 _('[-e] DATE [RANGE]'),
570 norepo=True, optionalrepo=True)
570 norepo=True, optionalrepo=True)
571 def debugdate(ui, date, range=None, **opts):
571 def debugdate(ui, date, range=None, **opts):
572 """parse and display a date"""
572 """parse and display a date"""
573 if opts[r"extended"]:
573 if opts[r"extended"]:
574 d = dateutil.parsedate(date, util.extendeddateformats)
574 d = dateutil.parsedate(date, util.extendeddateformats)
575 else:
575 else:
576 d = dateutil.parsedate(date)
576 d = dateutil.parsedate(date)
577 ui.write(("internal: %d %d\n") % d)
577 ui.write(("internal: %d %d\n") % d)
578 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 if range:
579 if range:
580 m = dateutil.matchdate(range)
580 m = dateutil.matchdate(range)
581 ui.write(("match: %s\n") % m(d[0]))
581 ui.write(("match: %s\n") % m(d[0]))
582
582
583 @command('debugdeltachain',
583 @command('debugdeltachain',
584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 _('-c|-m|FILE'),
585 _('-c|-m|FILE'),
586 optionalrepo=True)
586 optionalrepo=True)
587 def debugdeltachain(ui, repo, file_=None, **opts):
587 def debugdeltachain(ui, repo, file_=None, **opts):
588 """dump information about delta chains in a revlog
588 """dump information about delta chains in a revlog
589
589
590 Output can be templatized. Available template keywords are:
590 Output can be templatized. Available template keywords are:
591
591
592 :``rev``: revision number
592 :``rev``: revision number
593 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainlen``: delta chain length to this revision
594 :``chainlen``: delta chain length to this revision
595 :``prevrev``: previous revision in delta chain
595 :``prevrev``: previous revision in delta chain
596 :``deltatype``: role of delta / how it was computed
596 :``deltatype``: role of delta / how it was computed
597 :``compsize``: compressed size of revision
597 :``compsize``: compressed size of revision
598 :``uncompsize``: uncompressed size of revision
598 :``uncompsize``: uncompressed size of revision
599 :``chainsize``: total size of compressed revisions in chain
599 :``chainsize``: total size of compressed revisions in chain
600 :``chainratio``: total chain size divided by uncompressed revision size
600 :``chainratio``: total chain size divided by uncompressed revision size
601 (new delta chains typically start at ratio 2.00)
601 (new delta chains typically start at ratio 2.00)
602 :``lindist``: linear distance from base revision in delta chain to end
602 :``lindist``: linear distance from base revision in delta chain to end
603 of this revision
603 of this revision
604 :``extradist``: total size of revisions not part of this delta chain from
604 :``extradist``: total size of revisions not part of this delta chain from
605 base of delta chain to end of this revision; a measurement
605 base of delta chain to end of this revision; a measurement
606 of how much extra data we need to read/seek across to read
606 of how much extra data we need to read/seek across to read
607 the delta chain for this revision
607 the delta chain for this revision
608 :``extraratio``: extradist divided by chainsize; another representation of
608 :``extraratio``: extradist divided by chainsize; another representation of
609 how much unrelated data is needed to load this delta chain
609 how much unrelated data is needed to load this delta chain
610
610
611 If the repository is configured to use the sparse read, additional keywords
611 If the repository is configured to use the sparse read, additional keywords
612 are available:
612 are available:
613
613
614 :``readsize``: total size of data read from the disk for a revision
614 :``readsize``: total size of data read from the disk for a revision
615 (sum of the sizes of all the blocks)
615 (sum of the sizes of all the blocks)
616 :``largestblock``: size of the largest block of data read from the disk
616 :``largestblock``: size of the largest block of data read from the disk
617 :``readdensity``: density of useful bytes in the data read from the disk
617 :``readdensity``: density of useful bytes in the data read from the disk
618 :``srchunks``: in how many data hunks the whole revision would be read
618 :``srchunks``: in how many data hunks the whole revision would be read
619
619
620 The sparse read can be enabled with experimental.sparse-read = True
620 The sparse read can be enabled with experimental.sparse-read = True
621 """
621 """
622 opts = pycompat.byteskwargs(opts)
622 opts = pycompat.byteskwargs(opts)
623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 index = r.index
624 index = r.index
625 start = r.start
625 start = r.start
626 length = r.length
626 length = r.length
627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 withsparseread = getattr(r, '_withsparseread', False)
628 withsparseread = getattr(r, '_withsparseread', False)
629
629
630 def revinfo(rev):
630 def revinfo(rev):
631 e = index[rev]
631 e = index[rev]
632 compsize = e[1]
632 compsize = e[1]
633 uncompsize = e[2]
633 uncompsize = e[2]
634 chainsize = 0
634 chainsize = 0
635
635
636 if generaldelta:
636 if generaldelta:
637 if e[3] == e[5]:
637 if e[3] == e[5]:
638 deltatype = 'p1'
638 deltatype = 'p1'
639 elif e[3] == e[6]:
639 elif e[3] == e[6]:
640 deltatype = 'p2'
640 deltatype = 'p2'
641 elif e[3] == rev - 1:
641 elif e[3] == rev - 1:
642 deltatype = 'prev'
642 deltatype = 'prev'
643 elif e[3] == rev:
643 elif e[3] == rev:
644 deltatype = 'base'
644 deltatype = 'base'
645 else:
645 else:
646 deltatype = 'other'
646 deltatype = 'other'
647 else:
647 else:
648 if e[3] == rev:
648 if e[3] == rev:
649 deltatype = 'base'
649 deltatype = 'base'
650 else:
650 else:
651 deltatype = 'prev'
651 deltatype = 'prev'
652
652
653 chain = r._deltachain(rev)[0]
653 chain = r._deltachain(rev)[0]
654 for iterrev in chain:
654 for iterrev in chain:
655 e = index[iterrev]
655 e = index[iterrev]
656 chainsize += e[1]
656 chainsize += e[1]
657
657
658 return compsize, uncompsize, deltatype, chain, chainsize
658 return compsize, uncompsize, deltatype, chain, chainsize
659
659
660 fm = ui.formatter('debugdeltachain', opts)
660 fm = ui.formatter('debugdeltachain', opts)
661
661
662 fm.plain(' rev chain# chainlen prev delta '
662 fm.plain(' rev chain# chainlen prev delta '
663 'size rawsize chainsize ratio lindist extradist '
663 'size rawsize chainsize ratio lindist extradist '
664 'extraratio')
664 'extraratio')
665 if withsparseread:
665 if withsparseread:
666 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain('\n')
667 fm.plain('\n')
668
668
669 chainbases = {}
669 chainbases = {}
670 for rev in r:
670 for rev in r:
671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 chainbase = chain[0]
672 chainbase = chain[0]
673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 basestart = start(chainbase)
674 basestart = start(chainbase)
675 revstart = start(rev)
675 revstart = start(rev)
676 lineardist = revstart + comp - basestart
676 lineardist = revstart + comp - basestart
677 extradist = lineardist - chainsize
677 extradist = lineardist - chainsize
678 try:
678 try:
679 prevrev = chain[-2]
679 prevrev = chain[-2]
680 except IndexError:
680 except IndexError:
681 prevrev = -1
681 prevrev = -1
682
682
683 if uncomp != 0:
683 if uncomp != 0:
684 chainratio = float(chainsize) / float(uncomp)
684 chainratio = float(chainsize) / float(uncomp)
685 else:
685 else:
686 chainratio = chainsize
686 chainratio = chainsize
687
687
688 if chainsize != 0:
688 if chainsize != 0:
689 extraratio = float(extradist) / float(chainsize)
689 extraratio = float(extradist) / float(chainsize)
690 else:
690 else:
691 extraratio = extradist
691 extraratio = extradist
692
692
693 fm.startitem()
693 fm.startitem()
694 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 'uncompsize chainsize chainratio lindist extradist '
695 'uncompsize chainsize chainratio lindist extradist '
696 'extraratio',
696 'extraratio',
697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 rev, chainid, len(chain), prevrev, deltatype, comp,
698 rev, chainid, len(chain), prevrev, deltatype, comp,
699 uncomp, chainsize, chainratio, lineardist, extradist,
699 uncomp, chainsize, chainratio, lineardist, extradist,
700 extraratio,
700 extraratio,
701 rev=rev, chainid=chainid, chainlen=len(chain),
701 rev=rev, chainid=chainid, chainlen=len(chain),
702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 uncompsize=uncomp, chainsize=chainsize,
703 uncompsize=uncomp, chainsize=chainsize,
704 chainratio=chainratio, lindist=lineardist,
704 chainratio=chainratio, lindist=lineardist,
705 extradist=extradist, extraratio=extraratio)
705 extradist=extradist, extraratio=extraratio)
706 if withsparseread:
706 if withsparseread:
707 readsize = 0
707 readsize = 0
708 largestblock = 0
708 largestblock = 0
709 srchunks = 0
709 srchunks = 0
710
710
711 for revschunk in deltautil.slicechunk(r, chain):
711 for revschunk in deltautil.slicechunk(r, chain):
712 srchunks += 1
712 srchunks += 1
713 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blksize = blkend - start(revschunk[0])
714 blksize = blkend - start(revschunk[0])
715
715
716 readsize += blksize
716 readsize += blksize
717 if largestblock < blksize:
717 if largestblock < blksize:
718 largestblock = blksize
718 largestblock = blksize
719
719
720 if readsize:
720 if readsize:
721 readdensity = float(chainsize) / float(readsize)
721 readdensity = float(chainsize) / float(readsize)
722 else:
722 else:
723 readdensity = 1
723 readdensity = 1
724
724
725 fm.write('readsize largestblock readdensity srchunks',
725 fm.write('readsize largestblock readdensity srchunks',
726 ' %10d %10d %9.5f %8d',
726 ' %10d %10d %9.5f %8d',
727 readsize, largestblock, readdensity, srchunks,
727 readsize, largestblock, readdensity, srchunks,
728 readsize=readsize, largestblock=largestblock,
728 readsize=readsize, largestblock=largestblock,
729 readdensity=readdensity, srchunks=srchunks)
729 readdensity=readdensity, srchunks=srchunks)
730
730
731 fm.plain('\n')
731 fm.plain('\n')
732
732
733 fm.end()
733 fm.end()
734
734
735 @command('debugdirstate|debugstate',
735 @command('debugdirstate|debugstate',
736 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
736 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 ('', 'dates', True, _('display the saved mtime')),
737 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'datesort', None, _('sort by saved mtime'))],
738 ('', 'datesort', None, _('sort by saved mtime'))],
739 _('[OPTION]...'))
739 _('[OPTION]...'))
740 def debugstate(ui, repo, **opts):
740 def debugstate(ui, repo, **opts):
741 """show the contents of the current dirstate"""
741 """show the contents of the current dirstate"""
742
742
743 nodates = not opts[r'dates']
743 nodates = not opts[r'dates']
744 if opts.get(r'nodates') is not None:
744 if opts.get(r'nodates') is not None:
745 nodates = True
745 nodates = True
746 datesort = opts.get(r'datesort')
746 datesort = opts.get(r'datesort')
747
747
748 timestr = ""
748 timestr = ""
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ] + cmdutil.remoteopts,
775 ] + cmdutil.remoteopts,
776 _('[--rev REV] [OTHER]'))
776 _('[--rev REV] [OTHER]'))
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 """runs the changeset discovery protocol in isolation"""
778 """runs the changeset discovery protocol in isolation"""
779 opts = pycompat.byteskwargs(opts)
779 opts = pycompat.byteskwargs(opts)
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remote = hg.peer(repo, opts, remoteurl)
781 remote = hg.peer(repo, opts, remoteurl)
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783
783
784 # make sure tests are repeatable
784 # make sure tests are repeatable
785 random.seed(12323)
785 random.seed(12323)
786
786
787 def doit(pushedrevs, remoteheads, remote=remote):
787 def doit(pushedrevs, remoteheads, remote=remote):
788 if opts.get('old'):
788 if opts.get('old'):
789 if not util.safehasattr(remote, 'branches'):
789 if not util.safehasattr(remote, 'branches'):
790 # enable in-client legacy support
790 # enable in-client legacy support
791 remote = localrepo.locallegacypeer(remote.local())
791 remote = localrepo.locallegacypeer(remote.local())
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 force=True)
793 force=True)
794 common = set(common)
794 common = set(common)
795 if not opts.get('nonheads'):
795 if not opts.get('nonheads'):
796 ui.write(("unpruned common: %s\n") %
796 ui.write(("unpruned common: %s\n") %
797 " ".join(sorted(short(n) for n in common)))
797 " ".join(sorted(short(n) for n in common)))
798
798
799 clnode = repo.changelog.node
799 clnode = repo.changelog.node
800 common = repo.revs('heads(::%ln)', common)
800 common = repo.revs('heads(::%ln)', common)
801 common = {clnode(r) for r in common}
801 common = {clnode(r) for r in common}
802 else:
802 else:
803 nodes = None
803 nodes = None
804 if pushedrevs:
804 if pushedrevs:
805 revs = scmutil.revrange(repo, pushedrevs)
805 revs = scmutil.revrange(repo, pushedrevs)
806 nodes = [repo[r].node() for r in revs]
806 nodes = [repo[r].node() for r in revs]
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 ancestorsof=nodes)
808 ancestorsof=nodes)
809 common = set(common)
809 common = set(common)
810 rheads = set(hds)
810 rheads = set(hds)
811 lheads = set(repo.heads())
811 lheads = set(repo.heads())
812 ui.write(("common heads: %s\n") %
812 ui.write(("common heads: %s\n") %
813 " ".join(sorted(short(n) for n in common)))
813 " ".join(sorted(short(n) for n in common)))
814 if lheads <= common:
814 if lheads <= common:
815 ui.write(("local is subset\n"))
815 ui.write(("local is subset\n"))
816 elif rheads <= common:
816 elif rheads <= common:
817 ui.write(("remote is subset\n"))
817 ui.write(("remote is subset\n"))
818
818
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 localrevs = opts['rev']
820 localrevs = opts['rev']
821 doit(localrevs, remoterevs)
821 doit(localrevs, remoterevs)
822
822
823 _chunksize = 4 << 10
823 _chunksize = 4 << 10
824
824
825 @command('debugdownload',
825 @command('debugdownload',
826 [
826 [
827 ('o', 'output', '', _('path')),
827 ('o', 'output', '', _('path')),
828 ],
828 ],
829 optionalrepo=True)
829 optionalrepo=True)
830 def debugdownload(ui, repo, url, output=None, **opts):
830 def debugdownload(ui, repo, url, output=None, **opts):
831 """download a resource using Mercurial logic and config
831 """download a resource using Mercurial logic and config
832 """
832 """
833 fh = urlmod.open(ui, url, output)
833 fh = urlmod.open(ui, url, output)
834
834
835 dest = ui
835 dest = ui
836 if output:
836 if output:
837 dest = open(output, "wb", _chunksize)
837 dest = open(output, "wb", _chunksize)
838 try:
838 try:
839 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
840 while data:
840 while data:
841 dest.write(data)
841 dest.write(data)
842 data = fh.read(_chunksize)
842 data = fh.read(_chunksize)
843 finally:
843 finally:
844 if output:
844 if output:
845 dest.close()
845 dest.close()
846
846
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 def debugextensions(ui, repo, **opts):
848 def debugextensions(ui, repo, **opts):
849 '''show information about active extensions'''
849 '''show information about active extensions'''
850 opts = pycompat.byteskwargs(opts)
850 opts = pycompat.byteskwargs(opts)
851 exts = extensions.extensions(ui)
851 exts = extensions.extensions(ui)
852 hgver = util.version()
852 hgver = util.version()
853 fm = ui.formatter('debugextensions', opts)
853 fm = ui.formatter('debugextensions', opts)
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 isinternal = extensions.ismoduleinternal(extmod)
855 isinternal = extensions.ismoduleinternal(extmod)
856 extsource = pycompat.fsencode(extmod.__file__)
856 extsource = pycompat.fsencode(extmod.__file__)
857 if isinternal:
857 if isinternal:
858 exttestedwith = [] # never expose magic string to users
858 exttestedwith = [] # never expose magic string to users
859 else:
859 else:
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 extbuglink = getattr(extmod, 'buglink', None)
861 extbuglink = getattr(extmod, 'buglink', None)
862
862
863 fm.startitem()
863 fm.startitem()
864
864
865 if ui.quiet or ui.verbose:
865 if ui.quiet or ui.verbose:
866 fm.write('name', '%s\n', extname)
866 fm.write('name', '%s\n', extname)
867 else:
867 else:
868 fm.write('name', '%s', extname)
868 fm.write('name', '%s', extname)
869 if isinternal or hgver in exttestedwith:
869 if isinternal or hgver in exttestedwith:
870 fm.plain('\n')
870 fm.plain('\n')
871 elif not exttestedwith:
871 elif not exttestedwith:
872 fm.plain(_(' (untested!)\n'))
872 fm.plain(_(' (untested!)\n'))
873 else:
873 else:
874 lasttestedversion = exttestedwith[-1]
874 lasttestedversion = exttestedwith[-1]
875 fm.plain(' (%s!)\n' % lasttestedversion)
875 fm.plain(' (%s!)\n' % lasttestedversion)
876
876
877 fm.condwrite(ui.verbose and extsource, 'source',
877 fm.condwrite(ui.verbose and extsource, 'source',
878 _(' location: %s\n'), extsource or "")
878 _(' location: %s\n'), extsource or "")
879
879
880 if ui.verbose:
880 if ui.verbose:
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 fm.data(bundled=isinternal)
882 fm.data(bundled=isinternal)
883
883
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 _(' tested with: %s\n'),
885 _(' tested with: %s\n'),
886 fm.formatlist(exttestedwith, name='ver'))
886 fm.formatlist(exttestedwith, name='ver'))
887
887
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 _(' bug reporting: %s\n'), extbuglink or "")
889 _(' bug reporting: %s\n'), extbuglink or "")
890
890
891 fm.end()
891 fm.end()
892
892
893 @command('debugfileset',
893 @command('debugfileset',
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 ('', 'all-files', False,
895 ('', 'all-files', False,
896 _('test files from all revisions and working directory')),
896 _('test files from all revisions and working directory')),
897 ('s', 'show-matcher', None,
897 ('s', 'show-matcher', None,
898 _('print internal representation of matcher')),
898 _('print internal representation of matcher')),
899 ('p', 'show-stage', [],
899 ('p', 'show-stage', [],
900 _('print parsed tree at the given stage'), _('NAME'))],
900 _('print parsed tree at the given stage'), _('NAME'))],
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 def debugfileset(ui, repo, expr, **opts):
902 def debugfileset(ui, repo, expr, **opts):
903 '''parse and apply a fileset specification'''
903 '''parse and apply a fileset specification'''
904 from . import fileset
904 from . import fileset
905 fileset.symbols # force import of fileset so we have predicates to optimize
905 fileset.symbols # force import of fileset so we have predicates to optimize
906 opts = pycompat.byteskwargs(opts)
906 opts = pycompat.byteskwargs(opts)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908
908
909 stages = [
909 stages = [
910 ('parsed', pycompat.identity),
910 ('parsed', pycompat.identity),
911 ('analyzed', filesetlang.analyze),
911 ('analyzed', filesetlang.analyze),
912 ('optimized', filesetlang.optimize),
912 ('optimized', filesetlang.optimize),
913 ]
913 ]
914 stagenames = set(n for n, f in stages)
914 stagenames = set(n for n, f in stages)
915
915
916 showalways = set()
916 showalways = set()
917 if ui.verbose and not opts['show_stage']:
917 if ui.verbose and not opts['show_stage']:
918 # show parsed tree by --verbose (deprecated)
918 # show parsed tree by --verbose (deprecated)
919 showalways.add('parsed')
919 showalways.add('parsed')
920 if opts['show_stage'] == ['all']:
920 if opts['show_stage'] == ['all']:
921 showalways.update(stagenames)
921 showalways.update(stagenames)
922 else:
922 else:
923 for n in opts['show_stage']:
923 for n in opts['show_stage']:
924 if n not in stagenames:
924 if n not in stagenames:
925 raise error.Abort(_('invalid stage name: %s') % n)
925 raise error.Abort(_('invalid stage name: %s') % n)
926 showalways.update(opts['show_stage'])
926 showalways.update(opts['show_stage'])
927
927
928 tree = filesetlang.parse(expr)
928 tree = filesetlang.parse(expr)
929 for n, f in stages:
929 for n, f in stages:
930 tree = f(tree)
930 tree = f(tree)
931 if n in showalways:
931 if n in showalways:
932 if opts['show_stage'] or n != 'parsed':
932 if opts['show_stage'] or n != 'parsed':
933 ui.write(("* %s:\n") % n)
933 ui.write(("* %s:\n") % n)
934 ui.write(filesetlang.prettyformat(tree), "\n")
934 ui.write(filesetlang.prettyformat(tree), "\n")
935
935
936 files = set()
936 files = set()
937 if opts['all_files']:
937 if opts['all_files']:
938 for r in repo:
938 for r in repo:
939 c = repo[r]
939 c = repo[r]
940 files.update(c.files())
940 files.update(c.files())
941 files.update(c.substate)
941 files.update(c.substate)
942 if opts['all_files'] or ctx.rev() is None:
942 if opts['all_files'] or ctx.rev() is None:
943 wctx = repo[None]
943 wctx = repo[None]
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 subrepos=list(wctx.substate),
945 subrepos=list(wctx.substate),
946 unknown=True, ignored=True))
946 unknown=True, ignored=True))
947 files.update(wctx.substate)
947 files.update(wctx.substate)
948 else:
948 else:
949 files.update(ctx.files())
949 files.update(ctx.files())
950 files.update(ctx.substate)
950 files.update(ctx.substate)
951
951
952 m = ctx.matchfileset(expr)
952 m = ctx.matchfileset(expr)
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 for f in sorted(files):
955 for f in sorted(files):
956 if not m(f):
956 if not m(f):
957 continue
957 continue
958 ui.write("%s\n" % f)
958 ui.write("%s\n" % f)
959
959
960 @command('debugformat',
960 @command('debugformat',
961 [] + cmdutil.formatteropts)
961 [] + cmdutil.formatteropts)
962 def debugformat(ui, repo, **opts):
962 def debugformat(ui, repo, **opts):
963 """display format information about the current repository
963 """display format information about the current repository
964
964
965 Use --verbose to get extra information about current config value and
965 Use --verbose to get extra information about current config value and
966 Mercurial default."""
966 Mercurial default."""
967 opts = pycompat.byteskwargs(opts)
967 opts = pycompat.byteskwargs(opts)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970
970
971 def makeformatname(name):
971 def makeformatname(name):
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973
973
974 fm = ui.formatter('debugformat', opts)
974 fm = ui.formatter('debugformat', opts)
975 if fm.isplain():
975 if fm.isplain():
976 def formatvalue(value):
976 def formatvalue(value):
977 if util.safehasattr(value, 'startswith'):
977 if util.safehasattr(value, 'startswith'):
978 return value
978 return value
979 if value:
979 if value:
980 return 'yes'
980 return 'yes'
981 else:
981 else:
982 return 'no'
982 return 'no'
983 else:
983 else:
984 formatvalue = pycompat.identity
984 formatvalue = pycompat.identity
985
985
986 fm.plain('format-variant')
986 fm.plain('format-variant')
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 fm.plain(' repo')
988 fm.plain(' repo')
989 if ui.verbose:
989 if ui.verbose:
990 fm.plain(' config default')
990 fm.plain(' config default')
991 fm.plain('\n')
991 fm.plain('\n')
992 for fv in upgrade.allformatvariant:
992 for fv in upgrade.allformatvariant:
993 fm.startitem()
993 fm.startitem()
994 repovalue = fv.fromrepo(repo)
994 repovalue = fv.fromrepo(repo)
995 configvalue = fv.fromconfig(repo)
995 configvalue = fv.fromconfig(repo)
996
996
997 if repovalue != configvalue:
997 if repovalue != configvalue:
998 namelabel = 'formatvariant.name.mismatchconfig'
998 namelabel = 'formatvariant.name.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 elif repovalue != fv.default:
1000 elif repovalue != fv.default:
1001 namelabel = 'formatvariant.name.mismatchdefault'
1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 else:
1003 else:
1004 namelabel = 'formatvariant.name.uptodate'
1004 namelabel = 'formatvariant.name.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1006
1006
1007 fm.write('name', makeformatname(fv.name), fv.name,
1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 label=namelabel)
1008 label=namelabel)
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 label=repolabel)
1010 label=repolabel)
1011 if fv.default != configvalue:
1011 if fv.default != configvalue:
1012 configlabel = 'formatvariant.config.special'
1012 configlabel = 'formatvariant.config.special'
1013 else:
1013 else:
1014 configlabel = 'formatvariant.config.default'
1014 configlabel = 'formatvariant.config.default'
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 label=configlabel)
1016 label=configlabel)
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 label='formatvariant.default')
1018 label='formatvariant.default')
1019 fm.plain('\n')
1019 fm.plain('\n')
1020 fm.end()
1020 fm.end()
1021
1021
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 def debugfsinfo(ui, path="."):
1023 def debugfsinfo(ui, path="."):
1024 """show information detected about current filesystem"""
1024 """show information detected about current filesystem"""
1025 ui.write(('path: %s\n') % path)
1025 ui.write(('path: %s\n') % path)
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 casesensitive = '(unknown)'
1031 casesensitive = '(unknown)'
1032 try:
1032 try:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 except OSError:
1035 except OSError:
1036 pass
1036 pass
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038
1038
1039 @command('debuggetbundle',
1039 @command('debuggetbundle',
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 _('REPO FILE [-H|-C ID]...'),
1043 _('REPO FILE [-H|-C ID]...'),
1044 norepo=True)
1044 norepo=True)
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 """retrieves a bundle from a repo
1046 """retrieves a bundle from a repo
1047
1047
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 given file.
1049 given file.
1050 """
1050 """
1051 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1052 repo = hg.peer(ui, opts, repopath)
1052 repo = hg.peer(ui, opts, repopath)
1053 if not repo.capable('getbundle'):
1053 if not repo.capable('getbundle'):
1054 raise error.Abort("getbundle() not supported by target repository")
1054 raise error.Abort("getbundle() not supported by target repository")
1055 args = {}
1055 args = {}
1056 if common:
1056 if common:
1057 args[r'common'] = [bin(s) for s in common]
1057 args[r'common'] = [bin(s) for s in common]
1058 if head:
1058 if head:
1059 args[r'heads'] = [bin(s) for s in head]
1059 args[r'heads'] = [bin(s) for s in head]
1060 # TODO: get desired bundlecaps from command line.
1060 # TODO: get desired bundlecaps from command line.
1061 args[r'bundlecaps'] = None
1061 args[r'bundlecaps'] = None
1062 bundle = repo.getbundle('debug', **args)
1062 bundle = repo.getbundle('debug', **args)
1063
1063
1064 bundletype = opts.get('type', 'bzip2').lower()
1064 bundletype = opts.get('type', 'bzip2').lower()
1065 btypes = {'none': 'HG10UN',
1065 btypes = {'none': 'HG10UN',
1066 'bzip2': 'HG10BZ',
1066 'bzip2': 'HG10BZ',
1067 'gzip': 'HG10GZ',
1067 'gzip': 'HG10GZ',
1068 'bundle2': 'HG20'}
1068 'bundle2': 'HG20'}
1069 bundletype = btypes.get(bundletype)
1069 bundletype = btypes.get(bundletype)
1070 if bundletype not in bundle2.bundletypes:
1070 if bundletype not in bundle2.bundletypes:
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073
1073
1074 @command('debugignore', [], '[FILE]')
1074 @command('debugignore', [], '[FILE]')
1075 def debugignore(ui, repo, *files, **opts):
1075 def debugignore(ui, repo, *files, **opts):
1076 """display the combined ignore pattern and information about ignored files
1076 """display the combined ignore pattern and information about ignored files
1077
1077
1078 With no argument display the combined ignore pattern.
1078 With no argument display the combined ignore pattern.
1079
1079
1080 Given space separated file names, shows if the given file is ignored and
1080 Given space separated file names, shows if the given file is ignored and
1081 if so, show the ignore rule (file and line number) that matched it.
1081 if so, show the ignore rule (file and line number) that matched it.
1082 """
1082 """
1083 ignore = repo.dirstate._ignore
1083 ignore = repo.dirstate._ignore
1084 if not files:
1084 if not files:
1085 # Show all the patterns
1085 # Show all the patterns
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 else:
1087 else:
1088 m = scmutil.match(repo[None], pats=files)
1088 m = scmutil.match(repo[None], pats=files)
1089 for f in m.files():
1089 for f in m.files():
1090 nf = util.normpath(f)
1090 nf = util.normpath(f)
1091 ignored = None
1091 ignored = None
1092 ignoredata = None
1092 ignoredata = None
1093 if nf != '.':
1093 if nf != '.':
1094 if ignore(nf):
1094 if ignore(nf):
1095 ignored = nf
1095 ignored = nf
1096 ignoredata = repo.dirstate._ignorefileandline(nf)
1096 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 else:
1097 else:
1098 for p in util.finddirs(nf):
1098 for p in util.finddirs(nf):
1099 if ignore(p):
1099 if ignore(p):
1100 ignored = p
1100 ignored = p
1101 ignoredata = repo.dirstate._ignorefileandline(p)
1101 ignoredata = repo.dirstate._ignorefileandline(p)
1102 break
1102 break
1103 if ignored:
1103 if ignored:
1104 if ignored == nf:
1104 if ignored == nf:
1105 ui.write(_("%s is ignored\n") % m.uipath(f))
1105 ui.write(_("%s is ignored\n") % m.uipath(f))
1106 else:
1106 else:
1107 ui.write(_("%s is ignored because of "
1107 ui.write(_("%s is ignored because of "
1108 "containing folder %s\n")
1108 "containing folder %s\n")
1109 % (m.uipath(f), ignored))
1109 % (m.uipath(f), ignored))
1110 ignorefile, lineno, line = ignoredata
1110 ignorefile, lineno, line = ignoredata
1111 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1111 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 % (ignorefile, lineno, line))
1112 % (ignorefile, lineno, line))
1113 else:
1113 else:
1114 ui.write(_("%s is not ignored\n") % m.uipath(f))
1114 ui.write(_("%s is not ignored\n") % m.uipath(f))
1115
1115
1116 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1116 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 _('-c|-m|FILE'))
1117 _('-c|-m|FILE'))
1118 def debugindex(ui, repo, file_=None, **opts):
1118 def debugindex(ui, repo, file_=None, **opts):
1119 """dump index data for a storage primitive"""
1119 """dump index data for a storage primitive"""
1120 opts = pycompat.byteskwargs(opts)
1120 opts = pycompat.byteskwargs(opts)
1121 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1121 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122
1122
1123 if ui.debugflag:
1123 if ui.debugflag:
1124 shortfn = hex
1124 shortfn = hex
1125 else:
1125 else:
1126 shortfn = short
1126 shortfn = short
1127
1127
1128 idlen = 12
1128 idlen = 12
1129 for i in store:
1129 for i in store:
1130 idlen = len(shortfn(store.node(i)))
1130 idlen = len(shortfn(store.node(i)))
1131 break
1131 break
1132
1132
1133 fm = ui.formatter('debugindex', opts)
1133 fm = ui.formatter('debugindex', opts)
1134 fm.plain(b' rev linkrev %s %s p2\n' % (
1134 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 b'nodeid'.ljust(idlen),
1135 b'nodeid'.ljust(idlen),
1136 b'p1'.ljust(idlen)))
1136 b'p1'.ljust(idlen)))
1137
1137
1138 for rev in store:
1138 for rev in store:
1139 node = store.node(rev)
1139 node = store.node(rev)
1140 parents = store.parents(node)
1140 parents = store.parents(node)
1141
1141
1142 fm.startitem()
1142 fm.startitem()
1143 fm.write(b'rev', b'%6d ', rev)
1143 fm.write(b'rev', b'%6d ', rev)
1144 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1144 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 fm.write(b'node', '%s ', shortfn(node))
1145 fm.write(b'node', '%s ', shortfn(node))
1146 fm.write(b'p1', '%s ', shortfn(parents[0]))
1146 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 fm.write(b'p2', '%s', shortfn(parents[1]))
1147 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 fm.plain(b'\n')
1148 fm.plain(b'\n')
1149
1149
1150 fm.end()
1150 fm.end()
1151
1151
1152 @command('debugindexdot', cmdutil.debugrevlogopts,
1152 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 _('-c|-m|FILE'), optionalrepo=True)
1153 _('-c|-m|FILE'), optionalrepo=True)
1154 def debugindexdot(ui, repo, file_=None, **opts):
1154 def debugindexdot(ui, repo, file_=None, **opts):
1155 """dump an index DAG as a graphviz dot file"""
1155 """dump an index DAG as a graphviz dot file"""
1156 opts = pycompat.byteskwargs(opts)
1156 opts = pycompat.byteskwargs(opts)
1157 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1157 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 ui.write(("digraph G {\n"))
1158 ui.write(("digraph G {\n"))
1159 for i in r:
1159 for i in r:
1160 node = r.node(i)
1160 node = r.node(i)
1161 pp = r.parents(node)
1161 pp = r.parents(node)
1162 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1162 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 if pp[1] != nullid:
1163 if pp[1] != nullid:
1164 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1164 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 ui.write("}\n")
1165 ui.write("}\n")
1166
1166
1167 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1167 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1168 def debuginstall(ui, **opts):
1168 def debuginstall(ui, **opts):
1169 '''test Mercurial installation
1169 '''test Mercurial installation
1170
1170
1171 Returns 0 on success.
1171 Returns 0 on success.
1172 '''
1172 '''
1173 opts = pycompat.byteskwargs(opts)
1173 opts = pycompat.byteskwargs(opts)
1174
1174
1175 def writetemp(contents):
1175 def writetemp(contents):
1176 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1176 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1177 f = os.fdopen(fd, r"wb")
1177 f = os.fdopen(fd, r"wb")
1178 f.write(contents)
1178 f.write(contents)
1179 f.close()
1179 f.close()
1180 return name
1180 return name
1181
1181
1182 problems = 0
1182 problems = 0
1183
1183
1184 fm = ui.formatter('debuginstall', opts)
1184 fm = ui.formatter('debuginstall', opts)
1185 fm.startitem()
1185 fm.startitem()
1186
1186
1187 # encoding
1187 # encoding
1188 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1188 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1189 err = None
1189 err = None
1190 try:
1190 try:
1191 codecs.lookup(pycompat.sysstr(encoding.encoding))
1191 codecs.lookup(pycompat.sysstr(encoding.encoding))
1192 except LookupError as inst:
1192 except LookupError as inst:
1193 err = stringutil.forcebytestr(inst)
1193 err = stringutil.forcebytestr(inst)
1194 problems += 1
1194 problems += 1
1195 fm.condwrite(err, 'encodingerror', _(" %s\n"
1195 fm.condwrite(err, 'encodingerror', _(" %s\n"
1196 " (check that your locale is properly set)\n"), err)
1196 " (check that your locale is properly set)\n"), err)
1197
1197
1198 # Python
1198 # Python
1199 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1199 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1200 pycompat.sysexecutable)
1200 pycompat.sysexecutable)
1201 fm.write('pythonver', _("checking Python version (%s)\n"),
1201 fm.write('pythonver', _("checking Python version (%s)\n"),
1202 ("%d.%d.%d" % sys.version_info[:3]))
1202 ("%d.%d.%d" % sys.version_info[:3]))
1203 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1203 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1204 os.path.dirname(pycompat.fsencode(os.__file__)))
1204 os.path.dirname(pycompat.fsencode(os.__file__)))
1205
1205
1206 security = set(sslutil.supportedprotocols)
1206 security = set(sslutil.supportedprotocols)
1207 if sslutil.hassni:
1207 if sslutil.hassni:
1208 security.add('sni')
1208 security.add('sni')
1209
1209
1210 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1210 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1211 fm.formatlist(sorted(security), name='protocol',
1211 fm.formatlist(sorted(security), name='protocol',
1212 fmt='%s', sep=','))
1212 fmt='%s', sep=','))
1213
1213
1214 # These are warnings, not errors. So don't increment problem count. This
1214 # These are warnings, not errors. So don't increment problem count. This
1215 # may change in the future.
1215 # may change in the future.
1216 if 'tls1.2' not in security:
1216 if 'tls1.2' not in security:
1217 fm.plain(_(' TLS 1.2 not supported by Python install; '
1217 fm.plain(_(' TLS 1.2 not supported by Python install; '
1218 'network connections lack modern security\n'))
1218 'network connections lack modern security\n'))
1219 if 'sni' not in security:
1219 if 'sni' not in security:
1220 fm.plain(_(' SNI not supported by Python install; may have '
1220 fm.plain(_(' SNI not supported by Python install; may have '
1221 'connectivity issues with some servers\n'))
1221 'connectivity issues with some servers\n'))
1222
1222
1223 # TODO print CA cert info
1223 # TODO print CA cert info
1224
1224
1225 # hg version
1225 # hg version
1226 hgver = util.version()
1226 hgver = util.version()
1227 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1227 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1228 hgver.split('+')[0])
1228 hgver.split('+')[0])
1229 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1229 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1230 '+'.join(hgver.split('+')[1:]))
1230 '+'.join(hgver.split('+')[1:]))
1231
1231
1232 # compiled modules
1232 # compiled modules
1233 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1233 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1234 policy.policy)
1234 policy.policy)
1235 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1235 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1236 os.path.dirname(pycompat.fsencode(__file__)))
1236 os.path.dirname(pycompat.fsencode(__file__)))
1237
1237
1238 if policy.policy in ('c', 'allow'):
1238 if policy.policy in ('c', 'allow'):
1239 err = None
1239 err = None
1240 try:
1240 try:
1241 from .cext import (
1241 from .cext import (
1242 base85,
1242 base85,
1243 bdiff,
1243 bdiff,
1244 mpatch,
1244 mpatch,
1245 osutil,
1245 osutil,
1246 )
1246 )
1247 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1247 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1248 except Exception as inst:
1248 except Exception as inst:
1249 err = stringutil.forcebytestr(inst)
1249 err = stringutil.forcebytestr(inst)
1250 problems += 1
1250 problems += 1
1251 fm.condwrite(err, 'extensionserror', " %s\n", err)
1251 fm.condwrite(err, 'extensionserror', " %s\n", err)
1252
1252
1253 compengines = util.compengines._engines.values()
1253 compengines = util.compengines._engines.values()
1254 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1254 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1255 fm.formatlist(sorted(e.name() for e in compengines),
1255 fm.formatlist(sorted(e.name() for e in compengines),
1256 name='compengine', fmt='%s', sep=', '))
1256 name='compengine', fmt='%s', sep=', '))
1257 fm.write('compenginesavail', _('checking available compression engines '
1257 fm.write('compenginesavail', _('checking available compression engines '
1258 '(%s)\n'),
1258 '(%s)\n'),
1259 fm.formatlist(sorted(e.name() for e in compengines
1259 fm.formatlist(sorted(e.name() for e in compengines
1260 if e.available()),
1260 if e.available()),
1261 name='compengine', fmt='%s', sep=', '))
1261 name='compengine', fmt='%s', sep=', '))
1262 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1262 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1263 fm.write('compenginesserver', _('checking available compression engines '
1263 fm.write('compenginesserver', _('checking available compression engines '
1264 'for wire protocol (%s)\n'),
1264 'for wire protocol (%s)\n'),
1265 fm.formatlist([e.name() for e in wirecompengines
1265 fm.formatlist([e.name() for e in wirecompengines
1266 if e.wireprotosupport()],
1266 if e.wireprotosupport()],
1267 name='compengine', fmt='%s', sep=', '))
1267 name='compengine', fmt='%s', sep=', '))
1268 re2 = 'missing'
1268 re2 = 'missing'
1269 if util._re2:
1269 if util._re2:
1270 re2 = 'available'
1270 re2 = 'available'
1271 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1271 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1272 fm.data(re2=bool(util._re2))
1272 fm.data(re2=bool(util._re2))
1273
1273
1274 # templates
1274 # templates
1275 p = templater.templatepaths()
1275 p = templater.templatepaths()
1276 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1276 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1277 fm.condwrite(not p, '', _(" no template directories found\n"))
1277 fm.condwrite(not p, '', _(" no template directories found\n"))
1278 if p:
1278 if p:
1279 m = templater.templatepath("map-cmdline.default")
1279 m = templater.templatepath("map-cmdline.default")
1280 if m:
1280 if m:
1281 # template found, check if it is working
1281 # template found, check if it is working
1282 err = None
1282 err = None
1283 try:
1283 try:
1284 templater.templater.frommapfile(m)
1284 templater.templater.frommapfile(m)
1285 except Exception as inst:
1285 except Exception as inst:
1286 err = stringutil.forcebytestr(inst)
1286 err = stringutil.forcebytestr(inst)
1287 p = None
1287 p = None
1288 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1288 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1289 else:
1289 else:
1290 p = None
1290 p = None
1291 fm.condwrite(p, 'defaulttemplate',
1291 fm.condwrite(p, 'defaulttemplate',
1292 _("checking default template (%s)\n"), m)
1292 _("checking default template (%s)\n"), m)
1293 fm.condwrite(not m, 'defaulttemplatenotfound',
1293 fm.condwrite(not m, 'defaulttemplatenotfound',
1294 _(" template '%s' not found\n"), "default")
1294 _(" template '%s' not found\n"), "default")
1295 if not p:
1295 if not p:
1296 problems += 1
1296 problems += 1
1297 fm.condwrite(not p, '',
1297 fm.condwrite(not p, '',
1298 _(" (templates seem to have been installed incorrectly)\n"))
1298 _(" (templates seem to have been installed incorrectly)\n"))
1299
1299
1300 # editor
1300 # editor
1301 editor = ui.geteditor()
1301 editor = ui.geteditor()
1302 editor = util.expandpath(editor)
1302 editor = util.expandpath(editor)
1303 editorbin = procutil.shellsplit(editor)[0]
1303 editorbin = procutil.shellsplit(editor)[0]
1304 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1304 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1305 cmdpath = procutil.findexe(editorbin)
1305 cmdpath = procutil.findexe(editorbin)
1306 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1306 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1307 _(" No commit editor set and can't find %s in PATH\n"
1307 _(" No commit editor set and can't find %s in PATH\n"
1308 " (specify a commit editor in your configuration"
1308 " (specify a commit editor in your configuration"
1309 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1309 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1310 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1310 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1311 _(" Can't find editor '%s' in PATH\n"
1311 _(" Can't find editor '%s' in PATH\n"
1312 " (specify a commit editor in your configuration"
1312 " (specify a commit editor in your configuration"
1313 " file)\n"), not cmdpath and editorbin)
1313 " file)\n"), not cmdpath and editorbin)
1314 if not cmdpath and editor != 'vi':
1314 if not cmdpath and editor != 'vi':
1315 problems += 1
1315 problems += 1
1316
1316
1317 # check username
1317 # check username
1318 username = None
1318 username = None
1319 err = None
1319 err = None
1320 try:
1320 try:
1321 username = ui.username()
1321 username = ui.username()
1322 except error.Abort as e:
1322 except error.Abort as e:
1323 err = stringutil.forcebytestr(e)
1323 err = stringutil.forcebytestr(e)
1324 problems += 1
1324 problems += 1
1325
1325
1326 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1326 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1327 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1327 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1328 " (specify a username in your configuration file)\n"), err)
1328 " (specify a username in your configuration file)\n"), err)
1329
1329
1330 fm.condwrite(not problems, '',
1330 fm.condwrite(not problems, '',
1331 _("no problems detected\n"))
1331 _("no problems detected\n"))
1332 if not problems:
1332 if not problems:
1333 fm.data(problems=problems)
1333 fm.data(problems=problems)
1334 fm.condwrite(problems, 'problems',
1334 fm.condwrite(problems, 'problems',
1335 _("%d problems detected,"
1335 _("%d problems detected,"
1336 " please check your install!\n"), problems)
1336 " please check your install!\n"), problems)
1337 fm.end()
1337 fm.end()
1338
1338
1339 return problems
1339 return problems
1340
1340
1341 @command('debugknown', [], _('REPO ID...'), norepo=True)
1341 @command('debugknown', [], _('REPO ID...'), norepo=True)
1342 def debugknown(ui, repopath, *ids, **opts):
1342 def debugknown(ui, repopath, *ids, **opts):
1343 """test whether node ids are known to a repo
1343 """test whether node ids are known to a repo
1344
1344
1345 Every ID must be a full-length hex node id string. Returns a list of 0s
1345 Every ID must be a full-length hex node id string. Returns a list of 0s
1346 and 1s indicating unknown/known.
1346 and 1s indicating unknown/known.
1347 """
1347 """
1348 opts = pycompat.byteskwargs(opts)
1348 opts = pycompat.byteskwargs(opts)
1349 repo = hg.peer(ui, opts, repopath)
1349 repo = hg.peer(ui, opts, repopath)
1350 if not repo.capable('known'):
1350 if not repo.capable('known'):
1351 raise error.Abort("known() not supported by target repository")
1351 raise error.Abort("known() not supported by target repository")
1352 flags = repo.known([bin(s) for s in ids])
1352 flags = repo.known([bin(s) for s in ids])
1353 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1353 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1354
1354
1355 @command('debuglabelcomplete', [], _('LABEL...'))
1355 @command('debuglabelcomplete', [], _('LABEL...'))
1356 def debuglabelcomplete(ui, repo, *args):
1356 def debuglabelcomplete(ui, repo, *args):
1357 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1357 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1358 debugnamecomplete(ui, repo, *args)
1358 debugnamecomplete(ui, repo, *args)
1359
1359
1360 @command('debuglocks',
1360 @command('debuglocks',
1361 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1361 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1362 ('W', 'force-wlock', None,
1362 ('W', 'force-wlock', None,
1363 _('free the working state lock (DANGEROUS)')),
1363 _('free the working state lock (DANGEROUS)')),
1364 ('s', 'set-lock', None, _('set the store lock until stopped')),
1364 ('s', 'set-lock', None, _('set the store lock until stopped')),
1365 ('S', 'set-wlock', None,
1365 ('S', 'set-wlock', None,
1366 _('set the working state lock until stopped'))],
1366 _('set the working state lock until stopped'))],
1367 _('[OPTION]...'))
1367 _('[OPTION]...'))
1368 def debuglocks(ui, repo, **opts):
1368 def debuglocks(ui, repo, **opts):
1369 """show or modify state of locks
1369 """show or modify state of locks
1370
1370
1371 By default, this command will show which locks are held. This
1371 By default, this command will show which locks are held. This
1372 includes the user and process holding the lock, the amount of time
1372 includes the user and process holding the lock, the amount of time
1373 the lock has been held, and the machine name where the process is
1373 the lock has been held, and the machine name where the process is
1374 running if it's not local.
1374 running if it's not local.
1375
1375
1376 Locks protect the integrity of Mercurial's data, so should be
1376 Locks protect the integrity of Mercurial's data, so should be
1377 treated with care. System crashes or other interruptions may cause
1377 treated with care. System crashes or other interruptions may cause
1378 locks to not be properly released, though Mercurial will usually
1378 locks to not be properly released, though Mercurial will usually
1379 detect and remove such stale locks automatically.
1379 detect and remove such stale locks automatically.
1380
1380
1381 However, detecting stale locks may not always be possible (for
1381 However, detecting stale locks may not always be possible (for
1382 instance, on a shared filesystem). Removing locks may also be
1382 instance, on a shared filesystem). Removing locks may also be
1383 blocked by filesystem permissions.
1383 blocked by filesystem permissions.
1384
1384
1385 Setting a lock will prevent other commands from changing the data.
1385 Setting a lock will prevent other commands from changing the data.
1386 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1386 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1387 The set locks are removed when the command exits.
1387 The set locks are removed when the command exits.
1388
1388
1389 Returns 0 if no locks are held.
1389 Returns 0 if no locks are held.
1390
1390
1391 """
1391 """
1392
1392
1393 if opts.get(r'force_lock'):
1393 if opts.get(r'force_lock'):
1394 repo.svfs.unlink('lock')
1394 repo.svfs.unlink('lock')
1395 if opts.get(r'force_wlock'):
1395 if opts.get(r'force_wlock'):
1396 repo.vfs.unlink('wlock')
1396 repo.vfs.unlink('wlock')
1397 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1397 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1398 return 0
1398 return 0
1399
1399
1400 locks = []
1400 locks = []
1401 try:
1401 try:
1402 if opts.get(r'set_wlock'):
1402 if opts.get(r'set_wlock'):
1403 try:
1403 try:
1404 locks.append(repo.wlock(False))
1404 locks.append(repo.wlock(False))
1405 except error.LockHeld:
1405 except error.LockHeld:
1406 raise error.Abort(_('wlock is already held'))
1406 raise error.Abort(_('wlock is already held'))
1407 if opts.get(r'set_lock'):
1407 if opts.get(r'set_lock'):
1408 try:
1408 try:
1409 locks.append(repo.lock(False))
1409 locks.append(repo.lock(False))
1410 except error.LockHeld:
1410 except error.LockHeld:
1411 raise error.Abort(_('lock is already held'))
1411 raise error.Abort(_('lock is already held'))
1412 if len(locks):
1412 if len(locks):
1413 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1413 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1414 return 0
1414 return 0
1415 finally:
1415 finally:
1416 release(*locks)
1416 release(*locks)
1417
1417
1418 now = time.time()
1418 now = time.time()
1419 held = 0
1419 held = 0
1420
1420
1421 def report(vfs, name, method):
1421 def report(vfs, name, method):
1422 # this causes stale locks to get reaped for more accurate reporting
1422 # this causes stale locks to get reaped for more accurate reporting
1423 try:
1423 try:
1424 l = method(False)
1424 l = method(False)
1425 except error.LockHeld:
1425 except error.LockHeld:
1426 l = None
1426 l = None
1427
1427
1428 if l:
1428 if l:
1429 l.release()
1429 l.release()
1430 else:
1430 else:
1431 try:
1431 try:
1432 st = vfs.lstat(name)
1432 st = vfs.lstat(name)
1433 age = now - st[stat.ST_MTIME]
1433 age = now - st[stat.ST_MTIME]
1434 user = util.username(st.st_uid)
1434 user = util.username(st.st_uid)
1435 locker = vfs.readlock(name)
1435 locker = vfs.readlock(name)
1436 if ":" in locker:
1436 if ":" in locker:
1437 host, pid = locker.split(':')
1437 host, pid = locker.split(':')
1438 if host == socket.gethostname():
1438 if host == socket.gethostname():
1439 locker = 'user %s, process %s' % (user, pid)
1439 locker = 'user %s, process %s' % (user or b'None', pid)
1440 else:
1440 else:
1441 locker = 'user %s, process %s, host %s' \
1441 locker = 'user %s, process %s, host %s' \
1442 % (user, pid, host)
1442 % (user or b'None', pid, host)
1443 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1443 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1444 return 1
1444 return 1
1445 except OSError as e:
1445 except OSError as e:
1446 if e.errno != errno.ENOENT:
1446 if e.errno != errno.ENOENT:
1447 raise
1447 raise
1448
1448
1449 ui.write(("%-6s free\n") % (name + ":"))
1449 ui.write(("%-6s free\n") % (name + ":"))
1450 return 0
1450 return 0
1451
1451
1452 held += report(repo.svfs, "lock", repo.lock)
1452 held += report(repo.svfs, "lock", repo.lock)
1453 held += report(repo.vfs, "wlock", repo.wlock)
1453 held += report(repo.vfs, "wlock", repo.wlock)
1454
1454
1455 return held
1455 return held
1456
1456
1457 @command('debugmanifestfulltextcache', [
1457 @command('debugmanifestfulltextcache', [
1458 ('', 'clear', False, _('clear the cache')),
1458 ('', 'clear', False, _('clear the cache')),
1459 ('a', 'add', '', _('add the given manifest node to the cache'),
1459 ('a', 'add', '', _('add the given manifest node to the cache'),
1460 _('NODE'))
1460 _('NODE'))
1461 ], '')
1461 ], '')
1462 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1462 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1463 """show, clear or amend the contents of the manifest fulltext cache"""
1463 """show, clear or amend the contents of the manifest fulltext cache"""
1464 with repo.lock():
1464 with repo.lock():
1465 r = repo.manifestlog.getstorage(b'')
1465 r = repo.manifestlog.getstorage(b'')
1466 try:
1466 try:
1467 cache = r._fulltextcache
1467 cache = r._fulltextcache
1468 except AttributeError:
1468 except AttributeError:
1469 ui.warn(_(
1469 ui.warn(_(
1470 "Current revlog implementation doesn't appear to have a "
1470 "Current revlog implementation doesn't appear to have a "
1471 'manifest fulltext cache\n'))
1471 'manifest fulltext cache\n'))
1472 return
1472 return
1473
1473
1474 if opts.get(r'clear'):
1474 if opts.get(r'clear'):
1475 cache.clear()
1475 cache.clear()
1476
1476
1477 if add:
1477 if add:
1478 try:
1478 try:
1479 manifest = repo.manifestlog[r.lookup(add)]
1479 manifest = repo.manifestlog[r.lookup(add)]
1480 except error.LookupError as e:
1480 except error.LookupError as e:
1481 raise error.Abort(e, hint="Check your manifest node id")
1481 raise error.Abort(e, hint="Check your manifest node id")
1482 manifest.read() # stores revisision in cache too
1482 manifest.read() # stores revisision in cache too
1483
1483
1484 if not len(cache):
1484 if not len(cache):
1485 ui.write(_('Cache empty'))
1485 ui.write(_('Cache empty'))
1486 else:
1486 else:
1487 ui.write(
1487 ui.write(
1488 _('Cache contains %d manifest entries, in order of most to '
1488 _('Cache contains %d manifest entries, in order of most to '
1489 'least recent:\n') % (len(cache),))
1489 'least recent:\n') % (len(cache),))
1490 totalsize = 0
1490 totalsize = 0
1491 for nodeid in cache:
1491 for nodeid in cache:
1492 # Use cache.get to not update the LRU order
1492 # Use cache.get to not update the LRU order
1493 data = cache.get(nodeid)
1493 data = cache.get(nodeid)
1494 size = len(data)
1494 size = len(data)
1495 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1495 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1496 ui.write(_('id: %s, size %s\n') % (
1496 ui.write(_('id: %s, size %s\n') % (
1497 hex(nodeid), util.bytecount(size)))
1497 hex(nodeid), util.bytecount(size)))
1498 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1498 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1499 ui.write(
1499 ui.write(
1500 _('Total cache data size %s, on-disk %s\n') % (
1500 _('Total cache data size %s, on-disk %s\n') % (
1501 util.bytecount(totalsize), util.bytecount(ondisk))
1501 util.bytecount(totalsize), util.bytecount(ondisk))
1502 )
1502 )
1503
1503
1504 @command('debugmergestate', [], '')
1504 @command('debugmergestate', [], '')
1505 def debugmergestate(ui, repo, *args):
1505 def debugmergestate(ui, repo, *args):
1506 """print merge state
1506 """print merge state
1507
1507
1508 Use --verbose to print out information about whether v1 or v2 merge state
1508 Use --verbose to print out information about whether v1 or v2 merge state
1509 was chosen."""
1509 was chosen."""
1510 def _hashornull(h):
1510 def _hashornull(h):
1511 if h == nullhex:
1511 if h == nullhex:
1512 return 'null'
1512 return 'null'
1513 else:
1513 else:
1514 return h
1514 return h
1515
1515
1516 def printrecords(version):
1516 def printrecords(version):
1517 ui.write(('* version %d records\n') % version)
1517 ui.write(('* version %d records\n') % version)
1518 if version == 1:
1518 if version == 1:
1519 records = v1records
1519 records = v1records
1520 else:
1520 else:
1521 records = v2records
1521 records = v2records
1522
1522
1523 for rtype, record in records:
1523 for rtype, record in records:
1524 # pretty print some record types
1524 # pretty print some record types
1525 if rtype == 'L':
1525 if rtype == 'L':
1526 ui.write(('local: %s\n') % record)
1526 ui.write(('local: %s\n') % record)
1527 elif rtype == 'O':
1527 elif rtype == 'O':
1528 ui.write(('other: %s\n') % record)
1528 ui.write(('other: %s\n') % record)
1529 elif rtype == 'm':
1529 elif rtype == 'm':
1530 driver, mdstate = record.split('\0', 1)
1530 driver, mdstate = record.split('\0', 1)
1531 ui.write(('merge driver: %s (state "%s")\n')
1531 ui.write(('merge driver: %s (state "%s")\n')
1532 % (driver, mdstate))
1532 % (driver, mdstate))
1533 elif rtype in 'FDC':
1533 elif rtype in 'FDC':
1534 r = record.split('\0')
1534 r = record.split('\0')
1535 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1535 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1536 if version == 1:
1536 if version == 1:
1537 onode = 'not stored in v1 format'
1537 onode = 'not stored in v1 format'
1538 flags = r[7]
1538 flags = r[7]
1539 else:
1539 else:
1540 onode, flags = r[7:9]
1540 onode, flags = r[7:9]
1541 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1541 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1542 % (f, rtype, state, _hashornull(hash)))
1542 % (f, rtype, state, _hashornull(hash)))
1543 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1543 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1544 ui.write((' ancestor path: %s (node %s)\n')
1544 ui.write((' ancestor path: %s (node %s)\n')
1545 % (afile, _hashornull(anode)))
1545 % (afile, _hashornull(anode)))
1546 ui.write((' other path: %s (node %s)\n')
1546 ui.write((' other path: %s (node %s)\n')
1547 % (ofile, _hashornull(onode)))
1547 % (ofile, _hashornull(onode)))
1548 elif rtype == 'f':
1548 elif rtype == 'f':
1549 filename, rawextras = record.split('\0', 1)
1549 filename, rawextras = record.split('\0', 1)
1550 extras = rawextras.split('\0')
1550 extras = rawextras.split('\0')
1551 i = 0
1551 i = 0
1552 extrastrings = []
1552 extrastrings = []
1553 while i < len(extras):
1553 while i < len(extras):
1554 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1554 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1555 i += 2
1555 i += 2
1556
1556
1557 ui.write(('file extras: %s (%s)\n')
1557 ui.write(('file extras: %s (%s)\n')
1558 % (filename, ', '.join(extrastrings)))
1558 % (filename, ', '.join(extrastrings)))
1559 elif rtype == 'l':
1559 elif rtype == 'l':
1560 labels = record.split('\0', 2)
1560 labels = record.split('\0', 2)
1561 labels = [l for l in labels if len(l) > 0]
1561 labels = [l for l in labels if len(l) > 0]
1562 ui.write(('labels:\n'))
1562 ui.write(('labels:\n'))
1563 ui.write((' local: %s\n' % labels[0]))
1563 ui.write((' local: %s\n' % labels[0]))
1564 ui.write((' other: %s\n' % labels[1]))
1564 ui.write((' other: %s\n' % labels[1]))
1565 if len(labels) > 2:
1565 if len(labels) > 2:
1566 ui.write((' base: %s\n' % labels[2]))
1566 ui.write((' base: %s\n' % labels[2]))
1567 else:
1567 else:
1568 ui.write(('unrecognized entry: %s\t%s\n')
1568 ui.write(('unrecognized entry: %s\t%s\n')
1569 % (rtype, record.replace('\0', '\t')))
1569 % (rtype, record.replace('\0', '\t')))
1570
1570
1571 # Avoid mergestate.read() since it may raise an exception for unsupported
1571 # Avoid mergestate.read() since it may raise an exception for unsupported
1572 # merge state records. We shouldn't be doing this, but this is OK since this
1572 # merge state records. We shouldn't be doing this, but this is OK since this
1573 # command is pretty low-level.
1573 # command is pretty low-level.
1574 ms = mergemod.mergestate(repo)
1574 ms = mergemod.mergestate(repo)
1575
1575
1576 # sort so that reasonable information is on top
1576 # sort so that reasonable information is on top
1577 v1records = ms._readrecordsv1()
1577 v1records = ms._readrecordsv1()
1578 v2records = ms._readrecordsv2()
1578 v2records = ms._readrecordsv2()
1579 order = 'LOml'
1579 order = 'LOml'
1580 def key(r):
1580 def key(r):
1581 idx = order.find(r[0])
1581 idx = order.find(r[0])
1582 if idx == -1:
1582 if idx == -1:
1583 return (1, r[1])
1583 return (1, r[1])
1584 else:
1584 else:
1585 return (0, idx)
1585 return (0, idx)
1586 v1records.sort(key=key)
1586 v1records.sort(key=key)
1587 v2records.sort(key=key)
1587 v2records.sort(key=key)
1588
1588
1589 if not v1records and not v2records:
1589 if not v1records and not v2records:
1590 ui.write(('no merge state found\n'))
1590 ui.write(('no merge state found\n'))
1591 elif not v2records:
1591 elif not v2records:
1592 ui.note(('no version 2 merge state\n'))
1592 ui.note(('no version 2 merge state\n'))
1593 printrecords(1)
1593 printrecords(1)
1594 elif ms._v1v2match(v1records, v2records):
1594 elif ms._v1v2match(v1records, v2records):
1595 ui.note(('v1 and v2 states match: using v2\n'))
1595 ui.note(('v1 and v2 states match: using v2\n'))
1596 printrecords(2)
1596 printrecords(2)
1597 else:
1597 else:
1598 ui.note(('v1 and v2 states mismatch: using v1\n'))
1598 ui.note(('v1 and v2 states mismatch: using v1\n'))
1599 printrecords(1)
1599 printrecords(1)
1600 if ui.verbose:
1600 if ui.verbose:
1601 printrecords(2)
1601 printrecords(2)
1602
1602
1603 @command('debugnamecomplete', [], _('NAME...'))
1603 @command('debugnamecomplete', [], _('NAME...'))
1604 def debugnamecomplete(ui, repo, *args):
1604 def debugnamecomplete(ui, repo, *args):
1605 '''complete "names" - tags, open branch names, bookmark names'''
1605 '''complete "names" - tags, open branch names, bookmark names'''
1606
1606
1607 names = set()
1607 names = set()
1608 # since we previously only listed open branches, we will handle that
1608 # since we previously only listed open branches, we will handle that
1609 # specially (after this for loop)
1609 # specially (after this for loop)
1610 for name, ns in repo.names.iteritems():
1610 for name, ns in repo.names.iteritems():
1611 if name != 'branches':
1611 if name != 'branches':
1612 names.update(ns.listnames(repo))
1612 names.update(ns.listnames(repo))
1613 names.update(tag for (tag, heads, tip, closed)
1613 names.update(tag for (tag, heads, tip, closed)
1614 in repo.branchmap().iterbranches() if not closed)
1614 in repo.branchmap().iterbranches() if not closed)
1615 completions = set()
1615 completions = set()
1616 if not args:
1616 if not args:
1617 args = ['']
1617 args = ['']
1618 for a in args:
1618 for a in args:
1619 completions.update(n for n in names if n.startswith(a))
1619 completions.update(n for n in names if n.startswith(a))
1620 ui.write('\n'.join(sorted(completions)))
1620 ui.write('\n'.join(sorted(completions)))
1621 ui.write('\n')
1621 ui.write('\n')
1622
1622
1623 @command('debugobsolete',
1623 @command('debugobsolete',
1624 [('', 'flags', 0, _('markers flag')),
1624 [('', 'flags', 0, _('markers flag')),
1625 ('', 'record-parents', False,
1625 ('', 'record-parents', False,
1626 _('record parent information for the precursor')),
1626 _('record parent information for the precursor')),
1627 ('r', 'rev', [], _('display markers relevant to REV')),
1627 ('r', 'rev', [], _('display markers relevant to REV')),
1628 ('', 'exclusive', False, _('restrict display to markers only '
1628 ('', 'exclusive', False, _('restrict display to markers only '
1629 'relevant to REV')),
1629 'relevant to REV')),
1630 ('', 'index', False, _('display index of the marker')),
1630 ('', 'index', False, _('display index of the marker')),
1631 ('', 'delete', [], _('delete markers specified by indices')),
1631 ('', 'delete', [], _('delete markers specified by indices')),
1632 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1632 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1633 _('[OBSOLETED [REPLACEMENT ...]]'))
1633 _('[OBSOLETED [REPLACEMENT ...]]'))
1634 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1634 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1635 """create arbitrary obsolete marker
1635 """create arbitrary obsolete marker
1636
1636
1637 With no arguments, displays the list of obsolescence markers."""
1637 With no arguments, displays the list of obsolescence markers."""
1638
1638
1639 opts = pycompat.byteskwargs(opts)
1639 opts = pycompat.byteskwargs(opts)
1640
1640
1641 def parsenodeid(s):
1641 def parsenodeid(s):
1642 try:
1642 try:
1643 # We do not use revsingle/revrange functions here to accept
1643 # We do not use revsingle/revrange functions here to accept
1644 # arbitrary node identifiers, possibly not present in the
1644 # arbitrary node identifiers, possibly not present in the
1645 # local repository.
1645 # local repository.
1646 n = bin(s)
1646 n = bin(s)
1647 if len(n) != len(nullid):
1647 if len(n) != len(nullid):
1648 raise TypeError()
1648 raise TypeError()
1649 return n
1649 return n
1650 except TypeError:
1650 except TypeError:
1651 raise error.Abort('changeset references must be full hexadecimal '
1651 raise error.Abort('changeset references must be full hexadecimal '
1652 'node identifiers')
1652 'node identifiers')
1653
1653
1654 if opts.get('delete'):
1654 if opts.get('delete'):
1655 indices = []
1655 indices = []
1656 for v in opts.get('delete'):
1656 for v in opts.get('delete'):
1657 try:
1657 try:
1658 indices.append(int(v))
1658 indices.append(int(v))
1659 except ValueError:
1659 except ValueError:
1660 raise error.Abort(_('invalid index value: %r') % v,
1660 raise error.Abort(_('invalid index value: %r') % v,
1661 hint=_('use integers for indices'))
1661 hint=_('use integers for indices'))
1662
1662
1663 if repo.currenttransaction():
1663 if repo.currenttransaction():
1664 raise error.Abort(_('cannot delete obsmarkers in the middle '
1664 raise error.Abort(_('cannot delete obsmarkers in the middle '
1665 'of transaction.'))
1665 'of transaction.'))
1666
1666
1667 with repo.lock():
1667 with repo.lock():
1668 n = repair.deleteobsmarkers(repo.obsstore, indices)
1668 n = repair.deleteobsmarkers(repo.obsstore, indices)
1669 ui.write(_('deleted %i obsolescence markers\n') % n)
1669 ui.write(_('deleted %i obsolescence markers\n') % n)
1670
1670
1671 return
1671 return
1672
1672
1673 if precursor is not None:
1673 if precursor is not None:
1674 if opts['rev']:
1674 if opts['rev']:
1675 raise error.Abort('cannot select revision when creating marker')
1675 raise error.Abort('cannot select revision when creating marker')
1676 metadata = {}
1676 metadata = {}
1677 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1677 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1678 succs = tuple(parsenodeid(succ) for succ in successors)
1678 succs = tuple(parsenodeid(succ) for succ in successors)
1679 l = repo.lock()
1679 l = repo.lock()
1680 try:
1680 try:
1681 tr = repo.transaction('debugobsolete')
1681 tr = repo.transaction('debugobsolete')
1682 try:
1682 try:
1683 date = opts.get('date')
1683 date = opts.get('date')
1684 if date:
1684 if date:
1685 date = dateutil.parsedate(date)
1685 date = dateutil.parsedate(date)
1686 else:
1686 else:
1687 date = None
1687 date = None
1688 prec = parsenodeid(precursor)
1688 prec = parsenodeid(precursor)
1689 parents = None
1689 parents = None
1690 if opts['record_parents']:
1690 if opts['record_parents']:
1691 if prec not in repo.unfiltered():
1691 if prec not in repo.unfiltered():
1692 raise error.Abort('cannot used --record-parents on '
1692 raise error.Abort('cannot used --record-parents on '
1693 'unknown changesets')
1693 'unknown changesets')
1694 parents = repo.unfiltered()[prec].parents()
1694 parents = repo.unfiltered()[prec].parents()
1695 parents = tuple(p.node() for p in parents)
1695 parents = tuple(p.node() for p in parents)
1696 repo.obsstore.create(tr, prec, succs, opts['flags'],
1696 repo.obsstore.create(tr, prec, succs, opts['flags'],
1697 parents=parents, date=date,
1697 parents=parents, date=date,
1698 metadata=metadata, ui=ui)
1698 metadata=metadata, ui=ui)
1699 tr.close()
1699 tr.close()
1700 except ValueError as exc:
1700 except ValueError as exc:
1701 raise error.Abort(_('bad obsmarker input: %s') %
1701 raise error.Abort(_('bad obsmarker input: %s') %
1702 pycompat.bytestr(exc))
1702 pycompat.bytestr(exc))
1703 finally:
1703 finally:
1704 tr.release()
1704 tr.release()
1705 finally:
1705 finally:
1706 l.release()
1706 l.release()
1707 else:
1707 else:
1708 if opts['rev']:
1708 if opts['rev']:
1709 revs = scmutil.revrange(repo, opts['rev'])
1709 revs = scmutil.revrange(repo, opts['rev'])
1710 nodes = [repo[r].node() for r in revs]
1710 nodes = [repo[r].node() for r in revs]
1711 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1711 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1712 exclusive=opts['exclusive']))
1712 exclusive=opts['exclusive']))
1713 markers.sort(key=lambda x: x._data)
1713 markers.sort(key=lambda x: x._data)
1714 else:
1714 else:
1715 markers = obsutil.getmarkers(repo)
1715 markers = obsutil.getmarkers(repo)
1716
1716
1717 markerstoiter = markers
1717 markerstoiter = markers
1718 isrelevant = lambda m: True
1718 isrelevant = lambda m: True
1719 if opts.get('rev') and opts.get('index'):
1719 if opts.get('rev') and opts.get('index'):
1720 markerstoiter = obsutil.getmarkers(repo)
1720 markerstoiter = obsutil.getmarkers(repo)
1721 markerset = set(markers)
1721 markerset = set(markers)
1722 isrelevant = lambda m: m in markerset
1722 isrelevant = lambda m: m in markerset
1723
1723
1724 fm = ui.formatter('debugobsolete', opts)
1724 fm = ui.formatter('debugobsolete', opts)
1725 for i, m in enumerate(markerstoiter):
1725 for i, m in enumerate(markerstoiter):
1726 if not isrelevant(m):
1726 if not isrelevant(m):
1727 # marker can be irrelevant when we're iterating over a set
1727 # marker can be irrelevant when we're iterating over a set
1728 # of markers (markerstoiter) which is bigger than the set
1728 # of markers (markerstoiter) which is bigger than the set
1729 # of markers we want to display (markers)
1729 # of markers we want to display (markers)
1730 # this can happen if both --index and --rev options are
1730 # this can happen if both --index and --rev options are
1731 # provided and thus we need to iterate over all of the markers
1731 # provided and thus we need to iterate over all of the markers
1732 # to get the correct indices, but only display the ones that
1732 # to get the correct indices, but only display the ones that
1733 # are relevant to --rev value
1733 # are relevant to --rev value
1734 continue
1734 continue
1735 fm.startitem()
1735 fm.startitem()
1736 ind = i if opts.get('index') else None
1736 ind = i if opts.get('index') else None
1737 cmdutil.showmarker(fm, m, index=ind)
1737 cmdutil.showmarker(fm, m, index=ind)
1738 fm.end()
1738 fm.end()
1739
1739
1740 @command('debugpathcomplete',
1740 @command('debugpathcomplete',
1741 [('f', 'full', None, _('complete an entire path')),
1741 [('f', 'full', None, _('complete an entire path')),
1742 ('n', 'normal', None, _('show only normal files')),
1742 ('n', 'normal', None, _('show only normal files')),
1743 ('a', 'added', None, _('show only added files')),
1743 ('a', 'added', None, _('show only added files')),
1744 ('r', 'removed', None, _('show only removed files'))],
1744 ('r', 'removed', None, _('show only removed files'))],
1745 _('FILESPEC...'))
1745 _('FILESPEC...'))
1746 def debugpathcomplete(ui, repo, *specs, **opts):
1746 def debugpathcomplete(ui, repo, *specs, **opts):
1747 '''complete part or all of a tracked path
1747 '''complete part or all of a tracked path
1748
1748
1749 This command supports shells that offer path name completion. It
1749 This command supports shells that offer path name completion. It
1750 currently completes only files already known to the dirstate.
1750 currently completes only files already known to the dirstate.
1751
1751
1752 Completion extends only to the next path segment unless
1752 Completion extends only to the next path segment unless
1753 --full is specified, in which case entire paths are used.'''
1753 --full is specified, in which case entire paths are used.'''
1754
1754
1755 def complete(path, acceptable):
1755 def complete(path, acceptable):
1756 dirstate = repo.dirstate
1756 dirstate = repo.dirstate
1757 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1757 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1758 rootdir = repo.root + pycompat.ossep
1758 rootdir = repo.root + pycompat.ossep
1759 if spec != repo.root and not spec.startswith(rootdir):
1759 if spec != repo.root and not spec.startswith(rootdir):
1760 return [], []
1760 return [], []
1761 if os.path.isdir(spec):
1761 if os.path.isdir(spec):
1762 spec += '/'
1762 spec += '/'
1763 spec = spec[len(rootdir):]
1763 spec = spec[len(rootdir):]
1764 fixpaths = pycompat.ossep != '/'
1764 fixpaths = pycompat.ossep != '/'
1765 if fixpaths:
1765 if fixpaths:
1766 spec = spec.replace(pycompat.ossep, '/')
1766 spec = spec.replace(pycompat.ossep, '/')
1767 speclen = len(spec)
1767 speclen = len(spec)
1768 fullpaths = opts[r'full']
1768 fullpaths = opts[r'full']
1769 files, dirs = set(), set()
1769 files, dirs = set(), set()
1770 adddir, addfile = dirs.add, files.add
1770 adddir, addfile = dirs.add, files.add
1771 for f, st in dirstate.iteritems():
1771 for f, st in dirstate.iteritems():
1772 if f.startswith(spec) and st[0] in acceptable:
1772 if f.startswith(spec) and st[0] in acceptable:
1773 if fixpaths:
1773 if fixpaths:
1774 f = f.replace('/', pycompat.ossep)
1774 f = f.replace('/', pycompat.ossep)
1775 if fullpaths:
1775 if fullpaths:
1776 addfile(f)
1776 addfile(f)
1777 continue
1777 continue
1778 s = f.find(pycompat.ossep, speclen)
1778 s = f.find(pycompat.ossep, speclen)
1779 if s >= 0:
1779 if s >= 0:
1780 adddir(f[:s])
1780 adddir(f[:s])
1781 else:
1781 else:
1782 addfile(f)
1782 addfile(f)
1783 return files, dirs
1783 return files, dirs
1784
1784
1785 acceptable = ''
1785 acceptable = ''
1786 if opts[r'normal']:
1786 if opts[r'normal']:
1787 acceptable += 'nm'
1787 acceptable += 'nm'
1788 if opts[r'added']:
1788 if opts[r'added']:
1789 acceptable += 'a'
1789 acceptable += 'a'
1790 if opts[r'removed']:
1790 if opts[r'removed']:
1791 acceptable += 'r'
1791 acceptable += 'r'
1792 cwd = repo.getcwd()
1792 cwd = repo.getcwd()
1793 if not specs:
1793 if not specs:
1794 specs = ['.']
1794 specs = ['.']
1795
1795
1796 files, dirs = set(), set()
1796 files, dirs = set(), set()
1797 for spec in specs:
1797 for spec in specs:
1798 f, d = complete(spec, acceptable or 'nmar')
1798 f, d = complete(spec, acceptable or 'nmar')
1799 files.update(f)
1799 files.update(f)
1800 dirs.update(d)
1800 dirs.update(d)
1801 files.update(dirs)
1801 files.update(dirs)
1802 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1802 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1803 ui.write('\n')
1803 ui.write('\n')
1804
1804
1805 @command('debugpeer', [], _('PATH'), norepo=True)
1805 @command('debugpeer', [], _('PATH'), norepo=True)
1806 def debugpeer(ui, path):
1806 def debugpeer(ui, path):
1807 """establish a connection to a peer repository"""
1807 """establish a connection to a peer repository"""
1808 # Always enable peer request logging. Requires --debug to display
1808 # Always enable peer request logging. Requires --debug to display
1809 # though.
1809 # though.
1810 overrides = {
1810 overrides = {
1811 ('devel', 'debug.peer-request'): True,
1811 ('devel', 'debug.peer-request'): True,
1812 }
1812 }
1813
1813
1814 with ui.configoverride(overrides):
1814 with ui.configoverride(overrides):
1815 peer = hg.peer(ui, {}, path)
1815 peer = hg.peer(ui, {}, path)
1816
1816
1817 local = peer.local() is not None
1817 local = peer.local() is not None
1818 canpush = peer.canpush()
1818 canpush = peer.canpush()
1819
1819
1820 ui.write(_('url: %s\n') % peer.url())
1820 ui.write(_('url: %s\n') % peer.url())
1821 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1821 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1822 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1822 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1823
1823
1824 @command('debugpickmergetool',
1824 @command('debugpickmergetool',
1825 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1825 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1826 ('', 'changedelete', None, _('emulate merging change and delete')),
1826 ('', 'changedelete', None, _('emulate merging change and delete')),
1827 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1827 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1828 _('[PATTERN]...'),
1828 _('[PATTERN]...'),
1829 inferrepo=True)
1829 inferrepo=True)
1830 def debugpickmergetool(ui, repo, *pats, **opts):
1830 def debugpickmergetool(ui, repo, *pats, **opts):
1831 """examine which merge tool is chosen for specified file
1831 """examine which merge tool is chosen for specified file
1832
1832
1833 As described in :hg:`help merge-tools`, Mercurial examines
1833 As described in :hg:`help merge-tools`, Mercurial examines
1834 configurations below in this order to decide which merge tool is
1834 configurations below in this order to decide which merge tool is
1835 chosen for specified file.
1835 chosen for specified file.
1836
1836
1837 1. ``--tool`` option
1837 1. ``--tool`` option
1838 2. ``HGMERGE`` environment variable
1838 2. ``HGMERGE`` environment variable
1839 3. configurations in ``merge-patterns`` section
1839 3. configurations in ``merge-patterns`` section
1840 4. configuration of ``ui.merge``
1840 4. configuration of ``ui.merge``
1841 5. configurations in ``merge-tools`` section
1841 5. configurations in ``merge-tools`` section
1842 6. ``hgmerge`` tool (for historical reason only)
1842 6. ``hgmerge`` tool (for historical reason only)
1843 7. default tool for fallback (``:merge`` or ``:prompt``)
1843 7. default tool for fallback (``:merge`` or ``:prompt``)
1844
1844
1845 This command writes out examination result in the style below::
1845 This command writes out examination result in the style below::
1846
1846
1847 FILE = MERGETOOL
1847 FILE = MERGETOOL
1848
1848
1849 By default, all files known in the first parent context of the
1849 By default, all files known in the first parent context of the
1850 working directory are examined. Use file patterns and/or -I/-X
1850 working directory are examined. Use file patterns and/or -I/-X
1851 options to limit target files. -r/--rev is also useful to examine
1851 options to limit target files. -r/--rev is also useful to examine
1852 files in another context without actual updating to it.
1852 files in another context without actual updating to it.
1853
1853
1854 With --debug, this command shows warning messages while matching
1854 With --debug, this command shows warning messages while matching
1855 against ``merge-patterns`` and so on, too. It is recommended to
1855 against ``merge-patterns`` and so on, too. It is recommended to
1856 use this option with explicit file patterns and/or -I/-X options,
1856 use this option with explicit file patterns and/or -I/-X options,
1857 because this option increases amount of output per file according
1857 because this option increases amount of output per file according
1858 to configurations in hgrc.
1858 to configurations in hgrc.
1859
1859
1860 With -v/--verbose, this command shows configurations below at
1860 With -v/--verbose, this command shows configurations below at
1861 first (only if specified).
1861 first (only if specified).
1862
1862
1863 - ``--tool`` option
1863 - ``--tool`` option
1864 - ``HGMERGE`` environment variable
1864 - ``HGMERGE`` environment variable
1865 - configuration of ``ui.merge``
1865 - configuration of ``ui.merge``
1866
1866
1867 If merge tool is chosen before matching against
1867 If merge tool is chosen before matching against
1868 ``merge-patterns``, this command can't show any helpful
1868 ``merge-patterns``, this command can't show any helpful
1869 information, even with --debug. In such case, information above is
1869 information, even with --debug. In such case, information above is
1870 useful to know why a merge tool is chosen.
1870 useful to know why a merge tool is chosen.
1871 """
1871 """
1872 opts = pycompat.byteskwargs(opts)
1872 opts = pycompat.byteskwargs(opts)
1873 overrides = {}
1873 overrides = {}
1874 if opts['tool']:
1874 if opts['tool']:
1875 overrides[('ui', 'forcemerge')] = opts['tool']
1875 overrides[('ui', 'forcemerge')] = opts['tool']
1876 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1876 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1877
1877
1878 with ui.configoverride(overrides, 'debugmergepatterns'):
1878 with ui.configoverride(overrides, 'debugmergepatterns'):
1879 hgmerge = encoding.environ.get("HGMERGE")
1879 hgmerge = encoding.environ.get("HGMERGE")
1880 if hgmerge is not None:
1880 if hgmerge is not None:
1881 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1881 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1882 uimerge = ui.config("ui", "merge")
1882 uimerge = ui.config("ui", "merge")
1883 if uimerge:
1883 if uimerge:
1884 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1884 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1885
1885
1886 ctx = scmutil.revsingle(repo, opts.get('rev'))
1886 ctx = scmutil.revsingle(repo, opts.get('rev'))
1887 m = scmutil.match(ctx, pats, opts)
1887 m = scmutil.match(ctx, pats, opts)
1888 changedelete = opts['changedelete']
1888 changedelete = opts['changedelete']
1889 for path in ctx.walk(m):
1889 for path in ctx.walk(m):
1890 fctx = ctx[path]
1890 fctx = ctx[path]
1891 try:
1891 try:
1892 if not ui.debugflag:
1892 if not ui.debugflag:
1893 ui.pushbuffer(error=True)
1893 ui.pushbuffer(error=True)
1894 tool, toolpath = filemerge._picktool(repo, ui, path,
1894 tool, toolpath = filemerge._picktool(repo, ui, path,
1895 fctx.isbinary(),
1895 fctx.isbinary(),
1896 'l' in fctx.flags(),
1896 'l' in fctx.flags(),
1897 changedelete)
1897 changedelete)
1898 finally:
1898 finally:
1899 if not ui.debugflag:
1899 if not ui.debugflag:
1900 ui.popbuffer()
1900 ui.popbuffer()
1901 ui.write(('%s = %s\n') % (path, tool))
1901 ui.write(('%s = %s\n') % (path, tool))
1902
1902
1903 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1903 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1904 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1904 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1905 '''access the pushkey key/value protocol
1905 '''access the pushkey key/value protocol
1906
1906
1907 With two args, list the keys in the given namespace.
1907 With two args, list the keys in the given namespace.
1908
1908
1909 With five args, set a key to new if it currently is set to old.
1909 With five args, set a key to new if it currently is set to old.
1910 Reports success or failure.
1910 Reports success or failure.
1911 '''
1911 '''
1912
1912
1913 target = hg.peer(ui, {}, repopath)
1913 target = hg.peer(ui, {}, repopath)
1914 if keyinfo:
1914 if keyinfo:
1915 key, old, new = keyinfo
1915 key, old, new = keyinfo
1916 with target.commandexecutor() as e:
1916 with target.commandexecutor() as e:
1917 r = e.callcommand('pushkey', {
1917 r = e.callcommand('pushkey', {
1918 'namespace': namespace,
1918 'namespace': namespace,
1919 'key': key,
1919 'key': key,
1920 'old': old,
1920 'old': old,
1921 'new': new,
1921 'new': new,
1922 }).result()
1922 }).result()
1923
1923
1924 ui.status(pycompat.bytestr(r) + '\n')
1924 ui.status(pycompat.bytestr(r) + '\n')
1925 return not r
1925 return not r
1926 else:
1926 else:
1927 for k, v in sorted(target.listkeys(namespace).iteritems()):
1927 for k, v in sorted(target.listkeys(namespace).iteritems()):
1928 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1928 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1929 stringutil.escapestr(v)))
1929 stringutil.escapestr(v)))
1930
1930
1931 @command('debugpvec', [], _('A B'))
1931 @command('debugpvec', [], _('A B'))
1932 def debugpvec(ui, repo, a, b=None):
1932 def debugpvec(ui, repo, a, b=None):
1933 ca = scmutil.revsingle(repo, a)
1933 ca = scmutil.revsingle(repo, a)
1934 cb = scmutil.revsingle(repo, b)
1934 cb = scmutil.revsingle(repo, b)
1935 pa = pvec.ctxpvec(ca)
1935 pa = pvec.ctxpvec(ca)
1936 pb = pvec.ctxpvec(cb)
1936 pb = pvec.ctxpvec(cb)
1937 if pa == pb:
1937 if pa == pb:
1938 rel = "="
1938 rel = "="
1939 elif pa > pb:
1939 elif pa > pb:
1940 rel = ">"
1940 rel = ">"
1941 elif pa < pb:
1941 elif pa < pb:
1942 rel = "<"
1942 rel = "<"
1943 elif pa | pb:
1943 elif pa | pb:
1944 rel = "|"
1944 rel = "|"
1945 ui.write(_("a: %s\n") % pa)
1945 ui.write(_("a: %s\n") % pa)
1946 ui.write(_("b: %s\n") % pb)
1946 ui.write(_("b: %s\n") % pb)
1947 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1947 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1948 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1948 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1949 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1949 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1950 pa.distance(pb), rel))
1950 pa.distance(pb), rel))
1951
1951
1952 @command('debugrebuilddirstate|debugrebuildstate',
1952 @command('debugrebuilddirstate|debugrebuildstate',
1953 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1953 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1954 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1954 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1955 'the working copy parent')),
1955 'the working copy parent')),
1956 ],
1956 ],
1957 _('[-r REV]'))
1957 _('[-r REV]'))
1958 def debugrebuilddirstate(ui, repo, rev, **opts):
1958 def debugrebuilddirstate(ui, repo, rev, **opts):
1959 """rebuild the dirstate as it would look like for the given revision
1959 """rebuild the dirstate as it would look like for the given revision
1960
1960
1961 If no revision is specified the first current parent will be used.
1961 If no revision is specified the first current parent will be used.
1962
1962
1963 The dirstate will be set to the files of the given revision.
1963 The dirstate will be set to the files of the given revision.
1964 The actual working directory content or existing dirstate
1964 The actual working directory content or existing dirstate
1965 information such as adds or removes is not considered.
1965 information such as adds or removes is not considered.
1966
1966
1967 ``minimal`` will only rebuild the dirstate status for files that claim to be
1967 ``minimal`` will only rebuild the dirstate status for files that claim to be
1968 tracked but are not in the parent manifest, or that exist in the parent
1968 tracked but are not in the parent manifest, or that exist in the parent
1969 manifest but are not in the dirstate. It will not change adds, removes, or
1969 manifest but are not in the dirstate. It will not change adds, removes, or
1970 modified files that are in the working copy parent.
1970 modified files that are in the working copy parent.
1971
1971
1972 One use of this command is to make the next :hg:`status` invocation
1972 One use of this command is to make the next :hg:`status` invocation
1973 check the actual file content.
1973 check the actual file content.
1974 """
1974 """
1975 ctx = scmutil.revsingle(repo, rev)
1975 ctx = scmutil.revsingle(repo, rev)
1976 with repo.wlock():
1976 with repo.wlock():
1977 dirstate = repo.dirstate
1977 dirstate = repo.dirstate
1978 changedfiles = None
1978 changedfiles = None
1979 # See command doc for what minimal does.
1979 # See command doc for what minimal does.
1980 if opts.get(r'minimal'):
1980 if opts.get(r'minimal'):
1981 manifestfiles = set(ctx.manifest().keys())
1981 manifestfiles = set(ctx.manifest().keys())
1982 dirstatefiles = set(dirstate)
1982 dirstatefiles = set(dirstate)
1983 manifestonly = manifestfiles - dirstatefiles
1983 manifestonly = manifestfiles - dirstatefiles
1984 dsonly = dirstatefiles - manifestfiles
1984 dsonly = dirstatefiles - manifestfiles
1985 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1985 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1986 changedfiles = manifestonly | dsnotadded
1986 changedfiles = manifestonly | dsnotadded
1987
1987
1988 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1988 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1989
1989
1990 @command('debugrebuildfncache', [], '')
1990 @command('debugrebuildfncache', [], '')
1991 def debugrebuildfncache(ui, repo):
1991 def debugrebuildfncache(ui, repo):
1992 """rebuild the fncache file"""
1992 """rebuild the fncache file"""
1993 repair.rebuildfncache(ui, repo)
1993 repair.rebuildfncache(ui, repo)
1994
1994
1995 @command('debugrename',
1995 @command('debugrename',
1996 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1996 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1997 _('[-r REV] FILE'))
1997 _('[-r REV] FILE'))
1998 def debugrename(ui, repo, file1, *pats, **opts):
1998 def debugrename(ui, repo, file1, *pats, **opts):
1999 """dump rename information"""
1999 """dump rename information"""
2000
2000
2001 opts = pycompat.byteskwargs(opts)
2001 opts = pycompat.byteskwargs(opts)
2002 ctx = scmutil.revsingle(repo, opts.get('rev'))
2002 ctx = scmutil.revsingle(repo, opts.get('rev'))
2003 m = scmutil.match(ctx, (file1,) + pats, opts)
2003 m = scmutil.match(ctx, (file1,) + pats, opts)
2004 for abs in ctx.walk(m):
2004 for abs in ctx.walk(m):
2005 fctx = ctx[abs]
2005 fctx = ctx[abs]
2006 o = fctx.filelog().renamed(fctx.filenode())
2006 o = fctx.filelog().renamed(fctx.filenode())
2007 rel = m.rel(abs)
2007 rel = m.rel(abs)
2008 if o:
2008 if o:
2009 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2009 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2010 else:
2010 else:
2011 ui.write(_("%s not renamed\n") % rel)
2011 ui.write(_("%s not renamed\n") % rel)
2012
2012
2013 @command('debugrevlog', cmdutil.debugrevlogopts +
2013 @command('debugrevlog', cmdutil.debugrevlogopts +
2014 [('d', 'dump', False, _('dump index data'))],
2014 [('d', 'dump', False, _('dump index data'))],
2015 _('-c|-m|FILE'),
2015 _('-c|-m|FILE'),
2016 optionalrepo=True)
2016 optionalrepo=True)
2017 def debugrevlog(ui, repo, file_=None, **opts):
2017 def debugrevlog(ui, repo, file_=None, **opts):
2018 """show data and statistics about a revlog"""
2018 """show data and statistics about a revlog"""
2019 opts = pycompat.byteskwargs(opts)
2019 opts = pycompat.byteskwargs(opts)
2020 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2020 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2021
2021
2022 if opts.get("dump"):
2022 if opts.get("dump"):
2023 numrevs = len(r)
2023 numrevs = len(r)
2024 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2024 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2025 " rawsize totalsize compression heads chainlen\n"))
2025 " rawsize totalsize compression heads chainlen\n"))
2026 ts = 0
2026 ts = 0
2027 heads = set()
2027 heads = set()
2028
2028
2029 for rev in pycompat.xrange(numrevs):
2029 for rev in pycompat.xrange(numrevs):
2030 dbase = r.deltaparent(rev)
2030 dbase = r.deltaparent(rev)
2031 if dbase == -1:
2031 if dbase == -1:
2032 dbase = rev
2032 dbase = rev
2033 cbase = r.chainbase(rev)
2033 cbase = r.chainbase(rev)
2034 clen = r.chainlen(rev)
2034 clen = r.chainlen(rev)
2035 p1, p2 = r.parentrevs(rev)
2035 p1, p2 = r.parentrevs(rev)
2036 rs = r.rawsize(rev)
2036 rs = r.rawsize(rev)
2037 ts = ts + rs
2037 ts = ts + rs
2038 heads -= set(r.parentrevs(rev))
2038 heads -= set(r.parentrevs(rev))
2039 heads.add(rev)
2039 heads.add(rev)
2040 try:
2040 try:
2041 compression = ts / r.end(rev)
2041 compression = ts / r.end(rev)
2042 except ZeroDivisionError:
2042 except ZeroDivisionError:
2043 compression = 0
2043 compression = 0
2044 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2044 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2045 "%11d %5d %8d\n" %
2045 "%11d %5d %8d\n" %
2046 (rev, p1, p2, r.start(rev), r.end(rev),
2046 (rev, p1, p2, r.start(rev), r.end(rev),
2047 r.start(dbase), r.start(cbase),
2047 r.start(dbase), r.start(cbase),
2048 r.start(p1), r.start(p2),
2048 r.start(p1), r.start(p2),
2049 rs, ts, compression, len(heads), clen))
2049 rs, ts, compression, len(heads), clen))
2050 return 0
2050 return 0
2051
2051
2052 v = r.version
2052 v = r.version
2053 format = v & 0xFFFF
2053 format = v & 0xFFFF
2054 flags = []
2054 flags = []
2055 gdelta = False
2055 gdelta = False
2056 if v & revlog.FLAG_INLINE_DATA:
2056 if v & revlog.FLAG_INLINE_DATA:
2057 flags.append('inline')
2057 flags.append('inline')
2058 if v & revlog.FLAG_GENERALDELTA:
2058 if v & revlog.FLAG_GENERALDELTA:
2059 gdelta = True
2059 gdelta = True
2060 flags.append('generaldelta')
2060 flags.append('generaldelta')
2061 if not flags:
2061 if not flags:
2062 flags = ['(none)']
2062 flags = ['(none)']
2063
2063
2064 ### tracks merge vs single parent
2064 ### tracks merge vs single parent
2065 nummerges = 0
2065 nummerges = 0
2066
2066
2067 ### tracks ways the "delta" are build
2067 ### tracks ways the "delta" are build
2068 # nodelta
2068 # nodelta
2069 numempty = 0
2069 numempty = 0
2070 numemptytext = 0
2070 numemptytext = 0
2071 numemptydelta = 0
2071 numemptydelta = 0
2072 # full file content
2072 # full file content
2073 numfull = 0
2073 numfull = 0
2074 # intermediate snapshot against a prior snapshot
2074 # intermediate snapshot against a prior snapshot
2075 numsemi = 0
2075 numsemi = 0
2076 # snapshot count per depth
2076 # snapshot count per depth
2077 numsnapdepth = collections.defaultdict(lambda: 0)
2077 numsnapdepth = collections.defaultdict(lambda: 0)
2078 # delta against previous revision
2078 # delta against previous revision
2079 numprev = 0
2079 numprev = 0
2080 # delta against first or second parent (not prev)
2080 # delta against first or second parent (not prev)
2081 nump1 = 0
2081 nump1 = 0
2082 nump2 = 0
2082 nump2 = 0
2083 # delta against neither prev nor parents
2083 # delta against neither prev nor parents
2084 numother = 0
2084 numother = 0
2085 # delta against prev that are also first or second parent
2085 # delta against prev that are also first or second parent
2086 # (details of `numprev`)
2086 # (details of `numprev`)
2087 nump1prev = 0
2087 nump1prev = 0
2088 nump2prev = 0
2088 nump2prev = 0
2089
2089
2090 # data about delta chain of each revs
2090 # data about delta chain of each revs
2091 chainlengths = []
2091 chainlengths = []
2092 chainbases = []
2092 chainbases = []
2093 chainspans = []
2093 chainspans = []
2094
2094
2095 # data about each revision
2095 # data about each revision
2096 datasize = [None, 0, 0]
2096 datasize = [None, 0, 0]
2097 fullsize = [None, 0, 0]
2097 fullsize = [None, 0, 0]
2098 semisize = [None, 0, 0]
2098 semisize = [None, 0, 0]
2099 # snapshot count per depth
2099 # snapshot count per depth
2100 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2100 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2101 deltasize = [None, 0, 0]
2101 deltasize = [None, 0, 0]
2102 chunktypecounts = {}
2102 chunktypecounts = {}
2103 chunktypesizes = {}
2103 chunktypesizes = {}
2104
2104
2105 def addsize(size, l):
2105 def addsize(size, l):
2106 if l[0] is None or size < l[0]:
2106 if l[0] is None or size < l[0]:
2107 l[0] = size
2107 l[0] = size
2108 if size > l[1]:
2108 if size > l[1]:
2109 l[1] = size
2109 l[1] = size
2110 l[2] += size
2110 l[2] += size
2111
2111
2112 numrevs = len(r)
2112 numrevs = len(r)
2113 for rev in pycompat.xrange(numrevs):
2113 for rev in pycompat.xrange(numrevs):
2114 p1, p2 = r.parentrevs(rev)
2114 p1, p2 = r.parentrevs(rev)
2115 delta = r.deltaparent(rev)
2115 delta = r.deltaparent(rev)
2116 if format > 0:
2116 if format > 0:
2117 addsize(r.rawsize(rev), datasize)
2117 addsize(r.rawsize(rev), datasize)
2118 if p2 != nullrev:
2118 if p2 != nullrev:
2119 nummerges += 1
2119 nummerges += 1
2120 size = r.length(rev)
2120 size = r.length(rev)
2121 if delta == nullrev:
2121 if delta == nullrev:
2122 chainlengths.append(0)
2122 chainlengths.append(0)
2123 chainbases.append(r.start(rev))
2123 chainbases.append(r.start(rev))
2124 chainspans.append(size)
2124 chainspans.append(size)
2125 if size == 0:
2125 if size == 0:
2126 numempty += 1
2126 numempty += 1
2127 numemptytext += 1
2127 numemptytext += 1
2128 else:
2128 else:
2129 numfull += 1
2129 numfull += 1
2130 numsnapdepth[0] += 1
2130 numsnapdepth[0] += 1
2131 addsize(size, fullsize)
2131 addsize(size, fullsize)
2132 addsize(size, snapsizedepth[0])
2132 addsize(size, snapsizedepth[0])
2133 else:
2133 else:
2134 chainlengths.append(chainlengths[delta] + 1)
2134 chainlengths.append(chainlengths[delta] + 1)
2135 baseaddr = chainbases[delta]
2135 baseaddr = chainbases[delta]
2136 revaddr = r.start(rev)
2136 revaddr = r.start(rev)
2137 chainbases.append(baseaddr)
2137 chainbases.append(baseaddr)
2138 chainspans.append((revaddr - baseaddr) + size)
2138 chainspans.append((revaddr - baseaddr) + size)
2139 if size == 0:
2139 if size == 0:
2140 numempty += 1
2140 numempty += 1
2141 numemptydelta += 1
2141 numemptydelta += 1
2142 elif r.issnapshot(rev):
2142 elif r.issnapshot(rev):
2143 addsize(size, semisize)
2143 addsize(size, semisize)
2144 numsemi += 1
2144 numsemi += 1
2145 depth = r.snapshotdepth(rev)
2145 depth = r.snapshotdepth(rev)
2146 numsnapdepth[depth] += 1
2146 numsnapdepth[depth] += 1
2147 addsize(size, snapsizedepth[depth])
2147 addsize(size, snapsizedepth[depth])
2148 else:
2148 else:
2149 addsize(size, deltasize)
2149 addsize(size, deltasize)
2150 if delta == rev - 1:
2150 if delta == rev - 1:
2151 numprev += 1
2151 numprev += 1
2152 if delta == p1:
2152 if delta == p1:
2153 nump1prev += 1
2153 nump1prev += 1
2154 elif delta == p2:
2154 elif delta == p2:
2155 nump2prev += 1
2155 nump2prev += 1
2156 elif delta == p1:
2156 elif delta == p1:
2157 nump1 += 1
2157 nump1 += 1
2158 elif delta == p2:
2158 elif delta == p2:
2159 nump2 += 1
2159 nump2 += 1
2160 elif delta != nullrev:
2160 elif delta != nullrev:
2161 numother += 1
2161 numother += 1
2162
2162
2163 # Obtain data on the raw chunks in the revlog.
2163 # Obtain data on the raw chunks in the revlog.
2164 if util.safehasattr(r, '_getsegmentforrevs'):
2164 if util.safehasattr(r, '_getsegmentforrevs'):
2165 segment = r._getsegmentforrevs(rev, rev)[1]
2165 segment = r._getsegmentforrevs(rev, rev)[1]
2166 else:
2166 else:
2167 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2167 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2168 if segment:
2168 if segment:
2169 chunktype = bytes(segment[0:1])
2169 chunktype = bytes(segment[0:1])
2170 else:
2170 else:
2171 chunktype = 'empty'
2171 chunktype = 'empty'
2172
2172
2173 if chunktype not in chunktypecounts:
2173 if chunktype not in chunktypecounts:
2174 chunktypecounts[chunktype] = 0
2174 chunktypecounts[chunktype] = 0
2175 chunktypesizes[chunktype] = 0
2175 chunktypesizes[chunktype] = 0
2176
2176
2177 chunktypecounts[chunktype] += 1
2177 chunktypecounts[chunktype] += 1
2178 chunktypesizes[chunktype] += size
2178 chunktypesizes[chunktype] += size
2179
2179
2180 # Adjust size min value for empty cases
2180 # Adjust size min value for empty cases
2181 for size in (datasize, fullsize, semisize, deltasize):
2181 for size in (datasize, fullsize, semisize, deltasize):
2182 if size[0] is None:
2182 if size[0] is None:
2183 size[0] = 0
2183 size[0] = 0
2184
2184
2185 numdeltas = numrevs - numfull - numempty - numsemi
2185 numdeltas = numrevs - numfull - numempty - numsemi
2186 numoprev = numprev - nump1prev - nump2prev
2186 numoprev = numprev - nump1prev - nump2prev
2187 totalrawsize = datasize[2]
2187 totalrawsize = datasize[2]
2188 datasize[2] /= numrevs
2188 datasize[2] /= numrevs
2189 fulltotal = fullsize[2]
2189 fulltotal = fullsize[2]
2190 fullsize[2] /= numfull
2190 fullsize[2] /= numfull
2191 semitotal = semisize[2]
2191 semitotal = semisize[2]
2192 snaptotal = {}
2192 snaptotal = {}
2193 if 0 < numsemi:
2193 if 0 < numsemi:
2194 semisize[2] /= numsemi
2194 semisize[2] /= numsemi
2195 for depth in snapsizedepth:
2195 for depth in snapsizedepth:
2196 snaptotal[depth] = snapsizedepth[depth][2]
2196 snaptotal[depth] = snapsizedepth[depth][2]
2197 snapsizedepth[depth][2] /= numsnapdepth[depth]
2197 snapsizedepth[depth][2] /= numsnapdepth[depth]
2198
2198
2199 deltatotal = deltasize[2]
2199 deltatotal = deltasize[2]
2200 if numdeltas > 0:
2200 if numdeltas > 0:
2201 deltasize[2] /= numdeltas
2201 deltasize[2] /= numdeltas
2202 totalsize = fulltotal + semitotal + deltatotal
2202 totalsize = fulltotal + semitotal + deltatotal
2203 avgchainlen = sum(chainlengths) / numrevs
2203 avgchainlen = sum(chainlengths) / numrevs
2204 maxchainlen = max(chainlengths)
2204 maxchainlen = max(chainlengths)
2205 maxchainspan = max(chainspans)
2205 maxchainspan = max(chainspans)
2206 compratio = 1
2206 compratio = 1
2207 if totalsize:
2207 if totalsize:
2208 compratio = totalrawsize / totalsize
2208 compratio = totalrawsize / totalsize
2209
2209
2210 basedfmtstr = '%%%dd\n'
2210 basedfmtstr = '%%%dd\n'
2211 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2211 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2212
2212
2213 def dfmtstr(max):
2213 def dfmtstr(max):
2214 return basedfmtstr % len(str(max))
2214 return basedfmtstr % len(str(max))
2215 def pcfmtstr(max, padding=0):
2215 def pcfmtstr(max, padding=0):
2216 return basepcfmtstr % (len(str(max)), ' ' * padding)
2216 return basepcfmtstr % (len(str(max)), ' ' * padding)
2217
2217
2218 def pcfmt(value, total):
2218 def pcfmt(value, total):
2219 if total:
2219 if total:
2220 return (value, 100 * float(value) / total)
2220 return (value, 100 * float(value) / total)
2221 else:
2221 else:
2222 return value, 100.0
2222 return value, 100.0
2223
2223
2224 ui.write(('format : %d\n') % format)
2224 ui.write(('format : %d\n') % format)
2225 ui.write(('flags : %s\n') % ', '.join(flags))
2225 ui.write(('flags : %s\n') % ', '.join(flags))
2226
2226
2227 ui.write('\n')
2227 ui.write('\n')
2228 fmt = pcfmtstr(totalsize)
2228 fmt = pcfmtstr(totalsize)
2229 fmt2 = dfmtstr(totalsize)
2229 fmt2 = dfmtstr(totalsize)
2230 ui.write(('revisions : ') + fmt2 % numrevs)
2230 ui.write(('revisions : ') + fmt2 % numrevs)
2231 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2231 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2232 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2232 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2233 ui.write(('revisions : ') + fmt2 % numrevs)
2233 ui.write(('revisions : ') + fmt2 % numrevs)
2234 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2234 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2235 ui.write((' text : ')
2235 ui.write((' text : ')
2236 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2236 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2237 ui.write((' delta : ')
2237 ui.write((' delta : ')
2238 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2238 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2239 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2239 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2240 for depth in sorted(numsnapdepth):
2240 for depth in sorted(numsnapdepth):
2241 ui.write((' lvl-%-3d : ' % depth)
2241 ui.write((' lvl-%-3d : ' % depth)
2242 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2242 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2243 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2243 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2244 ui.write(('revision size : ') + fmt2 % totalsize)
2244 ui.write(('revision size : ') + fmt2 % totalsize)
2245 ui.write((' snapshot : ')
2245 ui.write((' snapshot : ')
2246 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2246 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2247 for depth in sorted(numsnapdepth):
2247 for depth in sorted(numsnapdepth):
2248 ui.write((' lvl-%-3d : ' % depth)
2248 ui.write((' lvl-%-3d : ' % depth)
2249 + fmt % pcfmt(snaptotal[depth], totalsize))
2249 + fmt % pcfmt(snaptotal[depth], totalsize))
2250 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2250 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2251
2251
2252 def fmtchunktype(chunktype):
2252 def fmtchunktype(chunktype):
2253 if chunktype == 'empty':
2253 if chunktype == 'empty':
2254 return ' %s : ' % chunktype
2254 return ' %s : ' % chunktype
2255 elif chunktype in pycompat.bytestr(string.ascii_letters):
2255 elif chunktype in pycompat.bytestr(string.ascii_letters):
2256 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2256 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2257 else:
2257 else:
2258 return ' 0x%s : ' % hex(chunktype)
2258 return ' 0x%s : ' % hex(chunktype)
2259
2259
2260 ui.write('\n')
2260 ui.write('\n')
2261 ui.write(('chunks : ') + fmt2 % numrevs)
2261 ui.write(('chunks : ') + fmt2 % numrevs)
2262 for chunktype in sorted(chunktypecounts):
2262 for chunktype in sorted(chunktypecounts):
2263 ui.write(fmtchunktype(chunktype))
2263 ui.write(fmtchunktype(chunktype))
2264 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2264 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2265 ui.write(('chunks size : ') + fmt2 % totalsize)
2265 ui.write(('chunks size : ') + fmt2 % totalsize)
2266 for chunktype in sorted(chunktypecounts):
2266 for chunktype in sorted(chunktypecounts):
2267 ui.write(fmtchunktype(chunktype))
2267 ui.write(fmtchunktype(chunktype))
2268 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2268 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2269
2269
2270 ui.write('\n')
2270 ui.write('\n')
2271 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2271 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2272 ui.write(('avg chain length : ') + fmt % avgchainlen)
2272 ui.write(('avg chain length : ') + fmt % avgchainlen)
2273 ui.write(('max chain length : ') + fmt % maxchainlen)
2273 ui.write(('max chain length : ') + fmt % maxchainlen)
2274 ui.write(('max chain reach : ') + fmt % maxchainspan)
2274 ui.write(('max chain reach : ') + fmt % maxchainspan)
2275 ui.write(('compression ratio : ') + fmt % compratio)
2275 ui.write(('compression ratio : ') + fmt % compratio)
2276
2276
2277 if format > 0:
2277 if format > 0:
2278 ui.write('\n')
2278 ui.write('\n')
2279 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2279 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2280 % tuple(datasize))
2280 % tuple(datasize))
2281 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2281 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2282 % tuple(fullsize))
2282 % tuple(fullsize))
2283 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2283 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2284 % tuple(semisize))
2284 % tuple(semisize))
2285 for depth in sorted(snapsizedepth):
2285 for depth in sorted(snapsizedepth):
2286 if depth == 0:
2286 if depth == 0:
2287 continue
2287 continue
2288 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2288 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2289 % ((depth,) + tuple(snapsizedepth[depth])))
2289 % ((depth,) + tuple(snapsizedepth[depth])))
2290 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2290 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2291 % tuple(deltasize))
2291 % tuple(deltasize))
2292
2292
2293 if numdeltas > 0:
2293 if numdeltas > 0:
2294 ui.write('\n')
2294 ui.write('\n')
2295 fmt = pcfmtstr(numdeltas)
2295 fmt = pcfmtstr(numdeltas)
2296 fmt2 = pcfmtstr(numdeltas, 4)
2296 fmt2 = pcfmtstr(numdeltas, 4)
2297 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2297 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2298 if numprev > 0:
2298 if numprev > 0:
2299 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2299 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2300 numprev))
2300 numprev))
2301 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2301 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2302 numprev))
2302 numprev))
2303 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2303 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2304 numprev))
2304 numprev))
2305 if gdelta:
2305 if gdelta:
2306 ui.write(('deltas against p1 : ')
2306 ui.write(('deltas against p1 : ')
2307 + fmt % pcfmt(nump1, numdeltas))
2307 + fmt % pcfmt(nump1, numdeltas))
2308 ui.write(('deltas against p2 : ')
2308 ui.write(('deltas against p2 : ')
2309 + fmt % pcfmt(nump2, numdeltas))
2309 + fmt % pcfmt(nump2, numdeltas))
2310 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2310 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2311 numdeltas))
2311 numdeltas))
2312
2312
2313 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2313 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2314 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2314 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2315 _('[-f FORMAT] -c|-m|FILE'),
2315 _('[-f FORMAT] -c|-m|FILE'),
2316 optionalrepo=True)
2316 optionalrepo=True)
2317 def debugrevlogindex(ui, repo, file_=None, **opts):
2317 def debugrevlogindex(ui, repo, file_=None, **opts):
2318 """dump the contents of a revlog index"""
2318 """dump the contents of a revlog index"""
2319 opts = pycompat.byteskwargs(opts)
2319 opts = pycompat.byteskwargs(opts)
2320 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2320 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2321 format = opts.get('format', 0)
2321 format = opts.get('format', 0)
2322 if format not in (0, 1):
2322 if format not in (0, 1):
2323 raise error.Abort(_("unknown format %d") % format)
2323 raise error.Abort(_("unknown format %d") % format)
2324
2324
2325 if ui.debugflag:
2325 if ui.debugflag:
2326 shortfn = hex
2326 shortfn = hex
2327 else:
2327 else:
2328 shortfn = short
2328 shortfn = short
2329
2329
2330 # There might not be anything in r, so have a sane default
2330 # There might not be anything in r, so have a sane default
2331 idlen = 12
2331 idlen = 12
2332 for i in r:
2332 for i in r:
2333 idlen = len(shortfn(r.node(i)))
2333 idlen = len(shortfn(r.node(i)))
2334 break
2334 break
2335
2335
2336 if format == 0:
2336 if format == 0:
2337 if ui.verbose:
2337 if ui.verbose:
2338 ui.write((" rev offset length linkrev"
2338 ui.write((" rev offset length linkrev"
2339 " %s %s p2\n") % ("nodeid".ljust(idlen),
2339 " %s %s p2\n") % ("nodeid".ljust(idlen),
2340 "p1".ljust(idlen)))
2340 "p1".ljust(idlen)))
2341 else:
2341 else:
2342 ui.write((" rev linkrev %s %s p2\n") % (
2342 ui.write((" rev linkrev %s %s p2\n") % (
2343 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2343 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2344 elif format == 1:
2344 elif format == 1:
2345 if ui.verbose:
2345 if ui.verbose:
2346 ui.write((" rev flag offset length size link p1"
2346 ui.write((" rev flag offset length size link p1"
2347 " p2 %s\n") % "nodeid".rjust(idlen))
2347 " p2 %s\n") % "nodeid".rjust(idlen))
2348 else:
2348 else:
2349 ui.write((" rev flag size link p1 p2 %s\n") %
2349 ui.write((" rev flag size link p1 p2 %s\n") %
2350 "nodeid".rjust(idlen))
2350 "nodeid".rjust(idlen))
2351
2351
2352 for i in r:
2352 for i in r:
2353 node = r.node(i)
2353 node = r.node(i)
2354 if format == 0:
2354 if format == 0:
2355 try:
2355 try:
2356 pp = r.parents(node)
2356 pp = r.parents(node)
2357 except Exception:
2357 except Exception:
2358 pp = [nullid, nullid]
2358 pp = [nullid, nullid]
2359 if ui.verbose:
2359 if ui.verbose:
2360 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2360 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2361 i, r.start(i), r.length(i), r.linkrev(i),
2361 i, r.start(i), r.length(i), r.linkrev(i),
2362 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2362 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2363 else:
2363 else:
2364 ui.write("% 6d % 7d %s %s %s\n" % (
2364 ui.write("% 6d % 7d %s %s %s\n" % (
2365 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2365 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2366 shortfn(pp[1])))
2366 shortfn(pp[1])))
2367 elif format == 1:
2367 elif format == 1:
2368 pr = r.parentrevs(i)
2368 pr = r.parentrevs(i)
2369 if ui.verbose:
2369 if ui.verbose:
2370 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2370 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2371 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2371 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2372 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2372 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2373 else:
2373 else:
2374 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2374 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2375 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2375 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2376 shortfn(node)))
2376 shortfn(node)))
2377
2377
2378 @command('debugrevspec',
2378 @command('debugrevspec',
2379 [('', 'optimize', None,
2379 [('', 'optimize', None,
2380 _('print parsed tree after optimizing (DEPRECATED)')),
2380 _('print parsed tree after optimizing (DEPRECATED)')),
2381 ('', 'show-revs', True, _('print list of result revisions (default)')),
2381 ('', 'show-revs', True, _('print list of result revisions (default)')),
2382 ('s', 'show-set', None, _('print internal representation of result set')),
2382 ('s', 'show-set', None, _('print internal representation of result set')),
2383 ('p', 'show-stage', [],
2383 ('p', 'show-stage', [],
2384 _('print parsed tree at the given stage'), _('NAME')),
2384 _('print parsed tree at the given stage'), _('NAME')),
2385 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2385 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2386 ('', 'verify-optimized', False, _('verify optimized result')),
2386 ('', 'verify-optimized', False, _('verify optimized result')),
2387 ],
2387 ],
2388 ('REVSPEC'))
2388 ('REVSPEC'))
2389 def debugrevspec(ui, repo, expr, **opts):
2389 def debugrevspec(ui, repo, expr, **opts):
2390 """parse and apply a revision specification
2390 """parse and apply a revision specification
2391
2391
2392 Use -p/--show-stage option to print the parsed tree at the given stages.
2392 Use -p/--show-stage option to print the parsed tree at the given stages.
2393 Use -p all to print tree at every stage.
2393 Use -p all to print tree at every stage.
2394
2394
2395 Use --no-show-revs option with -s or -p to print only the set
2395 Use --no-show-revs option with -s or -p to print only the set
2396 representation or the parsed tree respectively.
2396 representation or the parsed tree respectively.
2397
2397
2398 Use --verify-optimized to compare the optimized result with the unoptimized
2398 Use --verify-optimized to compare the optimized result with the unoptimized
2399 one. Returns 1 if the optimized result differs.
2399 one. Returns 1 if the optimized result differs.
2400 """
2400 """
2401 opts = pycompat.byteskwargs(opts)
2401 opts = pycompat.byteskwargs(opts)
2402 aliases = ui.configitems('revsetalias')
2402 aliases = ui.configitems('revsetalias')
2403 stages = [
2403 stages = [
2404 ('parsed', lambda tree: tree),
2404 ('parsed', lambda tree: tree),
2405 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2405 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2406 ui.warn)),
2406 ui.warn)),
2407 ('concatenated', revsetlang.foldconcat),
2407 ('concatenated', revsetlang.foldconcat),
2408 ('analyzed', revsetlang.analyze),
2408 ('analyzed', revsetlang.analyze),
2409 ('optimized', revsetlang.optimize),
2409 ('optimized', revsetlang.optimize),
2410 ]
2410 ]
2411 if opts['no_optimized']:
2411 if opts['no_optimized']:
2412 stages = stages[:-1]
2412 stages = stages[:-1]
2413 if opts['verify_optimized'] and opts['no_optimized']:
2413 if opts['verify_optimized'] and opts['no_optimized']:
2414 raise error.Abort(_('cannot use --verify-optimized with '
2414 raise error.Abort(_('cannot use --verify-optimized with '
2415 '--no-optimized'))
2415 '--no-optimized'))
2416 stagenames = set(n for n, f in stages)
2416 stagenames = set(n for n, f in stages)
2417
2417
2418 showalways = set()
2418 showalways = set()
2419 showchanged = set()
2419 showchanged = set()
2420 if ui.verbose and not opts['show_stage']:
2420 if ui.verbose and not opts['show_stage']:
2421 # show parsed tree by --verbose (deprecated)
2421 # show parsed tree by --verbose (deprecated)
2422 showalways.add('parsed')
2422 showalways.add('parsed')
2423 showchanged.update(['expanded', 'concatenated'])
2423 showchanged.update(['expanded', 'concatenated'])
2424 if opts['optimize']:
2424 if opts['optimize']:
2425 showalways.add('optimized')
2425 showalways.add('optimized')
2426 if opts['show_stage'] and opts['optimize']:
2426 if opts['show_stage'] and opts['optimize']:
2427 raise error.Abort(_('cannot use --optimize with --show-stage'))
2427 raise error.Abort(_('cannot use --optimize with --show-stage'))
2428 if opts['show_stage'] == ['all']:
2428 if opts['show_stage'] == ['all']:
2429 showalways.update(stagenames)
2429 showalways.update(stagenames)
2430 else:
2430 else:
2431 for n in opts['show_stage']:
2431 for n in opts['show_stage']:
2432 if n not in stagenames:
2432 if n not in stagenames:
2433 raise error.Abort(_('invalid stage name: %s') % n)
2433 raise error.Abort(_('invalid stage name: %s') % n)
2434 showalways.update(opts['show_stage'])
2434 showalways.update(opts['show_stage'])
2435
2435
2436 treebystage = {}
2436 treebystage = {}
2437 printedtree = None
2437 printedtree = None
2438 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2438 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2439 for n, f in stages:
2439 for n, f in stages:
2440 treebystage[n] = tree = f(tree)
2440 treebystage[n] = tree = f(tree)
2441 if n in showalways or (n in showchanged and tree != printedtree):
2441 if n in showalways or (n in showchanged and tree != printedtree):
2442 if opts['show_stage'] or n != 'parsed':
2442 if opts['show_stage'] or n != 'parsed':
2443 ui.write(("* %s:\n") % n)
2443 ui.write(("* %s:\n") % n)
2444 ui.write(revsetlang.prettyformat(tree), "\n")
2444 ui.write(revsetlang.prettyformat(tree), "\n")
2445 printedtree = tree
2445 printedtree = tree
2446
2446
2447 if opts['verify_optimized']:
2447 if opts['verify_optimized']:
2448 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2448 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2449 brevs = revset.makematcher(treebystage['optimized'])(repo)
2449 brevs = revset.makematcher(treebystage['optimized'])(repo)
2450 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2450 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2451 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2451 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2452 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2452 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2453 arevs = list(arevs)
2453 arevs = list(arevs)
2454 brevs = list(brevs)
2454 brevs = list(brevs)
2455 if arevs == brevs:
2455 if arevs == brevs:
2456 return 0
2456 return 0
2457 ui.write(('--- analyzed\n'), label='diff.file_a')
2457 ui.write(('--- analyzed\n'), label='diff.file_a')
2458 ui.write(('+++ optimized\n'), label='diff.file_b')
2458 ui.write(('+++ optimized\n'), label='diff.file_b')
2459 sm = difflib.SequenceMatcher(None, arevs, brevs)
2459 sm = difflib.SequenceMatcher(None, arevs, brevs)
2460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2461 if tag in ('delete', 'replace'):
2461 if tag in ('delete', 'replace'):
2462 for c in arevs[alo:ahi]:
2462 for c in arevs[alo:ahi]:
2463 ui.write('-%s\n' % c, label='diff.deleted')
2463 ui.write('-%s\n' % c, label='diff.deleted')
2464 if tag in ('insert', 'replace'):
2464 if tag in ('insert', 'replace'):
2465 for c in brevs[blo:bhi]:
2465 for c in brevs[blo:bhi]:
2466 ui.write('+%s\n' % c, label='diff.inserted')
2466 ui.write('+%s\n' % c, label='diff.inserted')
2467 if tag == 'equal':
2467 if tag == 'equal':
2468 for c in arevs[alo:ahi]:
2468 for c in arevs[alo:ahi]:
2469 ui.write(' %s\n' % c)
2469 ui.write(' %s\n' % c)
2470 return 1
2470 return 1
2471
2471
2472 func = revset.makematcher(tree)
2472 func = revset.makematcher(tree)
2473 revs = func(repo)
2473 revs = func(repo)
2474 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2474 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2475 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2475 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2476 if not opts['show_revs']:
2476 if not opts['show_revs']:
2477 return
2477 return
2478 for c in revs:
2478 for c in revs:
2479 ui.write("%d\n" % c)
2479 ui.write("%d\n" % c)
2480
2480
2481 @command('debugserve', [
2481 @command('debugserve', [
2482 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2482 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2483 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2483 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2484 ('', 'logiofile', '', _('file to log server I/O to')),
2484 ('', 'logiofile', '', _('file to log server I/O to')),
2485 ], '')
2485 ], '')
2486 def debugserve(ui, repo, **opts):
2486 def debugserve(ui, repo, **opts):
2487 """run a server with advanced settings
2487 """run a server with advanced settings
2488
2488
2489 This command is similar to :hg:`serve`. It exists partially as a
2489 This command is similar to :hg:`serve`. It exists partially as a
2490 workaround to the fact that ``hg serve --stdio`` must have specific
2490 workaround to the fact that ``hg serve --stdio`` must have specific
2491 arguments for security reasons.
2491 arguments for security reasons.
2492 """
2492 """
2493 opts = pycompat.byteskwargs(opts)
2493 opts = pycompat.byteskwargs(opts)
2494
2494
2495 if not opts['sshstdio']:
2495 if not opts['sshstdio']:
2496 raise error.Abort(_('only --sshstdio is currently supported'))
2496 raise error.Abort(_('only --sshstdio is currently supported'))
2497
2497
2498 logfh = None
2498 logfh = None
2499
2499
2500 if opts['logiofd'] and opts['logiofile']:
2500 if opts['logiofd'] and opts['logiofile']:
2501 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2501 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2502
2502
2503 if opts['logiofd']:
2503 if opts['logiofd']:
2504 # Line buffered because output is line based.
2504 # Line buffered because output is line based.
2505 try:
2505 try:
2506 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2506 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2507 except OSError as e:
2507 except OSError as e:
2508 if e.errno != errno.ESPIPE:
2508 if e.errno != errno.ESPIPE:
2509 raise
2509 raise
2510 # can't seek a pipe, so `ab` mode fails on py3
2510 # can't seek a pipe, so `ab` mode fails on py3
2511 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2511 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2512 elif opts['logiofile']:
2512 elif opts['logiofile']:
2513 logfh = open(opts['logiofile'], 'ab', 1)
2513 logfh = open(opts['logiofile'], 'ab', 1)
2514
2514
2515 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2515 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2516 s.serve_forever()
2516 s.serve_forever()
2517
2517
2518 @command('debugsetparents', [], _('REV1 [REV2]'))
2518 @command('debugsetparents', [], _('REV1 [REV2]'))
2519 def debugsetparents(ui, repo, rev1, rev2=None):
2519 def debugsetparents(ui, repo, rev1, rev2=None):
2520 """manually set the parents of the current working directory
2520 """manually set the parents of the current working directory
2521
2521
2522 This is useful for writing repository conversion tools, but should
2522 This is useful for writing repository conversion tools, but should
2523 be used with care. For example, neither the working directory nor the
2523 be used with care. For example, neither the working directory nor the
2524 dirstate is updated, so file status may be incorrect after running this
2524 dirstate is updated, so file status may be incorrect after running this
2525 command.
2525 command.
2526
2526
2527 Returns 0 on success.
2527 Returns 0 on success.
2528 """
2528 """
2529
2529
2530 node1 = scmutil.revsingle(repo, rev1).node()
2530 node1 = scmutil.revsingle(repo, rev1).node()
2531 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2531 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2532
2532
2533 with repo.wlock():
2533 with repo.wlock():
2534 repo.setparents(node1, node2)
2534 repo.setparents(node1, node2)
2535
2535
2536 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2536 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2537 def debugssl(ui, repo, source=None, **opts):
2537 def debugssl(ui, repo, source=None, **opts):
2538 '''test a secure connection to a server
2538 '''test a secure connection to a server
2539
2539
2540 This builds the certificate chain for the server on Windows, installing the
2540 This builds the certificate chain for the server on Windows, installing the
2541 missing intermediates and trusted root via Windows Update if necessary. It
2541 missing intermediates and trusted root via Windows Update if necessary. It
2542 does nothing on other platforms.
2542 does nothing on other platforms.
2543
2543
2544 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2544 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2545 that server is used. See :hg:`help urls` for more information.
2545 that server is used. See :hg:`help urls` for more information.
2546
2546
2547 If the update succeeds, retry the original operation. Otherwise, the cause
2547 If the update succeeds, retry the original operation. Otherwise, the cause
2548 of the SSL error is likely another issue.
2548 of the SSL error is likely another issue.
2549 '''
2549 '''
2550 if not pycompat.iswindows:
2550 if not pycompat.iswindows:
2551 raise error.Abort(_('certificate chain building is only possible on '
2551 raise error.Abort(_('certificate chain building is only possible on '
2552 'Windows'))
2552 'Windows'))
2553
2553
2554 if not source:
2554 if not source:
2555 if not repo:
2555 if not repo:
2556 raise error.Abort(_("there is no Mercurial repository here, and no "
2556 raise error.Abort(_("there is no Mercurial repository here, and no "
2557 "server specified"))
2557 "server specified"))
2558 source = "default"
2558 source = "default"
2559
2559
2560 source, branches = hg.parseurl(ui.expandpath(source))
2560 source, branches = hg.parseurl(ui.expandpath(source))
2561 url = util.url(source)
2561 url = util.url(source)
2562 addr = None
2562 addr = None
2563
2563
2564 defaultport = {'https': 443, 'ssh': 22}
2564 defaultport = {'https': 443, 'ssh': 22}
2565 if url.scheme in defaultport:
2565 if url.scheme in defaultport:
2566 try:
2566 try:
2567 addr = (url.host, int(url.port or defaultport[url.scheme]))
2567 addr = (url.host, int(url.port or defaultport[url.scheme]))
2568 except ValueError:
2568 except ValueError:
2569 raise error.Abort(_("malformed port number in URL"))
2569 raise error.Abort(_("malformed port number in URL"))
2570 else:
2570 else:
2571 raise error.Abort(_("only https and ssh connections are supported"))
2571 raise error.Abort(_("only https and ssh connections are supported"))
2572
2572
2573 from . import win32
2573 from . import win32
2574
2574
2575 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2575 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2576 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2576 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2577
2577
2578 try:
2578 try:
2579 s.connect(addr)
2579 s.connect(addr)
2580 cert = s.getpeercert(True)
2580 cert = s.getpeercert(True)
2581
2581
2582 ui.status(_('checking the certificate chain for %s\n') % url.host)
2582 ui.status(_('checking the certificate chain for %s\n') % url.host)
2583
2583
2584 complete = win32.checkcertificatechain(cert, build=False)
2584 complete = win32.checkcertificatechain(cert, build=False)
2585
2585
2586 if not complete:
2586 if not complete:
2587 ui.status(_('certificate chain is incomplete, updating... '))
2587 ui.status(_('certificate chain is incomplete, updating... '))
2588
2588
2589 if not win32.checkcertificatechain(cert):
2589 if not win32.checkcertificatechain(cert):
2590 ui.status(_('failed.\n'))
2590 ui.status(_('failed.\n'))
2591 else:
2591 else:
2592 ui.status(_('done.\n'))
2592 ui.status(_('done.\n'))
2593 else:
2593 else:
2594 ui.status(_('full certificate chain is available\n'))
2594 ui.status(_('full certificate chain is available\n'))
2595 finally:
2595 finally:
2596 s.close()
2596 s.close()
2597
2597
2598 @command('debugsub',
2598 @command('debugsub',
2599 [('r', 'rev', '',
2599 [('r', 'rev', '',
2600 _('revision to check'), _('REV'))],
2600 _('revision to check'), _('REV'))],
2601 _('[-r REV] [REV]'))
2601 _('[-r REV] [REV]'))
2602 def debugsub(ui, repo, rev=None):
2602 def debugsub(ui, repo, rev=None):
2603 ctx = scmutil.revsingle(repo, rev, None)
2603 ctx = scmutil.revsingle(repo, rev, None)
2604 for k, v in sorted(ctx.substate.items()):
2604 for k, v in sorted(ctx.substate.items()):
2605 ui.write(('path %s\n') % k)
2605 ui.write(('path %s\n') % k)
2606 ui.write((' source %s\n') % v[0])
2606 ui.write((' source %s\n') % v[0])
2607 ui.write((' revision %s\n') % v[1])
2607 ui.write((' revision %s\n') % v[1])
2608
2608
2609 @command('debugsuccessorssets',
2609 @command('debugsuccessorssets',
2610 [('', 'closest', False, _('return closest successors sets only'))],
2610 [('', 'closest', False, _('return closest successors sets only'))],
2611 _('[REV]'))
2611 _('[REV]'))
2612 def debugsuccessorssets(ui, repo, *revs, **opts):
2612 def debugsuccessorssets(ui, repo, *revs, **opts):
2613 """show set of successors for revision
2613 """show set of successors for revision
2614
2614
2615 A successors set of changeset A is a consistent group of revisions that
2615 A successors set of changeset A is a consistent group of revisions that
2616 succeed A. It contains non-obsolete changesets only unless closests
2616 succeed A. It contains non-obsolete changesets only unless closests
2617 successors set is set.
2617 successors set is set.
2618
2618
2619 In most cases a changeset A has a single successors set containing a single
2619 In most cases a changeset A has a single successors set containing a single
2620 successor (changeset A replaced by A').
2620 successor (changeset A replaced by A').
2621
2621
2622 A changeset that is made obsolete with no successors are called "pruned".
2622 A changeset that is made obsolete with no successors are called "pruned".
2623 Such changesets have no successors sets at all.
2623 Such changesets have no successors sets at all.
2624
2624
2625 A changeset that has been "split" will have a successors set containing
2625 A changeset that has been "split" will have a successors set containing
2626 more than one successor.
2626 more than one successor.
2627
2627
2628 A changeset that has been rewritten in multiple different ways is called
2628 A changeset that has been rewritten in multiple different ways is called
2629 "divergent". Such changesets have multiple successor sets (each of which
2629 "divergent". Such changesets have multiple successor sets (each of which
2630 may also be split, i.e. have multiple successors).
2630 may also be split, i.e. have multiple successors).
2631
2631
2632 Results are displayed as follows::
2632 Results are displayed as follows::
2633
2633
2634 <rev1>
2634 <rev1>
2635 <successors-1A>
2635 <successors-1A>
2636 <rev2>
2636 <rev2>
2637 <successors-2A>
2637 <successors-2A>
2638 <successors-2B1> <successors-2B2> <successors-2B3>
2638 <successors-2B1> <successors-2B2> <successors-2B3>
2639
2639
2640 Here rev2 has two possible (i.e. divergent) successors sets. The first
2640 Here rev2 has two possible (i.e. divergent) successors sets. The first
2641 holds one element, whereas the second holds three (i.e. the changeset has
2641 holds one element, whereas the second holds three (i.e. the changeset has
2642 been split).
2642 been split).
2643 """
2643 """
2644 # passed to successorssets caching computation from one call to another
2644 # passed to successorssets caching computation from one call to another
2645 cache = {}
2645 cache = {}
2646 ctx2str = bytes
2646 ctx2str = bytes
2647 node2str = short
2647 node2str = short
2648 for rev in scmutil.revrange(repo, revs):
2648 for rev in scmutil.revrange(repo, revs):
2649 ctx = repo[rev]
2649 ctx = repo[rev]
2650 ui.write('%s\n'% ctx2str(ctx))
2650 ui.write('%s\n'% ctx2str(ctx))
2651 for succsset in obsutil.successorssets(repo, ctx.node(),
2651 for succsset in obsutil.successorssets(repo, ctx.node(),
2652 closest=opts[r'closest'],
2652 closest=opts[r'closest'],
2653 cache=cache):
2653 cache=cache):
2654 if succsset:
2654 if succsset:
2655 ui.write(' ')
2655 ui.write(' ')
2656 ui.write(node2str(succsset[0]))
2656 ui.write(node2str(succsset[0]))
2657 for node in succsset[1:]:
2657 for node in succsset[1:]:
2658 ui.write(' ')
2658 ui.write(' ')
2659 ui.write(node2str(node))
2659 ui.write(node2str(node))
2660 ui.write('\n')
2660 ui.write('\n')
2661
2661
2662 @command('debugtemplate',
2662 @command('debugtemplate',
2663 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2663 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2664 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2664 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2665 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2665 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2666 optionalrepo=True)
2666 optionalrepo=True)
2667 def debugtemplate(ui, repo, tmpl, **opts):
2667 def debugtemplate(ui, repo, tmpl, **opts):
2668 """parse and apply a template
2668 """parse and apply a template
2669
2669
2670 If -r/--rev is given, the template is processed as a log template and
2670 If -r/--rev is given, the template is processed as a log template and
2671 applied to the given changesets. Otherwise, it is processed as a generic
2671 applied to the given changesets. Otherwise, it is processed as a generic
2672 template.
2672 template.
2673
2673
2674 Use --verbose to print the parsed tree.
2674 Use --verbose to print the parsed tree.
2675 """
2675 """
2676 revs = None
2676 revs = None
2677 if opts[r'rev']:
2677 if opts[r'rev']:
2678 if repo is None:
2678 if repo is None:
2679 raise error.RepoError(_('there is no Mercurial repository here '
2679 raise error.RepoError(_('there is no Mercurial repository here '
2680 '(.hg not found)'))
2680 '(.hg not found)'))
2681 revs = scmutil.revrange(repo, opts[r'rev'])
2681 revs = scmutil.revrange(repo, opts[r'rev'])
2682
2682
2683 props = {}
2683 props = {}
2684 for d in opts[r'define']:
2684 for d in opts[r'define']:
2685 try:
2685 try:
2686 k, v = (e.strip() for e in d.split('=', 1))
2686 k, v = (e.strip() for e in d.split('=', 1))
2687 if not k or k == 'ui':
2687 if not k or k == 'ui':
2688 raise ValueError
2688 raise ValueError
2689 props[k] = v
2689 props[k] = v
2690 except ValueError:
2690 except ValueError:
2691 raise error.Abort(_('malformed keyword definition: %s') % d)
2691 raise error.Abort(_('malformed keyword definition: %s') % d)
2692
2692
2693 if ui.verbose:
2693 if ui.verbose:
2694 aliases = ui.configitems('templatealias')
2694 aliases = ui.configitems('templatealias')
2695 tree = templater.parse(tmpl)
2695 tree = templater.parse(tmpl)
2696 ui.note(templater.prettyformat(tree), '\n')
2696 ui.note(templater.prettyformat(tree), '\n')
2697 newtree = templater.expandaliases(tree, aliases)
2697 newtree = templater.expandaliases(tree, aliases)
2698 if newtree != tree:
2698 if newtree != tree:
2699 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2699 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2700
2700
2701 if revs is None:
2701 if revs is None:
2702 tres = formatter.templateresources(ui, repo)
2702 tres = formatter.templateresources(ui, repo)
2703 t = formatter.maketemplater(ui, tmpl, resources=tres)
2703 t = formatter.maketemplater(ui, tmpl, resources=tres)
2704 if ui.verbose:
2704 if ui.verbose:
2705 kwds, funcs = t.symbolsuseddefault()
2705 kwds, funcs = t.symbolsuseddefault()
2706 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2706 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2707 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2707 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2708 ui.write(t.renderdefault(props))
2708 ui.write(t.renderdefault(props))
2709 else:
2709 else:
2710 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2710 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2711 if ui.verbose:
2711 if ui.verbose:
2712 kwds, funcs = displayer.t.symbolsuseddefault()
2712 kwds, funcs = displayer.t.symbolsuseddefault()
2713 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2713 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2714 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2714 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2715 for r in revs:
2715 for r in revs:
2716 displayer.show(repo[r], **pycompat.strkwargs(props))
2716 displayer.show(repo[r], **pycompat.strkwargs(props))
2717 displayer.close()
2717 displayer.close()
2718
2718
2719 @command('debuguigetpass', [
2719 @command('debuguigetpass', [
2720 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2720 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2721 ], _('[-p TEXT]'), norepo=True)
2721 ], _('[-p TEXT]'), norepo=True)
2722 def debuguigetpass(ui, prompt=''):
2722 def debuguigetpass(ui, prompt=''):
2723 """show prompt to type password"""
2723 """show prompt to type password"""
2724 r = ui.getpass(prompt)
2724 r = ui.getpass(prompt)
2725 ui.write(('respose: %s\n') % r)
2725 ui.write(('respose: %s\n') % r)
2726
2726
2727 @command('debuguiprompt', [
2727 @command('debuguiprompt', [
2728 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2728 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2729 ], _('[-p TEXT]'), norepo=True)
2729 ], _('[-p TEXT]'), norepo=True)
2730 def debuguiprompt(ui, prompt=''):
2730 def debuguiprompt(ui, prompt=''):
2731 """show plain prompt"""
2731 """show plain prompt"""
2732 r = ui.prompt(prompt)
2732 r = ui.prompt(prompt)
2733 ui.write(('response: %s\n') % r)
2733 ui.write(('response: %s\n') % r)
2734
2734
2735 @command('debugupdatecaches', [])
2735 @command('debugupdatecaches', [])
2736 def debugupdatecaches(ui, repo, *pats, **opts):
2736 def debugupdatecaches(ui, repo, *pats, **opts):
2737 """warm all known caches in the repository"""
2737 """warm all known caches in the repository"""
2738 with repo.wlock(), repo.lock():
2738 with repo.wlock(), repo.lock():
2739 repo.updatecaches(full=True)
2739 repo.updatecaches(full=True)
2740
2740
2741 @command('debugupgraderepo', [
2741 @command('debugupgraderepo', [
2742 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2742 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2743 ('', 'run', False, _('performs an upgrade')),
2743 ('', 'run', False, _('performs an upgrade')),
2744 ])
2744 ])
2745 def debugupgraderepo(ui, repo, run=False, optimize=None):
2745 def debugupgraderepo(ui, repo, run=False, optimize=None):
2746 """upgrade a repository to use different features
2746 """upgrade a repository to use different features
2747
2747
2748 If no arguments are specified, the repository is evaluated for upgrade
2748 If no arguments are specified, the repository is evaluated for upgrade
2749 and a list of problems and potential optimizations is printed.
2749 and a list of problems and potential optimizations is printed.
2750
2750
2751 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2751 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2752 can be influenced via additional arguments. More details will be provided
2752 can be influenced via additional arguments. More details will be provided
2753 by the command output when run without ``--run``.
2753 by the command output when run without ``--run``.
2754
2754
2755 During the upgrade, the repository will be locked and no writes will be
2755 During the upgrade, the repository will be locked and no writes will be
2756 allowed.
2756 allowed.
2757
2757
2758 At the end of the upgrade, the repository may not be readable while new
2758 At the end of the upgrade, the repository may not be readable while new
2759 repository data is swapped in. This window will be as long as it takes to
2759 repository data is swapped in. This window will be as long as it takes to
2760 rename some directories inside the ``.hg`` directory. On most machines, this
2760 rename some directories inside the ``.hg`` directory. On most machines, this
2761 should complete almost instantaneously and the chances of a consumer being
2761 should complete almost instantaneously and the chances of a consumer being
2762 unable to access the repository should be low.
2762 unable to access the repository should be low.
2763 """
2763 """
2764 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2764 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2765
2765
2766 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2766 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2767 inferrepo=True)
2767 inferrepo=True)
2768 def debugwalk(ui, repo, *pats, **opts):
2768 def debugwalk(ui, repo, *pats, **opts):
2769 """show how files match on given patterns"""
2769 """show how files match on given patterns"""
2770 opts = pycompat.byteskwargs(opts)
2770 opts = pycompat.byteskwargs(opts)
2771 m = scmutil.match(repo[None], pats, opts)
2771 m = scmutil.match(repo[None], pats, opts)
2772 if ui.verbose:
2772 if ui.verbose:
2773 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2773 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2774 items = list(repo[None].walk(m))
2774 items = list(repo[None].walk(m))
2775 if not items:
2775 if not items:
2776 return
2776 return
2777 f = lambda fn: fn
2777 f = lambda fn: fn
2778 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2778 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2779 f = lambda fn: util.normpath(fn)
2779 f = lambda fn: util.normpath(fn)
2780 fmt = 'f %%-%ds %%-%ds %%s' % (
2780 fmt = 'f %%-%ds %%-%ds %%s' % (
2781 max([len(abs) for abs in items]),
2781 max([len(abs) for abs in items]),
2782 max([len(m.rel(abs)) for abs in items]))
2782 max([len(m.rel(abs)) for abs in items]))
2783 for abs in items:
2783 for abs in items:
2784 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2784 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2785 ui.write("%s\n" % line.rstrip())
2785 ui.write("%s\n" % line.rstrip())
2786
2786
2787 @command('debugwhyunstable', [], _('REV'))
2787 @command('debugwhyunstable', [], _('REV'))
2788 def debugwhyunstable(ui, repo, rev):
2788 def debugwhyunstable(ui, repo, rev):
2789 """explain instabilities of a changeset"""
2789 """explain instabilities of a changeset"""
2790 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2790 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2791 dnodes = ''
2791 dnodes = ''
2792 if entry.get('divergentnodes'):
2792 if entry.get('divergentnodes'):
2793 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2793 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2794 for ctx in entry['divergentnodes']) + ' '
2794 for ctx in entry['divergentnodes']) + ' '
2795 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2795 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2796 entry['reason'], entry['node']))
2796 entry['reason'], entry['node']))
2797
2797
2798 @command('debugwireargs',
2798 @command('debugwireargs',
2799 [('', 'three', '', 'three'),
2799 [('', 'three', '', 'three'),
2800 ('', 'four', '', 'four'),
2800 ('', 'four', '', 'four'),
2801 ('', 'five', '', 'five'),
2801 ('', 'five', '', 'five'),
2802 ] + cmdutil.remoteopts,
2802 ] + cmdutil.remoteopts,
2803 _('REPO [OPTIONS]... [ONE [TWO]]'),
2803 _('REPO [OPTIONS]... [ONE [TWO]]'),
2804 norepo=True)
2804 norepo=True)
2805 def debugwireargs(ui, repopath, *vals, **opts):
2805 def debugwireargs(ui, repopath, *vals, **opts):
2806 opts = pycompat.byteskwargs(opts)
2806 opts = pycompat.byteskwargs(opts)
2807 repo = hg.peer(ui, opts, repopath)
2807 repo = hg.peer(ui, opts, repopath)
2808 for opt in cmdutil.remoteopts:
2808 for opt in cmdutil.remoteopts:
2809 del opts[opt[1]]
2809 del opts[opt[1]]
2810 args = {}
2810 args = {}
2811 for k, v in opts.iteritems():
2811 for k, v in opts.iteritems():
2812 if v:
2812 if v:
2813 args[k] = v
2813 args[k] = v
2814 args = pycompat.strkwargs(args)
2814 args = pycompat.strkwargs(args)
2815 # run twice to check that we don't mess up the stream for the next command
2815 # run twice to check that we don't mess up the stream for the next command
2816 res1 = repo.debugwireargs(*vals, **args)
2816 res1 = repo.debugwireargs(*vals, **args)
2817 res2 = repo.debugwireargs(*vals, **args)
2817 res2 = repo.debugwireargs(*vals, **args)
2818 ui.write("%s\n" % res1)
2818 ui.write("%s\n" % res1)
2819 if res1 != res2:
2819 if res1 != res2:
2820 ui.warn("%s\n" % res2)
2820 ui.warn("%s\n" % res2)
2821
2821
2822 def _parsewirelangblocks(fh):
2822 def _parsewirelangblocks(fh):
2823 activeaction = None
2823 activeaction = None
2824 blocklines = []
2824 blocklines = []
2825
2825
2826 for line in fh:
2826 for line in fh:
2827 line = line.rstrip()
2827 line = line.rstrip()
2828 if not line:
2828 if not line:
2829 continue
2829 continue
2830
2830
2831 if line.startswith(b'#'):
2831 if line.startswith(b'#'):
2832 continue
2832 continue
2833
2833
2834 if not line.startswith(b' '):
2834 if not line.startswith(b' '):
2835 # New block. Flush previous one.
2835 # New block. Flush previous one.
2836 if activeaction:
2836 if activeaction:
2837 yield activeaction, blocklines
2837 yield activeaction, blocklines
2838
2838
2839 activeaction = line
2839 activeaction = line
2840 blocklines = []
2840 blocklines = []
2841 continue
2841 continue
2842
2842
2843 # Else we start with an indent.
2843 # Else we start with an indent.
2844
2844
2845 if not activeaction:
2845 if not activeaction:
2846 raise error.Abort(_('indented line outside of block'))
2846 raise error.Abort(_('indented line outside of block'))
2847
2847
2848 blocklines.append(line)
2848 blocklines.append(line)
2849
2849
2850 # Flush last block.
2850 # Flush last block.
2851 if activeaction:
2851 if activeaction:
2852 yield activeaction, blocklines
2852 yield activeaction, blocklines
2853
2853
2854 @command('debugwireproto',
2854 @command('debugwireproto',
2855 [
2855 [
2856 ('', 'localssh', False, _('start an SSH server for this repo')),
2856 ('', 'localssh', False, _('start an SSH server for this repo')),
2857 ('', 'peer', '', _('construct a specific version of the peer')),
2857 ('', 'peer', '', _('construct a specific version of the peer')),
2858 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2858 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2859 ('', 'nologhandshake', False,
2859 ('', 'nologhandshake', False,
2860 _('do not log I/O related to the peer handshake')),
2860 _('do not log I/O related to the peer handshake')),
2861 ] + cmdutil.remoteopts,
2861 ] + cmdutil.remoteopts,
2862 _('[PATH]'),
2862 _('[PATH]'),
2863 optionalrepo=True)
2863 optionalrepo=True)
2864 def debugwireproto(ui, repo, path=None, **opts):
2864 def debugwireproto(ui, repo, path=None, **opts):
2865 """send wire protocol commands to a server
2865 """send wire protocol commands to a server
2866
2866
2867 This command can be used to issue wire protocol commands to remote
2867 This command can be used to issue wire protocol commands to remote
2868 peers and to debug the raw data being exchanged.
2868 peers and to debug the raw data being exchanged.
2869
2869
2870 ``--localssh`` will start an SSH server against the current repository
2870 ``--localssh`` will start an SSH server against the current repository
2871 and connect to that. By default, the connection will perform a handshake
2871 and connect to that. By default, the connection will perform a handshake
2872 and establish an appropriate peer instance.
2872 and establish an appropriate peer instance.
2873
2873
2874 ``--peer`` can be used to bypass the handshake protocol and construct a
2874 ``--peer`` can be used to bypass the handshake protocol and construct a
2875 peer instance using the specified class type. Valid values are ``raw``,
2875 peer instance using the specified class type. Valid values are ``raw``,
2876 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2876 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2877 raw data payloads and don't support higher-level command actions.
2877 raw data payloads and don't support higher-level command actions.
2878
2878
2879 ``--noreadstderr`` can be used to disable automatic reading from stderr
2879 ``--noreadstderr`` can be used to disable automatic reading from stderr
2880 of the peer (for SSH connections only). Disabling automatic reading of
2880 of the peer (for SSH connections only). Disabling automatic reading of
2881 stderr is useful for making output more deterministic.
2881 stderr is useful for making output more deterministic.
2882
2882
2883 Commands are issued via a mini language which is specified via stdin.
2883 Commands are issued via a mini language which is specified via stdin.
2884 The language consists of individual actions to perform. An action is
2884 The language consists of individual actions to perform. An action is
2885 defined by a block. A block is defined as a line with no leading
2885 defined by a block. A block is defined as a line with no leading
2886 space followed by 0 or more lines with leading space. Blocks are
2886 space followed by 0 or more lines with leading space. Blocks are
2887 effectively a high-level command with additional metadata.
2887 effectively a high-level command with additional metadata.
2888
2888
2889 Lines beginning with ``#`` are ignored.
2889 Lines beginning with ``#`` are ignored.
2890
2890
2891 The following sections denote available actions.
2891 The following sections denote available actions.
2892
2892
2893 raw
2893 raw
2894 ---
2894 ---
2895
2895
2896 Send raw data to the server.
2896 Send raw data to the server.
2897
2897
2898 The block payload contains the raw data to send as one atomic send
2898 The block payload contains the raw data to send as one atomic send
2899 operation. The data may not actually be delivered in a single system
2899 operation. The data may not actually be delivered in a single system
2900 call: it depends on the abilities of the transport being used.
2900 call: it depends on the abilities of the transport being used.
2901
2901
2902 Each line in the block is de-indented and concatenated. Then, that
2902 Each line in the block is de-indented and concatenated. Then, that
2903 value is evaluated as a Python b'' literal. This allows the use of
2903 value is evaluated as a Python b'' literal. This allows the use of
2904 backslash escaping, etc.
2904 backslash escaping, etc.
2905
2905
2906 raw+
2906 raw+
2907 ----
2907 ----
2908
2908
2909 Behaves like ``raw`` except flushes output afterwards.
2909 Behaves like ``raw`` except flushes output afterwards.
2910
2910
2911 command <X>
2911 command <X>
2912 -----------
2912 -----------
2913
2913
2914 Send a request to run a named command, whose name follows the ``command``
2914 Send a request to run a named command, whose name follows the ``command``
2915 string.
2915 string.
2916
2916
2917 Arguments to the command are defined as lines in this block. The format of
2917 Arguments to the command are defined as lines in this block. The format of
2918 each line is ``<key> <value>``. e.g.::
2918 each line is ``<key> <value>``. e.g.::
2919
2919
2920 command listkeys
2920 command listkeys
2921 namespace bookmarks
2921 namespace bookmarks
2922
2922
2923 If the value begins with ``eval:``, it will be interpreted as a Python
2923 If the value begins with ``eval:``, it will be interpreted as a Python
2924 literal expression. Otherwise values are interpreted as Python b'' literals.
2924 literal expression. Otherwise values are interpreted as Python b'' literals.
2925 This allows sending complex types and encoding special byte sequences via
2925 This allows sending complex types and encoding special byte sequences via
2926 backslash escaping.
2926 backslash escaping.
2927
2927
2928 The following arguments have special meaning:
2928 The following arguments have special meaning:
2929
2929
2930 ``PUSHFILE``
2930 ``PUSHFILE``
2931 When defined, the *push* mechanism of the peer will be used instead
2931 When defined, the *push* mechanism of the peer will be used instead
2932 of the static request-response mechanism and the content of the
2932 of the static request-response mechanism and the content of the
2933 file specified in the value of this argument will be sent as the
2933 file specified in the value of this argument will be sent as the
2934 command payload.
2934 command payload.
2935
2935
2936 This can be used to submit a local bundle file to the remote.
2936 This can be used to submit a local bundle file to the remote.
2937
2937
2938 batchbegin
2938 batchbegin
2939 ----------
2939 ----------
2940
2940
2941 Instruct the peer to begin a batched send.
2941 Instruct the peer to begin a batched send.
2942
2942
2943 All ``command`` blocks are queued for execution until the next
2943 All ``command`` blocks are queued for execution until the next
2944 ``batchsubmit`` block.
2944 ``batchsubmit`` block.
2945
2945
2946 batchsubmit
2946 batchsubmit
2947 -----------
2947 -----------
2948
2948
2949 Submit previously queued ``command`` blocks as a batch request.
2949 Submit previously queued ``command`` blocks as a batch request.
2950
2950
2951 This action MUST be paired with a ``batchbegin`` action.
2951 This action MUST be paired with a ``batchbegin`` action.
2952
2952
2953 httprequest <method> <path>
2953 httprequest <method> <path>
2954 ---------------------------
2954 ---------------------------
2955
2955
2956 (HTTP peer only)
2956 (HTTP peer only)
2957
2957
2958 Send an HTTP request to the peer.
2958 Send an HTTP request to the peer.
2959
2959
2960 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2960 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2961
2961
2962 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2962 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2963 headers to add to the request. e.g. ``Accept: foo``.
2963 headers to add to the request. e.g. ``Accept: foo``.
2964
2964
2965 The following arguments are special:
2965 The following arguments are special:
2966
2966
2967 ``BODYFILE``
2967 ``BODYFILE``
2968 The content of the file defined as the value to this argument will be
2968 The content of the file defined as the value to this argument will be
2969 transferred verbatim as the HTTP request body.
2969 transferred verbatim as the HTTP request body.
2970
2970
2971 ``frame <type> <flags> <payload>``
2971 ``frame <type> <flags> <payload>``
2972 Send a unified protocol frame as part of the request body.
2972 Send a unified protocol frame as part of the request body.
2973
2973
2974 All frames will be collected and sent as the body to the HTTP
2974 All frames will be collected and sent as the body to the HTTP
2975 request.
2975 request.
2976
2976
2977 close
2977 close
2978 -----
2978 -----
2979
2979
2980 Close the connection to the server.
2980 Close the connection to the server.
2981
2981
2982 flush
2982 flush
2983 -----
2983 -----
2984
2984
2985 Flush data written to the server.
2985 Flush data written to the server.
2986
2986
2987 readavailable
2987 readavailable
2988 -------------
2988 -------------
2989
2989
2990 Close the write end of the connection and read all available data from
2990 Close the write end of the connection and read all available data from
2991 the server.
2991 the server.
2992
2992
2993 If the connection to the server encompasses multiple pipes, we poll both
2993 If the connection to the server encompasses multiple pipes, we poll both
2994 pipes and read available data.
2994 pipes and read available data.
2995
2995
2996 readline
2996 readline
2997 --------
2997 --------
2998
2998
2999 Read a line of output from the server. If there are multiple output
2999 Read a line of output from the server. If there are multiple output
3000 pipes, reads only the main pipe.
3000 pipes, reads only the main pipe.
3001
3001
3002 ereadline
3002 ereadline
3003 ---------
3003 ---------
3004
3004
3005 Like ``readline``, but read from the stderr pipe, if available.
3005 Like ``readline``, but read from the stderr pipe, if available.
3006
3006
3007 read <X>
3007 read <X>
3008 --------
3008 --------
3009
3009
3010 ``read()`` N bytes from the server's main output pipe.
3010 ``read()`` N bytes from the server's main output pipe.
3011
3011
3012 eread <X>
3012 eread <X>
3013 ---------
3013 ---------
3014
3014
3015 ``read()`` N bytes from the server's stderr pipe, if available.
3015 ``read()`` N bytes from the server's stderr pipe, if available.
3016
3016
3017 Specifying Unified Frame-Based Protocol Frames
3017 Specifying Unified Frame-Based Protocol Frames
3018 ----------------------------------------------
3018 ----------------------------------------------
3019
3019
3020 It is possible to emit a *Unified Frame-Based Protocol* by using special
3020 It is possible to emit a *Unified Frame-Based Protocol* by using special
3021 syntax.
3021 syntax.
3022
3022
3023 A frame is composed as a type, flags, and payload. These can be parsed
3023 A frame is composed as a type, flags, and payload. These can be parsed
3024 from a string of the form:
3024 from a string of the form:
3025
3025
3026 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3026 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3027
3027
3028 ``request-id`` and ``stream-id`` are integers defining the request and
3028 ``request-id`` and ``stream-id`` are integers defining the request and
3029 stream identifiers.
3029 stream identifiers.
3030
3030
3031 ``type`` can be an integer value for the frame type or the string name
3031 ``type`` can be an integer value for the frame type or the string name
3032 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3032 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3033 ``command-name``.
3033 ``command-name``.
3034
3034
3035 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3035 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3036 components. Each component (and there can be just one) can be an integer
3036 components. Each component (and there can be just one) can be an integer
3037 or a flag name for stream flags or frame flags, respectively. Values are
3037 or a flag name for stream flags or frame flags, respectively. Values are
3038 resolved to integers and then bitwise OR'd together.
3038 resolved to integers and then bitwise OR'd together.
3039
3039
3040 ``payload`` represents the raw frame payload. If it begins with
3040 ``payload`` represents the raw frame payload. If it begins with
3041 ``cbor:``, the following string is evaluated as Python code and the
3041 ``cbor:``, the following string is evaluated as Python code and the
3042 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3042 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3043 as a Python byte string literal.
3043 as a Python byte string literal.
3044 """
3044 """
3045 opts = pycompat.byteskwargs(opts)
3045 opts = pycompat.byteskwargs(opts)
3046
3046
3047 if opts['localssh'] and not repo:
3047 if opts['localssh'] and not repo:
3048 raise error.Abort(_('--localssh requires a repository'))
3048 raise error.Abort(_('--localssh requires a repository'))
3049
3049
3050 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3050 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3051 raise error.Abort(_('invalid value for --peer'),
3051 raise error.Abort(_('invalid value for --peer'),
3052 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3052 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3053
3053
3054 if path and opts['localssh']:
3054 if path and opts['localssh']:
3055 raise error.Abort(_('cannot specify --localssh with an explicit '
3055 raise error.Abort(_('cannot specify --localssh with an explicit '
3056 'path'))
3056 'path'))
3057
3057
3058 if ui.interactive():
3058 if ui.interactive():
3059 ui.write(_('(waiting for commands on stdin)\n'))
3059 ui.write(_('(waiting for commands on stdin)\n'))
3060
3060
3061 blocks = list(_parsewirelangblocks(ui.fin))
3061 blocks = list(_parsewirelangblocks(ui.fin))
3062
3062
3063 proc = None
3063 proc = None
3064 stdin = None
3064 stdin = None
3065 stdout = None
3065 stdout = None
3066 stderr = None
3066 stderr = None
3067 opener = None
3067 opener = None
3068
3068
3069 if opts['localssh']:
3069 if opts['localssh']:
3070 # We start the SSH server in its own process so there is process
3070 # We start the SSH server in its own process so there is process
3071 # separation. This prevents a whole class of potential bugs around
3071 # separation. This prevents a whole class of potential bugs around
3072 # shared state from interfering with server operation.
3072 # shared state from interfering with server operation.
3073 args = procutil.hgcmd() + [
3073 args = procutil.hgcmd() + [
3074 '-R', repo.root,
3074 '-R', repo.root,
3075 'debugserve', '--sshstdio',
3075 'debugserve', '--sshstdio',
3076 ]
3076 ]
3077 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3077 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3078 stdin=subprocess.PIPE,
3078 stdin=subprocess.PIPE,
3079 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3079 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3080 bufsize=0)
3080 bufsize=0)
3081
3081
3082 stdin = proc.stdin
3082 stdin = proc.stdin
3083 stdout = proc.stdout
3083 stdout = proc.stdout
3084 stderr = proc.stderr
3084 stderr = proc.stderr
3085
3085
3086 # We turn the pipes into observers so we can log I/O.
3086 # We turn the pipes into observers so we can log I/O.
3087 if ui.verbose or opts['peer'] == 'raw':
3087 if ui.verbose or opts['peer'] == 'raw':
3088 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3088 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3089 logdata=True)
3089 logdata=True)
3090 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3090 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3091 logdata=True)
3091 logdata=True)
3092 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3092 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3093 logdata=True)
3093 logdata=True)
3094
3094
3095 # --localssh also implies the peer connection settings.
3095 # --localssh also implies the peer connection settings.
3096
3096
3097 url = 'ssh://localserver'
3097 url = 'ssh://localserver'
3098 autoreadstderr = not opts['noreadstderr']
3098 autoreadstderr = not opts['noreadstderr']
3099
3099
3100 if opts['peer'] == 'ssh1':
3100 if opts['peer'] == 'ssh1':
3101 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3101 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3102 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3102 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3103 None, autoreadstderr=autoreadstderr)
3103 None, autoreadstderr=autoreadstderr)
3104 elif opts['peer'] == 'ssh2':
3104 elif opts['peer'] == 'ssh2':
3105 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3105 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3106 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3106 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3107 None, autoreadstderr=autoreadstderr)
3107 None, autoreadstderr=autoreadstderr)
3108 elif opts['peer'] == 'raw':
3108 elif opts['peer'] == 'raw':
3109 ui.write(_('using raw connection to peer\n'))
3109 ui.write(_('using raw connection to peer\n'))
3110 peer = None
3110 peer = None
3111 else:
3111 else:
3112 ui.write(_('creating ssh peer from handshake results\n'))
3112 ui.write(_('creating ssh peer from handshake results\n'))
3113 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3113 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3114 autoreadstderr=autoreadstderr)
3114 autoreadstderr=autoreadstderr)
3115
3115
3116 elif path:
3116 elif path:
3117 # We bypass hg.peer() so we can proxy the sockets.
3117 # We bypass hg.peer() so we can proxy the sockets.
3118 # TODO consider not doing this because we skip
3118 # TODO consider not doing this because we skip
3119 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3119 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3120 u = util.url(path)
3120 u = util.url(path)
3121 if u.scheme != 'http':
3121 if u.scheme != 'http':
3122 raise error.Abort(_('only http:// paths are currently supported'))
3122 raise error.Abort(_('only http:// paths are currently supported'))
3123
3123
3124 url, authinfo = u.authinfo()
3124 url, authinfo = u.authinfo()
3125 openerargs = {
3125 openerargs = {
3126 r'useragent': b'Mercurial debugwireproto',
3126 r'useragent': b'Mercurial debugwireproto',
3127 }
3127 }
3128
3128
3129 # Turn pipes/sockets into observers so we can log I/O.
3129 # Turn pipes/sockets into observers so we can log I/O.
3130 if ui.verbose:
3130 if ui.verbose:
3131 openerargs.update({
3131 openerargs.update({
3132 r'loggingfh': ui,
3132 r'loggingfh': ui,
3133 r'loggingname': b's',
3133 r'loggingname': b's',
3134 r'loggingopts': {
3134 r'loggingopts': {
3135 r'logdata': True,
3135 r'logdata': True,
3136 r'logdataapis': False,
3136 r'logdataapis': False,
3137 },
3137 },
3138 })
3138 })
3139
3139
3140 if ui.debugflag:
3140 if ui.debugflag:
3141 openerargs[r'loggingopts'][r'logdataapis'] = True
3141 openerargs[r'loggingopts'][r'logdataapis'] = True
3142
3142
3143 # Don't send default headers when in raw mode. This allows us to
3143 # Don't send default headers when in raw mode. This allows us to
3144 # bypass most of the behavior of our URL handling code so we can
3144 # bypass most of the behavior of our URL handling code so we can
3145 # have near complete control over what's sent on the wire.
3145 # have near complete control over what's sent on the wire.
3146 if opts['peer'] == 'raw':
3146 if opts['peer'] == 'raw':
3147 openerargs[r'sendaccept'] = False
3147 openerargs[r'sendaccept'] = False
3148
3148
3149 opener = urlmod.opener(ui, authinfo, **openerargs)
3149 opener = urlmod.opener(ui, authinfo, **openerargs)
3150
3150
3151 if opts['peer'] == 'http2':
3151 if opts['peer'] == 'http2':
3152 ui.write(_('creating http peer for wire protocol version 2\n'))
3152 ui.write(_('creating http peer for wire protocol version 2\n'))
3153 # We go through makepeer() because we need an API descriptor for
3153 # We go through makepeer() because we need an API descriptor for
3154 # the peer instance to be useful.
3154 # the peer instance to be useful.
3155 with ui.configoverride({
3155 with ui.configoverride({
3156 ('experimental', 'httppeer.advertise-v2'): True}):
3156 ('experimental', 'httppeer.advertise-v2'): True}):
3157 if opts['nologhandshake']:
3157 if opts['nologhandshake']:
3158 ui.pushbuffer()
3158 ui.pushbuffer()
3159
3159
3160 peer = httppeer.makepeer(ui, path, opener=opener)
3160 peer = httppeer.makepeer(ui, path, opener=opener)
3161
3161
3162 if opts['nologhandshake']:
3162 if opts['nologhandshake']:
3163 ui.popbuffer()
3163 ui.popbuffer()
3164
3164
3165 if not isinstance(peer, httppeer.httpv2peer):
3165 if not isinstance(peer, httppeer.httpv2peer):
3166 raise error.Abort(_('could not instantiate HTTP peer for '
3166 raise error.Abort(_('could not instantiate HTTP peer for '
3167 'wire protocol version 2'),
3167 'wire protocol version 2'),
3168 hint=_('the server may not have the feature '
3168 hint=_('the server may not have the feature '
3169 'enabled or is not allowing this '
3169 'enabled or is not allowing this '
3170 'client version'))
3170 'client version'))
3171
3171
3172 elif opts['peer'] == 'raw':
3172 elif opts['peer'] == 'raw':
3173 ui.write(_('using raw connection to peer\n'))
3173 ui.write(_('using raw connection to peer\n'))
3174 peer = None
3174 peer = None
3175 elif opts['peer']:
3175 elif opts['peer']:
3176 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3176 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3177 opts['peer'])
3177 opts['peer'])
3178 else:
3178 else:
3179 peer = httppeer.makepeer(ui, path, opener=opener)
3179 peer = httppeer.makepeer(ui, path, opener=opener)
3180
3180
3181 # We /could/ populate stdin/stdout with sock.makefile()...
3181 # We /could/ populate stdin/stdout with sock.makefile()...
3182 else:
3182 else:
3183 raise error.Abort(_('unsupported connection configuration'))
3183 raise error.Abort(_('unsupported connection configuration'))
3184
3184
3185 batchedcommands = None
3185 batchedcommands = None
3186
3186
3187 # Now perform actions based on the parsed wire language instructions.
3187 # Now perform actions based on the parsed wire language instructions.
3188 for action, lines in blocks:
3188 for action, lines in blocks:
3189 if action in ('raw', 'raw+'):
3189 if action in ('raw', 'raw+'):
3190 if not stdin:
3190 if not stdin:
3191 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3191 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3192
3192
3193 # Concatenate the data together.
3193 # Concatenate the data together.
3194 data = ''.join(l.lstrip() for l in lines)
3194 data = ''.join(l.lstrip() for l in lines)
3195 data = stringutil.unescapestr(data)
3195 data = stringutil.unescapestr(data)
3196 stdin.write(data)
3196 stdin.write(data)
3197
3197
3198 if action == 'raw+':
3198 if action == 'raw+':
3199 stdin.flush()
3199 stdin.flush()
3200 elif action == 'flush':
3200 elif action == 'flush':
3201 if not stdin:
3201 if not stdin:
3202 raise error.Abort(_('cannot call flush on this peer'))
3202 raise error.Abort(_('cannot call flush on this peer'))
3203 stdin.flush()
3203 stdin.flush()
3204 elif action.startswith('command'):
3204 elif action.startswith('command'):
3205 if not peer:
3205 if not peer:
3206 raise error.Abort(_('cannot send commands unless peer instance '
3206 raise error.Abort(_('cannot send commands unless peer instance '
3207 'is available'))
3207 'is available'))
3208
3208
3209 command = action.split(' ', 1)[1]
3209 command = action.split(' ', 1)[1]
3210
3210
3211 args = {}
3211 args = {}
3212 for line in lines:
3212 for line in lines:
3213 # We need to allow empty values.
3213 # We need to allow empty values.
3214 fields = line.lstrip().split(' ', 1)
3214 fields = line.lstrip().split(' ', 1)
3215 if len(fields) == 1:
3215 if len(fields) == 1:
3216 key = fields[0]
3216 key = fields[0]
3217 value = ''
3217 value = ''
3218 else:
3218 else:
3219 key, value = fields
3219 key, value = fields
3220
3220
3221 if value.startswith('eval:'):
3221 if value.startswith('eval:'):
3222 value = stringutil.evalpythonliteral(value[5:])
3222 value = stringutil.evalpythonliteral(value[5:])
3223 else:
3223 else:
3224 value = stringutil.unescapestr(value)
3224 value = stringutil.unescapestr(value)
3225
3225
3226 args[key] = value
3226 args[key] = value
3227
3227
3228 if batchedcommands is not None:
3228 if batchedcommands is not None:
3229 batchedcommands.append((command, args))
3229 batchedcommands.append((command, args))
3230 continue
3230 continue
3231
3231
3232 ui.status(_('sending %s command\n') % command)
3232 ui.status(_('sending %s command\n') % command)
3233
3233
3234 if 'PUSHFILE' in args:
3234 if 'PUSHFILE' in args:
3235 with open(args['PUSHFILE'], r'rb') as fh:
3235 with open(args['PUSHFILE'], r'rb') as fh:
3236 del args['PUSHFILE']
3236 del args['PUSHFILE']
3237 res, output = peer._callpush(command, fh,
3237 res, output = peer._callpush(command, fh,
3238 **pycompat.strkwargs(args))
3238 **pycompat.strkwargs(args))
3239 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3239 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3240 ui.status(_('remote output: %s\n') %
3240 ui.status(_('remote output: %s\n') %
3241 stringutil.escapestr(output))
3241 stringutil.escapestr(output))
3242 else:
3242 else:
3243 with peer.commandexecutor() as e:
3243 with peer.commandexecutor() as e:
3244 res = e.callcommand(command, args).result()
3244 res = e.callcommand(command, args).result()
3245
3245
3246 if isinstance(res, wireprotov2peer.commandresponse):
3246 if isinstance(res, wireprotov2peer.commandresponse):
3247 val = res.objects()
3247 val = res.objects()
3248 ui.status(_('response: %s\n') %
3248 ui.status(_('response: %s\n') %
3249 stringutil.pprint(val, bprefix=True, indent=2))
3249 stringutil.pprint(val, bprefix=True, indent=2))
3250 else:
3250 else:
3251 ui.status(_('response: %s\n') %
3251 ui.status(_('response: %s\n') %
3252 stringutil.pprint(res, bprefix=True, indent=2))
3252 stringutil.pprint(res, bprefix=True, indent=2))
3253
3253
3254 elif action == 'batchbegin':
3254 elif action == 'batchbegin':
3255 if batchedcommands is not None:
3255 if batchedcommands is not None:
3256 raise error.Abort(_('nested batchbegin not allowed'))
3256 raise error.Abort(_('nested batchbegin not allowed'))
3257
3257
3258 batchedcommands = []
3258 batchedcommands = []
3259 elif action == 'batchsubmit':
3259 elif action == 'batchsubmit':
3260 # There is a batching API we could go through. But it would be
3260 # There is a batching API we could go through. But it would be
3261 # difficult to normalize requests into function calls. It is easier
3261 # difficult to normalize requests into function calls. It is easier
3262 # to bypass this layer and normalize to commands + args.
3262 # to bypass this layer and normalize to commands + args.
3263 ui.status(_('sending batch with %d sub-commands\n') %
3263 ui.status(_('sending batch with %d sub-commands\n') %
3264 len(batchedcommands))
3264 len(batchedcommands))
3265 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3265 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3266 ui.status(_('response #%d: %s\n') %
3266 ui.status(_('response #%d: %s\n') %
3267 (i, stringutil.escapestr(chunk)))
3267 (i, stringutil.escapestr(chunk)))
3268
3268
3269 batchedcommands = None
3269 batchedcommands = None
3270
3270
3271 elif action.startswith('httprequest '):
3271 elif action.startswith('httprequest '):
3272 if not opener:
3272 if not opener:
3273 raise error.Abort(_('cannot use httprequest without an HTTP '
3273 raise error.Abort(_('cannot use httprequest without an HTTP '
3274 'peer'))
3274 'peer'))
3275
3275
3276 request = action.split(' ', 2)
3276 request = action.split(' ', 2)
3277 if len(request) != 3:
3277 if len(request) != 3:
3278 raise error.Abort(_('invalid httprequest: expected format is '
3278 raise error.Abort(_('invalid httprequest: expected format is '
3279 '"httprequest <method> <path>'))
3279 '"httprequest <method> <path>'))
3280
3280
3281 method, httppath = request[1:]
3281 method, httppath = request[1:]
3282 headers = {}
3282 headers = {}
3283 body = None
3283 body = None
3284 frames = []
3284 frames = []
3285 for line in lines:
3285 for line in lines:
3286 line = line.lstrip()
3286 line = line.lstrip()
3287 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3287 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3288 if m:
3288 if m:
3289 headers[m.group(1)] = m.group(2)
3289 headers[m.group(1)] = m.group(2)
3290 continue
3290 continue
3291
3291
3292 if line.startswith(b'BODYFILE '):
3292 if line.startswith(b'BODYFILE '):
3293 with open(line.split(b' ', 1), 'rb') as fh:
3293 with open(line.split(b' ', 1), 'rb') as fh:
3294 body = fh.read()
3294 body = fh.read()
3295 elif line.startswith(b'frame '):
3295 elif line.startswith(b'frame '):
3296 frame = wireprotoframing.makeframefromhumanstring(
3296 frame = wireprotoframing.makeframefromhumanstring(
3297 line[len(b'frame '):])
3297 line[len(b'frame '):])
3298
3298
3299 frames.append(frame)
3299 frames.append(frame)
3300 else:
3300 else:
3301 raise error.Abort(_('unknown argument to httprequest: %s') %
3301 raise error.Abort(_('unknown argument to httprequest: %s') %
3302 line)
3302 line)
3303
3303
3304 url = path + httppath
3304 url = path + httppath
3305
3305
3306 if frames:
3306 if frames:
3307 body = b''.join(bytes(f) for f in frames)
3307 body = b''.join(bytes(f) for f in frames)
3308
3308
3309 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3309 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3310
3310
3311 # urllib.Request insists on using has_data() as a proxy for
3311 # urllib.Request insists on using has_data() as a proxy for
3312 # determining the request method. Override that to use our
3312 # determining the request method. Override that to use our
3313 # explicitly requested method.
3313 # explicitly requested method.
3314 req.get_method = lambda: pycompat.sysstr(method)
3314 req.get_method = lambda: pycompat.sysstr(method)
3315
3315
3316 try:
3316 try:
3317 res = opener.open(req)
3317 res = opener.open(req)
3318 body = res.read()
3318 body = res.read()
3319 except util.urlerr.urlerror as e:
3319 except util.urlerr.urlerror as e:
3320 # read() method must be called, but only exists in Python 2
3320 # read() method must be called, but only exists in Python 2
3321 getattr(e, 'read', lambda: None)()
3321 getattr(e, 'read', lambda: None)()
3322 continue
3322 continue
3323
3323
3324 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3324 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3325 ui.write(_('cbor> %s\n') %
3325 ui.write(_('cbor> %s\n') %
3326 stringutil.pprint(cborutil.decodeall(body)[0],
3326 stringutil.pprint(cborutil.decodeall(body)[0],
3327 bprefix=True,
3327 bprefix=True,
3328 indent=2))
3328 indent=2))
3329
3329
3330 elif action == 'close':
3330 elif action == 'close':
3331 peer.close()
3331 peer.close()
3332 elif action == 'readavailable':
3332 elif action == 'readavailable':
3333 if not stdout or not stderr:
3333 if not stdout or not stderr:
3334 raise error.Abort(_('readavailable not available on this peer'))
3334 raise error.Abort(_('readavailable not available on this peer'))
3335
3335
3336 stdin.close()
3336 stdin.close()
3337 stdout.read()
3337 stdout.read()
3338 stderr.read()
3338 stderr.read()
3339
3339
3340 elif action == 'readline':
3340 elif action == 'readline':
3341 if not stdout:
3341 if not stdout:
3342 raise error.Abort(_('readline not available on this peer'))
3342 raise error.Abort(_('readline not available on this peer'))
3343 stdout.readline()
3343 stdout.readline()
3344 elif action == 'ereadline':
3344 elif action == 'ereadline':
3345 if not stderr:
3345 if not stderr:
3346 raise error.Abort(_('ereadline not available on this peer'))
3346 raise error.Abort(_('ereadline not available on this peer'))
3347 stderr.readline()
3347 stderr.readline()
3348 elif action.startswith('read '):
3348 elif action.startswith('read '):
3349 count = int(action.split(' ', 1)[1])
3349 count = int(action.split(' ', 1)[1])
3350 if not stdout:
3350 if not stdout:
3351 raise error.Abort(_('read not available on this peer'))
3351 raise error.Abort(_('read not available on this peer'))
3352 stdout.read(count)
3352 stdout.read(count)
3353 elif action.startswith('eread '):
3353 elif action.startswith('eread '):
3354 count = int(action.split(' ', 1)[1])
3354 count = int(action.split(' ', 1)[1])
3355 if not stderr:
3355 if not stderr:
3356 raise error.Abort(_('eread not available on this peer'))
3356 raise error.Abort(_('eread not available on this peer'))
3357 stderr.read(count)
3357 stderr.read(count)
3358 else:
3358 else:
3359 raise error.Abort(_('unknown action: %s') % action)
3359 raise error.Abort(_('unknown action: %s') % action)
3360
3360
3361 if batchedcommands is not None:
3361 if batchedcommands is not None:
3362 raise error.Abort(_('unclosed "batchbegin" request'))
3362 raise error.Abort(_('unclosed "batchbegin" request'))
3363
3363
3364 if peer:
3364 if peer:
3365 peer.close()
3365 peer.close()
3366
3366
3367 if proc:
3367 if proc:
3368 proc.kill()
3368 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now