##// END OF EJS Templates
debugdiscovery: small internal refactoring...
marmoute -
r42198:d31d8c52 default
parent child Browse files
Show More
@@ -1,3430 +1,3436 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 from .revlogutils import (
90 from .revlogutils import (
91 deltas as deltautil
91 deltas as deltautil
92 )
92 )
93
93
94 release = lockmod.release
94 release = lockmod.release
95
95
96 command = registrar.command()
96 command = registrar.command()
97
97
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
100 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
101 if len(args) == 3:
101 if len(args) == 3:
102 index, rev1, rev2 = args
102 index, rev1, rev2 = args
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 lookup = r.lookup
104 lookup = r.lookup
105 elif len(args) == 2:
105 elif len(args) == 2:
106 if not repo:
106 if not repo:
107 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
108 '(.hg not found)'))
108 '(.hg not found)'))
109 rev1, rev2 = args
109 rev1, rev2 = args
110 r = repo.changelog
110 r = repo.changelog
111 lookup = repo.lookup
111 lookup = repo.lookup
112 else:
112 else:
113 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116
116
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
118 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
119 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
120 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
122 gen.apply(repo)
122 gen.apply(repo)
123
123
124 @command('debugbuilddag',
124 @command('debugbuilddag',
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
128 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
129 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
130 mergeable_file=False,
130 mergeable_file=False,
131 overwritten_file=False,
131 overwritten_file=False,
132 new_file=False):
132 new_file=False):
133 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
134
134
135 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
136 command line.
136 command line.
137
137
138 Elements:
138 Elements:
139
139
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
141 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
142 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
143 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
144 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
145 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
148 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
149 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
150 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
151
151
152 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
153
153
154 A backref is either
154 A backref is either
155
155
156 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
157 node, or
157 node, or
158 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
159 - empty to denote the default parent.
159 - empty to denote the default parent.
160
160
161 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 """
163 """
164
164
165 if text is None:
165 if text is None:
166 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
167 text = ui.fin.read()
167 text = ui.fin.read()
168
168
169 cl = repo.changelog
169 cl = repo.changelog
170 if len(cl) > 0:
170 if len(cl) > 0:
171 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
172
172
173 # determine number of revs in DAG
173 # determine number of revs in DAG
174 total = 0
174 total = 0
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 total += 1
177 total += 1
178
178
179 if mergeable_file:
179 if mergeable_file:
180 linesperrev = 2
180 linesperrev = 2
181 # make a file with k lines per rev
181 # make a file with k lines per rev
182 initialmergedlines = ['%d' % i
182 initialmergedlines = ['%d' % i
183 for i in pycompat.xrange(0, total * linesperrev)]
183 for i in pycompat.xrange(0, total * linesperrev)]
184 initialmergedlines.append("")
184 initialmergedlines.append("")
185
185
186 tags = []
186 tags = []
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 total=total)
188 total=total)
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 at = -1
190 at = -1
191 atbranch = 'default'
191 atbranch = 'default'
192 nodeids = []
192 nodeids = []
193 id = 0
193 id = 0
194 progress.update(id)
194 progress.update(id)
195 for type, data in dagparser.parsedag(text):
195 for type, data in dagparser.parsedag(text):
196 if type == 'n':
196 if type == 'n':
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 id, ps = data
198 id, ps = data
199
199
200 files = []
200 files = []
201 filecontent = {}
201 filecontent = {}
202
202
203 p2 = None
203 p2 = None
204 if mergeable_file:
204 if mergeable_file:
205 fn = "mf"
205 fn = "mf"
206 p1 = repo[ps[0]]
206 p1 = repo[ps[0]]
207 if len(ps) > 1:
207 if len(ps) > 1:
208 p2 = repo[ps[1]]
208 p2 = repo[ps[1]]
209 pa = p1.ancestor(p2)
209 pa = p1.ancestor(p2)
210 base, local, other = [x[fn].data() for x in (pa, p1,
210 base, local, other = [x[fn].data() for x in (pa, p1,
211 p2)]
211 p2)]
212 m3 = simplemerge.Merge3Text(base, local, other)
212 m3 = simplemerge.Merge3Text(base, local, other)
213 ml = [l.strip() for l in m3.merge_lines()]
213 ml = [l.strip() for l in m3.merge_lines()]
214 ml.append("")
214 ml.append("")
215 elif at > 0:
215 elif at > 0:
216 ml = p1[fn].data().split("\n")
216 ml = p1[fn].data().split("\n")
217 else:
217 else:
218 ml = initialmergedlines
218 ml = initialmergedlines
219 ml[id * linesperrev] += " r%i" % id
219 ml[id * linesperrev] += " r%i" % id
220 mergedtext = "\n".join(ml)
220 mergedtext = "\n".join(ml)
221 files.append(fn)
221 files.append(fn)
222 filecontent[fn] = mergedtext
222 filecontent[fn] = mergedtext
223
223
224 if overwritten_file:
224 if overwritten_file:
225 fn = "of"
225 fn = "of"
226 files.append(fn)
226 files.append(fn)
227 filecontent[fn] = "r%i\n" % id
227 filecontent[fn] = "r%i\n" % id
228
228
229 if new_file:
229 if new_file:
230 fn = "nf%i" % id
230 fn = "nf%i" % id
231 files.append(fn)
231 files.append(fn)
232 filecontent[fn] = "r%i\n" % id
232 filecontent[fn] = "r%i\n" % id
233 if len(ps) > 1:
233 if len(ps) > 1:
234 if not p2:
234 if not p2:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 for fn in p2:
236 for fn in p2:
237 if fn.startswith("nf"):
237 if fn.startswith("nf"):
238 files.append(fn)
238 files.append(fn)
239 filecontent[fn] = p2[fn].data()
239 filecontent[fn] = p2[fn].data()
240
240
241 def fctxfn(repo, cx, path):
241 def fctxfn(repo, cx, path):
242 if path in filecontent:
242 if path in filecontent:
243 return context.memfilectx(repo, cx, path,
243 return context.memfilectx(repo, cx, path,
244 filecontent[path])
244 filecontent[path])
245 return None
245 return None
246
246
247 if len(ps) == 0 or ps[0] < 0:
247 if len(ps) == 0 or ps[0] < 0:
248 pars = [None, None]
248 pars = [None, None]
249 elif len(ps) == 1:
249 elif len(ps) == 1:
250 pars = [nodeids[ps[0]], None]
250 pars = [nodeids[ps[0]], None]
251 else:
251 else:
252 pars = [nodeids[p] for p in ps]
252 pars = [nodeids[p] for p in ps]
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 date=(id, 0),
254 date=(id, 0),
255 user="debugbuilddag",
255 user="debugbuilddag",
256 extra={'branch': atbranch})
256 extra={'branch': atbranch})
257 nodeid = repo.commitctx(cx)
257 nodeid = repo.commitctx(cx)
258 nodeids.append(nodeid)
258 nodeids.append(nodeid)
259 at = id
259 at = id
260 elif type == 'l':
260 elif type == 'l':
261 id, name = data
261 id, name = data
262 ui.note(('tag %s\n' % name))
262 ui.note(('tag %s\n' % name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 elif type == 'a':
264 elif type == 'a':
265 ui.note(('branch %s\n' % data))
265 ui.note(('branch %s\n' % data))
266 atbranch = data
266 atbranch = data
267 progress.update(id)
267 progress.update(id)
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271
271
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 indent_string = ' ' * indent
273 indent_string = ' ' * indent
274 if all:
274 if all:
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 % indent_string)
276 % indent_string)
277
277
278 def showchunks(named):
278 def showchunks(named):
279 ui.write("\n%s%s\n" % (indent_string, named))
279 ui.write("\n%s%s\n" % (indent_string, named))
280 for deltadata in gen.deltaiter():
280 for deltadata in gen.deltaiter():
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 ui.write("%s%s %s %s %s %s %d\n" %
282 ui.write("%s%s %s %s %s %s %d\n" %
283 (indent_string, hex(node), hex(p1), hex(p2),
283 (indent_string, hex(node), hex(p1), hex(p2),
284 hex(cs), hex(deltabase), len(delta)))
284 hex(cs), hex(deltabase), len(delta)))
285
285
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 showchunks("changelog")
287 showchunks("changelog")
288 chunkdata = gen.manifestheader()
288 chunkdata = gen.manifestheader()
289 showchunks("manifest")
289 showchunks("manifest")
290 for chunkdata in iter(gen.filelogheader, {}):
290 for chunkdata in iter(gen.filelogheader, {}):
291 fname = chunkdata['filename']
291 fname = chunkdata['filename']
292 showchunks(fname)
292 showchunks(fname)
293 else:
293 else:
294 if isinstance(gen, bundle2.unbundle20):
294 if isinstance(gen, bundle2.unbundle20):
295 raise error.Abort(_('use debugbundle2 for this file'))
295 raise error.Abort(_('use debugbundle2 for this file'))
296 chunkdata = gen.changelogheader()
296 chunkdata = gen.changelogheader()
297 for deltadata in gen.deltaiter():
297 for deltadata in gen.deltaiter():
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 ui.write("%s%s\n" % (indent_string, hex(node)))
299 ui.write("%s%s\n" % (indent_string, hex(node)))
300
300
301 def _debugobsmarkers(ui, part, indent=0, **opts):
301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 """display version and markers contained in 'data'"""
302 """display version and markers contained in 'data'"""
303 opts = pycompat.byteskwargs(opts)
303 opts = pycompat.byteskwargs(opts)
304 data = part.read()
304 data = part.read()
305 indent_string = ' ' * indent
305 indent_string = ' ' * indent
306 try:
306 try:
307 version, markers = obsolete._readmarkers(data)
307 version, markers = obsolete._readmarkers(data)
308 except error.UnknownVersion as exc:
308 except error.UnknownVersion as exc:
309 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg %= indent_string, exc.version, len(data)
310 msg %= indent_string, exc.version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 else:
312 else:
313 msg = "%sversion: %d (%d bytes)\n"
313 msg = "%sversion: %d (%d bytes)\n"
314 msg %= indent_string, version, len(data)
314 msg %= indent_string, version, len(data)
315 ui.write(msg)
315 ui.write(msg)
316 fm = ui.formatter('debugobsolete', opts)
316 fm = ui.formatter('debugobsolete', opts)
317 for rawmarker in sorted(markers):
317 for rawmarker in sorted(markers):
318 m = obsutil.marker(None, rawmarker)
318 m = obsutil.marker(None, rawmarker)
319 fm.startitem()
319 fm.startitem()
320 fm.plain(indent_string)
320 fm.plain(indent_string)
321 cmdutil.showmarker(fm, m)
321 cmdutil.showmarker(fm, m)
322 fm.end()
322 fm.end()
323
323
324 def _debugphaseheads(ui, data, indent=0):
324 def _debugphaseheads(ui, data, indent=0):
325 """display version and markers contained in 'data'"""
325 """display version and markers contained in 'data'"""
326 indent_string = ' ' * indent
326 indent_string = ' ' * indent
327 headsbyphase = phases.binarydecode(data)
327 headsbyphase = phases.binarydecode(data)
328 for phase in phases.allphases:
328 for phase in phases.allphases:
329 for head in headsbyphase[phase]:
329 for head in headsbyphase[phase]:
330 ui.write(indent_string)
330 ui.write(indent_string)
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332
332
333 def _quasirepr(thing):
333 def _quasirepr(thing):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 return '{%s}' % (
335 return '{%s}' % (
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 return pycompat.bytestr(repr(thing))
337 return pycompat.bytestr(repr(thing))
338
338
339 def _debugbundle2(ui, gen, all=None, **opts):
339 def _debugbundle2(ui, gen, all=None, **opts):
340 """lists the contents of a bundle2"""
340 """lists the contents of a bundle2"""
341 if not isinstance(gen, bundle2.unbundle20):
341 if not isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_('not a bundle2 file'))
342 raise error.Abort(_('not a bundle2 file'))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 parttypes = opts.get(r'part_type', [])
344 parttypes = opts.get(r'part_type', [])
345 for part in gen.iterparts():
345 for part in gen.iterparts():
346 if parttypes and part.type not in parttypes:
346 if parttypes and part.type not in parttypes:
347 continue
347 continue
348 msg = '%s -- %s (mandatory: %r)\n'
348 msg = '%s -- %s (mandatory: %r)\n'
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 if part.type == 'changegroup':
350 if part.type == 'changegroup':
351 version = part.params.get('version', '01')
351 version = part.params.get('version', '01')
352 cg = changegroup.getunbundler(version, part, 'UN')
352 cg = changegroup.getunbundler(version, part, 'UN')
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 if part.type == 'obsmarkers':
355 if part.type == 'obsmarkers':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugobsmarkers(ui, part, indent=4, **opts)
357 _debugobsmarkers(ui, part, indent=4, **opts)
358 if part.type == 'phase-heads':
358 if part.type == 'phase-heads':
359 if not ui.quiet:
359 if not ui.quiet:
360 _debugphaseheads(ui, part, indent=4)
360 _debugphaseheads(ui, part, indent=4)
361
361
362 @command('debugbundle',
362 @command('debugbundle',
363 [('a', 'all', None, _('show all details')),
363 [('a', 'all', None, _('show all details')),
364 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 _('FILE'),
366 _('FILE'),
367 norepo=True)
367 norepo=True)
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 """lists the contents of a bundle"""
369 """lists the contents of a bundle"""
370 with hg.openpath(ui, bundlepath) as f:
370 with hg.openpath(ui, bundlepath) as f:
371 if spec:
371 if spec:
372 spec = exchange.getbundlespec(ui, f)
372 spec = exchange.getbundlespec(ui, f)
373 ui.write('%s\n' % spec)
373 ui.write('%s\n' % spec)
374 return
374 return
375
375
376 gen = exchange.readbundle(ui, f, bundlepath)
376 gen = exchange.readbundle(ui, f, bundlepath)
377 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
378 return _debugbundle2(ui, gen, all=all, **opts)
378 return _debugbundle2(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
380
380
381 @command('debugcapabilities',
381 @command('debugcapabilities',
382 [], _('PATH'),
382 [], _('PATH'),
383 norepo=True)
383 norepo=True)
384 def debugcapabilities(ui, path, **opts):
384 def debugcapabilities(ui, path, **opts):
385 """lists the capabilities of a remote peer"""
385 """lists the capabilities of a remote peer"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 peer = hg.peer(ui, opts, path)
387 peer = hg.peer(ui, opts, path)
388 caps = peer.capabilities()
388 caps = peer.capabilities()
389 ui.write(('Main capabilities:\n'))
389 ui.write(('Main capabilities:\n'))
390 for c in sorted(caps):
390 for c in sorted(caps):
391 ui.write((' %s\n') % c)
391 ui.write((' %s\n') % c)
392 b2caps = bundle2.bundle2caps(peer)
392 b2caps = bundle2.bundle2caps(peer)
393 if b2caps:
393 if b2caps:
394 ui.write(('Bundle2 capabilities:\n'))
394 ui.write(('Bundle2 capabilities:\n'))
395 for key, values in sorted(b2caps.iteritems()):
395 for key, values in sorted(b2caps.iteritems()):
396 ui.write((' %s\n') % key)
396 ui.write((' %s\n') % key)
397 for v in values:
397 for v in values:
398 ui.write((' %s\n') % v)
398 ui.write((' %s\n') % v)
399
399
400 @command('debugcheckstate', [], '')
400 @command('debugcheckstate', [], '')
401 def debugcheckstate(ui, repo):
401 def debugcheckstate(ui, repo):
402 """validate the correctness of the current dirstate"""
402 """validate the correctness of the current dirstate"""
403 parent1, parent2 = repo.dirstate.parents()
403 parent1, parent2 = repo.dirstate.parents()
404 m1 = repo[parent1].manifest()
404 m1 = repo[parent1].manifest()
405 m2 = repo[parent2].manifest()
405 m2 = repo[parent2].manifest()
406 errors = 0
406 errors = 0
407 for f in repo.dirstate:
407 for f in repo.dirstate:
408 state = repo.dirstate[f]
408 state = repo.dirstate[f]
409 if state in "nr" and f not in m1:
409 if state in "nr" and f not in m1:
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "a" and f in m1:
412 if state in "a" and f in m1:
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 errors += 1
414 errors += 1
415 if state in "m" and f not in m1 and f not in m2:
415 if state in "m" and f not in m1 and f not in m2:
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 (f, state))
417 (f, state))
418 errors += 1
418 errors += 1
419 for f in m1:
419 for f in m1:
420 state = repo.dirstate[f]
420 state = repo.dirstate[f]
421 if state not in "nrm":
421 if state not in "nrm":
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 errors += 1
423 errors += 1
424 if errors:
424 if errors:
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 raise error.Abort(error)
426 raise error.Abort(error)
427
427
428 @command('debugcolor',
428 @command('debugcolor',
429 [('', 'style', None, _('show all configured styles'))],
429 [('', 'style', None, _('show all configured styles'))],
430 'hg debugcolor')
430 'hg debugcolor')
431 def debugcolor(ui, repo, **opts):
431 def debugcolor(ui, repo, **opts):
432 """show available color, effects or style"""
432 """show available color, effects or style"""
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 if opts.get(r'style'):
434 if opts.get(r'style'):
435 return _debugdisplaystyle(ui)
435 return _debugdisplaystyle(ui)
436 else:
436 else:
437 return _debugdisplaycolor(ui)
437 return _debugdisplaycolor(ui)
438
438
439 def _debugdisplaycolor(ui):
439 def _debugdisplaycolor(ui):
440 ui = ui.copy()
440 ui = ui.copy()
441 ui._styles.clear()
441 ui._styles.clear()
442 for effect in color._activeeffects(ui).keys():
442 for effect in color._activeeffects(ui).keys():
443 ui._styles[effect] = effect
443 ui._styles[effect] = effect
444 if ui._terminfoparams:
444 if ui._terminfoparams:
445 for k, v in ui.configitems('color'):
445 for k, v in ui.configitems('color'):
446 if k.startswith('color.'):
446 if k.startswith('color.'):
447 ui._styles[k] = k[6:]
447 ui._styles[k] = k[6:]
448 elif k.startswith('terminfo.'):
448 elif k.startswith('terminfo.'):
449 ui._styles[k] = k[9:]
449 ui._styles[k] = k[9:]
450 ui.write(_('available colors:\n'))
450 ui.write(_('available colors:\n'))
451 # sort label with a '_' after the other to group '_background' entry.
451 # sort label with a '_' after the other to group '_background' entry.
452 items = sorted(ui._styles.items(),
452 items = sorted(ui._styles.items(),
453 key=lambda i: ('_' in i[0], i[0], i[1]))
453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 for colorname, label in items:
454 for colorname, label in items:
455 ui.write(('%s\n') % colorname, label=label)
455 ui.write(('%s\n') % colorname, label=label)
456
456
457 def _debugdisplaystyle(ui):
457 def _debugdisplaystyle(ui):
458 ui.write(_('available style:\n'))
458 ui.write(_('available style:\n'))
459 if not ui._styles:
459 if not ui._styles:
460 return
460 return
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 start = r.start
626 start = r.start
627 length = r.length
627 length = r.length
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 withsparseread = getattr(r, '_withsparseread', False)
629 withsparseread = getattr(r, '_withsparseread', False)
630
630
631 def revinfo(rev):
631 def revinfo(rev):
632 e = index[rev]
632 e = index[rev]
633 compsize = e[1]
633 compsize = e[1]
634 uncompsize = e[2]
634 uncompsize = e[2]
635 chainsize = 0
635 chainsize = 0
636
636
637 if generaldelta:
637 if generaldelta:
638 if e[3] == e[5]:
638 if e[3] == e[5]:
639 deltatype = 'p1'
639 deltatype = 'p1'
640 elif e[3] == e[6]:
640 elif e[3] == e[6]:
641 deltatype = 'p2'
641 deltatype = 'p2'
642 elif e[3] == rev - 1:
642 elif e[3] == rev - 1:
643 deltatype = 'prev'
643 deltatype = 'prev'
644 elif e[3] == rev:
644 elif e[3] == rev:
645 deltatype = 'base'
645 deltatype = 'base'
646 else:
646 else:
647 deltatype = 'other'
647 deltatype = 'other'
648 else:
648 else:
649 if e[3] == rev:
649 if e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'prev'
652 deltatype = 'prev'
653
653
654 chain = r._deltachain(rev)[0]
654 chain = r._deltachain(rev)[0]
655 for iterrev in chain:
655 for iterrev in chain:
656 e = index[iterrev]
656 e = index[iterrev]
657 chainsize += e[1]
657 chainsize += e[1]
658
658
659 return compsize, uncompsize, deltatype, chain, chainsize
659 return compsize, uncompsize, deltatype, chain, chainsize
660
660
661 fm = ui.formatter('debugdeltachain', opts)
661 fm = ui.formatter('debugdeltachain', opts)
662
662
663 fm.plain(' rev chain# chainlen prev delta '
663 fm.plain(' rev chain# chainlen prev delta '
664 'size rawsize chainsize ratio lindist extradist '
664 'size rawsize chainsize ratio lindist extradist '
665 'extraratio')
665 'extraratio')
666 if withsparseread:
666 if withsparseread:
667 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain('\n')
668 fm.plain('\n')
669
669
670 chainbases = {}
670 chainbases = {}
671 for rev in r:
671 for rev in r:
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 chainbase = chain[0]
673 chainbase = chain[0]
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 if uncomp != 0:
684 if uncomp != 0:
685 chainratio = float(chainsize) / float(uncomp)
685 chainratio = float(chainsize) / float(uncomp)
686 else:
686 else:
687 chainratio = chainsize
687 chainratio = chainsize
688
688
689 if chainsize != 0:
689 if chainsize != 0:
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691 else:
691 else:
692 extraratio = extradist
692 extraratio = extradist
693
693
694 fm.startitem()
694 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
697 'extraratio',
697 'extraratio',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
701 extraratio,
701 extraratio,
702 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
705 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
706 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
707 if withsparseread:
707 if withsparseread:
708 readsize = 0
708 readsize = 0
709 largestblock = 0
709 largestblock = 0
710 srchunks = 0
710 srchunks = 0
711
711
712 for revschunk in deltautil.slicechunk(r, chain):
712 for revschunk in deltautil.slicechunk(r, chain):
713 srchunks += 1
713 srchunks += 1
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
716
716
717 readsize += blksize
717 readsize += blksize
718 if largestblock < blksize:
718 if largestblock < blksize:
719 largestblock = blksize
719 largestblock = blksize
720
720
721 if readsize:
721 if readsize:
722 readdensity = float(chainsize) / float(readsize)
722 readdensity = float(chainsize) / float(readsize)
723 else:
723 else:
724 readdensity = 1
724 readdensity = 1
725
725
726 fm.write('readsize largestblock readdensity srchunks',
726 fm.write('readsize largestblock readdensity srchunks',
727 ' %10d %10d %9.5f %8d',
727 ' %10d %10d %9.5f %8d',
728 readsize, largestblock, readdensity, srchunks,
728 readsize, largestblock, readdensity, srchunks,
729 readsize=readsize, largestblock=largestblock,
729 readsize=readsize, largestblock=largestblock,
730 readdensity=readdensity, srchunks=srchunks)
730 readdensity=readdensity, srchunks=srchunks)
731
731
732 fm.plain('\n')
732 fm.plain('\n')
733
733
734 fm.end()
734 fm.end()
735
735
736 @command('debugdirstate|debugstate',
736 @command('debugdirstate|debugstate',
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
739 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
740 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
741 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
742 """show the contents of the current dirstate"""
743
743
744 nodates = not opts[r'dates']
744 nodates = not opts[r'dates']
745 if opts.get(r'nodates') is not None:
745 if opts.get(r'nodates') is not None:
746 nodates = True
746 nodates = True
747 datesort = opts.get(r'datesort')
747 datesort = opts.get(r'datesort')
748
748
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ] + cmdutil.remoteopts,
776 ] + cmdutil.remoteopts,
777 _('[--rev REV] [OTHER]'))
777 _('[--rev REV] [OTHER]'))
778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 """runs the changeset discovery protocol in isolation"""
779 """runs the changeset discovery protocol in isolation"""
780 opts = pycompat.byteskwargs(opts)
780 opts = pycompat.byteskwargs(opts)
781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remote = hg.peer(repo, opts, remoteurl)
782 remote = hg.peer(repo, opts, remoteurl)
783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784
784
785 # make sure tests are repeatable
785 # make sure tests are repeatable
786 random.seed(int(opts['seed']))
786 random.seed(int(opts['seed']))
787
787
788 def doit(pushedrevs, remoteheads, remote=remote):
788
789 if opts.get('old'):
789
790 if opts.get('old'):
791 def doit(pushedrevs, remoteheads, remote=remote):
790 if not util.safehasattr(remote, 'branches'):
792 if not util.safehasattr(remote, 'branches'):
791 # enable in-client legacy support
793 # enable in-client legacy support
792 remote = localrepo.locallegacypeer(remote.local())
794 remote = localrepo.locallegacypeer(remote.local())
793 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
794 force=True)
796 force=True)
795 common = set(common)
797 common = set(common)
796 if not opts.get('nonheads'):
798 if not opts.get('nonheads'):
797 ui.write(("unpruned common: %s\n") %
799 ui.write(("unpruned common: %s\n") %
798 " ".join(sorted(short(n) for n in common)))
800 " ".join(sorted(short(n) for n in common)))
799
801
800 clnode = repo.changelog.node
802 clnode = repo.changelog.node
801 common = repo.revs('heads(::%ln)', common)
803 common = repo.revs('heads(::%ln)', common)
802 common = {clnode(r) for r in common}
804 common = {clnode(r) for r in common}
803 else:
805 return common, hds
806 else:
807 def doit(pushedrevs, remoteheads, remote=remote):
804 nodes = None
808 nodes = None
805 if pushedrevs:
809 if pushedrevs:
806 revs = scmutil.revrange(repo, pushedrevs)
810 revs = scmutil.revrange(repo, pushedrevs)
807 nodes = [repo[r].node() for r in revs]
811 nodes = [repo[r].node() for r in revs]
808 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
809 ancestorsof=nodes)
813 ancestorsof=nodes)
810 common = set(common)
814 return common, hds
811 rheads = set(hds)
812 lheads = set(repo.heads())
813 ui.write(("common heads: %s\n") %
814 " ".join(sorted(short(n) for n in common)))
815 if lheads <= common:
816 ui.write(("local is subset\n"))
817 elif rheads <= common:
818 ui.write(("remote is subset\n"))
819
815
820 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
821 localrevs = opts['rev']
817 localrevs = opts['rev']
822 doit(localrevs, remoterevs)
818 common, hds = doit(localrevs, remoterevs)
819
820 common = set(common)
821 rheads = set(hds)
822 lheads = set(repo.heads())
823 ui.write(("common heads: %s\n") %
824 " ".join(sorted(short(n) for n in common)))
825 if lheads <= common:
826 ui.write(("local is subset\n"))
827 elif rheads <= common:
828 ui.write(("remote is subset\n"))
823
829
824 _chunksize = 4 << 10
830 _chunksize = 4 << 10
825
831
826 @command('debugdownload',
832 @command('debugdownload',
827 [
833 [
828 ('o', 'output', '', _('path')),
834 ('o', 'output', '', _('path')),
829 ],
835 ],
830 optionalrepo=True)
836 optionalrepo=True)
831 def debugdownload(ui, repo, url, output=None, **opts):
837 def debugdownload(ui, repo, url, output=None, **opts):
832 """download a resource using Mercurial logic and config
838 """download a resource using Mercurial logic and config
833 """
839 """
834 fh = urlmod.open(ui, url, output)
840 fh = urlmod.open(ui, url, output)
835
841
836 dest = ui
842 dest = ui
837 if output:
843 if output:
838 dest = open(output, "wb", _chunksize)
844 dest = open(output, "wb", _chunksize)
839 try:
845 try:
840 data = fh.read(_chunksize)
846 data = fh.read(_chunksize)
841 while data:
847 while data:
842 dest.write(data)
848 dest.write(data)
843 data = fh.read(_chunksize)
849 data = fh.read(_chunksize)
844 finally:
850 finally:
845 if output:
851 if output:
846 dest.close()
852 dest.close()
847
853
848 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
854 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
849 def debugextensions(ui, repo, **opts):
855 def debugextensions(ui, repo, **opts):
850 '''show information about active extensions'''
856 '''show information about active extensions'''
851 opts = pycompat.byteskwargs(opts)
857 opts = pycompat.byteskwargs(opts)
852 exts = extensions.extensions(ui)
858 exts = extensions.extensions(ui)
853 hgver = util.version()
859 hgver = util.version()
854 fm = ui.formatter('debugextensions', opts)
860 fm = ui.formatter('debugextensions', opts)
855 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
861 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
856 isinternal = extensions.ismoduleinternal(extmod)
862 isinternal = extensions.ismoduleinternal(extmod)
857 extsource = pycompat.fsencode(extmod.__file__)
863 extsource = pycompat.fsencode(extmod.__file__)
858 if isinternal:
864 if isinternal:
859 exttestedwith = [] # never expose magic string to users
865 exttestedwith = [] # never expose magic string to users
860 else:
866 else:
861 exttestedwith = getattr(extmod, 'testedwith', '').split()
867 exttestedwith = getattr(extmod, 'testedwith', '').split()
862 extbuglink = getattr(extmod, 'buglink', None)
868 extbuglink = getattr(extmod, 'buglink', None)
863
869
864 fm.startitem()
870 fm.startitem()
865
871
866 if ui.quiet or ui.verbose:
872 if ui.quiet or ui.verbose:
867 fm.write('name', '%s\n', extname)
873 fm.write('name', '%s\n', extname)
868 else:
874 else:
869 fm.write('name', '%s', extname)
875 fm.write('name', '%s', extname)
870 if isinternal or hgver in exttestedwith:
876 if isinternal or hgver in exttestedwith:
871 fm.plain('\n')
877 fm.plain('\n')
872 elif not exttestedwith:
878 elif not exttestedwith:
873 fm.plain(_(' (untested!)\n'))
879 fm.plain(_(' (untested!)\n'))
874 else:
880 else:
875 lasttestedversion = exttestedwith[-1]
881 lasttestedversion = exttestedwith[-1]
876 fm.plain(' (%s!)\n' % lasttestedversion)
882 fm.plain(' (%s!)\n' % lasttestedversion)
877
883
878 fm.condwrite(ui.verbose and extsource, 'source',
884 fm.condwrite(ui.verbose and extsource, 'source',
879 _(' location: %s\n'), extsource or "")
885 _(' location: %s\n'), extsource or "")
880
886
881 if ui.verbose:
887 if ui.verbose:
882 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
888 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
883 fm.data(bundled=isinternal)
889 fm.data(bundled=isinternal)
884
890
885 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
891 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
886 _(' tested with: %s\n'),
892 _(' tested with: %s\n'),
887 fm.formatlist(exttestedwith, name='ver'))
893 fm.formatlist(exttestedwith, name='ver'))
888
894
889 fm.condwrite(ui.verbose and extbuglink, 'buglink',
895 fm.condwrite(ui.verbose and extbuglink, 'buglink',
890 _(' bug reporting: %s\n'), extbuglink or "")
896 _(' bug reporting: %s\n'), extbuglink or "")
891
897
892 fm.end()
898 fm.end()
893
899
894 @command('debugfileset',
900 @command('debugfileset',
895 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
901 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
896 ('', 'all-files', False,
902 ('', 'all-files', False,
897 _('test files from all revisions and working directory')),
903 _('test files from all revisions and working directory')),
898 ('s', 'show-matcher', None,
904 ('s', 'show-matcher', None,
899 _('print internal representation of matcher')),
905 _('print internal representation of matcher')),
900 ('p', 'show-stage', [],
906 ('p', 'show-stage', [],
901 _('print parsed tree at the given stage'), _('NAME'))],
907 _('print parsed tree at the given stage'), _('NAME'))],
902 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
908 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
903 def debugfileset(ui, repo, expr, **opts):
909 def debugfileset(ui, repo, expr, **opts):
904 '''parse and apply a fileset specification'''
910 '''parse and apply a fileset specification'''
905 from . import fileset
911 from . import fileset
906 fileset.symbols # force import of fileset so we have predicates to optimize
912 fileset.symbols # force import of fileset so we have predicates to optimize
907 opts = pycompat.byteskwargs(opts)
913 opts = pycompat.byteskwargs(opts)
908 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
914 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
909
915
910 stages = [
916 stages = [
911 ('parsed', pycompat.identity),
917 ('parsed', pycompat.identity),
912 ('analyzed', filesetlang.analyze),
918 ('analyzed', filesetlang.analyze),
913 ('optimized', filesetlang.optimize),
919 ('optimized', filesetlang.optimize),
914 ]
920 ]
915 stagenames = set(n for n, f in stages)
921 stagenames = set(n for n, f in stages)
916
922
917 showalways = set()
923 showalways = set()
918 if ui.verbose and not opts['show_stage']:
924 if ui.verbose and not opts['show_stage']:
919 # show parsed tree by --verbose (deprecated)
925 # show parsed tree by --verbose (deprecated)
920 showalways.add('parsed')
926 showalways.add('parsed')
921 if opts['show_stage'] == ['all']:
927 if opts['show_stage'] == ['all']:
922 showalways.update(stagenames)
928 showalways.update(stagenames)
923 else:
929 else:
924 for n in opts['show_stage']:
930 for n in opts['show_stage']:
925 if n not in stagenames:
931 if n not in stagenames:
926 raise error.Abort(_('invalid stage name: %s') % n)
932 raise error.Abort(_('invalid stage name: %s') % n)
927 showalways.update(opts['show_stage'])
933 showalways.update(opts['show_stage'])
928
934
929 tree = filesetlang.parse(expr)
935 tree = filesetlang.parse(expr)
930 for n, f in stages:
936 for n, f in stages:
931 tree = f(tree)
937 tree = f(tree)
932 if n in showalways:
938 if n in showalways:
933 if opts['show_stage'] or n != 'parsed':
939 if opts['show_stage'] or n != 'parsed':
934 ui.write(("* %s:\n") % n)
940 ui.write(("* %s:\n") % n)
935 ui.write(filesetlang.prettyformat(tree), "\n")
941 ui.write(filesetlang.prettyformat(tree), "\n")
936
942
937 files = set()
943 files = set()
938 if opts['all_files']:
944 if opts['all_files']:
939 for r in repo:
945 for r in repo:
940 c = repo[r]
946 c = repo[r]
941 files.update(c.files())
947 files.update(c.files())
942 files.update(c.substate)
948 files.update(c.substate)
943 if opts['all_files'] or ctx.rev() is None:
949 if opts['all_files'] or ctx.rev() is None:
944 wctx = repo[None]
950 wctx = repo[None]
945 files.update(repo.dirstate.walk(scmutil.matchall(repo),
951 files.update(repo.dirstate.walk(scmutil.matchall(repo),
946 subrepos=list(wctx.substate),
952 subrepos=list(wctx.substate),
947 unknown=True, ignored=True))
953 unknown=True, ignored=True))
948 files.update(wctx.substate)
954 files.update(wctx.substate)
949 else:
955 else:
950 files.update(ctx.files())
956 files.update(ctx.files())
951 files.update(ctx.substate)
957 files.update(ctx.substate)
952
958
953 m = ctx.matchfileset(expr)
959 m = ctx.matchfileset(expr)
954 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
960 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
955 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
961 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
956 for f in sorted(files):
962 for f in sorted(files):
957 if not m(f):
963 if not m(f):
958 continue
964 continue
959 ui.write("%s\n" % f)
965 ui.write("%s\n" % f)
960
966
961 @command('debugformat',
967 @command('debugformat',
962 [] + cmdutil.formatteropts)
968 [] + cmdutil.formatteropts)
963 def debugformat(ui, repo, **opts):
969 def debugformat(ui, repo, **opts):
964 """display format information about the current repository
970 """display format information about the current repository
965
971
966 Use --verbose to get extra information about current config value and
972 Use --verbose to get extra information about current config value and
967 Mercurial default."""
973 Mercurial default."""
968 opts = pycompat.byteskwargs(opts)
974 opts = pycompat.byteskwargs(opts)
969 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
975 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
970 maxvariantlength = max(len('format-variant'), maxvariantlength)
976 maxvariantlength = max(len('format-variant'), maxvariantlength)
971
977
972 def makeformatname(name):
978 def makeformatname(name):
973 return '%s:' + (' ' * (maxvariantlength - len(name)))
979 return '%s:' + (' ' * (maxvariantlength - len(name)))
974
980
975 fm = ui.formatter('debugformat', opts)
981 fm = ui.formatter('debugformat', opts)
976 if fm.isplain():
982 if fm.isplain():
977 def formatvalue(value):
983 def formatvalue(value):
978 if util.safehasattr(value, 'startswith'):
984 if util.safehasattr(value, 'startswith'):
979 return value
985 return value
980 if value:
986 if value:
981 return 'yes'
987 return 'yes'
982 else:
988 else:
983 return 'no'
989 return 'no'
984 else:
990 else:
985 formatvalue = pycompat.identity
991 formatvalue = pycompat.identity
986
992
987 fm.plain('format-variant')
993 fm.plain('format-variant')
988 fm.plain(' ' * (maxvariantlength - len('format-variant')))
994 fm.plain(' ' * (maxvariantlength - len('format-variant')))
989 fm.plain(' repo')
995 fm.plain(' repo')
990 if ui.verbose:
996 if ui.verbose:
991 fm.plain(' config default')
997 fm.plain(' config default')
992 fm.plain('\n')
998 fm.plain('\n')
993 for fv in upgrade.allformatvariant:
999 for fv in upgrade.allformatvariant:
994 fm.startitem()
1000 fm.startitem()
995 repovalue = fv.fromrepo(repo)
1001 repovalue = fv.fromrepo(repo)
996 configvalue = fv.fromconfig(repo)
1002 configvalue = fv.fromconfig(repo)
997
1003
998 if repovalue != configvalue:
1004 if repovalue != configvalue:
999 namelabel = 'formatvariant.name.mismatchconfig'
1005 namelabel = 'formatvariant.name.mismatchconfig'
1000 repolabel = 'formatvariant.repo.mismatchconfig'
1006 repolabel = 'formatvariant.repo.mismatchconfig'
1001 elif repovalue != fv.default:
1007 elif repovalue != fv.default:
1002 namelabel = 'formatvariant.name.mismatchdefault'
1008 namelabel = 'formatvariant.name.mismatchdefault'
1003 repolabel = 'formatvariant.repo.mismatchdefault'
1009 repolabel = 'formatvariant.repo.mismatchdefault'
1004 else:
1010 else:
1005 namelabel = 'formatvariant.name.uptodate'
1011 namelabel = 'formatvariant.name.uptodate'
1006 repolabel = 'formatvariant.repo.uptodate'
1012 repolabel = 'formatvariant.repo.uptodate'
1007
1013
1008 fm.write('name', makeformatname(fv.name), fv.name,
1014 fm.write('name', makeformatname(fv.name), fv.name,
1009 label=namelabel)
1015 label=namelabel)
1010 fm.write('repo', ' %3s', formatvalue(repovalue),
1016 fm.write('repo', ' %3s', formatvalue(repovalue),
1011 label=repolabel)
1017 label=repolabel)
1012 if fv.default != configvalue:
1018 if fv.default != configvalue:
1013 configlabel = 'formatvariant.config.special'
1019 configlabel = 'formatvariant.config.special'
1014 else:
1020 else:
1015 configlabel = 'formatvariant.config.default'
1021 configlabel = 'formatvariant.config.default'
1016 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1022 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1017 label=configlabel)
1023 label=configlabel)
1018 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1024 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1019 label='formatvariant.default')
1025 label='formatvariant.default')
1020 fm.plain('\n')
1026 fm.plain('\n')
1021 fm.end()
1027 fm.end()
1022
1028
1023 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1029 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1024 def debugfsinfo(ui, path="."):
1030 def debugfsinfo(ui, path="."):
1025 """show information detected about current filesystem"""
1031 """show information detected about current filesystem"""
1026 ui.write(('path: %s\n') % path)
1032 ui.write(('path: %s\n') % path)
1027 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1033 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1028 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1034 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1029 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1035 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1030 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1036 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1031 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1037 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1032 casesensitive = '(unknown)'
1038 casesensitive = '(unknown)'
1033 try:
1039 try:
1034 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1040 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1035 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1041 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1036 except OSError:
1042 except OSError:
1037 pass
1043 pass
1038 ui.write(('case-sensitive: %s\n') % casesensitive)
1044 ui.write(('case-sensitive: %s\n') % casesensitive)
1039
1045
1040 @command('debuggetbundle',
1046 @command('debuggetbundle',
1041 [('H', 'head', [], _('id of head node'), _('ID')),
1047 [('H', 'head', [], _('id of head node'), _('ID')),
1042 ('C', 'common', [], _('id of common node'), _('ID')),
1048 ('C', 'common', [], _('id of common node'), _('ID')),
1043 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1049 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1044 _('REPO FILE [-H|-C ID]...'),
1050 _('REPO FILE [-H|-C ID]...'),
1045 norepo=True)
1051 norepo=True)
1046 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1052 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1047 """retrieves a bundle from a repo
1053 """retrieves a bundle from a repo
1048
1054
1049 Every ID must be a full-length hex node id string. Saves the bundle to the
1055 Every ID must be a full-length hex node id string. Saves the bundle to the
1050 given file.
1056 given file.
1051 """
1057 """
1052 opts = pycompat.byteskwargs(opts)
1058 opts = pycompat.byteskwargs(opts)
1053 repo = hg.peer(ui, opts, repopath)
1059 repo = hg.peer(ui, opts, repopath)
1054 if not repo.capable('getbundle'):
1060 if not repo.capable('getbundle'):
1055 raise error.Abort("getbundle() not supported by target repository")
1061 raise error.Abort("getbundle() not supported by target repository")
1056 args = {}
1062 args = {}
1057 if common:
1063 if common:
1058 args[r'common'] = [bin(s) for s in common]
1064 args[r'common'] = [bin(s) for s in common]
1059 if head:
1065 if head:
1060 args[r'heads'] = [bin(s) for s in head]
1066 args[r'heads'] = [bin(s) for s in head]
1061 # TODO: get desired bundlecaps from command line.
1067 # TODO: get desired bundlecaps from command line.
1062 args[r'bundlecaps'] = None
1068 args[r'bundlecaps'] = None
1063 bundle = repo.getbundle('debug', **args)
1069 bundle = repo.getbundle('debug', **args)
1064
1070
1065 bundletype = opts.get('type', 'bzip2').lower()
1071 bundletype = opts.get('type', 'bzip2').lower()
1066 btypes = {'none': 'HG10UN',
1072 btypes = {'none': 'HG10UN',
1067 'bzip2': 'HG10BZ',
1073 'bzip2': 'HG10BZ',
1068 'gzip': 'HG10GZ',
1074 'gzip': 'HG10GZ',
1069 'bundle2': 'HG20'}
1075 'bundle2': 'HG20'}
1070 bundletype = btypes.get(bundletype)
1076 bundletype = btypes.get(bundletype)
1071 if bundletype not in bundle2.bundletypes:
1077 if bundletype not in bundle2.bundletypes:
1072 raise error.Abort(_('unknown bundle type specified with --type'))
1078 raise error.Abort(_('unknown bundle type specified with --type'))
1073 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1079 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1074
1080
1075 @command('debugignore', [], '[FILE]')
1081 @command('debugignore', [], '[FILE]')
1076 def debugignore(ui, repo, *files, **opts):
1082 def debugignore(ui, repo, *files, **opts):
1077 """display the combined ignore pattern and information about ignored files
1083 """display the combined ignore pattern and information about ignored files
1078
1084
1079 With no argument display the combined ignore pattern.
1085 With no argument display the combined ignore pattern.
1080
1086
1081 Given space separated file names, shows if the given file is ignored and
1087 Given space separated file names, shows if the given file is ignored and
1082 if so, show the ignore rule (file and line number) that matched it.
1088 if so, show the ignore rule (file and line number) that matched it.
1083 """
1089 """
1084 ignore = repo.dirstate._ignore
1090 ignore = repo.dirstate._ignore
1085 if not files:
1091 if not files:
1086 # Show all the patterns
1092 # Show all the patterns
1087 ui.write("%s\n" % pycompat.byterepr(ignore))
1093 ui.write("%s\n" % pycompat.byterepr(ignore))
1088 else:
1094 else:
1089 m = scmutil.match(repo[None], pats=files)
1095 m = scmutil.match(repo[None], pats=files)
1090 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1096 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1091 for f in m.files():
1097 for f in m.files():
1092 nf = util.normpath(f)
1098 nf = util.normpath(f)
1093 ignored = None
1099 ignored = None
1094 ignoredata = None
1100 ignoredata = None
1095 if nf != '.':
1101 if nf != '.':
1096 if ignore(nf):
1102 if ignore(nf):
1097 ignored = nf
1103 ignored = nf
1098 ignoredata = repo.dirstate._ignorefileandline(nf)
1104 ignoredata = repo.dirstate._ignorefileandline(nf)
1099 else:
1105 else:
1100 for p in util.finddirs(nf):
1106 for p in util.finddirs(nf):
1101 if ignore(p):
1107 if ignore(p):
1102 ignored = p
1108 ignored = p
1103 ignoredata = repo.dirstate._ignorefileandline(p)
1109 ignoredata = repo.dirstate._ignorefileandline(p)
1104 break
1110 break
1105 if ignored:
1111 if ignored:
1106 if ignored == nf:
1112 if ignored == nf:
1107 ui.write(_("%s is ignored\n") % uipathfn(f))
1113 ui.write(_("%s is ignored\n") % uipathfn(f))
1108 else:
1114 else:
1109 ui.write(_("%s is ignored because of "
1115 ui.write(_("%s is ignored because of "
1110 "containing folder %s\n")
1116 "containing folder %s\n")
1111 % (uipathfn(f), ignored))
1117 % (uipathfn(f), ignored))
1112 ignorefile, lineno, line = ignoredata
1118 ignorefile, lineno, line = ignoredata
1113 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1119 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1114 % (ignorefile, lineno, line))
1120 % (ignorefile, lineno, line))
1115 else:
1121 else:
1116 ui.write(_("%s is not ignored\n") % uipathfn(f))
1122 ui.write(_("%s is not ignored\n") % uipathfn(f))
1117
1123
1118 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1124 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1119 _('-c|-m|FILE'))
1125 _('-c|-m|FILE'))
1120 def debugindex(ui, repo, file_=None, **opts):
1126 def debugindex(ui, repo, file_=None, **opts):
1121 """dump index data for a storage primitive"""
1127 """dump index data for a storage primitive"""
1122 opts = pycompat.byteskwargs(opts)
1128 opts = pycompat.byteskwargs(opts)
1123 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1129 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1124
1130
1125 if ui.debugflag:
1131 if ui.debugflag:
1126 shortfn = hex
1132 shortfn = hex
1127 else:
1133 else:
1128 shortfn = short
1134 shortfn = short
1129
1135
1130 idlen = 12
1136 idlen = 12
1131 for i in store:
1137 for i in store:
1132 idlen = len(shortfn(store.node(i)))
1138 idlen = len(shortfn(store.node(i)))
1133 break
1139 break
1134
1140
1135 fm = ui.formatter('debugindex', opts)
1141 fm = ui.formatter('debugindex', opts)
1136 fm.plain(b' rev linkrev %s %s p2\n' % (
1142 fm.plain(b' rev linkrev %s %s p2\n' % (
1137 b'nodeid'.ljust(idlen),
1143 b'nodeid'.ljust(idlen),
1138 b'p1'.ljust(idlen)))
1144 b'p1'.ljust(idlen)))
1139
1145
1140 for rev in store:
1146 for rev in store:
1141 node = store.node(rev)
1147 node = store.node(rev)
1142 parents = store.parents(node)
1148 parents = store.parents(node)
1143
1149
1144 fm.startitem()
1150 fm.startitem()
1145 fm.write(b'rev', b'%6d ', rev)
1151 fm.write(b'rev', b'%6d ', rev)
1146 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1152 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1147 fm.write(b'node', '%s ', shortfn(node))
1153 fm.write(b'node', '%s ', shortfn(node))
1148 fm.write(b'p1', '%s ', shortfn(parents[0]))
1154 fm.write(b'p1', '%s ', shortfn(parents[0]))
1149 fm.write(b'p2', '%s', shortfn(parents[1]))
1155 fm.write(b'p2', '%s', shortfn(parents[1]))
1150 fm.plain(b'\n')
1156 fm.plain(b'\n')
1151
1157
1152 fm.end()
1158 fm.end()
1153
1159
1154 @command('debugindexdot', cmdutil.debugrevlogopts,
1160 @command('debugindexdot', cmdutil.debugrevlogopts,
1155 _('-c|-m|FILE'), optionalrepo=True)
1161 _('-c|-m|FILE'), optionalrepo=True)
1156 def debugindexdot(ui, repo, file_=None, **opts):
1162 def debugindexdot(ui, repo, file_=None, **opts):
1157 """dump an index DAG as a graphviz dot file"""
1163 """dump an index DAG as a graphviz dot file"""
1158 opts = pycompat.byteskwargs(opts)
1164 opts = pycompat.byteskwargs(opts)
1159 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1165 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1160 ui.write(("digraph G {\n"))
1166 ui.write(("digraph G {\n"))
1161 for i in r:
1167 for i in r:
1162 node = r.node(i)
1168 node = r.node(i)
1163 pp = r.parents(node)
1169 pp = r.parents(node)
1164 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1170 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1165 if pp[1] != nullid:
1171 if pp[1] != nullid:
1166 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1172 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1167 ui.write("}\n")
1173 ui.write("}\n")
1168
1174
1169 @command('debugindexstats', [])
1175 @command('debugindexstats', [])
1170 def debugindexstats(ui, repo):
1176 def debugindexstats(ui, repo):
1171 """show stats related to the changelog index"""
1177 """show stats related to the changelog index"""
1172 repo.changelog.shortest(nullid, 1)
1178 repo.changelog.shortest(nullid, 1)
1173 index = repo.changelog.index
1179 index = repo.changelog.index
1174 if not util.safehasattr(index, 'stats'):
1180 if not util.safehasattr(index, 'stats'):
1175 raise error.Abort(_('debugindexstats only works with native code'))
1181 raise error.Abort(_('debugindexstats only works with native code'))
1176 for k, v in sorted(index.stats().items()):
1182 for k, v in sorted(index.stats().items()):
1177 ui.write('%s: %d\n' % (k, v))
1183 ui.write('%s: %d\n' % (k, v))
1178
1184
1179 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1185 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1180 def debuginstall(ui, **opts):
1186 def debuginstall(ui, **opts):
1181 '''test Mercurial installation
1187 '''test Mercurial installation
1182
1188
1183 Returns 0 on success.
1189 Returns 0 on success.
1184 '''
1190 '''
1185 opts = pycompat.byteskwargs(opts)
1191 opts = pycompat.byteskwargs(opts)
1186
1192
1187 problems = 0
1193 problems = 0
1188
1194
1189 fm = ui.formatter('debuginstall', opts)
1195 fm = ui.formatter('debuginstall', opts)
1190 fm.startitem()
1196 fm.startitem()
1191
1197
1192 # encoding
1198 # encoding
1193 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1199 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1194 err = None
1200 err = None
1195 try:
1201 try:
1196 codecs.lookup(pycompat.sysstr(encoding.encoding))
1202 codecs.lookup(pycompat.sysstr(encoding.encoding))
1197 except LookupError as inst:
1203 except LookupError as inst:
1198 err = stringutil.forcebytestr(inst)
1204 err = stringutil.forcebytestr(inst)
1199 problems += 1
1205 problems += 1
1200 fm.condwrite(err, 'encodingerror', _(" %s\n"
1206 fm.condwrite(err, 'encodingerror', _(" %s\n"
1201 " (check that your locale is properly set)\n"), err)
1207 " (check that your locale is properly set)\n"), err)
1202
1208
1203 # Python
1209 # Python
1204 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1210 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1205 pycompat.sysexecutable)
1211 pycompat.sysexecutable)
1206 fm.write('pythonver', _("checking Python version (%s)\n"),
1212 fm.write('pythonver', _("checking Python version (%s)\n"),
1207 ("%d.%d.%d" % sys.version_info[:3]))
1213 ("%d.%d.%d" % sys.version_info[:3]))
1208 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1214 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1209 os.path.dirname(pycompat.fsencode(os.__file__)))
1215 os.path.dirname(pycompat.fsencode(os.__file__)))
1210
1216
1211 security = set(sslutil.supportedprotocols)
1217 security = set(sslutil.supportedprotocols)
1212 if sslutil.hassni:
1218 if sslutil.hassni:
1213 security.add('sni')
1219 security.add('sni')
1214
1220
1215 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1221 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1216 fm.formatlist(sorted(security), name='protocol',
1222 fm.formatlist(sorted(security), name='protocol',
1217 fmt='%s', sep=','))
1223 fmt='%s', sep=','))
1218
1224
1219 # These are warnings, not errors. So don't increment problem count. This
1225 # These are warnings, not errors. So don't increment problem count. This
1220 # may change in the future.
1226 # may change in the future.
1221 if 'tls1.2' not in security:
1227 if 'tls1.2' not in security:
1222 fm.plain(_(' TLS 1.2 not supported by Python install; '
1228 fm.plain(_(' TLS 1.2 not supported by Python install; '
1223 'network connections lack modern security\n'))
1229 'network connections lack modern security\n'))
1224 if 'sni' not in security:
1230 if 'sni' not in security:
1225 fm.plain(_(' SNI not supported by Python install; may have '
1231 fm.plain(_(' SNI not supported by Python install; may have '
1226 'connectivity issues with some servers\n'))
1232 'connectivity issues with some servers\n'))
1227
1233
1228 # TODO print CA cert info
1234 # TODO print CA cert info
1229
1235
1230 # hg version
1236 # hg version
1231 hgver = util.version()
1237 hgver = util.version()
1232 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1238 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1233 hgver.split('+')[0])
1239 hgver.split('+')[0])
1234 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1240 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1235 '+'.join(hgver.split('+')[1:]))
1241 '+'.join(hgver.split('+')[1:]))
1236
1242
1237 # compiled modules
1243 # compiled modules
1238 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1244 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1239 policy.policy)
1245 policy.policy)
1240 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1246 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1241 os.path.dirname(pycompat.fsencode(__file__)))
1247 os.path.dirname(pycompat.fsencode(__file__)))
1242
1248
1243 if policy.policy in ('c', 'allow'):
1249 if policy.policy in ('c', 'allow'):
1244 err = None
1250 err = None
1245 try:
1251 try:
1246 from .cext import (
1252 from .cext import (
1247 base85,
1253 base85,
1248 bdiff,
1254 bdiff,
1249 mpatch,
1255 mpatch,
1250 osutil,
1256 osutil,
1251 )
1257 )
1252 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1258 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1253 except Exception as inst:
1259 except Exception as inst:
1254 err = stringutil.forcebytestr(inst)
1260 err = stringutil.forcebytestr(inst)
1255 problems += 1
1261 problems += 1
1256 fm.condwrite(err, 'extensionserror', " %s\n", err)
1262 fm.condwrite(err, 'extensionserror', " %s\n", err)
1257
1263
1258 compengines = util.compengines._engines.values()
1264 compengines = util.compengines._engines.values()
1259 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1265 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1260 fm.formatlist(sorted(e.name() for e in compengines),
1266 fm.formatlist(sorted(e.name() for e in compengines),
1261 name='compengine', fmt='%s', sep=', '))
1267 name='compengine', fmt='%s', sep=', '))
1262 fm.write('compenginesavail', _('checking available compression engines '
1268 fm.write('compenginesavail', _('checking available compression engines '
1263 '(%s)\n'),
1269 '(%s)\n'),
1264 fm.formatlist(sorted(e.name() for e in compengines
1270 fm.formatlist(sorted(e.name() for e in compengines
1265 if e.available()),
1271 if e.available()),
1266 name='compengine', fmt='%s', sep=', '))
1272 name='compengine', fmt='%s', sep=', '))
1267 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1273 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1268 fm.write('compenginesserver', _('checking available compression engines '
1274 fm.write('compenginesserver', _('checking available compression engines '
1269 'for wire protocol (%s)\n'),
1275 'for wire protocol (%s)\n'),
1270 fm.formatlist([e.name() for e in wirecompengines
1276 fm.formatlist([e.name() for e in wirecompengines
1271 if e.wireprotosupport()],
1277 if e.wireprotosupport()],
1272 name='compengine', fmt='%s', sep=', '))
1278 name='compengine', fmt='%s', sep=', '))
1273 re2 = 'missing'
1279 re2 = 'missing'
1274 if util._re2:
1280 if util._re2:
1275 re2 = 'available'
1281 re2 = 'available'
1276 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1282 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1277 fm.data(re2=bool(util._re2))
1283 fm.data(re2=bool(util._re2))
1278
1284
1279 # templates
1285 # templates
1280 p = templater.templatepaths()
1286 p = templater.templatepaths()
1281 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1287 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1282 fm.condwrite(not p, '', _(" no template directories found\n"))
1288 fm.condwrite(not p, '', _(" no template directories found\n"))
1283 if p:
1289 if p:
1284 m = templater.templatepath("map-cmdline.default")
1290 m = templater.templatepath("map-cmdline.default")
1285 if m:
1291 if m:
1286 # template found, check if it is working
1292 # template found, check if it is working
1287 err = None
1293 err = None
1288 try:
1294 try:
1289 templater.templater.frommapfile(m)
1295 templater.templater.frommapfile(m)
1290 except Exception as inst:
1296 except Exception as inst:
1291 err = stringutil.forcebytestr(inst)
1297 err = stringutil.forcebytestr(inst)
1292 p = None
1298 p = None
1293 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1299 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1294 else:
1300 else:
1295 p = None
1301 p = None
1296 fm.condwrite(p, 'defaulttemplate',
1302 fm.condwrite(p, 'defaulttemplate',
1297 _("checking default template (%s)\n"), m)
1303 _("checking default template (%s)\n"), m)
1298 fm.condwrite(not m, 'defaulttemplatenotfound',
1304 fm.condwrite(not m, 'defaulttemplatenotfound',
1299 _(" template '%s' not found\n"), "default")
1305 _(" template '%s' not found\n"), "default")
1300 if not p:
1306 if not p:
1301 problems += 1
1307 problems += 1
1302 fm.condwrite(not p, '',
1308 fm.condwrite(not p, '',
1303 _(" (templates seem to have been installed incorrectly)\n"))
1309 _(" (templates seem to have been installed incorrectly)\n"))
1304
1310
1305 # editor
1311 # editor
1306 editor = ui.geteditor()
1312 editor = ui.geteditor()
1307 editor = util.expandpath(editor)
1313 editor = util.expandpath(editor)
1308 editorbin = procutil.shellsplit(editor)[0]
1314 editorbin = procutil.shellsplit(editor)[0]
1309 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1315 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1310 cmdpath = procutil.findexe(editorbin)
1316 cmdpath = procutil.findexe(editorbin)
1311 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1317 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1312 _(" No commit editor set and can't find %s in PATH\n"
1318 _(" No commit editor set and can't find %s in PATH\n"
1313 " (specify a commit editor in your configuration"
1319 " (specify a commit editor in your configuration"
1314 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1320 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1315 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1321 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1316 _(" Can't find editor '%s' in PATH\n"
1322 _(" Can't find editor '%s' in PATH\n"
1317 " (specify a commit editor in your configuration"
1323 " (specify a commit editor in your configuration"
1318 " file)\n"), not cmdpath and editorbin)
1324 " file)\n"), not cmdpath and editorbin)
1319 if not cmdpath and editor != 'vi':
1325 if not cmdpath and editor != 'vi':
1320 problems += 1
1326 problems += 1
1321
1327
1322 # check username
1328 # check username
1323 username = None
1329 username = None
1324 err = None
1330 err = None
1325 try:
1331 try:
1326 username = ui.username()
1332 username = ui.username()
1327 except error.Abort as e:
1333 except error.Abort as e:
1328 err = stringutil.forcebytestr(e)
1334 err = stringutil.forcebytestr(e)
1329 problems += 1
1335 problems += 1
1330
1336
1331 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1337 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1332 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1338 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1333 " (specify a username in your configuration file)\n"), err)
1339 " (specify a username in your configuration file)\n"), err)
1334
1340
1335 fm.condwrite(not problems, '',
1341 fm.condwrite(not problems, '',
1336 _("no problems detected\n"))
1342 _("no problems detected\n"))
1337 if not problems:
1343 if not problems:
1338 fm.data(problems=problems)
1344 fm.data(problems=problems)
1339 fm.condwrite(problems, 'problems',
1345 fm.condwrite(problems, 'problems',
1340 _("%d problems detected,"
1346 _("%d problems detected,"
1341 " please check your install!\n"), problems)
1347 " please check your install!\n"), problems)
1342 fm.end()
1348 fm.end()
1343
1349
1344 return problems
1350 return problems
1345
1351
1346 @command('debugknown', [], _('REPO ID...'), norepo=True)
1352 @command('debugknown', [], _('REPO ID...'), norepo=True)
1347 def debugknown(ui, repopath, *ids, **opts):
1353 def debugknown(ui, repopath, *ids, **opts):
1348 """test whether node ids are known to a repo
1354 """test whether node ids are known to a repo
1349
1355
1350 Every ID must be a full-length hex node id string. Returns a list of 0s
1356 Every ID must be a full-length hex node id string. Returns a list of 0s
1351 and 1s indicating unknown/known.
1357 and 1s indicating unknown/known.
1352 """
1358 """
1353 opts = pycompat.byteskwargs(opts)
1359 opts = pycompat.byteskwargs(opts)
1354 repo = hg.peer(ui, opts, repopath)
1360 repo = hg.peer(ui, opts, repopath)
1355 if not repo.capable('known'):
1361 if not repo.capable('known'):
1356 raise error.Abort("known() not supported by target repository")
1362 raise error.Abort("known() not supported by target repository")
1357 flags = repo.known([bin(s) for s in ids])
1363 flags = repo.known([bin(s) for s in ids])
1358 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1364 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1359
1365
1360 @command('debuglabelcomplete', [], _('LABEL...'))
1366 @command('debuglabelcomplete', [], _('LABEL...'))
1361 def debuglabelcomplete(ui, repo, *args):
1367 def debuglabelcomplete(ui, repo, *args):
1362 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1368 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1363 debugnamecomplete(ui, repo, *args)
1369 debugnamecomplete(ui, repo, *args)
1364
1370
1365 @command('debuglocks',
1371 @command('debuglocks',
1366 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1372 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1367 ('W', 'force-wlock', None,
1373 ('W', 'force-wlock', None,
1368 _('free the working state lock (DANGEROUS)')),
1374 _('free the working state lock (DANGEROUS)')),
1369 ('s', 'set-lock', None, _('set the store lock until stopped')),
1375 ('s', 'set-lock', None, _('set the store lock until stopped')),
1370 ('S', 'set-wlock', None,
1376 ('S', 'set-wlock', None,
1371 _('set the working state lock until stopped'))],
1377 _('set the working state lock until stopped'))],
1372 _('[OPTION]...'))
1378 _('[OPTION]...'))
1373 def debuglocks(ui, repo, **opts):
1379 def debuglocks(ui, repo, **opts):
1374 """show or modify state of locks
1380 """show or modify state of locks
1375
1381
1376 By default, this command will show which locks are held. This
1382 By default, this command will show which locks are held. This
1377 includes the user and process holding the lock, the amount of time
1383 includes the user and process holding the lock, the amount of time
1378 the lock has been held, and the machine name where the process is
1384 the lock has been held, and the machine name where the process is
1379 running if it's not local.
1385 running if it's not local.
1380
1386
1381 Locks protect the integrity of Mercurial's data, so should be
1387 Locks protect the integrity of Mercurial's data, so should be
1382 treated with care. System crashes or other interruptions may cause
1388 treated with care. System crashes or other interruptions may cause
1383 locks to not be properly released, though Mercurial will usually
1389 locks to not be properly released, though Mercurial will usually
1384 detect and remove such stale locks automatically.
1390 detect and remove such stale locks automatically.
1385
1391
1386 However, detecting stale locks may not always be possible (for
1392 However, detecting stale locks may not always be possible (for
1387 instance, on a shared filesystem). Removing locks may also be
1393 instance, on a shared filesystem). Removing locks may also be
1388 blocked by filesystem permissions.
1394 blocked by filesystem permissions.
1389
1395
1390 Setting a lock will prevent other commands from changing the data.
1396 Setting a lock will prevent other commands from changing the data.
1391 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1397 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1392 The set locks are removed when the command exits.
1398 The set locks are removed when the command exits.
1393
1399
1394 Returns 0 if no locks are held.
1400 Returns 0 if no locks are held.
1395
1401
1396 """
1402 """
1397
1403
1398 if opts.get(r'force_lock'):
1404 if opts.get(r'force_lock'):
1399 repo.svfs.unlink('lock')
1405 repo.svfs.unlink('lock')
1400 if opts.get(r'force_wlock'):
1406 if opts.get(r'force_wlock'):
1401 repo.vfs.unlink('wlock')
1407 repo.vfs.unlink('wlock')
1402 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1408 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1403 return 0
1409 return 0
1404
1410
1405 locks = []
1411 locks = []
1406 try:
1412 try:
1407 if opts.get(r'set_wlock'):
1413 if opts.get(r'set_wlock'):
1408 try:
1414 try:
1409 locks.append(repo.wlock(False))
1415 locks.append(repo.wlock(False))
1410 except error.LockHeld:
1416 except error.LockHeld:
1411 raise error.Abort(_('wlock is already held'))
1417 raise error.Abort(_('wlock is already held'))
1412 if opts.get(r'set_lock'):
1418 if opts.get(r'set_lock'):
1413 try:
1419 try:
1414 locks.append(repo.lock(False))
1420 locks.append(repo.lock(False))
1415 except error.LockHeld:
1421 except error.LockHeld:
1416 raise error.Abort(_('lock is already held'))
1422 raise error.Abort(_('lock is already held'))
1417 if len(locks):
1423 if len(locks):
1418 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1424 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1419 return 0
1425 return 0
1420 finally:
1426 finally:
1421 release(*locks)
1427 release(*locks)
1422
1428
1423 now = time.time()
1429 now = time.time()
1424 held = 0
1430 held = 0
1425
1431
1426 def report(vfs, name, method):
1432 def report(vfs, name, method):
1427 # this causes stale locks to get reaped for more accurate reporting
1433 # this causes stale locks to get reaped for more accurate reporting
1428 try:
1434 try:
1429 l = method(False)
1435 l = method(False)
1430 except error.LockHeld:
1436 except error.LockHeld:
1431 l = None
1437 l = None
1432
1438
1433 if l:
1439 if l:
1434 l.release()
1440 l.release()
1435 else:
1441 else:
1436 try:
1442 try:
1437 st = vfs.lstat(name)
1443 st = vfs.lstat(name)
1438 age = now - st[stat.ST_MTIME]
1444 age = now - st[stat.ST_MTIME]
1439 user = util.username(st.st_uid)
1445 user = util.username(st.st_uid)
1440 locker = vfs.readlock(name)
1446 locker = vfs.readlock(name)
1441 if ":" in locker:
1447 if ":" in locker:
1442 host, pid = locker.split(':')
1448 host, pid = locker.split(':')
1443 if host == socket.gethostname():
1449 if host == socket.gethostname():
1444 locker = 'user %s, process %s' % (user or b'None', pid)
1450 locker = 'user %s, process %s' % (user or b'None', pid)
1445 else:
1451 else:
1446 locker = ('user %s, process %s, host %s'
1452 locker = ('user %s, process %s, host %s'
1447 % (user or b'None', pid, host))
1453 % (user or b'None', pid, host))
1448 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1454 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1449 return 1
1455 return 1
1450 except OSError as e:
1456 except OSError as e:
1451 if e.errno != errno.ENOENT:
1457 if e.errno != errno.ENOENT:
1452 raise
1458 raise
1453
1459
1454 ui.write(("%-6s free\n") % (name + ":"))
1460 ui.write(("%-6s free\n") % (name + ":"))
1455 return 0
1461 return 0
1456
1462
1457 held += report(repo.svfs, "lock", repo.lock)
1463 held += report(repo.svfs, "lock", repo.lock)
1458 held += report(repo.vfs, "wlock", repo.wlock)
1464 held += report(repo.vfs, "wlock", repo.wlock)
1459
1465
1460 return held
1466 return held
1461
1467
1462 @command('debugmanifestfulltextcache', [
1468 @command('debugmanifestfulltextcache', [
1463 ('', 'clear', False, _('clear the cache')),
1469 ('', 'clear', False, _('clear the cache')),
1464 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1470 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1465 _('NODE'))
1471 _('NODE'))
1466 ], '')
1472 ], '')
1467 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1473 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1468 """show, clear or amend the contents of the manifest fulltext cache"""
1474 """show, clear or amend the contents of the manifest fulltext cache"""
1469
1475
1470 def getcache():
1476 def getcache():
1471 r = repo.manifestlog.getstorage(b'')
1477 r = repo.manifestlog.getstorage(b'')
1472 try:
1478 try:
1473 return r._fulltextcache
1479 return r._fulltextcache
1474 except AttributeError:
1480 except AttributeError:
1475 msg = _("Current revlog implementation doesn't appear to have a "
1481 msg = _("Current revlog implementation doesn't appear to have a "
1476 "manifest fulltext cache\n")
1482 "manifest fulltext cache\n")
1477 raise error.Abort(msg)
1483 raise error.Abort(msg)
1478
1484
1479 if opts.get(r'clear'):
1485 if opts.get(r'clear'):
1480 with repo.wlock():
1486 with repo.wlock():
1481 cache = getcache()
1487 cache = getcache()
1482 cache.clear(clear_persisted_data=True)
1488 cache.clear(clear_persisted_data=True)
1483 return
1489 return
1484
1490
1485 if add:
1491 if add:
1486 with repo.wlock():
1492 with repo.wlock():
1487 m = repo.manifestlog
1493 m = repo.manifestlog
1488 store = m.getstorage(b'')
1494 store = m.getstorage(b'')
1489 for n in add:
1495 for n in add:
1490 try:
1496 try:
1491 manifest = m[store.lookup(n)]
1497 manifest = m[store.lookup(n)]
1492 except error.LookupError as e:
1498 except error.LookupError as e:
1493 raise error.Abort(e, hint="Check your manifest node id")
1499 raise error.Abort(e, hint="Check your manifest node id")
1494 manifest.read() # stores revisision in cache too
1500 manifest.read() # stores revisision in cache too
1495 return
1501 return
1496
1502
1497 cache = getcache()
1503 cache = getcache()
1498 if not len(cache):
1504 if not len(cache):
1499 ui.write(_('cache empty\n'))
1505 ui.write(_('cache empty\n'))
1500 else:
1506 else:
1501 ui.write(
1507 ui.write(
1502 _('cache contains %d manifest entries, in order of most to '
1508 _('cache contains %d manifest entries, in order of most to '
1503 'least recent:\n') % (len(cache),))
1509 'least recent:\n') % (len(cache),))
1504 totalsize = 0
1510 totalsize = 0
1505 for nodeid in cache:
1511 for nodeid in cache:
1506 # Use cache.get to not update the LRU order
1512 # Use cache.get to not update the LRU order
1507 data = cache.peek(nodeid)
1513 data = cache.peek(nodeid)
1508 size = len(data)
1514 size = len(data)
1509 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1515 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1510 ui.write(_('id: %s, size %s\n') % (
1516 ui.write(_('id: %s, size %s\n') % (
1511 hex(nodeid), util.bytecount(size)))
1517 hex(nodeid), util.bytecount(size)))
1512 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1518 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1513 ui.write(
1519 ui.write(
1514 _('total cache data size %s, on-disk %s\n') % (
1520 _('total cache data size %s, on-disk %s\n') % (
1515 util.bytecount(totalsize), util.bytecount(ondisk))
1521 util.bytecount(totalsize), util.bytecount(ondisk))
1516 )
1522 )
1517
1523
1518 @command('debugmergestate', [], '')
1524 @command('debugmergestate', [], '')
1519 def debugmergestate(ui, repo, *args):
1525 def debugmergestate(ui, repo, *args):
1520 """print merge state
1526 """print merge state
1521
1527
1522 Use --verbose to print out information about whether v1 or v2 merge state
1528 Use --verbose to print out information about whether v1 or v2 merge state
1523 was chosen."""
1529 was chosen."""
1524 def _hashornull(h):
1530 def _hashornull(h):
1525 if h == nullhex:
1531 if h == nullhex:
1526 return 'null'
1532 return 'null'
1527 else:
1533 else:
1528 return h
1534 return h
1529
1535
1530 def printrecords(version):
1536 def printrecords(version):
1531 ui.write(('* version %d records\n') % version)
1537 ui.write(('* version %d records\n') % version)
1532 if version == 1:
1538 if version == 1:
1533 records = v1records
1539 records = v1records
1534 else:
1540 else:
1535 records = v2records
1541 records = v2records
1536
1542
1537 for rtype, record in records:
1543 for rtype, record in records:
1538 # pretty print some record types
1544 # pretty print some record types
1539 if rtype == 'L':
1545 if rtype == 'L':
1540 ui.write(('local: %s\n') % record)
1546 ui.write(('local: %s\n') % record)
1541 elif rtype == 'O':
1547 elif rtype == 'O':
1542 ui.write(('other: %s\n') % record)
1548 ui.write(('other: %s\n') % record)
1543 elif rtype == 'm':
1549 elif rtype == 'm':
1544 driver, mdstate = record.split('\0', 1)
1550 driver, mdstate = record.split('\0', 1)
1545 ui.write(('merge driver: %s (state "%s")\n')
1551 ui.write(('merge driver: %s (state "%s")\n')
1546 % (driver, mdstate))
1552 % (driver, mdstate))
1547 elif rtype in 'FDC':
1553 elif rtype in 'FDC':
1548 r = record.split('\0')
1554 r = record.split('\0')
1549 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1555 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1550 if version == 1:
1556 if version == 1:
1551 onode = 'not stored in v1 format'
1557 onode = 'not stored in v1 format'
1552 flags = r[7]
1558 flags = r[7]
1553 else:
1559 else:
1554 onode, flags = r[7:9]
1560 onode, flags = r[7:9]
1555 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1561 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1556 % (f, rtype, state, _hashornull(hash)))
1562 % (f, rtype, state, _hashornull(hash)))
1557 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1563 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1558 ui.write((' ancestor path: %s (node %s)\n')
1564 ui.write((' ancestor path: %s (node %s)\n')
1559 % (afile, _hashornull(anode)))
1565 % (afile, _hashornull(anode)))
1560 ui.write((' other path: %s (node %s)\n')
1566 ui.write((' other path: %s (node %s)\n')
1561 % (ofile, _hashornull(onode)))
1567 % (ofile, _hashornull(onode)))
1562 elif rtype == 'f':
1568 elif rtype == 'f':
1563 filename, rawextras = record.split('\0', 1)
1569 filename, rawextras = record.split('\0', 1)
1564 extras = rawextras.split('\0')
1570 extras = rawextras.split('\0')
1565 i = 0
1571 i = 0
1566 extrastrings = []
1572 extrastrings = []
1567 while i < len(extras):
1573 while i < len(extras):
1568 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1574 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1569 i += 2
1575 i += 2
1570
1576
1571 ui.write(('file extras: %s (%s)\n')
1577 ui.write(('file extras: %s (%s)\n')
1572 % (filename, ', '.join(extrastrings)))
1578 % (filename, ', '.join(extrastrings)))
1573 elif rtype == 'l':
1579 elif rtype == 'l':
1574 labels = record.split('\0', 2)
1580 labels = record.split('\0', 2)
1575 labels = [l for l in labels if len(l) > 0]
1581 labels = [l for l in labels if len(l) > 0]
1576 ui.write(('labels:\n'))
1582 ui.write(('labels:\n'))
1577 ui.write((' local: %s\n' % labels[0]))
1583 ui.write((' local: %s\n' % labels[0]))
1578 ui.write((' other: %s\n' % labels[1]))
1584 ui.write((' other: %s\n' % labels[1]))
1579 if len(labels) > 2:
1585 if len(labels) > 2:
1580 ui.write((' base: %s\n' % labels[2]))
1586 ui.write((' base: %s\n' % labels[2]))
1581 else:
1587 else:
1582 ui.write(('unrecognized entry: %s\t%s\n')
1588 ui.write(('unrecognized entry: %s\t%s\n')
1583 % (rtype, record.replace('\0', '\t')))
1589 % (rtype, record.replace('\0', '\t')))
1584
1590
1585 # Avoid mergestate.read() since it may raise an exception for unsupported
1591 # Avoid mergestate.read() since it may raise an exception for unsupported
1586 # merge state records. We shouldn't be doing this, but this is OK since this
1592 # merge state records. We shouldn't be doing this, but this is OK since this
1587 # command is pretty low-level.
1593 # command is pretty low-level.
1588 ms = mergemod.mergestate(repo)
1594 ms = mergemod.mergestate(repo)
1589
1595
1590 # sort so that reasonable information is on top
1596 # sort so that reasonable information is on top
1591 v1records = ms._readrecordsv1()
1597 v1records = ms._readrecordsv1()
1592 v2records = ms._readrecordsv2()
1598 v2records = ms._readrecordsv2()
1593 order = 'LOml'
1599 order = 'LOml'
1594 def key(r):
1600 def key(r):
1595 idx = order.find(r[0])
1601 idx = order.find(r[0])
1596 if idx == -1:
1602 if idx == -1:
1597 return (1, r[1])
1603 return (1, r[1])
1598 else:
1604 else:
1599 return (0, idx)
1605 return (0, idx)
1600 v1records.sort(key=key)
1606 v1records.sort(key=key)
1601 v2records.sort(key=key)
1607 v2records.sort(key=key)
1602
1608
1603 if not v1records and not v2records:
1609 if not v1records and not v2records:
1604 ui.write(('no merge state found\n'))
1610 ui.write(('no merge state found\n'))
1605 elif not v2records:
1611 elif not v2records:
1606 ui.note(('no version 2 merge state\n'))
1612 ui.note(('no version 2 merge state\n'))
1607 printrecords(1)
1613 printrecords(1)
1608 elif ms._v1v2match(v1records, v2records):
1614 elif ms._v1v2match(v1records, v2records):
1609 ui.note(('v1 and v2 states match: using v2\n'))
1615 ui.note(('v1 and v2 states match: using v2\n'))
1610 printrecords(2)
1616 printrecords(2)
1611 else:
1617 else:
1612 ui.note(('v1 and v2 states mismatch: using v1\n'))
1618 ui.note(('v1 and v2 states mismatch: using v1\n'))
1613 printrecords(1)
1619 printrecords(1)
1614 if ui.verbose:
1620 if ui.verbose:
1615 printrecords(2)
1621 printrecords(2)
1616
1622
1617 @command('debugnamecomplete', [], _('NAME...'))
1623 @command('debugnamecomplete', [], _('NAME...'))
1618 def debugnamecomplete(ui, repo, *args):
1624 def debugnamecomplete(ui, repo, *args):
1619 '''complete "names" - tags, open branch names, bookmark names'''
1625 '''complete "names" - tags, open branch names, bookmark names'''
1620
1626
1621 names = set()
1627 names = set()
1622 # since we previously only listed open branches, we will handle that
1628 # since we previously only listed open branches, we will handle that
1623 # specially (after this for loop)
1629 # specially (after this for loop)
1624 for name, ns in repo.names.iteritems():
1630 for name, ns in repo.names.iteritems():
1625 if name != 'branches':
1631 if name != 'branches':
1626 names.update(ns.listnames(repo))
1632 names.update(ns.listnames(repo))
1627 names.update(tag for (tag, heads, tip, closed)
1633 names.update(tag for (tag, heads, tip, closed)
1628 in repo.branchmap().iterbranches() if not closed)
1634 in repo.branchmap().iterbranches() if not closed)
1629 completions = set()
1635 completions = set()
1630 if not args:
1636 if not args:
1631 args = ['']
1637 args = ['']
1632 for a in args:
1638 for a in args:
1633 completions.update(n for n in names if n.startswith(a))
1639 completions.update(n for n in names if n.startswith(a))
1634 ui.write('\n'.join(sorted(completions)))
1640 ui.write('\n'.join(sorted(completions)))
1635 ui.write('\n')
1641 ui.write('\n')
1636
1642
1637 @command('debugobsolete',
1643 @command('debugobsolete',
1638 [('', 'flags', 0, _('markers flag')),
1644 [('', 'flags', 0, _('markers flag')),
1639 ('', 'record-parents', False,
1645 ('', 'record-parents', False,
1640 _('record parent information for the precursor')),
1646 _('record parent information for the precursor')),
1641 ('r', 'rev', [], _('display markers relevant to REV')),
1647 ('r', 'rev', [], _('display markers relevant to REV')),
1642 ('', 'exclusive', False, _('restrict display to markers only '
1648 ('', 'exclusive', False, _('restrict display to markers only '
1643 'relevant to REV')),
1649 'relevant to REV')),
1644 ('', 'index', False, _('display index of the marker')),
1650 ('', 'index', False, _('display index of the marker')),
1645 ('', 'delete', [], _('delete markers specified by indices')),
1651 ('', 'delete', [], _('delete markers specified by indices')),
1646 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1652 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1647 _('[OBSOLETED [REPLACEMENT ...]]'))
1653 _('[OBSOLETED [REPLACEMENT ...]]'))
1648 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1654 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1649 """create arbitrary obsolete marker
1655 """create arbitrary obsolete marker
1650
1656
1651 With no arguments, displays the list of obsolescence markers."""
1657 With no arguments, displays the list of obsolescence markers."""
1652
1658
1653 opts = pycompat.byteskwargs(opts)
1659 opts = pycompat.byteskwargs(opts)
1654
1660
1655 def parsenodeid(s):
1661 def parsenodeid(s):
1656 try:
1662 try:
1657 # We do not use revsingle/revrange functions here to accept
1663 # We do not use revsingle/revrange functions here to accept
1658 # arbitrary node identifiers, possibly not present in the
1664 # arbitrary node identifiers, possibly not present in the
1659 # local repository.
1665 # local repository.
1660 n = bin(s)
1666 n = bin(s)
1661 if len(n) != len(nullid):
1667 if len(n) != len(nullid):
1662 raise TypeError()
1668 raise TypeError()
1663 return n
1669 return n
1664 except TypeError:
1670 except TypeError:
1665 raise error.Abort('changeset references must be full hexadecimal '
1671 raise error.Abort('changeset references must be full hexadecimal '
1666 'node identifiers')
1672 'node identifiers')
1667
1673
1668 if opts.get('delete'):
1674 if opts.get('delete'):
1669 indices = []
1675 indices = []
1670 for v in opts.get('delete'):
1676 for v in opts.get('delete'):
1671 try:
1677 try:
1672 indices.append(int(v))
1678 indices.append(int(v))
1673 except ValueError:
1679 except ValueError:
1674 raise error.Abort(_('invalid index value: %r') % v,
1680 raise error.Abort(_('invalid index value: %r') % v,
1675 hint=_('use integers for indices'))
1681 hint=_('use integers for indices'))
1676
1682
1677 if repo.currenttransaction():
1683 if repo.currenttransaction():
1678 raise error.Abort(_('cannot delete obsmarkers in the middle '
1684 raise error.Abort(_('cannot delete obsmarkers in the middle '
1679 'of transaction.'))
1685 'of transaction.'))
1680
1686
1681 with repo.lock():
1687 with repo.lock():
1682 n = repair.deleteobsmarkers(repo.obsstore, indices)
1688 n = repair.deleteobsmarkers(repo.obsstore, indices)
1683 ui.write(_('deleted %i obsolescence markers\n') % n)
1689 ui.write(_('deleted %i obsolescence markers\n') % n)
1684
1690
1685 return
1691 return
1686
1692
1687 if precursor is not None:
1693 if precursor is not None:
1688 if opts['rev']:
1694 if opts['rev']:
1689 raise error.Abort('cannot select revision when creating marker')
1695 raise error.Abort('cannot select revision when creating marker')
1690 metadata = {}
1696 metadata = {}
1691 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1697 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1692 succs = tuple(parsenodeid(succ) for succ in successors)
1698 succs = tuple(parsenodeid(succ) for succ in successors)
1693 l = repo.lock()
1699 l = repo.lock()
1694 try:
1700 try:
1695 tr = repo.transaction('debugobsolete')
1701 tr = repo.transaction('debugobsolete')
1696 try:
1702 try:
1697 date = opts.get('date')
1703 date = opts.get('date')
1698 if date:
1704 if date:
1699 date = dateutil.parsedate(date)
1705 date = dateutil.parsedate(date)
1700 else:
1706 else:
1701 date = None
1707 date = None
1702 prec = parsenodeid(precursor)
1708 prec = parsenodeid(precursor)
1703 parents = None
1709 parents = None
1704 if opts['record_parents']:
1710 if opts['record_parents']:
1705 if prec not in repo.unfiltered():
1711 if prec not in repo.unfiltered():
1706 raise error.Abort('cannot used --record-parents on '
1712 raise error.Abort('cannot used --record-parents on '
1707 'unknown changesets')
1713 'unknown changesets')
1708 parents = repo.unfiltered()[prec].parents()
1714 parents = repo.unfiltered()[prec].parents()
1709 parents = tuple(p.node() for p in parents)
1715 parents = tuple(p.node() for p in parents)
1710 repo.obsstore.create(tr, prec, succs, opts['flags'],
1716 repo.obsstore.create(tr, prec, succs, opts['flags'],
1711 parents=parents, date=date,
1717 parents=parents, date=date,
1712 metadata=metadata, ui=ui)
1718 metadata=metadata, ui=ui)
1713 tr.close()
1719 tr.close()
1714 except ValueError as exc:
1720 except ValueError as exc:
1715 raise error.Abort(_('bad obsmarker input: %s') %
1721 raise error.Abort(_('bad obsmarker input: %s') %
1716 pycompat.bytestr(exc))
1722 pycompat.bytestr(exc))
1717 finally:
1723 finally:
1718 tr.release()
1724 tr.release()
1719 finally:
1725 finally:
1720 l.release()
1726 l.release()
1721 else:
1727 else:
1722 if opts['rev']:
1728 if opts['rev']:
1723 revs = scmutil.revrange(repo, opts['rev'])
1729 revs = scmutil.revrange(repo, opts['rev'])
1724 nodes = [repo[r].node() for r in revs]
1730 nodes = [repo[r].node() for r in revs]
1725 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1731 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1726 exclusive=opts['exclusive']))
1732 exclusive=opts['exclusive']))
1727 markers.sort(key=lambda x: x._data)
1733 markers.sort(key=lambda x: x._data)
1728 else:
1734 else:
1729 markers = obsutil.getmarkers(repo)
1735 markers = obsutil.getmarkers(repo)
1730
1736
1731 markerstoiter = markers
1737 markerstoiter = markers
1732 isrelevant = lambda m: True
1738 isrelevant = lambda m: True
1733 if opts.get('rev') and opts.get('index'):
1739 if opts.get('rev') and opts.get('index'):
1734 markerstoiter = obsutil.getmarkers(repo)
1740 markerstoiter = obsutil.getmarkers(repo)
1735 markerset = set(markers)
1741 markerset = set(markers)
1736 isrelevant = lambda m: m in markerset
1742 isrelevant = lambda m: m in markerset
1737
1743
1738 fm = ui.formatter('debugobsolete', opts)
1744 fm = ui.formatter('debugobsolete', opts)
1739 for i, m in enumerate(markerstoiter):
1745 for i, m in enumerate(markerstoiter):
1740 if not isrelevant(m):
1746 if not isrelevant(m):
1741 # marker can be irrelevant when we're iterating over a set
1747 # marker can be irrelevant when we're iterating over a set
1742 # of markers (markerstoiter) which is bigger than the set
1748 # of markers (markerstoiter) which is bigger than the set
1743 # of markers we want to display (markers)
1749 # of markers we want to display (markers)
1744 # this can happen if both --index and --rev options are
1750 # this can happen if both --index and --rev options are
1745 # provided and thus we need to iterate over all of the markers
1751 # provided and thus we need to iterate over all of the markers
1746 # to get the correct indices, but only display the ones that
1752 # to get the correct indices, but only display the ones that
1747 # are relevant to --rev value
1753 # are relevant to --rev value
1748 continue
1754 continue
1749 fm.startitem()
1755 fm.startitem()
1750 ind = i if opts.get('index') else None
1756 ind = i if opts.get('index') else None
1751 cmdutil.showmarker(fm, m, index=ind)
1757 cmdutil.showmarker(fm, m, index=ind)
1752 fm.end()
1758 fm.end()
1753
1759
1754 @command('debugp1copies',
1760 @command('debugp1copies',
1755 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1761 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1756 _('[-r REV]'))
1762 _('[-r REV]'))
1757 def debugp1copies(ui, repo, **opts):
1763 def debugp1copies(ui, repo, **opts):
1758 """dump copy information compared to p1"""
1764 """dump copy information compared to p1"""
1759
1765
1760 opts = pycompat.byteskwargs(opts)
1766 opts = pycompat.byteskwargs(opts)
1761 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1767 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1762 for dst, src in ctx.p1copies().items():
1768 for dst, src in ctx.p1copies().items():
1763 ui.write('%s -> %s\n' % (src, dst))
1769 ui.write('%s -> %s\n' % (src, dst))
1764
1770
1765 @command('debugp2copies',
1771 @command('debugp2copies',
1766 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1772 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1767 _('[-r REV]'))
1773 _('[-r REV]'))
1768 def debugp1copies(ui, repo, **opts):
1774 def debugp1copies(ui, repo, **opts):
1769 """dump copy information compared to p2"""
1775 """dump copy information compared to p2"""
1770
1776
1771 opts = pycompat.byteskwargs(opts)
1777 opts = pycompat.byteskwargs(opts)
1772 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1778 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1773 for dst, src in ctx.p2copies().items():
1779 for dst, src in ctx.p2copies().items():
1774 ui.write('%s -> %s\n' % (src, dst))
1780 ui.write('%s -> %s\n' % (src, dst))
1775
1781
1776 @command('debugpathcomplete',
1782 @command('debugpathcomplete',
1777 [('f', 'full', None, _('complete an entire path')),
1783 [('f', 'full', None, _('complete an entire path')),
1778 ('n', 'normal', None, _('show only normal files')),
1784 ('n', 'normal', None, _('show only normal files')),
1779 ('a', 'added', None, _('show only added files')),
1785 ('a', 'added', None, _('show only added files')),
1780 ('r', 'removed', None, _('show only removed files'))],
1786 ('r', 'removed', None, _('show only removed files'))],
1781 _('FILESPEC...'))
1787 _('FILESPEC...'))
1782 def debugpathcomplete(ui, repo, *specs, **opts):
1788 def debugpathcomplete(ui, repo, *specs, **opts):
1783 '''complete part or all of a tracked path
1789 '''complete part or all of a tracked path
1784
1790
1785 This command supports shells that offer path name completion. It
1791 This command supports shells that offer path name completion. It
1786 currently completes only files already known to the dirstate.
1792 currently completes only files already known to the dirstate.
1787
1793
1788 Completion extends only to the next path segment unless
1794 Completion extends only to the next path segment unless
1789 --full is specified, in which case entire paths are used.'''
1795 --full is specified, in which case entire paths are used.'''
1790
1796
1791 def complete(path, acceptable):
1797 def complete(path, acceptable):
1792 dirstate = repo.dirstate
1798 dirstate = repo.dirstate
1793 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1799 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1794 rootdir = repo.root + pycompat.ossep
1800 rootdir = repo.root + pycompat.ossep
1795 if spec != repo.root and not spec.startswith(rootdir):
1801 if spec != repo.root and not spec.startswith(rootdir):
1796 return [], []
1802 return [], []
1797 if os.path.isdir(spec):
1803 if os.path.isdir(spec):
1798 spec += '/'
1804 spec += '/'
1799 spec = spec[len(rootdir):]
1805 spec = spec[len(rootdir):]
1800 fixpaths = pycompat.ossep != '/'
1806 fixpaths = pycompat.ossep != '/'
1801 if fixpaths:
1807 if fixpaths:
1802 spec = spec.replace(pycompat.ossep, '/')
1808 spec = spec.replace(pycompat.ossep, '/')
1803 speclen = len(spec)
1809 speclen = len(spec)
1804 fullpaths = opts[r'full']
1810 fullpaths = opts[r'full']
1805 files, dirs = set(), set()
1811 files, dirs = set(), set()
1806 adddir, addfile = dirs.add, files.add
1812 adddir, addfile = dirs.add, files.add
1807 for f, st in dirstate.iteritems():
1813 for f, st in dirstate.iteritems():
1808 if f.startswith(spec) and st[0] in acceptable:
1814 if f.startswith(spec) and st[0] in acceptable:
1809 if fixpaths:
1815 if fixpaths:
1810 f = f.replace('/', pycompat.ossep)
1816 f = f.replace('/', pycompat.ossep)
1811 if fullpaths:
1817 if fullpaths:
1812 addfile(f)
1818 addfile(f)
1813 continue
1819 continue
1814 s = f.find(pycompat.ossep, speclen)
1820 s = f.find(pycompat.ossep, speclen)
1815 if s >= 0:
1821 if s >= 0:
1816 adddir(f[:s])
1822 adddir(f[:s])
1817 else:
1823 else:
1818 addfile(f)
1824 addfile(f)
1819 return files, dirs
1825 return files, dirs
1820
1826
1821 acceptable = ''
1827 acceptable = ''
1822 if opts[r'normal']:
1828 if opts[r'normal']:
1823 acceptable += 'nm'
1829 acceptable += 'nm'
1824 if opts[r'added']:
1830 if opts[r'added']:
1825 acceptable += 'a'
1831 acceptable += 'a'
1826 if opts[r'removed']:
1832 if opts[r'removed']:
1827 acceptable += 'r'
1833 acceptable += 'r'
1828 cwd = repo.getcwd()
1834 cwd = repo.getcwd()
1829 if not specs:
1835 if not specs:
1830 specs = ['.']
1836 specs = ['.']
1831
1837
1832 files, dirs = set(), set()
1838 files, dirs = set(), set()
1833 for spec in specs:
1839 for spec in specs:
1834 f, d = complete(spec, acceptable or 'nmar')
1840 f, d = complete(spec, acceptable or 'nmar')
1835 files.update(f)
1841 files.update(f)
1836 dirs.update(d)
1842 dirs.update(d)
1837 files.update(dirs)
1843 files.update(dirs)
1838 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1844 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1839 ui.write('\n')
1845 ui.write('\n')
1840
1846
1841 @command('debugpathcopies',
1847 @command('debugpathcopies',
1842 cmdutil.walkopts,
1848 cmdutil.walkopts,
1843 'hg debugpathcopies REV1 REV2 [FILE]',
1849 'hg debugpathcopies REV1 REV2 [FILE]',
1844 inferrepo=True)
1850 inferrepo=True)
1845 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1851 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1846 """show copies between two revisions"""
1852 """show copies between two revisions"""
1847 ctx1 = scmutil.revsingle(repo, rev1)
1853 ctx1 = scmutil.revsingle(repo, rev1)
1848 ctx2 = scmutil.revsingle(repo, rev2)
1854 ctx2 = scmutil.revsingle(repo, rev2)
1849 m = scmutil.match(ctx1, pats, opts)
1855 m = scmutil.match(ctx1, pats, opts)
1850 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1856 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1851 ui.write('%s -> %s\n' % (src, dst))
1857 ui.write('%s -> %s\n' % (src, dst))
1852
1858
1853 @command('debugpeer', [], _('PATH'), norepo=True)
1859 @command('debugpeer', [], _('PATH'), norepo=True)
1854 def debugpeer(ui, path):
1860 def debugpeer(ui, path):
1855 """establish a connection to a peer repository"""
1861 """establish a connection to a peer repository"""
1856 # Always enable peer request logging. Requires --debug to display
1862 # Always enable peer request logging. Requires --debug to display
1857 # though.
1863 # though.
1858 overrides = {
1864 overrides = {
1859 ('devel', 'debug.peer-request'): True,
1865 ('devel', 'debug.peer-request'): True,
1860 }
1866 }
1861
1867
1862 with ui.configoverride(overrides):
1868 with ui.configoverride(overrides):
1863 peer = hg.peer(ui, {}, path)
1869 peer = hg.peer(ui, {}, path)
1864
1870
1865 local = peer.local() is not None
1871 local = peer.local() is not None
1866 canpush = peer.canpush()
1872 canpush = peer.canpush()
1867
1873
1868 ui.write(_('url: %s\n') % peer.url())
1874 ui.write(_('url: %s\n') % peer.url())
1869 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1875 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1870 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1876 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1871
1877
1872 @command('debugpickmergetool',
1878 @command('debugpickmergetool',
1873 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1879 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1874 ('', 'changedelete', None, _('emulate merging change and delete')),
1880 ('', 'changedelete', None, _('emulate merging change and delete')),
1875 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1881 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1876 _('[PATTERN]...'),
1882 _('[PATTERN]...'),
1877 inferrepo=True)
1883 inferrepo=True)
1878 def debugpickmergetool(ui, repo, *pats, **opts):
1884 def debugpickmergetool(ui, repo, *pats, **opts):
1879 """examine which merge tool is chosen for specified file
1885 """examine which merge tool is chosen for specified file
1880
1886
1881 As described in :hg:`help merge-tools`, Mercurial examines
1887 As described in :hg:`help merge-tools`, Mercurial examines
1882 configurations below in this order to decide which merge tool is
1888 configurations below in this order to decide which merge tool is
1883 chosen for specified file.
1889 chosen for specified file.
1884
1890
1885 1. ``--tool`` option
1891 1. ``--tool`` option
1886 2. ``HGMERGE`` environment variable
1892 2. ``HGMERGE`` environment variable
1887 3. configurations in ``merge-patterns`` section
1893 3. configurations in ``merge-patterns`` section
1888 4. configuration of ``ui.merge``
1894 4. configuration of ``ui.merge``
1889 5. configurations in ``merge-tools`` section
1895 5. configurations in ``merge-tools`` section
1890 6. ``hgmerge`` tool (for historical reason only)
1896 6. ``hgmerge`` tool (for historical reason only)
1891 7. default tool for fallback (``:merge`` or ``:prompt``)
1897 7. default tool for fallback (``:merge`` or ``:prompt``)
1892
1898
1893 This command writes out examination result in the style below::
1899 This command writes out examination result in the style below::
1894
1900
1895 FILE = MERGETOOL
1901 FILE = MERGETOOL
1896
1902
1897 By default, all files known in the first parent context of the
1903 By default, all files known in the first parent context of the
1898 working directory are examined. Use file patterns and/or -I/-X
1904 working directory are examined. Use file patterns and/or -I/-X
1899 options to limit target files. -r/--rev is also useful to examine
1905 options to limit target files. -r/--rev is also useful to examine
1900 files in another context without actual updating to it.
1906 files in another context without actual updating to it.
1901
1907
1902 With --debug, this command shows warning messages while matching
1908 With --debug, this command shows warning messages while matching
1903 against ``merge-patterns`` and so on, too. It is recommended to
1909 against ``merge-patterns`` and so on, too. It is recommended to
1904 use this option with explicit file patterns and/or -I/-X options,
1910 use this option with explicit file patterns and/or -I/-X options,
1905 because this option increases amount of output per file according
1911 because this option increases amount of output per file according
1906 to configurations in hgrc.
1912 to configurations in hgrc.
1907
1913
1908 With -v/--verbose, this command shows configurations below at
1914 With -v/--verbose, this command shows configurations below at
1909 first (only if specified).
1915 first (only if specified).
1910
1916
1911 - ``--tool`` option
1917 - ``--tool`` option
1912 - ``HGMERGE`` environment variable
1918 - ``HGMERGE`` environment variable
1913 - configuration of ``ui.merge``
1919 - configuration of ``ui.merge``
1914
1920
1915 If merge tool is chosen before matching against
1921 If merge tool is chosen before matching against
1916 ``merge-patterns``, this command can't show any helpful
1922 ``merge-patterns``, this command can't show any helpful
1917 information, even with --debug. In such case, information above is
1923 information, even with --debug. In such case, information above is
1918 useful to know why a merge tool is chosen.
1924 useful to know why a merge tool is chosen.
1919 """
1925 """
1920 opts = pycompat.byteskwargs(opts)
1926 opts = pycompat.byteskwargs(opts)
1921 overrides = {}
1927 overrides = {}
1922 if opts['tool']:
1928 if opts['tool']:
1923 overrides[('ui', 'forcemerge')] = opts['tool']
1929 overrides[('ui', 'forcemerge')] = opts['tool']
1924 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1930 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1925
1931
1926 with ui.configoverride(overrides, 'debugmergepatterns'):
1932 with ui.configoverride(overrides, 'debugmergepatterns'):
1927 hgmerge = encoding.environ.get("HGMERGE")
1933 hgmerge = encoding.environ.get("HGMERGE")
1928 if hgmerge is not None:
1934 if hgmerge is not None:
1929 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1935 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1930 uimerge = ui.config("ui", "merge")
1936 uimerge = ui.config("ui", "merge")
1931 if uimerge:
1937 if uimerge:
1932 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1938 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1933
1939
1934 ctx = scmutil.revsingle(repo, opts.get('rev'))
1940 ctx = scmutil.revsingle(repo, opts.get('rev'))
1935 m = scmutil.match(ctx, pats, opts)
1941 m = scmutil.match(ctx, pats, opts)
1936 changedelete = opts['changedelete']
1942 changedelete = opts['changedelete']
1937 for path in ctx.walk(m):
1943 for path in ctx.walk(m):
1938 fctx = ctx[path]
1944 fctx = ctx[path]
1939 try:
1945 try:
1940 if not ui.debugflag:
1946 if not ui.debugflag:
1941 ui.pushbuffer(error=True)
1947 ui.pushbuffer(error=True)
1942 tool, toolpath = filemerge._picktool(repo, ui, path,
1948 tool, toolpath = filemerge._picktool(repo, ui, path,
1943 fctx.isbinary(),
1949 fctx.isbinary(),
1944 'l' in fctx.flags(),
1950 'l' in fctx.flags(),
1945 changedelete)
1951 changedelete)
1946 finally:
1952 finally:
1947 if not ui.debugflag:
1953 if not ui.debugflag:
1948 ui.popbuffer()
1954 ui.popbuffer()
1949 ui.write(('%s = %s\n') % (path, tool))
1955 ui.write(('%s = %s\n') % (path, tool))
1950
1956
1951 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1957 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1952 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1958 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1953 '''access the pushkey key/value protocol
1959 '''access the pushkey key/value protocol
1954
1960
1955 With two args, list the keys in the given namespace.
1961 With two args, list the keys in the given namespace.
1956
1962
1957 With five args, set a key to new if it currently is set to old.
1963 With five args, set a key to new if it currently is set to old.
1958 Reports success or failure.
1964 Reports success or failure.
1959 '''
1965 '''
1960
1966
1961 target = hg.peer(ui, {}, repopath)
1967 target = hg.peer(ui, {}, repopath)
1962 if keyinfo:
1968 if keyinfo:
1963 key, old, new = keyinfo
1969 key, old, new = keyinfo
1964 with target.commandexecutor() as e:
1970 with target.commandexecutor() as e:
1965 r = e.callcommand('pushkey', {
1971 r = e.callcommand('pushkey', {
1966 'namespace': namespace,
1972 'namespace': namespace,
1967 'key': key,
1973 'key': key,
1968 'old': old,
1974 'old': old,
1969 'new': new,
1975 'new': new,
1970 }).result()
1976 }).result()
1971
1977
1972 ui.status(pycompat.bytestr(r) + '\n')
1978 ui.status(pycompat.bytestr(r) + '\n')
1973 return not r
1979 return not r
1974 else:
1980 else:
1975 for k, v in sorted(target.listkeys(namespace).iteritems()):
1981 for k, v in sorted(target.listkeys(namespace).iteritems()):
1976 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1982 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1977 stringutil.escapestr(v)))
1983 stringutil.escapestr(v)))
1978
1984
1979 @command('debugpvec', [], _('A B'))
1985 @command('debugpvec', [], _('A B'))
1980 def debugpvec(ui, repo, a, b=None):
1986 def debugpvec(ui, repo, a, b=None):
1981 ca = scmutil.revsingle(repo, a)
1987 ca = scmutil.revsingle(repo, a)
1982 cb = scmutil.revsingle(repo, b)
1988 cb = scmutil.revsingle(repo, b)
1983 pa = pvec.ctxpvec(ca)
1989 pa = pvec.ctxpvec(ca)
1984 pb = pvec.ctxpvec(cb)
1990 pb = pvec.ctxpvec(cb)
1985 if pa == pb:
1991 if pa == pb:
1986 rel = "="
1992 rel = "="
1987 elif pa > pb:
1993 elif pa > pb:
1988 rel = ">"
1994 rel = ">"
1989 elif pa < pb:
1995 elif pa < pb:
1990 rel = "<"
1996 rel = "<"
1991 elif pa | pb:
1997 elif pa | pb:
1992 rel = "|"
1998 rel = "|"
1993 ui.write(_("a: %s\n") % pa)
1999 ui.write(_("a: %s\n") % pa)
1994 ui.write(_("b: %s\n") % pb)
2000 ui.write(_("b: %s\n") % pb)
1995 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2001 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1996 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2002 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1997 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2003 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1998 pa.distance(pb), rel))
2004 pa.distance(pb), rel))
1999
2005
2000 @command('debugrebuilddirstate|debugrebuildstate',
2006 @command('debugrebuilddirstate|debugrebuildstate',
2001 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2007 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2002 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2008 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2003 'the working copy parent')),
2009 'the working copy parent')),
2004 ],
2010 ],
2005 _('[-r REV]'))
2011 _('[-r REV]'))
2006 def debugrebuilddirstate(ui, repo, rev, **opts):
2012 def debugrebuilddirstate(ui, repo, rev, **opts):
2007 """rebuild the dirstate as it would look like for the given revision
2013 """rebuild the dirstate as it would look like for the given revision
2008
2014
2009 If no revision is specified the first current parent will be used.
2015 If no revision is specified the first current parent will be used.
2010
2016
2011 The dirstate will be set to the files of the given revision.
2017 The dirstate will be set to the files of the given revision.
2012 The actual working directory content or existing dirstate
2018 The actual working directory content or existing dirstate
2013 information such as adds or removes is not considered.
2019 information such as adds or removes is not considered.
2014
2020
2015 ``minimal`` will only rebuild the dirstate status for files that claim to be
2021 ``minimal`` will only rebuild the dirstate status for files that claim to be
2016 tracked but are not in the parent manifest, or that exist in the parent
2022 tracked but are not in the parent manifest, or that exist in the parent
2017 manifest but are not in the dirstate. It will not change adds, removes, or
2023 manifest but are not in the dirstate. It will not change adds, removes, or
2018 modified files that are in the working copy parent.
2024 modified files that are in the working copy parent.
2019
2025
2020 One use of this command is to make the next :hg:`status` invocation
2026 One use of this command is to make the next :hg:`status` invocation
2021 check the actual file content.
2027 check the actual file content.
2022 """
2028 """
2023 ctx = scmutil.revsingle(repo, rev)
2029 ctx = scmutil.revsingle(repo, rev)
2024 with repo.wlock():
2030 with repo.wlock():
2025 dirstate = repo.dirstate
2031 dirstate = repo.dirstate
2026 changedfiles = None
2032 changedfiles = None
2027 # See command doc for what minimal does.
2033 # See command doc for what minimal does.
2028 if opts.get(r'minimal'):
2034 if opts.get(r'minimal'):
2029 manifestfiles = set(ctx.manifest().keys())
2035 manifestfiles = set(ctx.manifest().keys())
2030 dirstatefiles = set(dirstate)
2036 dirstatefiles = set(dirstate)
2031 manifestonly = manifestfiles - dirstatefiles
2037 manifestonly = manifestfiles - dirstatefiles
2032 dsonly = dirstatefiles - manifestfiles
2038 dsonly = dirstatefiles - manifestfiles
2033 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2039 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2034 changedfiles = manifestonly | dsnotadded
2040 changedfiles = manifestonly | dsnotadded
2035
2041
2036 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2042 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2037
2043
2038 @command('debugrebuildfncache', [], '')
2044 @command('debugrebuildfncache', [], '')
2039 def debugrebuildfncache(ui, repo):
2045 def debugrebuildfncache(ui, repo):
2040 """rebuild the fncache file"""
2046 """rebuild the fncache file"""
2041 repair.rebuildfncache(ui, repo)
2047 repair.rebuildfncache(ui, repo)
2042
2048
2043 @command('debugrename',
2049 @command('debugrename',
2044 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2050 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2045 _('[-r REV] [FILE]...'))
2051 _('[-r REV] [FILE]...'))
2046 def debugrename(ui, repo, *pats, **opts):
2052 def debugrename(ui, repo, *pats, **opts):
2047 """dump rename information"""
2053 """dump rename information"""
2048
2054
2049 opts = pycompat.byteskwargs(opts)
2055 opts = pycompat.byteskwargs(opts)
2050 ctx = scmutil.revsingle(repo, opts.get('rev'))
2056 ctx = scmutil.revsingle(repo, opts.get('rev'))
2051 m = scmutil.match(ctx, pats, opts)
2057 m = scmutil.match(ctx, pats, opts)
2052 for abs in ctx.walk(m):
2058 for abs in ctx.walk(m):
2053 fctx = ctx[abs]
2059 fctx = ctx[abs]
2054 o = fctx.filelog().renamed(fctx.filenode())
2060 o = fctx.filelog().renamed(fctx.filenode())
2055 rel = repo.pathto(abs)
2061 rel = repo.pathto(abs)
2056 if o:
2062 if o:
2057 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2063 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2058 else:
2064 else:
2059 ui.write(_("%s not renamed\n") % rel)
2065 ui.write(_("%s not renamed\n") % rel)
2060
2066
2061 @command('debugrevlog', cmdutil.debugrevlogopts +
2067 @command('debugrevlog', cmdutil.debugrevlogopts +
2062 [('d', 'dump', False, _('dump index data'))],
2068 [('d', 'dump', False, _('dump index data'))],
2063 _('-c|-m|FILE'),
2069 _('-c|-m|FILE'),
2064 optionalrepo=True)
2070 optionalrepo=True)
2065 def debugrevlog(ui, repo, file_=None, **opts):
2071 def debugrevlog(ui, repo, file_=None, **opts):
2066 """show data and statistics about a revlog"""
2072 """show data and statistics about a revlog"""
2067 opts = pycompat.byteskwargs(opts)
2073 opts = pycompat.byteskwargs(opts)
2068 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2074 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2069
2075
2070 if opts.get("dump"):
2076 if opts.get("dump"):
2071 numrevs = len(r)
2077 numrevs = len(r)
2072 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2078 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2073 " rawsize totalsize compression heads chainlen\n"))
2079 " rawsize totalsize compression heads chainlen\n"))
2074 ts = 0
2080 ts = 0
2075 heads = set()
2081 heads = set()
2076
2082
2077 for rev in pycompat.xrange(numrevs):
2083 for rev in pycompat.xrange(numrevs):
2078 dbase = r.deltaparent(rev)
2084 dbase = r.deltaparent(rev)
2079 if dbase == -1:
2085 if dbase == -1:
2080 dbase = rev
2086 dbase = rev
2081 cbase = r.chainbase(rev)
2087 cbase = r.chainbase(rev)
2082 clen = r.chainlen(rev)
2088 clen = r.chainlen(rev)
2083 p1, p2 = r.parentrevs(rev)
2089 p1, p2 = r.parentrevs(rev)
2084 rs = r.rawsize(rev)
2090 rs = r.rawsize(rev)
2085 ts = ts + rs
2091 ts = ts + rs
2086 heads -= set(r.parentrevs(rev))
2092 heads -= set(r.parentrevs(rev))
2087 heads.add(rev)
2093 heads.add(rev)
2088 try:
2094 try:
2089 compression = ts / r.end(rev)
2095 compression = ts / r.end(rev)
2090 except ZeroDivisionError:
2096 except ZeroDivisionError:
2091 compression = 0
2097 compression = 0
2092 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2098 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2093 "%11d %5d %8d\n" %
2099 "%11d %5d %8d\n" %
2094 (rev, p1, p2, r.start(rev), r.end(rev),
2100 (rev, p1, p2, r.start(rev), r.end(rev),
2095 r.start(dbase), r.start(cbase),
2101 r.start(dbase), r.start(cbase),
2096 r.start(p1), r.start(p2),
2102 r.start(p1), r.start(p2),
2097 rs, ts, compression, len(heads), clen))
2103 rs, ts, compression, len(heads), clen))
2098 return 0
2104 return 0
2099
2105
2100 v = r.version
2106 v = r.version
2101 format = v & 0xFFFF
2107 format = v & 0xFFFF
2102 flags = []
2108 flags = []
2103 gdelta = False
2109 gdelta = False
2104 if v & revlog.FLAG_INLINE_DATA:
2110 if v & revlog.FLAG_INLINE_DATA:
2105 flags.append('inline')
2111 flags.append('inline')
2106 if v & revlog.FLAG_GENERALDELTA:
2112 if v & revlog.FLAG_GENERALDELTA:
2107 gdelta = True
2113 gdelta = True
2108 flags.append('generaldelta')
2114 flags.append('generaldelta')
2109 if not flags:
2115 if not flags:
2110 flags = ['(none)']
2116 flags = ['(none)']
2111
2117
2112 ### tracks merge vs single parent
2118 ### tracks merge vs single parent
2113 nummerges = 0
2119 nummerges = 0
2114
2120
2115 ### tracks ways the "delta" are build
2121 ### tracks ways the "delta" are build
2116 # nodelta
2122 # nodelta
2117 numempty = 0
2123 numempty = 0
2118 numemptytext = 0
2124 numemptytext = 0
2119 numemptydelta = 0
2125 numemptydelta = 0
2120 # full file content
2126 # full file content
2121 numfull = 0
2127 numfull = 0
2122 # intermediate snapshot against a prior snapshot
2128 # intermediate snapshot against a prior snapshot
2123 numsemi = 0
2129 numsemi = 0
2124 # snapshot count per depth
2130 # snapshot count per depth
2125 numsnapdepth = collections.defaultdict(lambda: 0)
2131 numsnapdepth = collections.defaultdict(lambda: 0)
2126 # delta against previous revision
2132 # delta against previous revision
2127 numprev = 0
2133 numprev = 0
2128 # delta against first or second parent (not prev)
2134 # delta against first or second parent (not prev)
2129 nump1 = 0
2135 nump1 = 0
2130 nump2 = 0
2136 nump2 = 0
2131 # delta against neither prev nor parents
2137 # delta against neither prev nor parents
2132 numother = 0
2138 numother = 0
2133 # delta against prev that are also first or second parent
2139 # delta against prev that are also first or second parent
2134 # (details of `numprev`)
2140 # (details of `numprev`)
2135 nump1prev = 0
2141 nump1prev = 0
2136 nump2prev = 0
2142 nump2prev = 0
2137
2143
2138 # data about delta chain of each revs
2144 # data about delta chain of each revs
2139 chainlengths = []
2145 chainlengths = []
2140 chainbases = []
2146 chainbases = []
2141 chainspans = []
2147 chainspans = []
2142
2148
2143 # data about each revision
2149 # data about each revision
2144 datasize = [None, 0, 0]
2150 datasize = [None, 0, 0]
2145 fullsize = [None, 0, 0]
2151 fullsize = [None, 0, 0]
2146 semisize = [None, 0, 0]
2152 semisize = [None, 0, 0]
2147 # snapshot count per depth
2153 # snapshot count per depth
2148 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2154 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2149 deltasize = [None, 0, 0]
2155 deltasize = [None, 0, 0]
2150 chunktypecounts = {}
2156 chunktypecounts = {}
2151 chunktypesizes = {}
2157 chunktypesizes = {}
2152
2158
2153 def addsize(size, l):
2159 def addsize(size, l):
2154 if l[0] is None or size < l[0]:
2160 if l[0] is None or size < l[0]:
2155 l[0] = size
2161 l[0] = size
2156 if size > l[1]:
2162 if size > l[1]:
2157 l[1] = size
2163 l[1] = size
2158 l[2] += size
2164 l[2] += size
2159
2165
2160 numrevs = len(r)
2166 numrevs = len(r)
2161 for rev in pycompat.xrange(numrevs):
2167 for rev in pycompat.xrange(numrevs):
2162 p1, p2 = r.parentrevs(rev)
2168 p1, p2 = r.parentrevs(rev)
2163 delta = r.deltaparent(rev)
2169 delta = r.deltaparent(rev)
2164 if format > 0:
2170 if format > 0:
2165 addsize(r.rawsize(rev), datasize)
2171 addsize(r.rawsize(rev), datasize)
2166 if p2 != nullrev:
2172 if p2 != nullrev:
2167 nummerges += 1
2173 nummerges += 1
2168 size = r.length(rev)
2174 size = r.length(rev)
2169 if delta == nullrev:
2175 if delta == nullrev:
2170 chainlengths.append(0)
2176 chainlengths.append(0)
2171 chainbases.append(r.start(rev))
2177 chainbases.append(r.start(rev))
2172 chainspans.append(size)
2178 chainspans.append(size)
2173 if size == 0:
2179 if size == 0:
2174 numempty += 1
2180 numempty += 1
2175 numemptytext += 1
2181 numemptytext += 1
2176 else:
2182 else:
2177 numfull += 1
2183 numfull += 1
2178 numsnapdepth[0] += 1
2184 numsnapdepth[0] += 1
2179 addsize(size, fullsize)
2185 addsize(size, fullsize)
2180 addsize(size, snapsizedepth[0])
2186 addsize(size, snapsizedepth[0])
2181 else:
2187 else:
2182 chainlengths.append(chainlengths[delta] + 1)
2188 chainlengths.append(chainlengths[delta] + 1)
2183 baseaddr = chainbases[delta]
2189 baseaddr = chainbases[delta]
2184 revaddr = r.start(rev)
2190 revaddr = r.start(rev)
2185 chainbases.append(baseaddr)
2191 chainbases.append(baseaddr)
2186 chainspans.append((revaddr - baseaddr) + size)
2192 chainspans.append((revaddr - baseaddr) + size)
2187 if size == 0:
2193 if size == 0:
2188 numempty += 1
2194 numempty += 1
2189 numemptydelta += 1
2195 numemptydelta += 1
2190 elif r.issnapshot(rev):
2196 elif r.issnapshot(rev):
2191 addsize(size, semisize)
2197 addsize(size, semisize)
2192 numsemi += 1
2198 numsemi += 1
2193 depth = r.snapshotdepth(rev)
2199 depth = r.snapshotdepth(rev)
2194 numsnapdepth[depth] += 1
2200 numsnapdepth[depth] += 1
2195 addsize(size, snapsizedepth[depth])
2201 addsize(size, snapsizedepth[depth])
2196 else:
2202 else:
2197 addsize(size, deltasize)
2203 addsize(size, deltasize)
2198 if delta == rev - 1:
2204 if delta == rev - 1:
2199 numprev += 1
2205 numprev += 1
2200 if delta == p1:
2206 if delta == p1:
2201 nump1prev += 1
2207 nump1prev += 1
2202 elif delta == p2:
2208 elif delta == p2:
2203 nump2prev += 1
2209 nump2prev += 1
2204 elif delta == p1:
2210 elif delta == p1:
2205 nump1 += 1
2211 nump1 += 1
2206 elif delta == p2:
2212 elif delta == p2:
2207 nump2 += 1
2213 nump2 += 1
2208 elif delta != nullrev:
2214 elif delta != nullrev:
2209 numother += 1
2215 numother += 1
2210
2216
2211 # Obtain data on the raw chunks in the revlog.
2217 # Obtain data on the raw chunks in the revlog.
2212 if util.safehasattr(r, '_getsegmentforrevs'):
2218 if util.safehasattr(r, '_getsegmentforrevs'):
2213 segment = r._getsegmentforrevs(rev, rev)[1]
2219 segment = r._getsegmentforrevs(rev, rev)[1]
2214 else:
2220 else:
2215 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2221 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2216 if segment:
2222 if segment:
2217 chunktype = bytes(segment[0:1])
2223 chunktype = bytes(segment[0:1])
2218 else:
2224 else:
2219 chunktype = 'empty'
2225 chunktype = 'empty'
2220
2226
2221 if chunktype not in chunktypecounts:
2227 if chunktype not in chunktypecounts:
2222 chunktypecounts[chunktype] = 0
2228 chunktypecounts[chunktype] = 0
2223 chunktypesizes[chunktype] = 0
2229 chunktypesizes[chunktype] = 0
2224
2230
2225 chunktypecounts[chunktype] += 1
2231 chunktypecounts[chunktype] += 1
2226 chunktypesizes[chunktype] += size
2232 chunktypesizes[chunktype] += size
2227
2233
2228 # Adjust size min value for empty cases
2234 # Adjust size min value for empty cases
2229 for size in (datasize, fullsize, semisize, deltasize):
2235 for size in (datasize, fullsize, semisize, deltasize):
2230 if size[0] is None:
2236 if size[0] is None:
2231 size[0] = 0
2237 size[0] = 0
2232
2238
2233 numdeltas = numrevs - numfull - numempty - numsemi
2239 numdeltas = numrevs - numfull - numempty - numsemi
2234 numoprev = numprev - nump1prev - nump2prev
2240 numoprev = numprev - nump1prev - nump2prev
2235 totalrawsize = datasize[2]
2241 totalrawsize = datasize[2]
2236 datasize[2] /= numrevs
2242 datasize[2] /= numrevs
2237 fulltotal = fullsize[2]
2243 fulltotal = fullsize[2]
2238 fullsize[2] /= numfull
2244 fullsize[2] /= numfull
2239 semitotal = semisize[2]
2245 semitotal = semisize[2]
2240 snaptotal = {}
2246 snaptotal = {}
2241 if numsemi > 0:
2247 if numsemi > 0:
2242 semisize[2] /= numsemi
2248 semisize[2] /= numsemi
2243 for depth in snapsizedepth:
2249 for depth in snapsizedepth:
2244 snaptotal[depth] = snapsizedepth[depth][2]
2250 snaptotal[depth] = snapsizedepth[depth][2]
2245 snapsizedepth[depth][2] /= numsnapdepth[depth]
2251 snapsizedepth[depth][2] /= numsnapdepth[depth]
2246
2252
2247 deltatotal = deltasize[2]
2253 deltatotal = deltasize[2]
2248 if numdeltas > 0:
2254 if numdeltas > 0:
2249 deltasize[2] /= numdeltas
2255 deltasize[2] /= numdeltas
2250 totalsize = fulltotal + semitotal + deltatotal
2256 totalsize = fulltotal + semitotal + deltatotal
2251 avgchainlen = sum(chainlengths) / numrevs
2257 avgchainlen = sum(chainlengths) / numrevs
2252 maxchainlen = max(chainlengths)
2258 maxchainlen = max(chainlengths)
2253 maxchainspan = max(chainspans)
2259 maxchainspan = max(chainspans)
2254 compratio = 1
2260 compratio = 1
2255 if totalsize:
2261 if totalsize:
2256 compratio = totalrawsize / totalsize
2262 compratio = totalrawsize / totalsize
2257
2263
2258 basedfmtstr = '%%%dd\n'
2264 basedfmtstr = '%%%dd\n'
2259 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2265 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2260
2266
2261 def dfmtstr(max):
2267 def dfmtstr(max):
2262 return basedfmtstr % len(str(max))
2268 return basedfmtstr % len(str(max))
2263 def pcfmtstr(max, padding=0):
2269 def pcfmtstr(max, padding=0):
2264 return basepcfmtstr % (len(str(max)), ' ' * padding)
2270 return basepcfmtstr % (len(str(max)), ' ' * padding)
2265
2271
2266 def pcfmt(value, total):
2272 def pcfmt(value, total):
2267 if total:
2273 if total:
2268 return (value, 100 * float(value) / total)
2274 return (value, 100 * float(value) / total)
2269 else:
2275 else:
2270 return value, 100.0
2276 return value, 100.0
2271
2277
2272 ui.write(('format : %d\n') % format)
2278 ui.write(('format : %d\n') % format)
2273 ui.write(('flags : %s\n') % ', '.join(flags))
2279 ui.write(('flags : %s\n') % ', '.join(flags))
2274
2280
2275 ui.write('\n')
2281 ui.write('\n')
2276 fmt = pcfmtstr(totalsize)
2282 fmt = pcfmtstr(totalsize)
2277 fmt2 = dfmtstr(totalsize)
2283 fmt2 = dfmtstr(totalsize)
2278 ui.write(('revisions : ') + fmt2 % numrevs)
2284 ui.write(('revisions : ') + fmt2 % numrevs)
2279 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2285 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2280 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2286 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2281 ui.write(('revisions : ') + fmt2 % numrevs)
2287 ui.write(('revisions : ') + fmt2 % numrevs)
2282 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2288 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2283 ui.write((' text : ')
2289 ui.write((' text : ')
2284 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2290 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2285 ui.write((' delta : ')
2291 ui.write((' delta : ')
2286 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2292 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2287 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2293 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2288 for depth in sorted(numsnapdepth):
2294 for depth in sorted(numsnapdepth):
2289 ui.write((' lvl-%-3d : ' % depth)
2295 ui.write((' lvl-%-3d : ' % depth)
2290 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2296 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2291 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2297 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2292 ui.write(('revision size : ') + fmt2 % totalsize)
2298 ui.write(('revision size : ') + fmt2 % totalsize)
2293 ui.write((' snapshot : ')
2299 ui.write((' snapshot : ')
2294 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2300 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2295 for depth in sorted(numsnapdepth):
2301 for depth in sorted(numsnapdepth):
2296 ui.write((' lvl-%-3d : ' % depth)
2302 ui.write((' lvl-%-3d : ' % depth)
2297 + fmt % pcfmt(snaptotal[depth], totalsize))
2303 + fmt % pcfmt(snaptotal[depth], totalsize))
2298 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2304 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2299
2305
2300 def fmtchunktype(chunktype):
2306 def fmtchunktype(chunktype):
2301 if chunktype == 'empty':
2307 if chunktype == 'empty':
2302 return ' %s : ' % chunktype
2308 return ' %s : ' % chunktype
2303 elif chunktype in pycompat.bytestr(string.ascii_letters):
2309 elif chunktype in pycompat.bytestr(string.ascii_letters):
2304 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2310 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2305 else:
2311 else:
2306 return ' 0x%s : ' % hex(chunktype)
2312 return ' 0x%s : ' % hex(chunktype)
2307
2313
2308 ui.write('\n')
2314 ui.write('\n')
2309 ui.write(('chunks : ') + fmt2 % numrevs)
2315 ui.write(('chunks : ') + fmt2 % numrevs)
2310 for chunktype in sorted(chunktypecounts):
2316 for chunktype in sorted(chunktypecounts):
2311 ui.write(fmtchunktype(chunktype))
2317 ui.write(fmtchunktype(chunktype))
2312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2318 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2313 ui.write(('chunks size : ') + fmt2 % totalsize)
2319 ui.write(('chunks size : ') + fmt2 % totalsize)
2314 for chunktype in sorted(chunktypecounts):
2320 for chunktype in sorted(chunktypecounts):
2315 ui.write(fmtchunktype(chunktype))
2321 ui.write(fmtchunktype(chunktype))
2316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2322 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2317
2323
2318 ui.write('\n')
2324 ui.write('\n')
2319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2325 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2320 ui.write(('avg chain length : ') + fmt % avgchainlen)
2326 ui.write(('avg chain length : ') + fmt % avgchainlen)
2321 ui.write(('max chain length : ') + fmt % maxchainlen)
2327 ui.write(('max chain length : ') + fmt % maxchainlen)
2322 ui.write(('max chain reach : ') + fmt % maxchainspan)
2328 ui.write(('max chain reach : ') + fmt % maxchainspan)
2323 ui.write(('compression ratio : ') + fmt % compratio)
2329 ui.write(('compression ratio : ') + fmt % compratio)
2324
2330
2325 if format > 0:
2331 if format > 0:
2326 ui.write('\n')
2332 ui.write('\n')
2327 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2333 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2328 % tuple(datasize))
2334 % tuple(datasize))
2329 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2335 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2330 % tuple(fullsize))
2336 % tuple(fullsize))
2331 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2337 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2332 % tuple(semisize))
2338 % tuple(semisize))
2333 for depth in sorted(snapsizedepth):
2339 for depth in sorted(snapsizedepth):
2334 if depth == 0:
2340 if depth == 0:
2335 continue
2341 continue
2336 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2342 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2337 % ((depth,) + tuple(snapsizedepth[depth])))
2343 % ((depth,) + tuple(snapsizedepth[depth])))
2338 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2344 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2339 % tuple(deltasize))
2345 % tuple(deltasize))
2340
2346
2341 if numdeltas > 0:
2347 if numdeltas > 0:
2342 ui.write('\n')
2348 ui.write('\n')
2343 fmt = pcfmtstr(numdeltas)
2349 fmt = pcfmtstr(numdeltas)
2344 fmt2 = pcfmtstr(numdeltas, 4)
2350 fmt2 = pcfmtstr(numdeltas, 4)
2345 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2351 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2346 if numprev > 0:
2352 if numprev > 0:
2347 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2353 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2348 numprev))
2354 numprev))
2349 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2355 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2350 numprev))
2356 numprev))
2351 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2357 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2352 numprev))
2358 numprev))
2353 if gdelta:
2359 if gdelta:
2354 ui.write(('deltas against p1 : ')
2360 ui.write(('deltas against p1 : ')
2355 + fmt % pcfmt(nump1, numdeltas))
2361 + fmt % pcfmt(nump1, numdeltas))
2356 ui.write(('deltas against p2 : ')
2362 ui.write(('deltas against p2 : ')
2357 + fmt % pcfmt(nump2, numdeltas))
2363 + fmt % pcfmt(nump2, numdeltas))
2358 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2364 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2359 numdeltas))
2365 numdeltas))
2360
2366
2361 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2367 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2362 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2368 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2363 _('[-f FORMAT] -c|-m|FILE'),
2369 _('[-f FORMAT] -c|-m|FILE'),
2364 optionalrepo=True)
2370 optionalrepo=True)
2365 def debugrevlogindex(ui, repo, file_=None, **opts):
2371 def debugrevlogindex(ui, repo, file_=None, **opts):
2366 """dump the contents of a revlog index"""
2372 """dump the contents of a revlog index"""
2367 opts = pycompat.byteskwargs(opts)
2373 opts = pycompat.byteskwargs(opts)
2368 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2374 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2369 format = opts.get('format', 0)
2375 format = opts.get('format', 0)
2370 if format not in (0, 1):
2376 if format not in (0, 1):
2371 raise error.Abort(_("unknown format %d") % format)
2377 raise error.Abort(_("unknown format %d") % format)
2372
2378
2373 if ui.debugflag:
2379 if ui.debugflag:
2374 shortfn = hex
2380 shortfn = hex
2375 else:
2381 else:
2376 shortfn = short
2382 shortfn = short
2377
2383
2378 # There might not be anything in r, so have a sane default
2384 # There might not be anything in r, so have a sane default
2379 idlen = 12
2385 idlen = 12
2380 for i in r:
2386 for i in r:
2381 idlen = len(shortfn(r.node(i)))
2387 idlen = len(shortfn(r.node(i)))
2382 break
2388 break
2383
2389
2384 if format == 0:
2390 if format == 0:
2385 if ui.verbose:
2391 if ui.verbose:
2386 ui.write((" rev offset length linkrev"
2392 ui.write((" rev offset length linkrev"
2387 " %s %s p2\n") % ("nodeid".ljust(idlen),
2393 " %s %s p2\n") % ("nodeid".ljust(idlen),
2388 "p1".ljust(idlen)))
2394 "p1".ljust(idlen)))
2389 else:
2395 else:
2390 ui.write((" rev linkrev %s %s p2\n") % (
2396 ui.write((" rev linkrev %s %s p2\n") % (
2391 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2397 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2392 elif format == 1:
2398 elif format == 1:
2393 if ui.verbose:
2399 if ui.verbose:
2394 ui.write((" rev flag offset length size link p1"
2400 ui.write((" rev flag offset length size link p1"
2395 " p2 %s\n") % "nodeid".rjust(idlen))
2401 " p2 %s\n") % "nodeid".rjust(idlen))
2396 else:
2402 else:
2397 ui.write((" rev flag size link p1 p2 %s\n") %
2403 ui.write((" rev flag size link p1 p2 %s\n") %
2398 "nodeid".rjust(idlen))
2404 "nodeid".rjust(idlen))
2399
2405
2400 for i in r:
2406 for i in r:
2401 node = r.node(i)
2407 node = r.node(i)
2402 if format == 0:
2408 if format == 0:
2403 try:
2409 try:
2404 pp = r.parents(node)
2410 pp = r.parents(node)
2405 except Exception:
2411 except Exception:
2406 pp = [nullid, nullid]
2412 pp = [nullid, nullid]
2407 if ui.verbose:
2413 if ui.verbose:
2408 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2414 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2409 i, r.start(i), r.length(i), r.linkrev(i),
2415 i, r.start(i), r.length(i), r.linkrev(i),
2410 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2416 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2411 else:
2417 else:
2412 ui.write("% 6d % 7d %s %s %s\n" % (
2418 ui.write("% 6d % 7d %s %s %s\n" % (
2413 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2419 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2414 shortfn(pp[1])))
2420 shortfn(pp[1])))
2415 elif format == 1:
2421 elif format == 1:
2416 pr = r.parentrevs(i)
2422 pr = r.parentrevs(i)
2417 if ui.verbose:
2423 if ui.verbose:
2418 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2424 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2419 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2425 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2420 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2426 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2421 else:
2427 else:
2422 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2428 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2423 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2429 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2424 shortfn(node)))
2430 shortfn(node)))
2425
2431
2426 @command('debugrevspec',
2432 @command('debugrevspec',
2427 [('', 'optimize', None,
2433 [('', 'optimize', None,
2428 _('print parsed tree after optimizing (DEPRECATED)')),
2434 _('print parsed tree after optimizing (DEPRECATED)')),
2429 ('', 'show-revs', True, _('print list of result revisions (default)')),
2435 ('', 'show-revs', True, _('print list of result revisions (default)')),
2430 ('s', 'show-set', None, _('print internal representation of result set')),
2436 ('s', 'show-set', None, _('print internal representation of result set')),
2431 ('p', 'show-stage', [],
2437 ('p', 'show-stage', [],
2432 _('print parsed tree at the given stage'), _('NAME')),
2438 _('print parsed tree at the given stage'), _('NAME')),
2433 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2439 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2434 ('', 'verify-optimized', False, _('verify optimized result')),
2440 ('', 'verify-optimized', False, _('verify optimized result')),
2435 ],
2441 ],
2436 ('REVSPEC'))
2442 ('REVSPEC'))
2437 def debugrevspec(ui, repo, expr, **opts):
2443 def debugrevspec(ui, repo, expr, **opts):
2438 """parse and apply a revision specification
2444 """parse and apply a revision specification
2439
2445
2440 Use -p/--show-stage option to print the parsed tree at the given stages.
2446 Use -p/--show-stage option to print the parsed tree at the given stages.
2441 Use -p all to print tree at every stage.
2447 Use -p all to print tree at every stage.
2442
2448
2443 Use --no-show-revs option with -s or -p to print only the set
2449 Use --no-show-revs option with -s or -p to print only the set
2444 representation or the parsed tree respectively.
2450 representation or the parsed tree respectively.
2445
2451
2446 Use --verify-optimized to compare the optimized result with the unoptimized
2452 Use --verify-optimized to compare the optimized result with the unoptimized
2447 one. Returns 1 if the optimized result differs.
2453 one. Returns 1 if the optimized result differs.
2448 """
2454 """
2449 opts = pycompat.byteskwargs(opts)
2455 opts = pycompat.byteskwargs(opts)
2450 aliases = ui.configitems('revsetalias')
2456 aliases = ui.configitems('revsetalias')
2451 stages = [
2457 stages = [
2452 ('parsed', lambda tree: tree),
2458 ('parsed', lambda tree: tree),
2453 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2459 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2454 ui.warn)),
2460 ui.warn)),
2455 ('concatenated', revsetlang.foldconcat),
2461 ('concatenated', revsetlang.foldconcat),
2456 ('analyzed', revsetlang.analyze),
2462 ('analyzed', revsetlang.analyze),
2457 ('optimized', revsetlang.optimize),
2463 ('optimized', revsetlang.optimize),
2458 ]
2464 ]
2459 if opts['no_optimized']:
2465 if opts['no_optimized']:
2460 stages = stages[:-1]
2466 stages = stages[:-1]
2461 if opts['verify_optimized'] and opts['no_optimized']:
2467 if opts['verify_optimized'] and opts['no_optimized']:
2462 raise error.Abort(_('cannot use --verify-optimized with '
2468 raise error.Abort(_('cannot use --verify-optimized with '
2463 '--no-optimized'))
2469 '--no-optimized'))
2464 stagenames = set(n for n, f in stages)
2470 stagenames = set(n for n, f in stages)
2465
2471
2466 showalways = set()
2472 showalways = set()
2467 showchanged = set()
2473 showchanged = set()
2468 if ui.verbose and not opts['show_stage']:
2474 if ui.verbose and not opts['show_stage']:
2469 # show parsed tree by --verbose (deprecated)
2475 # show parsed tree by --verbose (deprecated)
2470 showalways.add('parsed')
2476 showalways.add('parsed')
2471 showchanged.update(['expanded', 'concatenated'])
2477 showchanged.update(['expanded', 'concatenated'])
2472 if opts['optimize']:
2478 if opts['optimize']:
2473 showalways.add('optimized')
2479 showalways.add('optimized')
2474 if opts['show_stage'] and opts['optimize']:
2480 if opts['show_stage'] and opts['optimize']:
2475 raise error.Abort(_('cannot use --optimize with --show-stage'))
2481 raise error.Abort(_('cannot use --optimize with --show-stage'))
2476 if opts['show_stage'] == ['all']:
2482 if opts['show_stage'] == ['all']:
2477 showalways.update(stagenames)
2483 showalways.update(stagenames)
2478 else:
2484 else:
2479 for n in opts['show_stage']:
2485 for n in opts['show_stage']:
2480 if n not in stagenames:
2486 if n not in stagenames:
2481 raise error.Abort(_('invalid stage name: %s') % n)
2487 raise error.Abort(_('invalid stage name: %s') % n)
2482 showalways.update(opts['show_stage'])
2488 showalways.update(opts['show_stage'])
2483
2489
2484 treebystage = {}
2490 treebystage = {}
2485 printedtree = None
2491 printedtree = None
2486 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2492 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2487 for n, f in stages:
2493 for n, f in stages:
2488 treebystage[n] = tree = f(tree)
2494 treebystage[n] = tree = f(tree)
2489 if n in showalways or (n in showchanged and tree != printedtree):
2495 if n in showalways or (n in showchanged and tree != printedtree):
2490 if opts['show_stage'] or n != 'parsed':
2496 if opts['show_stage'] or n != 'parsed':
2491 ui.write(("* %s:\n") % n)
2497 ui.write(("* %s:\n") % n)
2492 ui.write(revsetlang.prettyformat(tree), "\n")
2498 ui.write(revsetlang.prettyformat(tree), "\n")
2493 printedtree = tree
2499 printedtree = tree
2494
2500
2495 if opts['verify_optimized']:
2501 if opts['verify_optimized']:
2496 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2502 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2497 brevs = revset.makematcher(treebystage['optimized'])(repo)
2503 brevs = revset.makematcher(treebystage['optimized'])(repo)
2498 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2504 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2499 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2505 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2500 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2506 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2501 arevs = list(arevs)
2507 arevs = list(arevs)
2502 brevs = list(brevs)
2508 brevs = list(brevs)
2503 if arevs == brevs:
2509 if arevs == brevs:
2504 return 0
2510 return 0
2505 ui.write(('--- analyzed\n'), label='diff.file_a')
2511 ui.write(('--- analyzed\n'), label='diff.file_a')
2506 ui.write(('+++ optimized\n'), label='diff.file_b')
2512 ui.write(('+++ optimized\n'), label='diff.file_b')
2507 sm = difflib.SequenceMatcher(None, arevs, brevs)
2513 sm = difflib.SequenceMatcher(None, arevs, brevs)
2508 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2514 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2509 if tag in (r'delete', r'replace'):
2515 if tag in (r'delete', r'replace'):
2510 for c in arevs[alo:ahi]:
2516 for c in arevs[alo:ahi]:
2511 ui.write('-%d\n' % c, label='diff.deleted')
2517 ui.write('-%d\n' % c, label='diff.deleted')
2512 if tag in (r'insert', r'replace'):
2518 if tag in (r'insert', r'replace'):
2513 for c in brevs[blo:bhi]:
2519 for c in brevs[blo:bhi]:
2514 ui.write('+%d\n' % c, label='diff.inserted')
2520 ui.write('+%d\n' % c, label='diff.inserted')
2515 if tag == r'equal':
2521 if tag == r'equal':
2516 for c in arevs[alo:ahi]:
2522 for c in arevs[alo:ahi]:
2517 ui.write(' %d\n' % c)
2523 ui.write(' %d\n' % c)
2518 return 1
2524 return 1
2519
2525
2520 func = revset.makematcher(tree)
2526 func = revset.makematcher(tree)
2521 revs = func(repo)
2527 revs = func(repo)
2522 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2528 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2523 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2529 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2524 if not opts['show_revs']:
2530 if not opts['show_revs']:
2525 return
2531 return
2526 for c in revs:
2532 for c in revs:
2527 ui.write("%d\n" % c)
2533 ui.write("%d\n" % c)
2528
2534
2529 @command('debugserve', [
2535 @command('debugserve', [
2530 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2536 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2531 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2537 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2532 ('', 'logiofile', '', _('file to log server I/O to')),
2538 ('', 'logiofile', '', _('file to log server I/O to')),
2533 ], '')
2539 ], '')
2534 def debugserve(ui, repo, **opts):
2540 def debugserve(ui, repo, **opts):
2535 """run a server with advanced settings
2541 """run a server with advanced settings
2536
2542
2537 This command is similar to :hg:`serve`. It exists partially as a
2543 This command is similar to :hg:`serve`. It exists partially as a
2538 workaround to the fact that ``hg serve --stdio`` must have specific
2544 workaround to the fact that ``hg serve --stdio`` must have specific
2539 arguments for security reasons.
2545 arguments for security reasons.
2540 """
2546 """
2541 opts = pycompat.byteskwargs(opts)
2547 opts = pycompat.byteskwargs(opts)
2542
2548
2543 if not opts['sshstdio']:
2549 if not opts['sshstdio']:
2544 raise error.Abort(_('only --sshstdio is currently supported'))
2550 raise error.Abort(_('only --sshstdio is currently supported'))
2545
2551
2546 logfh = None
2552 logfh = None
2547
2553
2548 if opts['logiofd'] and opts['logiofile']:
2554 if opts['logiofd'] and opts['logiofile']:
2549 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2555 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2550
2556
2551 if opts['logiofd']:
2557 if opts['logiofd']:
2552 # Line buffered because output is line based.
2558 # Line buffered because output is line based.
2553 try:
2559 try:
2554 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2560 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2555 except OSError as e:
2561 except OSError as e:
2556 if e.errno != errno.ESPIPE:
2562 if e.errno != errno.ESPIPE:
2557 raise
2563 raise
2558 # can't seek a pipe, so `ab` mode fails on py3
2564 # can't seek a pipe, so `ab` mode fails on py3
2559 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2565 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2560 elif opts['logiofile']:
2566 elif opts['logiofile']:
2561 logfh = open(opts['logiofile'], 'ab', 1)
2567 logfh = open(opts['logiofile'], 'ab', 1)
2562
2568
2563 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2569 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2564 s.serve_forever()
2570 s.serve_forever()
2565
2571
2566 @command('debugsetparents', [], _('REV1 [REV2]'))
2572 @command('debugsetparents', [], _('REV1 [REV2]'))
2567 def debugsetparents(ui, repo, rev1, rev2=None):
2573 def debugsetparents(ui, repo, rev1, rev2=None):
2568 """manually set the parents of the current working directory
2574 """manually set the parents of the current working directory
2569
2575
2570 This is useful for writing repository conversion tools, but should
2576 This is useful for writing repository conversion tools, but should
2571 be used with care. For example, neither the working directory nor the
2577 be used with care. For example, neither the working directory nor the
2572 dirstate is updated, so file status may be incorrect after running this
2578 dirstate is updated, so file status may be incorrect after running this
2573 command.
2579 command.
2574
2580
2575 Returns 0 on success.
2581 Returns 0 on success.
2576 """
2582 """
2577
2583
2578 node1 = scmutil.revsingle(repo, rev1).node()
2584 node1 = scmutil.revsingle(repo, rev1).node()
2579 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2585 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2580
2586
2581 with repo.wlock():
2587 with repo.wlock():
2582 repo.setparents(node1, node2)
2588 repo.setparents(node1, node2)
2583
2589
2584 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2590 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2585 def debugssl(ui, repo, source=None, **opts):
2591 def debugssl(ui, repo, source=None, **opts):
2586 '''test a secure connection to a server
2592 '''test a secure connection to a server
2587
2593
2588 This builds the certificate chain for the server on Windows, installing the
2594 This builds the certificate chain for the server on Windows, installing the
2589 missing intermediates and trusted root via Windows Update if necessary. It
2595 missing intermediates and trusted root via Windows Update if necessary. It
2590 does nothing on other platforms.
2596 does nothing on other platforms.
2591
2597
2592 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2598 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2593 that server is used. See :hg:`help urls` for more information.
2599 that server is used. See :hg:`help urls` for more information.
2594
2600
2595 If the update succeeds, retry the original operation. Otherwise, the cause
2601 If the update succeeds, retry the original operation. Otherwise, the cause
2596 of the SSL error is likely another issue.
2602 of the SSL error is likely another issue.
2597 '''
2603 '''
2598 if not pycompat.iswindows:
2604 if not pycompat.iswindows:
2599 raise error.Abort(_('certificate chain building is only possible on '
2605 raise error.Abort(_('certificate chain building is only possible on '
2600 'Windows'))
2606 'Windows'))
2601
2607
2602 if not source:
2608 if not source:
2603 if not repo:
2609 if not repo:
2604 raise error.Abort(_("there is no Mercurial repository here, and no "
2610 raise error.Abort(_("there is no Mercurial repository here, and no "
2605 "server specified"))
2611 "server specified"))
2606 source = "default"
2612 source = "default"
2607
2613
2608 source, branches = hg.parseurl(ui.expandpath(source))
2614 source, branches = hg.parseurl(ui.expandpath(source))
2609 url = util.url(source)
2615 url = util.url(source)
2610
2616
2611 defaultport = {'https': 443, 'ssh': 22}
2617 defaultport = {'https': 443, 'ssh': 22}
2612 if url.scheme in defaultport:
2618 if url.scheme in defaultport:
2613 try:
2619 try:
2614 addr = (url.host, int(url.port or defaultport[url.scheme]))
2620 addr = (url.host, int(url.port or defaultport[url.scheme]))
2615 except ValueError:
2621 except ValueError:
2616 raise error.Abort(_("malformed port number in URL"))
2622 raise error.Abort(_("malformed port number in URL"))
2617 else:
2623 else:
2618 raise error.Abort(_("only https and ssh connections are supported"))
2624 raise error.Abort(_("only https and ssh connections are supported"))
2619
2625
2620 from . import win32
2626 from . import win32
2621
2627
2622 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2628 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2623 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2629 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2624
2630
2625 try:
2631 try:
2626 s.connect(addr)
2632 s.connect(addr)
2627 cert = s.getpeercert(True)
2633 cert = s.getpeercert(True)
2628
2634
2629 ui.status(_('checking the certificate chain for %s\n') % url.host)
2635 ui.status(_('checking the certificate chain for %s\n') % url.host)
2630
2636
2631 complete = win32.checkcertificatechain(cert, build=False)
2637 complete = win32.checkcertificatechain(cert, build=False)
2632
2638
2633 if not complete:
2639 if not complete:
2634 ui.status(_('certificate chain is incomplete, updating... '))
2640 ui.status(_('certificate chain is incomplete, updating... '))
2635
2641
2636 if not win32.checkcertificatechain(cert):
2642 if not win32.checkcertificatechain(cert):
2637 ui.status(_('failed.\n'))
2643 ui.status(_('failed.\n'))
2638 else:
2644 else:
2639 ui.status(_('done.\n'))
2645 ui.status(_('done.\n'))
2640 else:
2646 else:
2641 ui.status(_('full certificate chain is available\n'))
2647 ui.status(_('full certificate chain is available\n'))
2642 finally:
2648 finally:
2643 s.close()
2649 s.close()
2644
2650
2645 @command('debugsub',
2651 @command('debugsub',
2646 [('r', 'rev', '',
2652 [('r', 'rev', '',
2647 _('revision to check'), _('REV'))],
2653 _('revision to check'), _('REV'))],
2648 _('[-r REV] [REV]'))
2654 _('[-r REV] [REV]'))
2649 def debugsub(ui, repo, rev=None):
2655 def debugsub(ui, repo, rev=None):
2650 ctx = scmutil.revsingle(repo, rev, None)
2656 ctx = scmutil.revsingle(repo, rev, None)
2651 for k, v in sorted(ctx.substate.items()):
2657 for k, v in sorted(ctx.substate.items()):
2652 ui.write(('path %s\n') % k)
2658 ui.write(('path %s\n') % k)
2653 ui.write((' source %s\n') % v[0])
2659 ui.write((' source %s\n') % v[0])
2654 ui.write((' revision %s\n') % v[1])
2660 ui.write((' revision %s\n') % v[1])
2655
2661
2656 @command('debugsuccessorssets',
2662 @command('debugsuccessorssets',
2657 [('', 'closest', False, _('return closest successors sets only'))],
2663 [('', 'closest', False, _('return closest successors sets only'))],
2658 _('[REV]'))
2664 _('[REV]'))
2659 def debugsuccessorssets(ui, repo, *revs, **opts):
2665 def debugsuccessorssets(ui, repo, *revs, **opts):
2660 """show set of successors for revision
2666 """show set of successors for revision
2661
2667
2662 A successors set of changeset A is a consistent group of revisions that
2668 A successors set of changeset A is a consistent group of revisions that
2663 succeed A. It contains non-obsolete changesets only unless closests
2669 succeed A. It contains non-obsolete changesets only unless closests
2664 successors set is set.
2670 successors set is set.
2665
2671
2666 In most cases a changeset A has a single successors set containing a single
2672 In most cases a changeset A has a single successors set containing a single
2667 successor (changeset A replaced by A').
2673 successor (changeset A replaced by A').
2668
2674
2669 A changeset that is made obsolete with no successors are called "pruned".
2675 A changeset that is made obsolete with no successors are called "pruned".
2670 Such changesets have no successors sets at all.
2676 Such changesets have no successors sets at all.
2671
2677
2672 A changeset that has been "split" will have a successors set containing
2678 A changeset that has been "split" will have a successors set containing
2673 more than one successor.
2679 more than one successor.
2674
2680
2675 A changeset that has been rewritten in multiple different ways is called
2681 A changeset that has been rewritten in multiple different ways is called
2676 "divergent". Such changesets have multiple successor sets (each of which
2682 "divergent". Such changesets have multiple successor sets (each of which
2677 may also be split, i.e. have multiple successors).
2683 may also be split, i.e. have multiple successors).
2678
2684
2679 Results are displayed as follows::
2685 Results are displayed as follows::
2680
2686
2681 <rev1>
2687 <rev1>
2682 <successors-1A>
2688 <successors-1A>
2683 <rev2>
2689 <rev2>
2684 <successors-2A>
2690 <successors-2A>
2685 <successors-2B1> <successors-2B2> <successors-2B3>
2691 <successors-2B1> <successors-2B2> <successors-2B3>
2686
2692
2687 Here rev2 has two possible (i.e. divergent) successors sets. The first
2693 Here rev2 has two possible (i.e. divergent) successors sets. The first
2688 holds one element, whereas the second holds three (i.e. the changeset has
2694 holds one element, whereas the second holds three (i.e. the changeset has
2689 been split).
2695 been split).
2690 """
2696 """
2691 # passed to successorssets caching computation from one call to another
2697 # passed to successorssets caching computation from one call to another
2692 cache = {}
2698 cache = {}
2693 ctx2str = bytes
2699 ctx2str = bytes
2694 node2str = short
2700 node2str = short
2695 for rev in scmutil.revrange(repo, revs):
2701 for rev in scmutil.revrange(repo, revs):
2696 ctx = repo[rev]
2702 ctx = repo[rev]
2697 ui.write('%s\n'% ctx2str(ctx))
2703 ui.write('%s\n'% ctx2str(ctx))
2698 for succsset in obsutil.successorssets(repo, ctx.node(),
2704 for succsset in obsutil.successorssets(repo, ctx.node(),
2699 closest=opts[r'closest'],
2705 closest=opts[r'closest'],
2700 cache=cache):
2706 cache=cache):
2701 if succsset:
2707 if succsset:
2702 ui.write(' ')
2708 ui.write(' ')
2703 ui.write(node2str(succsset[0]))
2709 ui.write(node2str(succsset[0]))
2704 for node in succsset[1:]:
2710 for node in succsset[1:]:
2705 ui.write(' ')
2711 ui.write(' ')
2706 ui.write(node2str(node))
2712 ui.write(node2str(node))
2707 ui.write('\n')
2713 ui.write('\n')
2708
2714
2709 @command('debugtemplate',
2715 @command('debugtemplate',
2710 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2716 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2711 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2717 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2712 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2718 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2713 optionalrepo=True)
2719 optionalrepo=True)
2714 def debugtemplate(ui, repo, tmpl, **opts):
2720 def debugtemplate(ui, repo, tmpl, **opts):
2715 """parse and apply a template
2721 """parse and apply a template
2716
2722
2717 If -r/--rev is given, the template is processed as a log template and
2723 If -r/--rev is given, the template is processed as a log template and
2718 applied to the given changesets. Otherwise, it is processed as a generic
2724 applied to the given changesets. Otherwise, it is processed as a generic
2719 template.
2725 template.
2720
2726
2721 Use --verbose to print the parsed tree.
2727 Use --verbose to print the parsed tree.
2722 """
2728 """
2723 revs = None
2729 revs = None
2724 if opts[r'rev']:
2730 if opts[r'rev']:
2725 if repo is None:
2731 if repo is None:
2726 raise error.RepoError(_('there is no Mercurial repository here '
2732 raise error.RepoError(_('there is no Mercurial repository here '
2727 '(.hg not found)'))
2733 '(.hg not found)'))
2728 revs = scmutil.revrange(repo, opts[r'rev'])
2734 revs = scmutil.revrange(repo, opts[r'rev'])
2729
2735
2730 props = {}
2736 props = {}
2731 for d in opts[r'define']:
2737 for d in opts[r'define']:
2732 try:
2738 try:
2733 k, v = (e.strip() for e in d.split('=', 1))
2739 k, v = (e.strip() for e in d.split('=', 1))
2734 if not k or k == 'ui':
2740 if not k or k == 'ui':
2735 raise ValueError
2741 raise ValueError
2736 props[k] = v
2742 props[k] = v
2737 except ValueError:
2743 except ValueError:
2738 raise error.Abort(_('malformed keyword definition: %s') % d)
2744 raise error.Abort(_('malformed keyword definition: %s') % d)
2739
2745
2740 if ui.verbose:
2746 if ui.verbose:
2741 aliases = ui.configitems('templatealias')
2747 aliases = ui.configitems('templatealias')
2742 tree = templater.parse(tmpl)
2748 tree = templater.parse(tmpl)
2743 ui.note(templater.prettyformat(tree), '\n')
2749 ui.note(templater.prettyformat(tree), '\n')
2744 newtree = templater.expandaliases(tree, aliases)
2750 newtree = templater.expandaliases(tree, aliases)
2745 if newtree != tree:
2751 if newtree != tree:
2746 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2752 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2747
2753
2748 if revs is None:
2754 if revs is None:
2749 tres = formatter.templateresources(ui, repo)
2755 tres = formatter.templateresources(ui, repo)
2750 t = formatter.maketemplater(ui, tmpl, resources=tres)
2756 t = formatter.maketemplater(ui, tmpl, resources=tres)
2751 if ui.verbose:
2757 if ui.verbose:
2752 kwds, funcs = t.symbolsuseddefault()
2758 kwds, funcs = t.symbolsuseddefault()
2753 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2759 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2754 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2760 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2755 ui.write(t.renderdefault(props))
2761 ui.write(t.renderdefault(props))
2756 else:
2762 else:
2757 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2763 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2758 if ui.verbose:
2764 if ui.verbose:
2759 kwds, funcs = displayer.t.symbolsuseddefault()
2765 kwds, funcs = displayer.t.symbolsuseddefault()
2760 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2766 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2761 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2767 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2762 for r in revs:
2768 for r in revs:
2763 displayer.show(repo[r], **pycompat.strkwargs(props))
2769 displayer.show(repo[r], **pycompat.strkwargs(props))
2764 displayer.close()
2770 displayer.close()
2765
2771
2766 @command('debuguigetpass', [
2772 @command('debuguigetpass', [
2767 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2773 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2768 ], _('[-p TEXT]'), norepo=True)
2774 ], _('[-p TEXT]'), norepo=True)
2769 def debuguigetpass(ui, prompt=''):
2775 def debuguigetpass(ui, prompt=''):
2770 """show prompt to type password"""
2776 """show prompt to type password"""
2771 r = ui.getpass(prompt)
2777 r = ui.getpass(prompt)
2772 ui.write(('respose: %s\n') % r)
2778 ui.write(('respose: %s\n') % r)
2773
2779
2774 @command('debuguiprompt', [
2780 @command('debuguiprompt', [
2775 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2781 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2776 ], _('[-p TEXT]'), norepo=True)
2782 ], _('[-p TEXT]'), norepo=True)
2777 def debuguiprompt(ui, prompt=''):
2783 def debuguiprompt(ui, prompt=''):
2778 """show plain prompt"""
2784 """show plain prompt"""
2779 r = ui.prompt(prompt)
2785 r = ui.prompt(prompt)
2780 ui.write(('response: %s\n') % r)
2786 ui.write(('response: %s\n') % r)
2781
2787
2782 @command('debugupdatecaches', [])
2788 @command('debugupdatecaches', [])
2783 def debugupdatecaches(ui, repo, *pats, **opts):
2789 def debugupdatecaches(ui, repo, *pats, **opts):
2784 """warm all known caches in the repository"""
2790 """warm all known caches in the repository"""
2785 with repo.wlock(), repo.lock():
2791 with repo.wlock(), repo.lock():
2786 repo.updatecaches(full=True)
2792 repo.updatecaches(full=True)
2787
2793
2788 @command('debugupgraderepo', [
2794 @command('debugupgraderepo', [
2789 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2795 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2790 ('', 'run', False, _('performs an upgrade')),
2796 ('', 'run', False, _('performs an upgrade')),
2791 ('', 'backup', True, _('keep the old repository content around')),
2797 ('', 'backup', True, _('keep the old repository content around')),
2792 ])
2798 ])
2793 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2799 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2794 """upgrade a repository to use different features
2800 """upgrade a repository to use different features
2795
2801
2796 If no arguments are specified, the repository is evaluated for upgrade
2802 If no arguments are specified, the repository is evaluated for upgrade
2797 and a list of problems and potential optimizations is printed.
2803 and a list of problems and potential optimizations is printed.
2798
2804
2799 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2805 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2800 can be influenced via additional arguments. More details will be provided
2806 can be influenced via additional arguments. More details will be provided
2801 by the command output when run without ``--run``.
2807 by the command output when run without ``--run``.
2802
2808
2803 During the upgrade, the repository will be locked and no writes will be
2809 During the upgrade, the repository will be locked and no writes will be
2804 allowed.
2810 allowed.
2805
2811
2806 At the end of the upgrade, the repository may not be readable while new
2812 At the end of the upgrade, the repository may not be readable while new
2807 repository data is swapped in. This window will be as long as it takes to
2813 repository data is swapped in. This window will be as long as it takes to
2808 rename some directories inside the ``.hg`` directory. On most machines, this
2814 rename some directories inside the ``.hg`` directory. On most machines, this
2809 should complete almost instantaneously and the chances of a consumer being
2815 should complete almost instantaneously and the chances of a consumer being
2810 unable to access the repository should be low.
2816 unable to access the repository should be low.
2811 """
2817 """
2812 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2818 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2813 backup=backup)
2819 backup=backup)
2814
2820
2815 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2821 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2816 inferrepo=True)
2822 inferrepo=True)
2817 def debugwalk(ui, repo, *pats, **opts):
2823 def debugwalk(ui, repo, *pats, **opts):
2818 """show how files match on given patterns"""
2824 """show how files match on given patterns"""
2819 opts = pycompat.byteskwargs(opts)
2825 opts = pycompat.byteskwargs(opts)
2820 m = scmutil.match(repo[None], pats, opts)
2826 m = scmutil.match(repo[None], pats, opts)
2821 if ui.verbose:
2827 if ui.verbose:
2822 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2828 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2823 items = list(repo[None].walk(m))
2829 items = list(repo[None].walk(m))
2824 if not items:
2830 if not items:
2825 return
2831 return
2826 f = lambda fn: fn
2832 f = lambda fn: fn
2827 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2833 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2828 f = lambda fn: util.normpath(fn)
2834 f = lambda fn: util.normpath(fn)
2829 fmt = 'f %%-%ds %%-%ds %%s' % (
2835 fmt = 'f %%-%ds %%-%ds %%s' % (
2830 max([len(abs) for abs in items]),
2836 max([len(abs) for abs in items]),
2831 max([len(repo.pathto(abs)) for abs in items]))
2837 max([len(repo.pathto(abs)) for abs in items]))
2832 for abs in items:
2838 for abs in items:
2833 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2839 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2834 ui.write("%s\n" % line.rstrip())
2840 ui.write("%s\n" % line.rstrip())
2835
2841
2836 @command('debugwhyunstable', [], _('REV'))
2842 @command('debugwhyunstable', [], _('REV'))
2837 def debugwhyunstable(ui, repo, rev):
2843 def debugwhyunstable(ui, repo, rev):
2838 """explain instabilities of a changeset"""
2844 """explain instabilities of a changeset"""
2839 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2845 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2840 dnodes = ''
2846 dnodes = ''
2841 if entry.get('divergentnodes'):
2847 if entry.get('divergentnodes'):
2842 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2848 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2843 for ctx in entry['divergentnodes']) + ' '
2849 for ctx in entry['divergentnodes']) + ' '
2844 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2850 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2845 entry['reason'], entry['node']))
2851 entry['reason'], entry['node']))
2846
2852
2847 @command('debugwireargs',
2853 @command('debugwireargs',
2848 [('', 'three', '', 'three'),
2854 [('', 'three', '', 'three'),
2849 ('', 'four', '', 'four'),
2855 ('', 'four', '', 'four'),
2850 ('', 'five', '', 'five'),
2856 ('', 'five', '', 'five'),
2851 ] + cmdutil.remoteopts,
2857 ] + cmdutil.remoteopts,
2852 _('REPO [OPTIONS]... [ONE [TWO]]'),
2858 _('REPO [OPTIONS]... [ONE [TWO]]'),
2853 norepo=True)
2859 norepo=True)
2854 def debugwireargs(ui, repopath, *vals, **opts):
2860 def debugwireargs(ui, repopath, *vals, **opts):
2855 opts = pycompat.byteskwargs(opts)
2861 opts = pycompat.byteskwargs(opts)
2856 repo = hg.peer(ui, opts, repopath)
2862 repo = hg.peer(ui, opts, repopath)
2857 for opt in cmdutil.remoteopts:
2863 for opt in cmdutil.remoteopts:
2858 del opts[opt[1]]
2864 del opts[opt[1]]
2859 args = {}
2865 args = {}
2860 for k, v in opts.iteritems():
2866 for k, v in opts.iteritems():
2861 if v:
2867 if v:
2862 args[k] = v
2868 args[k] = v
2863 args = pycompat.strkwargs(args)
2869 args = pycompat.strkwargs(args)
2864 # run twice to check that we don't mess up the stream for the next command
2870 # run twice to check that we don't mess up the stream for the next command
2865 res1 = repo.debugwireargs(*vals, **args)
2871 res1 = repo.debugwireargs(*vals, **args)
2866 res2 = repo.debugwireargs(*vals, **args)
2872 res2 = repo.debugwireargs(*vals, **args)
2867 ui.write("%s\n" % res1)
2873 ui.write("%s\n" % res1)
2868 if res1 != res2:
2874 if res1 != res2:
2869 ui.warn("%s\n" % res2)
2875 ui.warn("%s\n" % res2)
2870
2876
2871 def _parsewirelangblocks(fh):
2877 def _parsewirelangblocks(fh):
2872 activeaction = None
2878 activeaction = None
2873 blocklines = []
2879 blocklines = []
2874 lastindent = 0
2880 lastindent = 0
2875
2881
2876 for line in fh:
2882 for line in fh:
2877 line = line.rstrip()
2883 line = line.rstrip()
2878 if not line:
2884 if not line:
2879 continue
2885 continue
2880
2886
2881 if line.startswith(b'#'):
2887 if line.startswith(b'#'):
2882 continue
2888 continue
2883
2889
2884 if not line.startswith(b' '):
2890 if not line.startswith(b' '):
2885 # New block. Flush previous one.
2891 # New block. Flush previous one.
2886 if activeaction:
2892 if activeaction:
2887 yield activeaction, blocklines
2893 yield activeaction, blocklines
2888
2894
2889 activeaction = line
2895 activeaction = line
2890 blocklines = []
2896 blocklines = []
2891 lastindent = 0
2897 lastindent = 0
2892 continue
2898 continue
2893
2899
2894 # Else we start with an indent.
2900 # Else we start with an indent.
2895
2901
2896 if not activeaction:
2902 if not activeaction:
2897 raise error.Abort(_('indented line outside of block'))
2903 raise error.Abort(_('indented line outside of block'))
2898
2904
2899 indent = len(line) - len(line.lstrip())
2905 indent = len(line) - len(line.lstrip())
2900
2906
2901 # If this line is indented more than the last line, concatenate it.
2907 # If this line is indented more than the last line, concatenate it.
2902 if indent > lastindent and blocklines:
2908 if indent > lastindent and blocklines:
2903 blocklines[-1] += line.lstrip()
2909 blocklines[-1] += line.lstrip()
2904 else:
2910 else:
2905 blocklines.append(line)
2911 blocklines.append(line)
2906 lastindent = indent
2912 lastindent = indent
2907
2913
2908 # Flush last block.
2914 # Flush last block.
2909 if activeaction:
2915 if activeaction:
2910 yield activeaction, blocklines
2916 yield activeaction, blocklines
2911
2917
2912 @command('debugwireproto',
2918 @command('debugwireproto',
2913 [
2919 [
2914 ('', 'localssh', False, _('start an SSH server for this repo')),
2920 ('', 'localssh', False, _('start an SSH server for this repo')),
2915 ('', 'peer', '', _('construct a specific version of the peer')),
2921 ('', 'peer', '', _('construct a specific version of the peer')),
2916 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2922 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2917 ('', 'nologhandshake', False,
2923 ('', 'nologhandshake', False,
2918 _('do not log I/O related to the peer handshake')),
2924 _('do not log I/O related to the peer handshake')),
2919 ] + cmdutil.remoteopts,
2925 ] + cmdutil.remoteopts,
2920 _('[PATH]'),
2926 _('[PATH]'),
2921 optionalrepo=True)
2927 optionalrepo=True)
2922 def debugwireproto(ui, repo, path=None, **opts):
2928 def debugwireproto(ui, repo, path=None, **opts):
2923 """send wire protocol commands to a server
2929 """send wire protocol commands to a server
2924
2930
2925 This command can be used to issue wire protocol commands to remote
2931 This command can be used to issue wire protocol commands to remote
2926 peers and to debug the raw data being exchanged.
2932 peers and to debug the raw data being exchanged.
2927
2933
2928 ``--localssh`` will start an SSH server against the current repository
2934 ``--localssh`` will start an SSH server against the current repository
2929 and connect to that. By default, the connection will perform a handshake
2935 and connect to that. By default, the connection will perform a handshake
2930 and establish an appropriate peer instance.
2936 and establish an appropriate peer instance.
2931
2937
2932 ``--peer`` can be used to bypass the handshake protocol and construct a
2938 ``--peer`` can be used to bypass the handshake protocol and construct a
2933 peer instance using the specified class type. Valid values are ``raw``,
2939 peer instance using the specified class type. Valid values are ``raw``,
2934 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2940 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2935 raw data payloads and don't support higher-level command actions.
2941 raw data payloads and don't support higher-level command actions.
2936
2942
2937 ``--noreadstderr`` can be used to disable automatic reading from stderr
2943 ``--noreadstderr`` can be used to disable automatic reading from stderr
2938 of the peer (for SSH connections only). Disabling automatic reading of
2944 of the peer (for SSH connections only). Disabling automatic reading of
2939 stderr is useful for making output more deterministic.
2945 stderr is useful for making output more deterministic.
2940
2946
2941 Commands are issued via a mini language which is specified via stdin.
2947 Commands are issued via a mini language which is specified via stdin.
2942 The language consists of individual actions to perform. An action is
2948 The language consists of individual actions to perform. An action is
2943 defined by a block. A block is defined as a line with no leading
2949 defined by a block. A block is defined as a line with no leading
2944 space followed by 0 or more lines with leading space. Blocks are
2950 space followed by 0 or more lines with leading space. Blocks are
2945 effectively a high-level command with additional metadata.
2951 effectively a high-level command with additional metadata.
2946
2952
2947 Lines beginning with ``#`` are ignored.
2953 Lines beginning with ``#`` are ignored.
2948
2954
2949 The following sections denote available actions.
2955 The following sections denote available actions.
2950
2956
2951 raw
2957 raw
2952 ---
2958 ---
2953
2959
2954 Send raw data to the server.
2960 Send raw data to the server.
2955
2961
2956 The block payload contains the raw data to send as one atomic send
2962 The block payload contains the raw data to send as one atomic send
2957 operation. The data may not actually be delivered in a single system
2963 operation. The data may not actually be delivered in a single system
2958 call: it depends on the abilities of the transport being used.
2964 call: it depends on the abilities of the transport being used.
2959
2965
2960 Each line in the block is de-indented and concatenated. Then, that
2966 Each line in the block is de-indented and concatenated. Then, that
2961 value is evaluated as a Python b'' literal. This allows the use of
2967 value is evaluated as a Python b'' literal. This allows the use of
2962 backslash escaping, etc.
2968 backslash escaping, etc.
2963
2969
2964 raw+
2970 raw+
2965 ----
2971 ----
2966
2972
2967 Behaves like ``raw`` except flushes output afterwards.
2973 Behaves like ``raw`` except flushes output afterwards.
2968
2974
2969 command <X>
2975 command <X>
2970 -----------
2976 -----------
2971
2977
2972 Send a request to run a named command, whose name follows the ``command``
2978 Send a request to run a named command, whose name follows the ``command``
2973 string.
2979 string.
2974
2980
2975 Arguments to the command are defined as lines in this block. The format of
2981 Arguments to the command are defined as lines in this block. The format of
2976 each line is ``<key> <value>``. e.g.::
2982 each line is ``<key> <value>``. e.g.::
2977
2983
2978 command listkeys
2984 command listkeys
2979 namespace bookmarks
2985 namespace bookmarks
2980
2986
2981 If the value begins with ``eval:``, it will be interpreted as a Python
2987 If the value begins with ``eval:``, it will be interpreted as a Python
2982 literal expression. Otherwise values are interpreted as Python b'' literals.
2988 literal expression. Otherwise values are interpreted as Python b'' literals.
2983 This allows sending complex types and encoding special byte sequences via
2989 This allows sending complex types and encoding special byte sequences via
2984 backslash escaping.
2990 backslash escaping.
2985
2991
2986 The following arguments have special meaning:
2992 The following arguments have special meaning:
2987
2993
2988 ``PUSHFILE``
2994 ``PUSHFILE``
2989 When defined, the *push* mechanism of the peer will be used instead
2995 When defined, the *push* mechanism of the peer will be used instead
2990 of the static request-response mechanism and the content of the
2996 of the static request-response mechanism and the content of the
2991 file specified in the value of this argument will be sent as the
2997 file specified in the value of this argument will be sent as the
2992 command payload.
2998 command payload.
2993
2999
2994 This can be used to submit a local bundle file to the remote.
3000 This can be used to submit a local bundle file to the remote.
2995
3001
2996 batchbegin
3002 batchbegin
2997 ----------
3003 ----------
2998
3004
2999 Instruct the peer to begin a batched send.
3005 Instruct the peer to begin a batched send.
3000
3006
3001 All ``command`` blocks are queued for execution until the next
3007 All ``command`` blocks are queued for execution until the next
3002 ``batchsubmit`` block.
3008 ``batchsubmit`` block.
3003
3009
3004 batchsubmit
3010 batchsubmit
3005 -----------
3011 -----------
3006
3012
3007 Submit previously queued ``command`` blocks as a batch request.
3013 Submit previously queued ``command`` blocks as a batch request.
3008
3014
3009 This action MUST be paired with a ``batchbegin`` action.
3015 This action MUST be paired with a ``batchbegin`` action.
3010
3016
3011 httprequest <method> <path>
3017 httprequest <method> <path>
3012 ---------------------------
3018 ---------------------------
3013
3019
3014 (HTTP peer only)
3020 (HTTP peer only)
3015
3021
3016 Send an HTTP request to the peer.
3022 Send an HTTP request to the peer.
3017
3023
3018 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3024 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3019
3025
3020 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3026 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3021 headers to add to the request. e.g. ``Accept: foo``.
3027 headers to add to the request. e.g. ``Accept: foo``.
3022
3028
3023 The following arguments are special:
3029 The following arguments are special:
3024
3030
3025 ``BODYFILE``
3031 ``BODYFILE``
3026 The content of the file defined as the value to this argument will be
3032 The content of the file defined as the value to this argument will be
3027 transferred verbatim as the HTTP request body.
3033 transferred verbatim as the HTTP request body.
3028
3034
3029 ``frame <type> <flags> <payload>``
3035 ``frame <type> <flags> <payload>``
3030 Send a unified protocol frame as part of the request body.
3036 Send a unified protocol frame as part of the request body.
3031
3037
3032 All frames will be collected and sent as the body to the HTTP
3038 All frames will be collected and sent as the body to the HTTP
3033 request.
3039 request.
3034
3040
3035 close
3041 close
3036 -----
3042 -----
3037
3043
3038 Close the connection to the server.
3044 Close the connection to the server.
3039
3045
3040 flush
3046 flush
3041 -----
3047 -----
3042
3048
3043 Flush data written to the server.
3049 Flush data written to the server.
3044
3050
3045 readavailable
3051 readavailable
3046 -------------
3052 -------------
3047
3053
3048 Close the write end of the connection and read all available data from
3054 Close the write end of the connection and read all available data from
3049 the server.
3055 the server.
3050
3056
3051 If the connection to the server encompasses multiple pipes, we poll both
3057 If the connection to the server encompasses multiple pipes, we poll both
3052 pipes and read available data.
3058 pipes and read available data.
3053
3059
3054 readline
3060 readline
3055 --------
3061 --------
3056
3062
3057 Read a line of output from the server. If there are multiple output
3063 Read a line of output from the server. If there are multiple output
3058 pipes, reads only the main pipe.
3064 pipes, reads only the main pipe.
3059
3065
3060 ereadline
3066 ereadline
3061 ---------
3067 ---------
3062
3068
3063 Like ``readline``, but read from the stderr pipe, if available.
3069 Like ``readline``, but read from the stderr pipe, if available.
3064
3070
3065 read <X>
3071 read <X>
3066 --------
3072 --------
3067
3073
3068 ``read()`` N bytes from the server's main output pipe.
3074 ``read()`` N bytes from the server's main output pipe.
3069
3075
3070 eread <X>
3076 eread <X>
3071 ---------
3077 ---------
3072
3078
3073 ``read()`` N bytes from the server's stderr pipe, if available.
3079 ``read()`` N bytes from the server's stderr pipe, if available.
3074
3080
3075 Specifying Unified Frame-Based Protocol Frames
3081 Specifying Unified Frame-Based Protocol Frames
3076 ----------------------------------------------
3082 ----------------------------------------------
3077
3083
3078 It is possible to emit a *Unified Frame-Based Protocol* by using special
3084 It is possible to emit a *Unified Frame-Based Protocol* by using special
3079 syntax.
3085 syntax.
3080
3086
3081 A frame is composed as a type, flags, and payload. These can be parsed
3087 A frame is composed as a type, flags, and payload. These can be parsed
3082 from a string of the form:
3088 from a string of the form:
3083
3089
3084 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3090 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3085
3091
3086 ``request-id`` and ``stream-id`` are integers defining the request and
3092 ``request-id`` and ``stream-id`` are integers defining the request and
3087 stream identifiers.
3093 stream identifiers.
3088
3094
3089 ``type`` can be an integer value for the frame type or the string name
3095 ``type`` can be an integer value for the frame type or the string name
3090 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3096 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3091 ``command-name``.
3097 ``command-name``.
3092
3098
3093 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3099 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3094 components. Each component (and there can be just one) can be an integer
3100 components. Each component (and there can be just one) can be an integer
3095 or a flag name for stream flags or frame flags, respectively. Values are
3101 or a flag name for stream flags or frame flags, respectively. Values are
3096 resolved to integers and then bitwise OR'd together.
3102 resolved to integers and then bitwise OR'd together.
3097
3103
3098 ``payload`` represents the raw frame payload. If it begins with
3104 ``payload`` represents the raw frame payload. If it begins with
3099 ``cbor:``, the following string is evaluated as Python code and the
3105 ``cbor:``, the following string is evaluated as Python code and the
3100 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3106 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3101 as a Python byte string literal.
3107 as a Python byte string literal.
3102 """
3108 """
3103 opts = pycompat.byteskwargs(opts)
3109 opts = pycompat.byteskwargs(opts)
3104
3110
3105 if opts['localssh'] and not repo:
3111 if opts['localssh'] and not repo:
3106 raise error.Abort(_('--localssh requires a repository'))
3112 raise error.Abort(_('--localssh requires a repository'))
3107
3113
3108 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3114 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3109 raise error.Abort(_('invalid value for --peer'),
3115 raise error.Abort(_('invalid value for --peer'),
3110 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3116 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3111
3117
3112 if path and opts['localssh']:
3118 if path and opts['localssh']:
3113 raise error.Abort(_('cannot specify --localssh with an explicit '
3119 raise error.Abort(_('cannot specify --localssh with an explicit '
3114 'path'))
3120 'path'))
3115
3121
3116 if ui.interactive():
3122 if ui.interactive():
3117 ui.write(_('(waiting for commands on stdin)\n'))
3123 ui.write(_('(waiting for commands on stdin)\n'))
3118
3124
3119 blocks = list(_parsewirelangblocks(ui.fin))
3125 blocks = list(_parsewirelangblocks(ui.fin))
3120
3126
3121 proc = None
3127 proc = None
3122 stdin = None
3128 stdin = None
3123 stdout = None
3129 stdout = None
3124 stderr = None
3130 stderr = None
3125 opener = None
3131 opener = None
3126
3132
3127 if opts['localssh']:
3133 if opts['localssh']:
3128 # We start the SSH server in its own process so there is process
3134 # We start the SSH server in its own process so there is process
3129 # separation. This prevents a whole class of potential bugs around
3135 # separation. This prevents a whole class of potential bugs around
3130 # shared state from interfering with server operation.
3136 # shared state from interfering with server operation.
3131 args = procutil.hgcmd() + [
3137 args = procutil.hgcmd() + [
3132 '-R', repo.root,
3138 '-R', repo.root,
3133 'debugserve', '--sshstdio',
3139 'debugserve', '--sshstdio',
3134 ]
3140 ]
3135 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3141 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3136 stdin=subprocess.PIPE,
3142 stdin=subprocess.PIPE,
3137 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3143 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3138 bufsize=0)
3144 bufsize=0)
3139
3145
3140 stdin = proc.stdin
3146 stdin = proc.stdin
3141 stdout = proc.stdout
3147 stdout = proc.stdout
3142 stderr = proc.stderr
3148 stderr = proc.stderr
3143
3149
3144 # We turn the pipes into observers so we can log I/O.
3150 # We turn the pipes into observers so we can log I/O.
3145 if ui.verbose or opts['peer'] == 'raw':
3151 if ui.verbose or opts['peer'] == 'raw':
3146 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3152 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3147 logdata=True)
3153 logdata=True)
3148 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3154 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3149 logdata=True)
3155 logdata=True)
3150 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3156 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3151 logdata=True)
3157 logdata=True)
3152
3158
3153 # --localssh also implies the peer connection settings.
3159 # --localssh also implies the peer connection settings.
3154
3160
3155 url = 'ssh://localserver'
3161 url = 'ssh://localserver'
3156 autoreadstderr = not opts['noreadstderr']
3162 autoreadstderr = not opts['noreadstderr']
3157
3163
3158 if opts['peer'] == 'ssh1':
3164 if opts['peer'] == 'ssh1':
3159 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3165 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3160 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3166 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3161 None, autoreadstderr=autoreadstderr)
3167 None, autoreadstderr=autoreadstderr)
3162 elif opts['peer'] == 'ssh2':
3168 elif opts['peer'] == 'ssh2':
3163 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3169 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3164 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3170 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3165 None, autoreadstderr=autoreadstderr)
3171 None, autoreadstderr=autoreadstderr)
3166 elif opts['peer'] == 'raw':
3172 elif opts['peer'] == 'raw':
3167 ui.write(_('using raw connection to peer\n'))
3173 ui.write(_('using raw connection to peer\n'))
3168 peer = None
3174 peer = None
3169 else:
3175 else:
3170 ui.write(_('creating ssh peer from handshake results\n'))
3176 ui.write(_('creating ssh peer from handshake results\n'))
3171 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3177 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3172 autoreadstderr=autoreadstderr)
3178 autoreadstderr=autoreadstderr)
3173
3179
3174 elif path:
3180 elif path:
3175 # We bypass hg.peer() so we can proxy the sockets.
3181 # We bypass hg.peer() so we can proxy the sockets.
3176 # TODO consider not doing this because we skip
3182 # TODO consider not doing this because we skip
3177 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3183 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3178 u = util.url(path)
3184 u = util.url(path)
3179 if u.scheme != 'http':
3185 if u.scheme != 'http':
3180 raise error.Abort(_('only http:// paths are currently supported'))
3186 raise error.Abort(_('only http:// paths are currently supported'))
3181
3187
3182 url, authinfo = u.authinfo()
3188 url, authinfo = u.authinfo()
3183 openerargs = {
3189 openerargs = {
3184 r'useragent': b'Mercurial debugwireproto',
3190 r'useragent': b'Mercurial debugwireproto',
3185 }
3191 }
3186
3192
3187 # Turn pipes/sockets into observers so we can log I/O.
3193 # Turn pipes/sockets into observers so we can log I/O.
3188 if ui.verbose:
3194 if ui.verbose:
3189 openerargs.update({
3195 openerargs.update({
3190 r'loggingfh': ui,
3196 r'loggingfh': ui,
3191 r'loggingname': b's',
3197 r'loggingname': b's',
3192 r'loggingopts': {
3198 r'loggingopts': {
3193 r'logdata': True,
3199 r'logdata': True,
3194 r'logdataapis': False,
3200 r'logdataapis': False,
3195 },
3201 },
3196 })
3202 })
3197
3203
3198 if ui.debugflag:
3204 if ui.debugflag:
3199 openerargs[r'loggingopts'][r'logdataapis'] = True
3205 openerargs[r'loggingopts'][r'logdataapis'] = True
3200
3206
3201 # Don't send default headers when in raw mode. This allows us to
3207 # Don't send default headers when in raw mode. This allows us to
3202 # bypass most of the behavior of our URL handling code so we can
3208 # bypass most of the behavior of our URL handling code so we can
3203 # have near complete control over what's sent on the wire.
3209 # have near complete control over what's sent on the wire.
3204 if opts['peer'] == 'raw':
3210 if opts['peer'] == 'raw':
3205 openerargs[r'sendaccept'] = False
3211 openerargs[r'sendaccept'] = False
3206
3212
3207 opener = urlmod.opener(ui, authinfo, **openerargs)
3213 opener = urlmod.opener(ui, authinfo, **openerargs)
3208
3214
3209 if opts['peer'] == 'http2':
3215 if opts['peer'] == 'http2':
3210 ui.write(_('creating http peer for wire protocol version 2\n'))
3216 ui.write(_('creating http peer for wire protocol version 2\n'))
3211 # We go through makepeer() because we need an API descriptor for
3217 # We go through makepeer() because we need an API descriptor for
3212 # the peer instance to be useful.
3218 # the peer instance to be useful.
3213 with ui.configoverride({
3219 with ui.configoverride({
3214 ('experimental', 'httppeer.advertise-v2'): True}):
3220 ('experimental', 'httppeer.advertise-v2'): True}):
3215 if opts['nologhandshake']:
3221 if opts['nologhandshake']:
3216 ui.pushbuffer()
3222 ui.pushbuffer()
3217
3223
3218 peer = httppeer.makepeer(ui, path, opener=opener)
3224 peer = httppeer.makepeer(ui, path, opener=opener)
3219
3225
3220 if opts['nologhandshake']:
3226 if opts['nologhandshake']:
3221 ui.popbuffer()
3227 ui.popbuffer()
3222
3228
3223 if not isinstance(peer, httppeer.httpv2peer):
3229 if not isinstance(peer, httppeer.httpv2peer):
3224 raise error.Abort(_('could not instantiate HTTP peer for '
3230 raise error.Abort(_('could not instantiate HTTP peer for '
3225 'wire protocol version 2'),
3231 'wire protocol version 2'),
3226 hint=_('the server may not have the feature '
3232 hint=_('the server may not have the feature '
3227 'enabled or is not allowing this '
3233 'enabled or is not allowing this '
3228 'client version'))
3234 'client version'))
3229
3235
3230 elif opts['peer'] == 'raw':
3236 elif opts['peer'] == 'raw':
3231 ui.write(_('using raw connection to peer\n'))
3237 ui.write(_('using raw connection to peer\n'))
3232 peer = None
3238 peer = None
3233 elif opts['peer']:
3239 elif opts['peer']:
3234 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3240 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3235 opts['peer'])
3241 opts['peer'])
3236 else:
3242 else:
3237 peer = httppeer.makepeer(ui, path, opener=opener)
3243 peer = httppeer.makepeer(ui, path, opener=opener)
3238
3244
3239 # We /could/ populate stdin/stdout with sock.makefile()...
3245 # We /could/ populate stdin/stdout with sock.makefile()...
3240 else:
3246 else:
3241 raise error.Abort(_('unsupported connection configuration'))
3247 raise error.Abort(_('unsupported connection configuration'))
3242
3248
3243 batchedcommands = None
3249 batchedcommands = None
3244
3250
3245 # Now perform actions based on the parsed wire language instructions.
3251 # Now perform actions based on the parsed wire language instructions.
3246 for action, lines in blocks:
3252 for action, lines in blocks:
3247 if action in ('raw', 'raw+'):
3253 if action in ('raw', 'raw+'):
3248 if not stdin:
3254 if not stdin:
3249 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3255 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3250
3256
3251 # Concatenate the data together.
3257 # Concatenate the data together.
3252 data = ''.join(l.lstrip() for l in lines)
3258 data = ''.join(l.lstrip() for l in lines)
3253 data = stringutil.unescapestr(data)
3259 data = stringutil.unescapestr(data)
3254 stdin.write(data)
3260 stdin.write(data)
3255
3261
3256 if action == 'raw+':
3262 if action == 'raw+':
3257 stdin.flush()
3263 stdin.flush()
3258 elif action == 'flush':
3264 elif action == 'flush':
3259 if not stdin:
3265 if not stdin:
3260 raise error.Abort(_('cannot call flush on this peer'))
3266 raise error.Abort(_('cannot call flush on this peer'))
3261 stdin.flush()
3267 stdin.flush()
3262 elif action.startswith('command'):
3268 elif action.startswith('command'):
3263 if not peer:
3269 if not peer:
3264 raise error.Abort(_('cannot send commands unless peer instance '
3270 raise error.Abort(_('cannot send commands unless peer instance '
3265 'is available'))
3271 'is available'))
3266
3272
3267 command = action.split(' ', 1)[1]
3273 command = action.split(' ', 1)[1]
3268
3274
3269 args = {}
3275 args = {}
3270 for line in lines:
3276 for line in lines:
3271 # We need to allow empty values.
3277 # We need to allow empty values.
3272 fields = line.lstrip().split(' ', 1)
3278 fields = line.lstrip().split(' ', 1)
3273 if len(fields) == 1:
3279 if len(fields) == 1:
3274 key = fields[0]
3280 key = fields[0]
3275 value = ''
3281 value = ''
3276 else:
3282 else:
3277 key, value = fields
3283 key, value = fields
3278
3284
3279 if value.startswith('eval:'):
3285 if value.startswith('eval:'):
3280 value = stringutil.evalpythonliteral(value[5:])
3286 value = stringutil.evalpythonliteral(value[5:])
3281 else:
3287 else:
3282 value = stringutil.unescapestr(value)
3288 value = stringutil.unescapestr(value)
3283
3289
3284 args[key] = value
3290 args[key] = value
3285
3291
3286 if batchedcommands is not None:
3292 if batchedcommands is not None:
3287 batchedcommands.append((command, args))
3293 batchedcommands.append((command, args))
3288 continue
3294 continue
3289
3295
3290 ui.status(_('sending %s command\n') % command)
3296 ui.status(_('sending %s command\n') % command)
3291
3297
3292 if 'PUSHFILE' in args:
3298 if 'PUSHFILE' in args:
3293 with open(args['PUSHFILE'], r'rb') as fh:
3299 with open(args['PUSHFILE'], r'rb') as fh:
3294 del args['PUSHFILE']
3300 del args['PUSHFILE']
3295 res, output = peer._callpush(command, fh,
3301 res, output = peer._callpush(command, fh,
3296 **pycompat.strkwargs(args))
3302 **pycompat.strkwargs(args))
3297 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3303 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3298 ui.status(_('remote output: %s\n') %
3304 ui.status(_('remote output: %s\n') %
3299 stringutil.escapestr(output))
3305 stringutil.escapestr(output))
3300 else:
3306 else:
3301 with peer.commandexecutor() as e:
3307 with peer.commandexecutor() as e:
3302 res = e.callcommand(command, args).result()
3308 res = e.callcommand(command, args).result()
3303
3309
3304 if isinstance(res, wireprotov2peer.commandresponse):
3310 if isinstance(res, wireprotov2peer.commandresponse):
3305 val = res.objects()
3311 val = res.objects()
3306 ui.status(_('response: %s\n') %
3312 ui.status(_('response: %s\n') %
3307 stringutil.pprint(val, bprefix=True, indent=2))
3313 stringutil.pprint(val, bprefix=True, indent=2))
3308 else:
3314 else:
3309 ui.status(_('response: %s\n') %
3315 ui.status(_('response: %s\n') %
3310 stringutil.pprint(res, bprefix=True, indent=2))
3316 stringutil.pprint(res, bprefix=True, indent=2))
3311
3317
3312 elif action == 'batchbegin':
3318 elif action == 'batchbegin':
3313 if batchedcommands is not None:
3319 if batchedcommands is not None:
3314 raise error.Abort(_('nested batchbegin not allowed'))
3320 raise error.Abort(_('nested batchbegin not allowed'))
3315
3321
3316 batchedcommands = []
3322 batchedcommands = []
3317 elif action == 'batchsubmit':
3323 elif action == 'batchsubmit':
3318 # There is a batching API we could go through. But it would be
3324 # There is a batching API we could go through. But it would be
3319 # difficult to normalize requests into function calls. It is easier
3325 # difficult to normalize requests into function calls. It is easier
3320 # to bypass this layer and normalize to commands + args.
3326 # to bypass this layer and normalize to commands + args.
3321 ui.status(_('sending batch with %d sub-commands\n') %
3327 ui.status(_('sending batch with %d sub-commands\n') %
3322 len(batchedcommands))
3328 len(batchedcommands))
3323 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3329 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3324 ui.status(_('response #%d: %s\n') %
3330 ui.status(_('response #%d: %s\n') %
3325 (i, stringutil.escapestr(chunk)))
3331 (i, stringutil.escapestr(chunk)))
3326
3332
3327 batchedcommands = None
3333 batchedcommands = None
3328
3334
3329 elif action.startswith('httprequest '):
3335 elif action.startswith('httprequest '):
3330 if not opener:
3336 if not opener:
3331 raise error.Abort(_('cannot use httprequest without an HTTP '
3337 raise error.Abort(_('cannot use httprequest without an HTTP '
3332 'peer'))
3338 'peer'))
3333
3339
3334 request = action.split(' ', 2)
3340 request = action.split(' ', 2)
3335 if len(request) != 3:
3341 if len(request) != 3:
3336 raise error.Abort(_('invalid httprequest: expected format is '
3342 raise error.Abort(_('invalid httprequest: expected format is '
3337 '"httprequest <method> <path>'))
3343 '"httprequest <method> <path>'))
3338
3344
3339 method, httppath = request[1:]
3345 method, httppath = request[1:]
3340 headers = {}
3346 headers = {}
3341 body = None
3347 body = None
3342 frames = []
3348 frames = []
3343 for line in lines:
3349 for line in lines:
3344 line = line.lstrip()
3350 line = line.lstrip()
3345 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3351 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3346 if m:
3352 if m:
3347 # Headers need to use native strings.
3353 # Headers need to use native strings.
3348 key = pycompat.strurl(m.group(1))
3354 key = pycompat.strurl(m.group(1))
3349 value = pycompat.strurl(m.group(2))
3355 value = pycompat.strurl(m.group(2))
3350 headers[key] = value
3356 headers[key] = value
3351 continue
3357 continue
3352
3358
3353 if line.startswith(b'BODYFILE '):
3359 if line.startswith(b'BODYFILE '):
3354 with open(line.split(b' ', 1), 'rb') as fh:
3360 with open(line.split(b' ', 1), 'rb') as fh:
3355 body = fh.read()
3361 body = fh.read()
3356 elif line.startswith(b'frame '):
3362 elif line.startswith(b'frame '):
3357 frame = wireprotoframing.makeframefromhumanstring(
3363 frame = wireprotoframing.makeframefromhumanstring(
3358 line[len(b'frame '):])
3364 line[len(b'frame '):])
3359
3365
3360 frames.append(frame)
3366 frames.append(frame)
3361 else:
3367 else:
3362 raise error.Abort(_('unknown argument to httprequest: %s') %
3368 raise error.Abort(_('unknown argument to httprequest: %s') %
3363 line)
3369 line)
3364
3370
3365 url = path + httppath
3371 url = path + httppath
3366
3372
3367 if frames:
3373 if frames:
3368 body = b''.join(bytes(f) for f in frames)
3374 body = b''.join(bytes(f) for f in frames)
3369
3375
3370 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3376 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3371
3377
3372 # urllib.Request insists on using has_data() as a proxy for
3378 # urllib.Request insists on using has_data() as a proxy for
3373 # determining the request method. Override that to use our
3379 # determining the request method. Override that to use our
3374 # explicitly requested method.
3380 # explicitly requested method.
3375 req.get_method = lambda: pycompat.sysstr(method)
3381 req.get_method = lambda: pycompat.sysstr(method)
3376
3382
3377 try:
3383 try:
3378 res = opener.open(req)
3384 res = opener.open(req)
3379 body = res.read()
3385 body = res.read()
3380 except util.urlerr.urlerror as e:
3386 except util.urlerr.urlerror as e:
3381 # read() method must be called, but only exists in Python 2
3387 # read() method must be called, but only exists in Python 2
3382 getattr(e, 'read', lambda: None)()
3388 getattr(e, 'read', lambda: None)()
3383 continue
3389 continue
3384
3390
3385 ct = res.headers.get(r'Content-Type')
3391 ct = res.headers.get(r'Content-Type')
3386 if ct == r'application/mercurial-cbor':
3392 if ct == r'application/mercurial-cbor':
3387 ui.write(_('cbor> %s\n') %
3393 ui.write(_('cbor> %s\n') %
3388 stringutil.pprint(cborutil.decodeall(body),
3394 stringutil.pprint(cborutil.decodeall(body),
3389 bprefix=True,
3395 bprefix=True,
3390 indent=2))
3396 indent=2))
3391
3397
3392 elif action == 'close':
3398 elif action == 'close':
3393 peer.close()
3399 peer.close()
3394 elif action == 'readavailable':
3400 elif action == 'readavailable':
3395 if not stdout or not stderr:
3401 if not stdout or not stderr:
3396 raise error.Abort(_('readavailable not available on this peer'))
3402 raise error.Abort(_('readavailable not available on this peer'))
3397
3403
3398 stdin.close()
3404 stdin.close()
3399 stdout.read()
3405 stdout.read()
3400 stderr.read()
3406 stderr.read()
3401
3407
3402 elif action == 'readline':
3408 elif action == 'readline':
3403 if not stdout:
3409 if not stdout:
3404 raise error.Abort(_('readline not available on this peer'))
3410 raise error.Abort(_('readline not available on this peer'))
3405 stdout.readline()
3411 stdout.readline()
3406 elif action == 'ereadline':
3412 elif action == 'ereadline':
3407 if not stderr:
3413 if not stderr:
3408 raise error.Abort(_('ereadline not available on this peer'))
3414 raise error.Abort(_('ereadline not available on this peer'))
3409 stderr.readline()
3415 stderr.readline()
3410 elif action.startswith('read '):
3416 elif action.startswith('read '):
3411 count = int(action.split(' ', 1)[1])
3417 count = int(action.split(' ', 1)[1])
3412 if not stdout:
3418 if not stdout:
3413 raise error.Abort(_('read not available on this peer'))
3419 raise error.Abort(_('read not available on this peer'))
3414 stdout.read(count)
3420 stdout.read(count)
3415 elif action.startswith('eread '):
3421 elif action.startswith('eread '):
3416 count = int(action.split(' ', 1)[1])
3422 count = int(action.split(' ', 1)[1])
3417 if not stderr:
3423 if not stderr:
3418 raise error.Abort(_('eread not available on this peer'))
3424 raise error.Abort(_('eread not available on this peer'))
3419 stderr.read(count)
3425 stderr.read(count)
3420 else:
3426 else:
3421 raise error.Abort(_('unknown action: %s') % action)
3427 raise error.Abort(_('unknown action: %s') % action)
3422
3428
3423 if batchedcommands is not None:
3429 if batchedcommands is not None:
3424 raise error.Abort(_('unclosed "batchbegin" request'))
3430 raise error.Abort(_('unclosed "batchbegin" request'))
3425
3431
3426 if peer:
3432 if peer:
3427 peer.close()
3433 peer.close()
3428
3434
3429 if proc:
3435 if proc:
3430 proc.kill()
3436 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now