##// END OF EJS Templates
i18n: omit redundant translatable synopsis text to avoid xgettext warning...
FUJIWARA Katsunori -
r38850:96b2e66d default
parent child Browse files
Show More
@@ -1,3252 +1,3251 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 filesetlang,
51 filesetlang,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i
180 initialmergedlines = ['%d' % i
181 for i in pycompat.xrange(0, total * linesperrev)]
181 for i in pycompat.xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
182 initialmergedlines.append("")
183
183
184 tags = []
184 tags = []
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 total=total)
186 total=total)
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 at = -1
188 at = -1
189 atbranch = 'default'
189 atbranch = 'default'
190 nodeids = []
190 nodeids = []
191 id = 0
191 id = 0
192 progress.update(id)
192 progress.update(id)
193 for type, data in dagparser.parsedag(text):
193 for type, data in dagparser.parsedag(text):
194 if type == 'n':
194 if type == 'n':
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 id, ps = data
196 id, ps = data
197
197
198 files = []
198 files = []
199 filecontent = {}
199 filecontent = {}
200
200
201 p2 = None
201 p2 = None
202 if mergeable_file:
202 if mergeable_file:
203 fn = "mf"
203 fn = "mf"
204 p1 = repo[ps[0]]
204 p1 = repo[ps[0]]
205 if len(ps) > 1:
205 if len(ps) > 1:
206 p2 = repo[ps[1]]
206 p2 = repo[ps[1]]
207 pa = p1.ancestor(p2)
207 pa = p1.ancestor(p2)
208 base, local, other = [x[fn].data() for x in (pa, p1,
208 base, local, other = [x[fn].data() for x in (pa, p1,
209 p2)]
209 p2)]
210 m3 = simplemerge.Merge3Text(base, local, other)
210 m3 = simplemerge.Merge3Text(base, local, other)
211 ml = [l.strip() for l in m3.merge_lines()]
211 ml = [l.strip() for l in m3.merge_lines()]
212 ml.append("")
212 ml.append("")
213 elif at > 0:
213 elif at > 0:
214 ml = p1[fn].data().split("\n")
214 ml = p1[fn].data().split("\n")
215 else:
215 else:
216 ml = initialmergedlines
216 ml = initialmergedlines
217 ml[id * linesperrev] += " r%i" % id
217 ml[id * linesperrev] += " r%i" % id
218 mergedtext = "\n".join(ml)
218 mergedtext = "\n".join(ml)
219 files.append(fn)
219 files.append(fn)
220 filecontent[fn] = mergedtext
220 filecontent[fn] = mergedtext
221
221
222 if overwritten_file:
222 if overwritten_file:
223 fn = "of"
223 fn = "of"
224 files.append(fn)
224 files.append(fn)
225 filecontent[fn] = "r%i\n" % id
225 filecontent[fn] = "r%i\n" % id
226
226
227 if new_file:
227 if new_file:
228 fn = "nf%i" % id
228 fn = "nf%i" % id
229 files.append(fn)
229 files.append(fn)
230 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
231 if len(ps) > 1:
231 if len(ps) > 1:
232 if not p2:
232 if not p2:
233 p2 = repo[ps[1]]
233 p2 = repo[ps[1]]
234 for fn in p2:
234 for fn in p2:
235 if fn.startswith("nf"):
235 if fn.startswith("nf"):
236 files.append(fn)
236 files.append(fn)
237 filecontent[fn] = p2[fn].data()
237 filecontent[fn] = p2[fn].data()
238
238
239 def fctxfn(repo, cx, path):
239 def fctxfn(repo, cx, path):
240 if path in filecontent:
240 if path in filecontent:
241 return context.memfilectx(repo, cx, path,
241 return context.memfilectx(repo, cx, path,
242 filecontent[path])
242 filecontent[path])
243 return None
243 return None
244
244
245 if len(ps) == 0 or ps[0] < 0:
245 if len(ps) == 0 or ps[0] < 0:
246 pars = [None, None]
246 pars = [None, None]
247 elif len(ps) == 1:
247 elif len(ps) == 1:
248 pars = [nodeids[ps[0]], None]
248 pars = [nodeids[ps[0]], None]
249 else:
249 else:
250 pars = [nodeids[p] for p in ps]
250 pars = [nodeids[p] for p in ps]
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 date=(id, 0),
252 date=(id, 0),
253 user="debugbuilddag",
253 user="debugbuilddag",
254 extra={'branch': atbranch})
254 extra={'branch': atbranch})
255 nodeid = repo.commitctx(cx)
255 nodeid = repo.commitctx(cx)
256 nodeids.append(nodeid)
256 nodeids.append(nodeid)
257 at = id
257 at = id
258 elif type == 'l':
258 elif type == 'l':
259 id, name = data
259 id, name = data
260 ui.note(('tag %s\n' % name))
260 ui.note(('tag %s\n' % name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 elif type == 'a':
262 elif type == 'a':
263 ui.note(('branch %s\n' % data))
263 ui.note(('branch %s\n' % data))
264 atbranch = data
264 atbranch = data
265 progress.update(id)
265 progress.update(id)
266
266
267 if tags:
267 if tags:
268 repo.vfs.write("localtags", "".join(tags))
268 repo.vfs.write("localtags", "".join(tags))
269
269
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
271 indent_string = ' ' * indent
272 if all:
272 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
274 % indent_string)
275
275
276 def showchunks(named):
276 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
277 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
278 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
280 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
281 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
282 hex(cs), hex(deltabase), len(delta)))
283
283
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 showchunks("changelog")
285 showchunks("changelog")
286 chunkdata = gen.manifestheader()
286 chunkdata = gen.manifestheader()
287 showchunks("manifest")
287 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
288 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
289 fname = chunkdata['filename']
290 showchunks(fname)
290 showchunks(fname)
291 else:
291 else:
292 if isinstance(gen, bundle2.unbundle20):
292 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
293 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
294 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
295 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
297 ui.write("%s%s\n" % (indent_string, hex(node)))
298
298
299 def _debugobsmarkers(ui, part, indent=0, **opts):
299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
300 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
301 opts = pycompat.byteskwargs(opts)
302 data = part.read()
302 data = part.read()
303 indent_string = ' ' * indent
303 indent_string = ' ' * indent
304 try:
304 try:
305 version, markers = obsolete._readmarkers(data)
305 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
306 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
308 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
309 ui.write(msg)
310 else:
310 else:
311 msg = "%sversion: %d (%d bytes)\n"
311 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
312 msg %= indent_string, version, len(data)
313 ui.write(msg)
313 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
314 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
315 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
316 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
317 fm.startitem()
318 fm.plain(indent_string)
318 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
319 cmdutil.showmarker(fm, m)
320 fm.end()
320 fm.end()
321
321
322 def _debugphaseheads(ui, data, indent=0):
322 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
323 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
324 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
325 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
326 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
327 for head in headsbyphase[phase]:
328 ui.write(indent_string)
328 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
330
331 def _quasirepr(thing):
331 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
333 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
335 return pycompat.bytestr(repr(thing))
336
336
337 def _debugbundle2(ui, gen, all=None, **opts):
337 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
338 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
339 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
340 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
342 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
343 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
344 if parttypes and part.type not in parttypes:
345 continue
345 continue
346 msg = '%s -- %s (mandatory: %r)\n'
346 msg = '%s -- %s (mandatory: %r)\n'
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 if part.type == 'changegroup':
348 if part.type == 'changegroup':
349 version = part.params.get('version', '01')
349 version = part.params.get('version', '01')
350 cg = changegroup.getunbundler(version, part, 'UN')
350 cg = changegroup.getunbundler(version, part, 'UN')
351 if not ui.quiet:
351 if not ui.quiet:
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 if part.type == 'obsmarkers':
353 if part.type == 'obsmarkers':
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugobsmarkers(ui, part, indent=4, **opts)
355 _debugobsmarkers(ui, part, indent=4, **opts)
356 if part.type == 'phase-heads':
356 if part.type == 'phase-heads':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugphaseheads(ui, part, indent=4)
358 _debugphaseheads(ui, part, indent=4)
359
359
360 @command('debugbundle',
360 @command('debugbundle',
361 [('a', 'all', None, _('show all details')),
361 [('a', 'all', None, _('show all details')),
362 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 _('FILE'),
364 _('FILE'),
365 norepo=True)
365 norepo=True)
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 """lists the contents of a bundle"""
367 """lists the contents of a bundle"""
368 with hg.openpath(ui, bundlepath) as f:
368 with hg.openpath(ui, bundlepath) as f:
369 if spec:
369 if spec:
370 spec = exchange.getbundlespec(ui, f)
370 spec = exchange.getbundlespec(ui, f)
371 ui.write('%s\n' % spec)
371 ui.write('%s\n' % spec)
372 return
372 return
373
373
374 gen = exchange.readbundle(ui, f, bundlepath)
374 gen = exchange.readbundle(ui, f, bundlepath)
375 if isinstance(gen, bundle2.unbundle20):
375 if isinstance(gen, bundle2.unbundle20):
376 return _debugbundle2(ui, gen, all=all, **opts)
376 return _debugbundle2(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
378
378
379 @command('debugcapabilities',
379 @command('debugcapabilities',
380 [], _('PATH'),
380 [], _('PATH'),
381 norepo=True)
381 norepo=True)
382 def debugcapabilities(ui, path, **opts):
382 def debugcapabilities(ui, path, **opts):
383 """lists the capabilities of a remote peer"""
383 """lists the capabilities of a remote peer"""
384 opts = pycompat.byteskwargs(opts)
384 opts = pycompat.byteskwargs(opts)
385 peer = hg.peer(ui, opts, path)
385 peer = hg.peer(ui, opts, path)
386 caps = peer.capabilities()
386 caps = peer.capabilities()
387 ui.write(('Main capabilities:\n'))
387 ui.write(('Main capabilities:\n'))
388 for c in sorted(caps):
388 for c in sorted(caps):
389 ui.write((' %s\n') % c)
389 ui.write((' %s\n') % c)
390 b2caps = bundle2.bundle2caps(peer)
390 b2caps = bundle2.bundle2caps(peer)
391 if b2caps:
391 if b2caps:
392 ui.write(('Bundle2 capabilities:\n'))
392 ui.write(('Bundle2 capabilities:\n'))
393 for key, values in sorted(b2caps.iteritems()):
393 for key, values in sorted(b2caps.iteritems()):
394 ui.write((' %s\n') % key)
394 ui.write((' %s\n') % key)
395 for v in values:
395 for v in values:
396 ui.write((' %s\n') % v)
396 ui.write((' %s\n') % v)
397
397
398 @command('debugcheckstate', [], '')
398 @command('debugcheckstate', [], '')
399 def debugcheckstate(ui, repo):
399 def debugcheckstate(ui, repo):
400 """validate the correctness of the current dirstate"""
400 """validate the correctness of the current dirstate"""
401 parent1, parent2 = repo.dirstate.parents()
401 parent1, parent2 = repo.dirstate.parents()
402 m1 = repo[parent1].manifest()
402 m1 = repo[parent1].manifest()
403 m2 = repo[parent2].manifest()
403 m2 = repo[parent2].manifest()
404 errors = 0
404 errors = 0
405 for f in repo.dirstate:
405 for f in repo.dirstate:
406 state = repo.dirstate[f]
406 state = repo.dirstate[f]
407 if state in "nr" and f not in m1:
407 if state in "nr" and f not in m1:
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 errors += 1
409 errors += 1
410 if state in "a" and f in m1:
410 if state in "a" and f in m1:
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "m" and f not in m1 and f not in m2:
413 if state in "m" and f not in m1 and f not in m2:
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 (f, state))
415 (f, state))
416 errors += 1
416 errors += 1
417 for f in m1:
417 for f in m1:
418 state = repo.dirstate[f]
418 state = repo.dirstate[f]
419 if state not in "nrm":
419 if state not in "nrm":
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 errors += 1
421 errors += 1
422 if errors:
422 if errors:
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 raise error.Abort(error)
424 raise error.Abort(error)
425
425
426 @command('debugcolor',
426 @command('debugcolor',
427 [('', 'style', None, _('show all configured styles'))],
427 [('', 'style', None, _('show all configured styles'))],
428 'hg debugcolor')
428 'hg debugcolor')
429 def debugcolor(ui, repo, **opts):
429 def debugcolor(ui, repo, **opts):
430 """show available color, effects or style"""
430 """show available color, effects or style"""
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 if opts.get(r'style'):
432 if opts.get(r'style'):
433 return _debugdisplaystyle(ui)
433 return _debugdisplaystyle(ui)
434 else:
434 else:
435 return _debugdisplaycolor(ui)
435 return _debugdisplaycolor(ui)
436
436
437 def _debugdisplaycolor(ui):
437 def _debugdisplaycolor(ui):
438 ui = ui.copy()
438 ui = ui.copy()
439 ui._styles.clear()
439 ui._styles.clear()
440 for effect in color._activeeffects(ui).keys():
440 for effect in color._activeeffects(ui).keys():
441 ui._styles[effect] = effect
441 ui._styles[effect] = effect
442 if ui._terminfoparams:
442 if ui._terminfoparams:
443 for k, v in ui.configitems('color'):
443 for k, v in ui.configitems('color'):
444 if k.startswith('color.'):
444 if k.startswith('color.'):
445 ui._styles[k] = k[6:]
445 ui._styles[k] = k[6:]
446 elif k.startswith('terminfo.'):
446 elif k.startswith('terminfo.'):
447 ui._styles[k] = k[9:]
447 ui._styles[k] = k[9:]
448 ui.write(_('available colors:\n'))
448 ui.write(_('available colors:\n'))
449 # sort label with a '_' after the other to group '_background' entry.
449 # sort label with a '_' after the other to group '_background' entry.
450 items = sorted(ui._styles.items(),
450 items = sorted(ui._styles.items(),
451 key=lambda i: ('_' in i[0], i[0], i[1]))
451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 for colorname, label in items:
452 for colorname, label in items:
453 ui.write(('%s\n') % colorname, label=label)
453 ui.write(('%s\n') % colorname, label=label)
454
454
455 def _debugdisplaystyle(ui):
455 def _debugdisplaystyle(ui):
456 ui.write(_('available style:\n'))
456 ui.write(_('available style:\n'))
457 if not ui._styles:
457 if not ui._styles:
458 return
458 return
459 width = max(len(s) for s in ui._styles)
459 width = max(len(s) for s in ui._styles)
460 for label, effects in sorted(ui._styles.items()):
460 for label, effects in sorted(ui._styles.items()):
461 ui.write('%s' % label, label=label)
461 ui.write('%s' % label, label=label)
462 if effects:
462 if effects:
463 # 50
463 # 50
464 ui.write(': ')
464 ui.write(': ')
465 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write('\n')
467 ui.write('\n')
468
468
469 @command('debugcreatestreamclonebundle', [], 'FILE')
469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 def debugcreatestreamclonebundle(ui, repo, fname):
470 def debugcreatestreamclonebundle(ui, repo, fname):
471 """create a stream clone bundle file
471 """create a stream clone bundle file
472
472
473 Stream bundles are special bundles that are essentially archives of
473 Stream bundles are special bundles that are essentially archives of
474 revlog files. They are commonly used for cloning very quickly.
474 revlog files. They are commonly used for cloning very quickly.
475 """
475 """
476 # TODO we may want to turn this into an abort when this functionality
476 # TODO we may want to turn this into an abort when this functionality
477 # is moved into `hg bundle`.
477 # is moved into `hg bundle`.
478 if phases.hassecret(repo):
478 if phases.hassecret(repo):
479 ui.warn(_('(warning: stream clone bundle will contain secret '
479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 'revisions)\n'))
480 'revisions)\n'))
481
481
482 requirements, gen = streamclone.generatebundlev1(repo)
482 requirements, gen = streamclone.generatebundlev1(repo)
483 changegroup.writechunks(ui, gen, fname)
483 changegroup.writechunks(ui, gen, fname)
484
484
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486
486
487 @command('debugdag',
487 @command('debugdag',
488 [('t', 'tags', None, _('use tags as labels')),
488 [('t', 'tags', None, _('use tags as labels')),
489 ('b', 'branches', None, _('annotate with branch names')),
489 ('b', 'branches', None, _('annotate with branch names')),
490 ('', 'dots', None, _('use dots for runs')),
490 ('', 'dots', None, _('use dots for runs')),
491 ('s', 'spaces', None, _('separate elements by spaces'))],
491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 _('[OPTION]... [FILE [REV]...]'),
492 _('[OPTION]... [FILE [REV]...]'),
493 optionalrepo=True)
493 optionalrepo=True)
494 def debugdag(ui, repo, file_=None, *revs, **opts):
494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 """format the changelog or an index DAG as a concise textual description
495 """format the changelog or an index DAG as a concise textual description
496
496
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 revision numbers, they get labeled in the output as rN.
498 revision numbers, they get labeled in the output as rN.
499
499
500 Otherwise, the changelog DAG of the current repo is emitted.
500 Otherwise, the changelog DAG of the current repo is emitted.
501 """
501 """
502 spaces = opts.get(r'spaces')
502 spaces = opts.get(r'spaces')
503 dots = opts.get(r'dots')
503 dots = opts.get(r'dots')
504 if file_:
504 if file_:
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 file_)
506 file_)
507 revs = set((int(r) for r in revs))
507 revs = set((int(r) for r in revs))
508 def events():
508 def events():
509 for r in rlog:
509 for r in rlog:
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 if p != -1))
511 if p != -1))
512 if r in revs:
512 if r in revs:
513 yield 'l', (r, "r%i" % r)
513 yield 'l', (r, "r%i" % r)
514 elif repo:
514 elif repo:
515 cl = repo.changelog
515 cl = repo.changelog
516 tags = opts.get(r'tags')
516 tags = opts.get(r'tags')
517 branches = opts.get(r'branches')
517 branches = opts.get(r'branches')
518 if tags:
518 if tags:
519 labels = {}
519 labels = {}
520 for l, n in repo.tags().items():
520 for l, n in repo.tags().items():
521 labels.setdefault(cl.rev(n), []).append(l)
521 labels.setdefault(cl.rev(n), []).append(l)
522 def events():
522 def events():
523 b = "default"
523 b = "default"
524 for r in cl:
524 for r in cl:
525 if branches:
525 if branches:
526 newb = cl.read(cl.node(r))[5]['branch']
526 newb = cl.read(cl.node(r))[5]['branch']
527 if newb != b:
527 if newb != b:
528 yield 'a', newb
528 yield 'a', newb
529 b = newb
529 b = newb
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 if p != -1))
531 if p != -1))
532 if tags:
532 if tags:
533 ls = labels.get(r)
533 ls = labels.get(r)
534 if ls:
534 if ls:
535 for l in ls:
535 for l in ls:
536 yield 'l', (r, l)
536 yield 'l', (r, l)
537 else:
537 else:
538 raise error.Abort(_('need repo for changelog dag'))
538 raise error.Abort(_('need repo for changelog dag'))
539
539
540 for line in dagparser.dagtextlines(events(),
540 for line in dagparser.dagtextlines(events(),
541 addspaces=spaces,
541 addspaces=spaces,
542 wraplabels=True,
542 wraplabels=True,
543 wrapannotations=True,
543 wrapannotations=True,
544 wrapnonlinear=dots,
544 wrapnonlinear=dots,
545 usedots=dots,
545 usedots=dots,
546 maxlinewidth=70):
546 maxlinewidth=70):
547 ui.write(line)
547 ui.write(line)
548 ui.write("\n")
548 ui.write("\n")
549
549
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 def debugdata(ui, repo, file_, rev=None, **opts):
551 def debugdata(ui, repo, file_, rev=None, **opts):
552 """dump the contents of a data file revision"""
552 """dump the contents of a data file revision"""
553 opts = pycompat.byteskwargs(opts)
553 opts = pycompat.byteskwargs(opts)
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if rev is not None:
555 if rev is not None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
556 raise error.CommandError('debugdata', _('invalid arguments'))
557 file_, rev = None, file_
557 file_, rev = None, file_
558 elif rev is None:
558 elif rev is None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 try:
561 try:
562 ui.write(r.revision(r.lookup(rev), raw=True))
562 ui.write(r.revision(r.lookup(rev), raw=True))
563 except KeyError:
563 except KeyError:
564 raise error.Abort(_('invalid revision identifier %s') % rev)
564 raise error.Abort(_('invalid revision identifier %s') % rev)
565
565
566 @command('debugdate',
566 @command('debugdate',
567 [('e', 'extended', None, _('try extended date formats'))],
567 [('e', 'extended', None, _('try extended date formats'))],
568 _('[-e] DATE [RANGE]'),
568 _('[-e] DATE [RANGE]'),
569 norepo=True, optionalrepo=True)
569 norepo=True, optionalrepo=True)
570 def debugdate(ui, date, range=None, **opts):
570 def debugdate(ui, date, range=None, **opts):
571 """parse and display a date"""
571 """parse and display a date"""
572 if opts[r"extended"]:
572 if opts[r"extended"]:
573 d = dateutil.parsedate(date, util.extendeddateformats)
573 d = dateutil.parsedate(date, util.extendeddateformats)
574 else:
574 else:
575 d = dateutil.parsedate(date)
575 d = dateutil.parsedate(date)
576 ui.write(("internal: %d %d\n") % d)
576 ui.write(("internal: %d %d\n") % d)
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 if range:
578 if range:
579 m = dateutil.matchdate(range)
579 m = dateutil.matchdate(range)
580 ui.write(("match: %s\n") % m(d[0]))
580 ui.write(("match: %s\n") % m(d[0]))
581
581
582 @command('debugdeltachain',
582 @command('debugdeltachain',
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 _('-c|-m|FILE'),
584 _('-c|-m|FILE'),
585 optionalrepo=True)
585 optionalrepo=True)
586 def debugdeltachain(ui, repo, file_=None, **opts):
586 def debugdeltachain(ui, repo, file_=None, **opts):
587 """dump information about delta chains in a revlog
587 """dump information about delta chains in a revlog
588
588
589 Output can be templatized. Available template keywords are:
589 Output can be templatized. Available template keywords are:
590
590
591 :``rev``: revision number
591 :``rev``: revision number
592 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainlen``: delta chain length to this revision
593 :``chainlen``: delta chain length to this revision
594 :``prevrev``: previous revision in delta chain
594 :``prevrev``: previous revision in delta chain
595 :``deltatype``: role of delta / how it was computed
595 :``deltatype``: role of delta / how it was computed
596 :``compsize``: compressed size of revision
596 :``compsize``: compressed size of revision
597 :``uncompsize``: uncompressed size of revision
597 :``uncompsize``: uncompressed size of revision
598 :``chainsize``: total size of compressed revisions in chain
598 :``chainsize``: total size of compressed revisions in chain
599 :``chainratio``: total chain size divided by uncompressed revision size
599 :``chainratio``: total chain size divided by uncompressed revision size
600 (new delta chains typically start at ratio 2.00)
600 (new delta chains typically start at ratio 2.00)
601 :``lindist``: linear distance from base revision in delta chain to end
601 :``lindist``: linear distance from base revision in delta chain to end
602 of this revision
602 of this revision
603 :``extradist``: total size of revisions not part of this delta chain from
603 :``extradist``: total size of revisions not part of this delta chain from
604 base of delta chain to end of this revision; a measurement
604 base of delta chain to end of this revision; a measurement
605 of how much extra data we need to read/seek across to read
605 of how much extra data we need to read/seek across to read
606 the delta chain for this revision
606 the delta chain for this revision
607 :``extraratio``: extradist divided by chainsize; another representation of
607 :``extraratio``: extradist divided by chainsize; another representation of
608 how much unrelated data is needed to load this delta chain
608 how much unrelated data is needed to load this delta chain
609
609
610 If the repository is configured to use the sparse read, additional keywords
610 If the repository is configured to use the sparse read, additional keywords
611 are available:
611 are available:
612
612
613 :``readsize``: total size of data read from the disk for a revision
613 :``readsize``: total size of data read from the disk for a revision
614 (sum of the sizes of all the blocks)
614 (sum of the sizes of all the blocks)
615 :``largestblock``: size of the largest block of data read from the disk
615 :``largestblock``: size of the largest block of data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
617 :``srchunks``: in how many data hunks the whole revision would be read
617 :``srchunks``: in how many data hunks the whole revision would be read
618
618
619 The sparse read can be enabled with experimental.sparse-read = True
619 The sparse read can be enabled with experimental.sparse-read = True
620 """
620 """
621 opts = pycompat.byteskwargs(opts)
621 opts = pycompat.byteskwargs(opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 index = r.index
623 index = r.index
624 start = r.start
624 start = r.start
625 length = r.length
625 length = r.length
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 basestart = start(chainbase)
673 basestart = start(chainbase)
674 revstart = start(rev)
674 revstart = start(rev)
675 lineardist = revstart + comp - basestart
675 lineardist = revstart + comp - basestart
676 extradist = lineardist - chainsize
676 extradist = lineardist - chainsize
677 try:
677 try:
678 prevrev = chain[-2]
678 prevrev = chain[-2]
679 except IndexError:
679 except IndexError:
680 prevrev = -1
680 prevrev = -1
681
681
682 if uncomp != 0:
682 if uncomp != 0:
683 chainratio = float(chainsize) / float(uncomp)
683 chainratio = float(chainsize) / float(uncomp)
684 else:
684 else:
685 chainratio = chainsize
685 chainratio = chainsize
686
686
687 if chainsize != 0:
687 if chainsize != 0:
688 extraratio = float(extradist) / float(chainsize)
688 extraratio = float(extradist) / float(chainsize)
689 else:
689 else:
690 extraratio = extradist
690 extraratio = extradist
691
691
692 fm.startitem()
692 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
694 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
695 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
698 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
699 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
700 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
702 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
703 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
704 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
705 if withsparseread:
706 readsize = 0
706 readsize = 0
707 largestblock = 0
707 largestblock = 0
708 srchunks = 0
708 srchunks = 0
709
709
710 for revschunk in revlog._slicechunk(r, chain):
710 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
711 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
713 blksize = blkend - start(revschunk[0])
714
714
715 readsize += blksize
715 readsize += blksize
716 if largestblock < blksize:
716 if largestblock < blksize:
717 largestblock = blksize
717 largestblock = blksize
718
718
719 if readsize:
719 if readsize:
720 readdensity = float(chainsize) / float(readsize)
720 readdensity = float(chainsize) / float(readsize)
721 else:
721 else:
722 readdensity = 1
722 readdensity = 1
723
723
724 fm.write('readsize largestblock readdensity srchunks',
724 fm.write('readsize largestblock readdensity srchunks',
725 ' %10d %10d %9.5f %8d',
725 ' %10d %10d %9.5f %8d',
726 readsize, largestblock, readdensity, srchunks,
726 readsize, largestblock, readdensity, srchunks,
727 readsize=readsize, largestblock=largestblock,
727 readsize=readsize, largestblock=largestblock,
728 readdensity=readdensity, srchunks=srchunks)
728 readdensity=readdensity, srchunks=srchunks)
729
729
730 fm.plain('\n')
730 fm.plain('\n')
731
731
732 fm.end()
732 fm.end()
733
733
734 @command('debugdirstate|debugstate',
734 @command('debugdirstate|debugstate',
735 [('', 'nodates', None, _('do not display the saved mtime')),
735 [('', 'nodates', None, _('do not display the saved mtime')),
736 ('', 'datesort', None, _('sort by saved mtime'))],
736 ('', 'datesort', None, _('sort by saved mtime'))],
737 _('[OPTION]...'))
737 _('[OPTION]...'))
738 def debugstate(ui, repo, **opts):
738 def debugstate(ui, repo, **opts):
739 """show the contents of the current dirstate"""
739 """show the contents of the current dirstate"""
740
740
741 nodates = opts.get(r'nodates')
741 nodates = opts.get(r'nodates')
742 datesort = opts.get(r'datesort')
742 datesort = opts.get(r'datesort')
743
743
744 timestr = ""
744 timestr = ""
745 if datesort:
745 if datesort:
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 else:
747 else:
748 keyfunc = None # sort by filename
748 keyfunc = None # sort by filename
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 if ent[3] == -1:
750 if ent[3] == -1:
751 timestr = 'unset '
751 timestr = 'unset '
752 elif nodates:
752 elif nodates:
753 timestr = 'set '
753 timestr = 'set '
754 else:
754 else:
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 time.localtime(ent[3]))
756 time.localtime(ent[3]))
757 timestr = encoding.strtolocal(timestr)
757 timestr = encoding.strtolocal(timestr)
758 if ent[1] & 0o20000:
758 if ent[1] & 0o20000:
759 mode = 'lnk'
759 mode = 'lnk'
760 else:
760 else:
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 for f in repo.dirstate.copies():
763 for f in repo.dirstate.copies():
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765
765
766 @command('debugdiscovery',
766 @command('debugdiscovery',
767 [('', 'old', None, _('use old-style discovery')),
767 [('', 'old', None, _('use old-style discovery')),
768 ('', 'nonheads', None,
768 ('', 'nonheads', None,
769 _('use old-style discovery with non-heads included')),
769 _('use old-style discovery with non-heads included')),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ] + cmdutil.remoteopts,
771 ] + cmdutil.remoteopts,
772 _('[--rev REV] [OTHER]'))
772 _('[--rev REV] [OTHER]'))
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 """runs the changeset discovery protocol in isolation"""
774 """runs the changeset discovery protocol in isolation"""
775 opts = pycompat.byteskwargs(opts)
775 opts = pycompat.byteskwargs(opts)
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remote = hg.peer(repo, opts, remoteurl)
777 remote = hg.peer(repo, opts, remoteurl)
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779
779
780 # make sure tests are repeatable
780 # make sure tests are repeatable
781 random.seed(12323)
781 random.seed(12323)
782
782
783 def doit(pushedrevs, remoteheads, remote=remote):
783 def doit(pushedrevs, remoteheads, remote=remote):
784 if opts.get('old'):
784 if opts.get('old'):
785 if not util.safehasattr(remote, 'branches'):
785 if not util.safehasattr(remote, 'branches'):
786 # enable in-client legacy support
786 # enable in-client legacy support
787 remote = localrepo.locallegacypeer(remote.local())
787 remote = localrepo.locallegacypeer(remote.local())
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 force=True)
789 force=True)
790 common = set(common)
790 common = set(common)
791 if not opts.get('nonheads'):
791 if not opts.get('nonheads'):
792 ui.write(("unpruned common: %s\n") %
792 ui.write(("unpruned common: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
793 " ".join(sorted(short(n) for n in common)))
794 dag = dagutil.revlogdag(repo.changelog)
794 dag = dagutil.revlogdag(repo.changelog)
795 all = dag.ancestorset(dag.internalizeall(common))
795 all = dag.ancestorset(dag.internalizeall(common))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
797 else:
797 else:
798 nodes = None
798 nodes = None
799 if pushedrevs:
799 if pushedrevs:
800 revs = scmutil.revrange(repo, pushedrevs)
800 revs = scmutil.revrange(repo, pushedrevs)
801 nodes = [repo[r].node() for r in revs]
801 nodes = [repo[r].node() for r in revs]
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 ancestorsof=nodes)
803 ancestorsof=nodes)
804 common = set(common)
804 common = set(common)
805 rheads = set(hds)
805 rheads = set(hds)
806 lheads = set(repo.heads())
806 lheads = set(repo.heads())
807 ui.write(("common heads: %s\n") %
807 ui.write(("common heads: %s\n") %
808 " ".join(sorted(short(n) for n in common)))
808 " ".join(sorted(short(n) for n in common)))
809 if lheads <= common:
809 if lheads <= common:
810 ui.write(("local is subset\n"))
810 ui.write(("local is subset\n"))
811 elif rheads <= common:
811 elif rheads <= common:
812 ui.write(("remote is subset\n"))
812 ui.write(("remote is subset\n"))
813
813
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 localrevs = opts['rev']
815 localrevs = opts['rev']
816 doit(localrevs, remoterevs)
816 doit(localrevs, remoterevs)
817
817
818 _chunksize = 4 << 10
818 _chunksize = 4 << 10
819
819
820 @command('debugdownload',
820 @command('debugdownload',
821 [
821 [
822 ('o', 'output', '', _('path')),
822 ('o', 'output', '', _('path')),
823 ],
823 ],
824 optionalrepo=True)
824 optionalrepo=True)
825 def debugdownload(ui, repo, url, output=None, **opts):
825 def debugdownload(ui, repo, url, output=None, **opts):
826 """download a resource using Mercurial logic and config
826 """download a resource using Mercurial logic and config
827 """
827 """
828 fh = urlmod.open(ui, url, output)
828 fh = urlmod.open(ui, url, output)
829
829
830 dest = ui
830 dest = ui
831 if output:
831 if output:
832 dest = open(output, "wb", _chunksize)
832 dest = open(output, "wb", _chunksize)
833 try:
833 try:
834 data = fh.read(_chunksize)
834 data = fh.read(_chunksize)
835 while data:
835 while data:
836 dest.write(data)
836 dest.write(data)
837 data = fh.read(_chunksize)
837 data = fh.read(_chunksize)
838 finally:
838 finally:
839 if output:
839 if output:
840 dest.close()
840 dest.close()
841
841
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 def debugextensions(ui, repo, **opts):
843 def debugextensions(ui, repo, **opts):
844 '''show information about active extensions'''
844 '''show information about active extensions'''
845 opts = pycompat.byteskwargs(opts)
845 opts = pycompat.byteskwargs(opts)
846 exts = extensions.extensions(ui)
846 exts = extensions.extensions(ui)
847 hgver = util.version()
847 hgver = util.version()
848 fm = ui.formatter('debugextensions', opts)
848 fm = ui.formatter('debugextensions', opts)
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 isinternal = extensions.ismoduleinternal(extmod)
850 isinternal = extensions.ismoduleinternal(extmod)
851 extsource = pycompat.fsencode(extmod.__file__)
851 extsource = pycompat.fsencode(extmod.__file__)
852 if isinternal:
852 if isinternal:
853 exttestedwith = [] # never expose magic string to users
853 exttestedwith = [] # never expose magic string to users
854 else:
854 else:
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 extbuglink = getattr(extmod, 'buglink', None)
856 extbuglink = getattr(extmod, 'buglink', None)
857
857
858 fm.startitem()
858 fm.startitem()
859
859
860 if ui.quiet or ui.verbose:
860 if ui.quiet or ui.verbose:
861 fm.write('name', '%s\n', extname)
861 fm.write('name', '%s\n', extname)
862 else:
862 else:
863 fm.write('name', '%s', extname)
863 fm.write('name', '%s', extname)
864 if isinternal or hgver in exttestedwith:
864 if isinternal or hgver in exttestedwith:
865 fm.plain('\n')
865 fm.plain('\n')
866 elif not exttestedwith:
866 elif not exttestedwith:
867 fm.plain(_(' (untested!)\n'))
867 fm.plain(_(' (untested!)\n'))
868 else:
868 else:
869 lasttestedversion = exttestedwith[-1]
869 lasttestedversion = exttestedwith[-1]
870 fm.plain(' (%s!)\n' % lasttestedversion)
870 fm.plain(' (%s!)\n' % lasttestedversion)
871
871
872 fm.condwrite(ui.verbose and extsource, 'source',
872 fm.condwrite(ui.verbose and extsource, 'source',
873 _(' location: %s\n'), extsource or "")
873 _(' location: %s\n'), extsource or "")
874
874
875 if ui.verbose:
875 if ui.verbose:
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 fm.data(bundled=isinternal)
877 fm.data(bundled=isinternal)
878
878
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 _(' tested with: %s\n'),
880 _(' tested with: %s\n'),
881 fm.formatlist(exttestedwith, name='ver'))
881 fm.formatlist(exttestedwith, name='ver'))
882
882
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 _(' bug reporting: %s\n'), extbuglink or "")
884 _(' bug reporting: %s\n'), extbuglink or "")
885
885
886 fm.end()
886 fm.end()
887
887
888 @command('debugfileset',
888 @command('debugfileset',
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 ('', 'all-files', False,
890 ('', 'all-files', False,
891 _('test files from all revisions and working directory')),
891 _('test files from all revisions and working directory')),
892 ('s', 'show-matcher', None,
892 ('s', 'show-matcher', None,
893 _('print internal representation of matcher')),
893 _('print internal representation of matcher')),
894 ('p', 'show-stage', [],
894 ('p', 'show-stage', [],
895 _('print parsed tree at the given stage'), _('NAME'))],
895 _('print parsed tree at the given stage'), _('NAME'))],
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 def debugfileset(ui, repo, expr, **opts):
897 def debugfileset(ui, repo, expr, **opts):
898 '''parse and apply a fileset specification'''
898 '''parse and apply a fileset specification'''
899 opts = pycompat.byteskwargs(opts)
899 opts = pycompat.byteskwargs(opts)
900 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
900 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
901
901
902 stages = [
902 stages = [
903 ('parsed', pycompat.identity),
903 ('parsed', pycompat.identity),
904 ]
904 ]
905 stagenames = set(n for n, f in stages)
905 stagenames = set(n for n, f in stages)
906
906
907 showalways = set()
907 showalways = set()
908 if ui.verbose and not opts['show_stage']:
908 if ui.verbose and not opts['show_stage']:
909 # show parsed tree by --verbose (deprecated)
909 # show parsed tree by --verbose (deprecated)
910 showalways.add('parsed')
910 showalways.add('parsed')
911 if opts['show_stage'] == ['all']:
911 if opts['show_stage'] == ['all']:
912 showalways.update(stagenames)
912 showalways.update(stagenames)
913 else:
913 else:
914 for n in opts['show_stage']:
914 for n in opts['show_stage']:
915 if n not in stagenames:
915 if n not in stagenames:
916 raise error.Abort(_('invalid stage name: %s') % n)
916 raise error.Abort(_('invalid stage name: %s') % n)
917 showalways.update(opts['show_stage'])
917 showalways.update(opts['show_stage'])
918
918
919 tree = filesetlang.parse(expr)
919 tree = filesetlang.parse(expr)
920 for n, f in stages:
920 for n, f in stages:
921 tree = f(tree)
921 tree = f(tree)
922 if n in showalways:
922 if n in showalways:
923 if opts['show_stage'] or n != 'parsed':
923 if opts['show_stage'] or n != 'parsed':
924 ui.write(("* %s:\n") % n)
924 ui.write(("* %s:\n") % n)
925 ui.write(filesetlang.prettyformat(tree), "\n")
925 ui.write(filesetlang.prettyformat(tree), "\n")
926
926
927 files = set()
927 files = set()
928 if opts['all_files']:
928 if opts['all_files']:
929 for r in repo:
929 for r in repo:
930 c = repo[r]
930 c = repo[r]
931 files.update(c.files())
931 files.update(c.files())
932 files.update(c.substate)
932 files.update(c.substate)
933 if opts['all_files'] or ctx.rev() is None:
933 if opts['all_files'] or ctx.rev() is None:
934 wctx = repo[None]
934 wctx = repo[None]
935 files.update(repo.dirstate.walk(scmutil.matchall(repo),
935 files.update(repo.dirstate.walk(scmutil.matchall(repo),
936 subrepos=list(wctx.substate),
936 subrepos=list(wctx.substate),
937 unknown=True, ignored=True))
937 unknown=True, ignored=True))
938 files.update(wctx.substate)
938 files.update(wctx.substate)
939 else:
939 else:
940 files.update(ctx.files())
940 files.update(ctx.files())
941 files.update(ctx.substate)
941 files.update(ctx.substate)
942
942
943 m = ctx.matchfileset(expr)
943 m = ctx.matchfileset(expr)
944 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
944 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
945 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
945 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
946 for f in sorted(files):
946 for f in sorted(files):
947 if not m(f):
947 if not m(f):
948 continue
948 continue
949 ui.write("%s\n" % f)
949 ui.write("%s\n" % f)
950
950
951 @command('debugformat',
951 @command('debugformat',
952 [] + cmdutil.formatteropts,
952 [] + cmdutil.formatteropts)
953 _(''))
954 def debugformat(ui, repo, **opts):
953 def debugformat(ui, repo, **opts):
955 """display format information about the current repository
954 """display format information about the current repository
956
955
957 Use --verbose to get extra information about current config value and
956 Use --verbose to get extra information about current config value and
958 Mercurial default."""
957 Mercurial default."""
959 opts = pycompat.byteskwargs(opts)
958 opts = pycompat.byteskwargs(opts)
960 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
959 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
961 maxvariantlength = max(len('format-variant'), maxvariantlength)
960 maxvariantlength = max(len('format-variant'), maxvariantlength)
962
961
963 def makeformatname(name):
962 def makeformatname(name):
964 return '%s:' + (' ' * (maxvariantlength - len(name)))
963 return '%s:' + (' ' * (maxvariantlength - len(name)))
965
964
966 fm = ui.formatter('debugformat', opts)
965 fm = ui.formatter('debugformat', opts)
967 if fm.isplain():
966 if fm.isplain():
968 def formatvalue(value):
967 def formatvalue(value):
969 if util.safehasattr(value, 'startswith'):
968 if util.safehasattr(value, 'startswith'):
970 return value
969 return value
971 if value:
970 if value:
972 return 'yes'
971 return 'yes'
973 else:
972 else:
974 return 'no'
973 return 'no'
975 else:
974 else:
976 formatvalue = pycompat.identity
975 formatvalue = pycompat.identity
977
976
978 fm.plain('format-variant')
977 fm.plain('format-variant')
979 fm.plain(' ' * (maxvariantlength - len('format-variant')))
978 fm.plain(' ' * (maxvariantlength - len('format-variant')))
980 fm.plain(' repo')
979 fm.plain(' repo')
981 if ui.verbose:
980 if ui.verbose:
982 fm.plain(' config default')
981 fm.plain(' config default')
983 fm.plain('\n')
982 fm.plain('\n')
984 for fv in upgrade.allformatvariant:
983 for fv in upgrade.allformatvariant:
985 fm.startitem()
984 fm.startitem()
986 repovalue = fv.fromrepo(repo)
985 repovalue = fv.fromrepo(repo)
987 configvalue = fv.fromconfig(repo)
986 configvalue = fv.fromconfig(repo)
988
987
989 if repovalue != configvalue:
988 if repovalue != configvalue:
990 namelabel = 'formatvariant.name.mismatchconfig'
989 namelabel = 'formatvariant.name.mismatchconfig'
991 repolabel = 'formatvariant.repo.mismatchconfig'
990 repolabel = 'formatvariant.repo.mismatchconfig'
992 elif repovalue != fv.default:
991 elif repovalue != fv.default:
993 namelabel = 'formatvariant.name.mismatchdefault'
992 namelabel = 'formatvariant.name.mismatchdefault'
994 repolabel = 'formatvariant.repo.mismatchdefault'
993 repolabel = 'formatvariant.repo.mismatchdefault'
995 else:
994 else:
996 namelabel = 'formatvariant.name.uptodate'
995 namelabel = 'formatvariant.name.uptodate'
997 repolabel = 'formatvariant.repo.uptodate'
996 repolabel = 'formatvariant.repo.uptodate'
998
997
999 fm.write('name', makeformatname(fv.name), fv.name,
998 fm.write('name', makeformatname(fv.name), fv.name,
1000 label=namelabel)
999 label=namelabel)
1001 fm.write('repo', ' %3s', formatvalue(repovalue),
1000 fm.write('repo', ' %3s', formatvalue(repovalue),
1002 label=repolabel)
1001 label=repolabel)
1003 if fv.default != configvalue:
1002 if fv.default != configvalue:
1004 configlabel = 'formatvariant.config.special'
1003 configlabel = 'formatvariant.config.special'
1005 else:
1004 else:
1006 configlabel = 'formatvariant.config.default'
1005 configlabel = 'formatvariant.config.default'
1007 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1006 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1008 label=configlabel)
1007 label=configlabel)
1009 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1008 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1010 label='formatvariant.default')
1009 label='formatvariant.default')
1011 fm.plain('\n')
1010 fm.plain('\n')
1012 fm.end()
1011 fm.end()
1013
1012
1014 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1013 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1015 def debugfsinfo(ui, path="."):
1014 def debugfsinfo(ui, path="."):
1016 """show information detected about current filesystem"""
1015 """show information detected about current filesystem"""
1017 ui.write(('path: %s\n') % path)
1016 ui.write(('path: %s\n') % path)
1018 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1017 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1019 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1018 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1020 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1019 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1021 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1020 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1022 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1021 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1023 casesensitive = '(unknown)'
1022 casesensitive = '(unknown)'
1024 try:
1023 try:
1025 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1024 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1026 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1025 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1027 except OSError:
1026 except OSError:
1028 pass
1027 pass
1029 ui.write(('case-sensitive: %s\n') % casesensitive)
1028 ui.write(('case-sensitive: %s\n') % casesensitive)
1030
1029
1031 @command('debuggetbundle',
1030 @command('debuggetbundle',
1032 [('H', 'head', [], _('id of head node'), _('ID')),
1031 [('H', 'head', [], _('id of head node'), _('ID')),
1033 ('C', 'common', [], _('id of common node'), _('ID')),
1032 ('C', 'common', [], _('id of common node'), _('ID')),
1034 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1033 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1035 _('REPO FILE [-H|-C ID]...'),
1034 _('REPO FILE [-H|-C ID]...'),
1036 norepo=True)
1035 norepo=True)
1037 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1036 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1038 """retrieves a bundle from a repo
1037 """retrieves a bundle from a repo
1039
1038
1040 Every ID must be a full-length hex node id string. Saves the bundle to the
1039 Every ID must be a full-length hex node id string. Saves the bundle to the
1041 given file.
1040 given file.
1042 """
1041 """
1043 opts = pycompat.byteskwargs(opts)
1042 opts = pycompat.byteskwargs(opts)
1044 repo = hg.peer(ui, opts, repopath)
1043 repo = hg.peer(ui, opts, repopath)
1045 if not repo.capable('getbundle'):
1044 if not repo.capable('getbundle'):
1046 raise error.Abort("getbundle() not supported by target repository")
1045 raise error.Abort("getbundle() not supported by target repository")
1047 args = {}
1046 args = {}
1048 if common:
1047 if common:
1049 args[r'common'] = [bin(s) for s in common]
1048 args[r'common'] = [bin(s) for s in common]
1050 if head:
1049 if head:
1051 args[r'heads'] = [bin(s) for s in head]
1050 args[r'heads'] = [bin(s) for s in head]
1052 # TODO: get desired bundlecaps from command line.
1051 # TODO: get desired bundlecaps from command line.
1053 args[r'bundlecaps'] = None
1052 args[r'bundlecaps'] = None
1054 bundle = repo.getbundle('debug', **args)
1053 bundle = repo.getbundle('debug', **args)
1055
1054
1056 bundletype = opts.get('type', 'bzip2').lower()
1055 bundletype = opts.get('type', 'bzip2').lower()
1057 btypes = {'none': 'HG10UN',
1056 btypes = {'none': 'HG10UN',
1058 'bzip2': 'HG10BZ',
1057 'bzip2': 'HG10BZ',
1059 'gzip': 'HG10GZ',
1058 'gzip': 'HG10GZ',
1060 'bundle2': 'HG20'}
1059 'bundle2': 'HG20'}
1061 bundletype = btypes.get(bundletype)
1060 bundletype = btypes.get(bundletype)
1062 if bundletype not in bundle2.bundletypes:
1061 if bundletype not in bundle2.bundletypes:
1063 raise error.Abort(_('unknown bundle type specified with --type'))
1062 raise error.Abort(_('unknown bundle type specified with --type'))
1064 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1063 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1065
1064
1066 @command('debugignore', [], '[FILE]')
1065 @command('debugignore', [], '[FILE]')
1067 def debugignore(ui, repo, *files, **opts):
1066 def debugignore(ui, repo, *files, **opts):
1068 """display the combined ignore pattern and information about ignored files
1067 """display the combined ignore pattern and information about ignored files
1069
1068
1070 With no argument display the combined ignore pattern.
1069 With no argument display the combined ignore pattern.
1071
1070
1072 Given space separated file names, shows if the given file is ignored and
1071 Given space separated file names, shows if the given file is ignored and
1073 if so, show the ignore rule (file and line number) that matched it.
1072 if so, show the ignore rule (file and line number) that matched it.
1074 """
1073 """
1075 ignore = repo.dirstate._ignore
1074 ignore = repo.dirstate._ignore
1076 if not files:
1075 if not files:
1077 # Show all the patterns
1076 # Show all the patterns
1078 ui.write("%s\n" % pycompat.byterepr(ignore))
1077 ui.write("%s\n" % pycompat.byterepr(ignore))
1079 else:
1078 else:
1080 m = scmutil.match(repo[None], pats=files)
1079 m = scmutil.match(repo[None], pats=files)
1081 for f in m.files():
1080 for f in m.files():
1082 nf = util.normpath(f)
1081 nf = util.normpath(f)
1083 ignored = None
1082 ignored = None
1084 ignoredata = None
1083 ignoredata = None
1085 if nf != '.':
1084 if nf != '.':
1086 if ignore(nf):
1085 if ignore(nf):
1087 ignored = nf
1086 ignored = nf
1088 ignoredata = repo.dirstate._ignorefileandline(nf)
1087 ignoredata = repo.dirstate._ignorefileandline(nf)
1089 else:
1088 else:
1090 for p in util.finddirs(nf):
1089 for p in util.finddirs(nf):
1091 if ignore(p):
1090 if ignore(p):
1092 ignored = p
1091 ignored = p
1093 ignoredata = repo.dirstate._ignorefileandline(p)
1092 ignoredata = repo.dirstate._ignorefileandline(p)
1094 break
1093 break
1095 if ignored:
1094 if ignored:
1096 if ignored == nf:
1095 if ignored == nf:
1097 ui.write(_("%s is ignored\n") % m.uipath(f))
1096 ui.write(_("%s is ignored\n") % m.uipath(f))
1098 else:
1097 else:
1099 ui.write(_("%s is ignored because of "
1098 ui.write(_("%s is ignored because of "
1100 "containing folder %s\n")
1099 "containing folder %s\n")
1101 % (m.uipath(f), ignored))
1100 % (m.uipath(f), ignored))
1102 ignorefile, lineno, line = ignoredata
1101 ignorefile, lineno, line = ignoredata
1103 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1102 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1104 % (ignorefile, lineno, line))
1103 % (ignorefile, lineno, line))
1105 else:
1104 else:
1106 ui.write(_("%s is not ignored\n") % m.uipath(f))
1105 ui.write(_("%s is not ignored\n") % m.uipath(f))
1107
1106
1108 @command('debugindex', cmdutil.debugrevlogopts +
1107 @command('debugindex', cmdutil.debugrevlogopts +
1109 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1108 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1110 _('[-f FORMAT] -c|-m|FILE'),
1109 _('[-f FORMAT] -c|-m|FILE'),
1111 optionalrepo=True)
1110 optionalrepo=True)
1112 def debugindex(ui, repo, file_=None, **opts):
1111 def debugindex(ui, repo, file_=None, **opts):
1113 """dump the contents of an index file"""
1112 """dump the contents of an index file"""
1114 opts = pycompat.byteskwargs(opts)
1113 opts = pycompat.byteskwargs(opts)
1115 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1114 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1116 format = opts.get('format', 0)
1115 format = opts.get('format', 0)
1117 if format not in (0, 1):
1116 if format not in (0, 1):
1118 raise error.Abort(_("unknown format %d") % format)
1117 raise error.Abort(_("unknown format %d") % format)
1119
1118
1120 if ui.debugflag:
1119 if ui.debugflag:
1121 shortfn = hex
1120 shortfn = hex
1122 else:
1121 else:
1123 shortfn = short
1122 shortfn = short
1124
1123
1125 # There might not be anything in r, so have a sane default
1124 # There might not be anything in r, so have a sane default
1126 idlen = 12
1125 idlen = 12
1127 for i in r:
1126 for i in r:
1128 idlen = len(shortfn(r.node(i)))
1127 idlen = len(shortfn(r.node(i)))
1129 break
1128 break
1130
1129
1131 if format == 0:
1130 if format == 0:
1132 if ui.verbose:
1131 if ui.verbose:
1133 ui.write((" rev offset length linkrev"
1132 ui.write((" rev offset length linkrev"
1134 " %s %s p2\n") % ("nodeid".ljust(idlen),
1133 " %s %s p2\n") % ("nodeid".ljust(idlen),
1135 "p1".ljust(idlen)))
1134 "p1".ljust(idlen)))
1136 else:
1135 else:
1137 ui.write((" rev linkrev %s %s p2\n") % (
1136 ui.write((" rev linkrev %s %s p2\n") % (
1138 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1137 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1139 elif format == 1:
1138 elif format == 1:
1140 if ui.verbose:
1139 if ui.verbose:
1141 ui.write((" rev flag offset length size link p1"
1140 ui.write((" rev flag offset length size link p1"
1142 " p2 %s\n") % "nodeid".rjust(idlen))
1141 " p2 %s\n") % "nodeid".rjust(idlen))
1143 else:
1142 else:
1144 ui.write((" rev flag size link p1 p2 %s\n") %
1143 ui.write((" rev flag size link p1 p2 %s\n") %
1145 "nodeid".rjust(idlen))
1144 "nodeid".rjust(idlen))
1146
1145
1147 for i in r:
1146 for i in r:
1148 node = r.node(i)
1147 node = r.node(i)
1149 if format == 0:
1148 if format == 0:
1150 try:
1149 try:
1151 pp = r.parents(node)
1150 pp = r.parents(node)
1152 except Exception:
1151 except Exception:
1153 pp = [nullid, nullid]
1152 pp = [nullid, nullid]
1154 if ui.verbose:
1153 if ui.verbose:
1155 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1154 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1156 i, r.start(i), r.length(i), r.linkrev(i),
1155 i, r.start(i), r.length(i), r.linkrev(i),
1157 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1156 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1158 else:
1157 else:
1159 ui.write("% 6d % 7d %s %s %s\n" % (
1158 ui.write("% 6d % 7d %s %s %s\n" % (
1160 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1159 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1161 shortfn(pp[1])))
1160 shortfn(pp[1])))
1162 elif format == 1:
1161 elif format == 1:
1163 pr = r.parentrevs(i)
1162 pr = r.parentrevs(i)
1164 if ui.verbose:
1163 if ui.verbose:
1165 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1164 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1166 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1165 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1167 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1166 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1168 else:
1167 else:
1169 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1168 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1170 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1169 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1171 shortfn(node)))
1170 shortfn(node)))
1172
1171
1173 @command('debugindexdot', cmdutil.debugrevlogopts,
1172 @command('debugindexdot', cmdutil.debugrevlogopts,
1174 _('-c|-m|FILE'), optionalrepo=True)
1173 _('-c|-m|FILE'), optionalrepo=True)
1175 def debugindexdot(ui, repo, file_=None, **opts):
1174 def debugindexdot(ui, repo, file_=None, **opts):
1176 """dump an index DAG as a graphviz dot file"""
1175 """dump an index DAG as a graphviz dot file"""
1177 opts = pycompat.byteskwargs(opts)
1176 opts = pycompat.byteskwargs(opts)
1178 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1177 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1179 ui.write(("digraph G {\n"))
1178 ui.write(("digraph G {\n"))
1180 for i in r:
1179 for i in r:
1181 node = r.node(i)
1180 node = r.node(i)
1182 pp = r.parents(node)
1181 pp = r.parents(node)
1183 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1182 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1184 if pp[1] != nullid:
1183 if pp[1] != nullid:
1185 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1184 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1186 ui.write("}\n")
1185 ui.write("}\n")
1187
1186
1188 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1187 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1189 def debuginstall(ui, **opts):
1188 def debuginstall(ui, **opts):
1190 '''test Mercurial installation
1189 '''test Mercurial installation
1191
1190
1192 Returns 0 on success.
1191 Returns 0 on success.
1193 '''
1192 '''
1194 opts = pycompat.byteskwargs(opts)
1193 opts = pycompat.byteskwargs(opts)
1195
1194
1196 def writetemp(contents):
1195 def writetemp(contents):
1197 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1196 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1198 f = os.fdopen(fd, r"wb")
1197 f = os.fdopen(fd, r"wb")
1199 f.write(contents)
1198 f.write(contents)
1200 f.close()
1199 f.close()
1201 return name
1200 return name
1202
1201
1203 problems = 0
1202 problems = 0
1204
1203
1205 fm = ui.formatter('debuginstall', opts)
1204 fm = ui.formatter('debuginstall', opts)
1206 fm.startitem()
1205 fm.startitem()
1207
1206
1208 # encoding
1207 # encoding
1209 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1208 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1210 err = None
1209 err = None
1211 try:
1210 try:
1212 codecs.lookup(pycompat.sysstr(encoding.encoding))
1211 codecs.lookup(pycompat.sysstr(encoding.encoding))
1213 except LookupError as inst:
1212 except LookupError as inst:
1214 err = stringutil.forcebytestr(inst)
1213 err = stringutil.forcebytestr(inst)
1215 problems += 1
1214 problems += 1
1216 fm.condwrite(err, 'encodingerror', _(" %s\n"
1215 fm.condwrite(err, 'encodingerror', _(" %s\n"
1217 " (check that your locale is properly set)\n"), err)
1216 " (check that your locale is properly set)\n"), err)
1218
1217
1219 # Python
1218 # Python
1220 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1219 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1221 pycompat.sysexecutable)
1220 pycompat.sysexecutable)
1222 fm.write('pythonver', _("checking Python version (%s)\n"),
1221 fm.write('pythonver', _("checking Python version (%s)\n"),
1223 ("%d.%d.%d" % sys.version_info[:3]))
1222 ("%d.%d.%d" % sys.version_info[:3]))
1224 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1223 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1225 os.path.dirname(pycompat.fsencode(os.__file__)))
1224 os.path.dirname(pycompat.fsencode(os.__file__)))
1226
1225
1227 security = set(sslutil.supportedprotocols)
1226 security = set(sslutil.supportedprotocols)
1228 if sslutil.hassni:
1227 if sslutil.hassni:
1229 security.add('sni')
1228 security.add('sni')
1230
1229
1231 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1230 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1232 fm.formatlist(sorted(security), name='protocol',
1231 fm.formatlist(sorted(security), name='protocol',
1233 fmt='%s', sep=','))
1232 fmt='%s', sep=','))
1234
1233
1235 # These are warnings, not errors. So don't increment problem count. This
1234 # These are warnings, not errors. So don't increment problem count. This
1236 # may change in the future.
1235 # may change in the future.
1237 if 'tls1.2' not in security:
1236 if 'tls1.2' not in security:
1238 fm.plain(_(' TLS 1.2 not supported by Python install; '
1237 fm.plain(_(' TLS 1.2 not supported by Python install; '
1239 'network connections lack modern security\n'))
1238 'network connections lack modern security\n'))
1240 if 'sni' not in security:
1239 if 'sni' not in security:
1241 fm.plain(_(' SNI not supported by Python install; may have '
1240 fm.plain(_(' SNI not supported by Python install; may have '
1242 'connectivity issues with some servers\n'))
1241 'connectivity issues with some servers\n'))
1243
1242
1244 # TODO print CA cert info
1243 # TODO print CA cert info
1245
1244
1246 # hg version
1245 # hg version
1247 hgver = util.version()
1246 hgver = util.version()
1248 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1247 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1249 hgver.split('+')[0])
1248 hgver.split('+')[0])
1250 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1249 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1251 '+'.join(hgver.split('+')[1:]))
1250 '+'.join(hgver.split('+')[1:]))
1252
1251
1253 # compiled modules
1252 # compiled modules
1254 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1253 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1255 policy.policy)
1254 policy.policy)
1256 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1255 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1257 os.path.dirname(pycompat.fsencode(__file__)))
1256 os.path.dirname(pycompat.fsencode(__file__)))
1258
1257
1259 if policy.policy in ('c', 'allow'):
1258 if policy.policy in ('c', 'allow'):
1260 err = None
1259 err = None
1261 try:
1260 try:
1262 from .cext import (
1261 from .cext import (
1263 base85,
1262 base85,
1264 bdiff,
1263 bdiff,
1265 mpatch,
1264 mpatch,
1266 osutil,
1265 osutil,
1267 )
1266 )
1268 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1267 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1269 except Exception as inst:
1268 except Exception as inst:
1270 err = stringutil.forcebytestr(inst)
1269 err = stringutil.forcebytestr(inst)
1271 problems += 1
1270 problems += 1
1272 fm.condwrite(err, 'extensionserror', " %s\n", err)
1271 fm.condwrite(err, 'extensionserror', " %s\n", err)
1273
1272
1274 compengines = util.compengines._engines.values()
1273 compengines = util.compengines._engines.values()
1275 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1274 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1276 fm.formatlist(sorted(e.name() for e in compengines),
1275 fm.formatlist(sorted(e.name() for e in compengines),
1277 name='compengine', fmt='%s', sep=', '))
1276 name='compengine', fmt='%s', sep=', '))
1278 fm.write('compenginesavail', _('checking available compression engines '
1277 fm.write('compenginesavail', _('checking available compression engines '
1279 '(%s)\n'),
1278 '(%s)\n'),
1280 fm.formatlist(sorted(e.name() for e in compengines
1279 fm.formatlist(sorted(e.name() for e in compengines
1281 if e.available()),
1280 if e.available()),
1282 name='compengine', fmt='%s', sep=', '))
1281 name='compengine', fmt='%s', sep=', '))
1283 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1282 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1284 fm.write('compenginesserver', _('checking available compression engines '
1283 fm.write('compenginesserver', _('checking available compression engines '
1285 'for wire protocol (%s)\n'),
1284 'for wire protocol (%s)\n'),
1286 fm.formatlist([e.name() for e in wirecompengines
1285 fm.formatlist([e.name() for e in wirecompengines
1287 if e.wireprotosupport()],
1286 if e.wireprotosupport()],
1288 name='compengine', fmt='%s', sep=', '))
1287 name='compengine', fmt='%s', sep=', '))
1289 re2 = 'missing'
1288 re2 = 'missing'
1290 if util._re2:
1289 if util._re2:
1291 re2 = 'available'
1290 re2 = 'available'
1292 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1291 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1293 fm.data(re2=bool(util._re2))
1292 fm.data(re2=bool(util._re2))
1294
1293
1295 # templates
1294 # templates
1296 p = templater.templatepaths()
1295 p = templater.templatepaths()
1297 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1296 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1298 fm.condwrite(not p, '', _(" no template directories found\n"))
1297 fm.condwrite(not p, '', _(" no template directories found\n"))
1299 if p:
1298 if p:
1300 m = templater.templatepath("map-cmdline.default")
1299 m = templater.templatepath("map-cmdline.default")
1301 if m:
1300 if m:
1302 # template found, check if it is working
1301 # template found, check if it is working
1303 err = None
1302 err = None
1304 try:
1303 try:
1305 templater.templater.frommapfile(m)
1304 templater.templater.frommapfile(m)
1306 except Exception as inst:
1305 except Exception as inst:
1307 err = stringutil.forcebytestr(inst)
1306 err = stringutil.forcebytestr(inst)
1308 p = None
1307 p = None
1309 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1308 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1310 else:
1309 else:
1311 p = None
1310 p = None
1312 fm.condwrite(p, 'defaulttemplate',
1311 fm.condwrite(p, 'defaulttemplate',
1313 _("checking default template (%s)\n"), m)
1312 _("checking default template (%s)\n"), m)
1314 fm.condwrite(not m, 'defaulttemplatenotfound',
1313 fm.condwrite(not m, 'defaulttemplatenotfound',
1315 _(" template '%s' not found\n"), "default")
1314 _(" template '%s' not found\n"), "default")
1316 if not p:
1315 if not p:
1317 problems += 1
1316 problems += 1
1318 fm.condwrite(not p, '',
1317 fm.condwrite(not p, '',
1319 _(" (templates seem to have been installed incorrectly)\n"))
1318 _(" (templates seem to have been installed incorrectly)\n"))
1320
1319
1321 # editor
1320 # editor
1322 editor = ui.geteditor()
1321 editor = ui.geteditor()
1323 editor = util.expandpath(editor)
1322 editor = util.expandpath(editor)
1324 editorbin = procutil.shellsplit(editor)[0]
1323 editorbin = procutil.shellsplit(editor)[0]
1325 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1324 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1326 cmdpath = procutil.findexe(editorbin)
1325 cmdpath = procutil.findexe(editorbin)
1327 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1326 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1328 _(" No commit editor set and can't find %s in PATH\n"
1327 _(" No commit editor set and can't find %s in PATH\n"
1329 " (specify a commit editor in your configuration"
1328 " (specify a commit editor in your configuration"
1330 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1329 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1331 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1330 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1332 _(" Can't find editor '%s' in PATH\n"
1331 _(" Can't find editor '%s' in PATH\n"
1333 " (specify a commit editor in your configuration"
1332 " (specify a commit editor in your configuration"
1334 " file)\n"), not cmdpath and editorbin)
1333 " file)\n"), not cmdpath and editorbin)
1335 if not cmdpath and editor != 'vi':
1334 if not cmdpath and editor != 'vi':
1336 problems += 1
1335 problems += 1
1337
1336
1338 # check username
1337 # check username
1339 username = None
1338 username = None
1340 err = None
1339 err = None
1341 try:
1340 try:
1342 username = ui.username()
1341 username = ui.username()
1343 except error.Abort as e:
1342 except error.Abort as e:
1344 err = stringutil.forcebytestr(e)
1343 err = stringutil.forcebytestr(e)
1345 problems += 1
1344 problems += 1
1346
1345
1347 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1346 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1348 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1347 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1349 " (specify a username in your configuration file)\n"), err)
1348 " (specify a username in your configuration file)\n"), err)
1350
1349
1351 fm.condwrite(not problems, '',
1350 fm.condwrite(not problems, '',
1352 _("no problems detected\n"))
1351 _("no problems detected\n"))
1353 if not problems:
1352 if not problems:
1354 fm.data(problems=problems)
1353 fm.data(problems=problems)
1355 fm.condwrite(problems, 'problems',
1354 fm.condwrite(problems, 'problems',
1356 _("%d problems detected,"
1355 _("%d problems detected,"
1357 " please check your install!\n"), problems)
1356 " please check your install!\n"), problems)
1358 fm.end()
1357 fm.end()
1359
1358
1360 return problems
1359 return problems
1361
1360
1362 @command('debugknown', [], _('REPO ID...'), norepo=True)
1361 @command('debugknown', [], _('REPO ID...'), norepo=True)
1363 def debugknown(ui, repopath, *ids, **opts):
1362 def debugknown(ui, repopath, *ids, **opts):
1364 """test whether node ids are known to a repo
1363 """test whether node ids are known to a repo
1365
1364
1366 Every ID must be a full-length hex node id string. Returns a list of 0s
1365 Every ID must be a full-length hex node id string. Returns a list of 0s
1367 and 1s indicating unknown/known.
1366 and 1s indicating unknown/known.
1368 """
1367 """
1369 opts = pycompat.byteskwargs(opts)
1368 opts = pycompat.byteskwargs(opts)
1370 repo = hg.peer(ui, opts, repopath)
1369 repo = hg.peer(ui, opts, repopath)
1371 if not repo.capable('known'):
1370 if not repo.capable('known'):
1372 raise error.Abort("known() not supported by target repository")
1371 raise error.Abort("known() not supported by target repository")
1373 flags = repo.known([bin(s) for s in ids])
1372 flags = repo.known([bin(s) for s in ids])
1374 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1373 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1375
1374
1376 @command('debuglabelcomplete', [], _('LABEL...'))
1375 @command('debuglabelcomplete', [], _('LABEL...'))
1377 def debuglabelcomplete(ui, repo, *args):
1376 def debuglabelcomplete(ui, repo, *args):
1378 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1377 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1379 debugnamecomplete(ui, repo, *args)
1378 debugnamecomplete(ui, repo, *args)
1380
1379
1381 @command('debuglocks',
1380 @command('debuglocks',
1382 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1381 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1383 ('W', 'force-wlock', None,
1382 ('W', 'force-wlock', None,
1384 _('free the working state lock (DANGEROUS)')),
1383 _('free the working state lock (DANGEROUS)')),
1385 ('s', 'set-lock', None, _('set the store lock until stopped')),
1384 ('s', 'set-lock', None, _('set the store lock until stopped')),
1386 ('S', 'set-wlock', None,
1385 ('S', 'set-wlock', None,
1387 _('set the working state lock until stopped'))],
1386 _('set the working state lock until stopped'))],
1388 _('[OPTION]...'))
1387 _('[OPTION]...'))
1389 def debuglocks(ui, repo, **opts):
1388 def debuglocks(ui, repo, **opts):
1390 """show or modify state of locks
1389 """show or modify state of locks
1391
1390
1392 By default, this command will show which locks are held. This
1391 By default, this command will show which locks are held. This
1393 includes the user and process holding the lock, the amount of time
1392 includes the user and process holding the lock, the amount of time
1394 the lock has been held, and the machine name where the process is
1393 the lock has been held, and the machine name where the process is
1395 running if it's not local.
1394 running if it's not local.
1396
1395
1397 Locks protect the integrity of Mercurial's data, so should be
1396 Locks protect the integrity of Mercurial's data, so should be
1398 treated with care. System crashes or other interruptions may cause
1397 treated with care. System crashes or other interruptions may cause
1399 locks to not be properly released, though Mercurial will usually
1398 locks to not be properly released, though Mercurial will usually
1400 detect and remove such stale locks automatically.
1399 detect and remove such stale locks automatically.
1401
1400
1402 However, detecting stale locks may not always be possible (for
1401 However, detecting stale locks may not always be possible (for
1403 instance, on a shared filesystem). Removing locks may also be
1402 instance, on a shared filesystem). Removing locks may also be
1404 blocked by filesystem permissions.
1403 blocked by filesystem permissions.
1405
1404
1406 Setting a lock will prevent other commands from changing the data.
1405 Setting a lock will prevent other commands from changing the data.
1407 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1406 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1408 The set locks are removed when the command exits.
1407 The set locks are removed when the command exits.
1409
1408
1410 Returns 0 if no locks are held.
1409 Returns 0 if no locks are held.
1411
1410
1412 """
1411 """
1413
1412
1414 if opts.get(r'force_lock'):
1413 if opts.get(r'force_lock'):
1415 repo.svfs.unlink('lock')
1414 repo.svfs.unlink('lock')
1416 if opts.get(r'force_wlock'):
1415 if opts.get(r'force_wlock'):
1417 repo.vfs.unlink('wlock')
1416 repo.vfs.unlink('wlock')
1418 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1417 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1419 return 0
1418 return 0
1420
1419
1421 locks = []
1420 locks = []
1422 try:
1421 try:
1423 if opts.get(r'set_wlock'):
1422 if opts.get(r'set_wlock'):
1424 try:
1423 try:
1425 locks.append(repo.wlock(False))
1424 locks.append(repo.wlock(False))
1426 except error.LockHeld:
1425 except error.LockHeld:
1427 raise error.Abort(_('wlock is already held'))
1426 raise error.Abort(_('wlock is already held'))
1428 if opts.get(r'set_lock'):
1427 if opts.get(r'set_lock'):
1429 try:
1428 try:
1430 locks.append(repo.lock(False))
1429 locks.append(repo.lock(False))
1431 except error.LockHeld:
1430 except error.LockHeld:
1432 raise error.Abort(_('lock is already held'))
1431 raise error.Abort(_('lock is already held'))
1433 if len(locks):
1432 if len(locks):
1434 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1433 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1435 return 0
1434 return 0
1436 finally:
1435 finally:
1437 release(*locks)
1436 release(*locks)
1438
1437
1439 now = time.time()
1438 now = time.time()
1440 held = 0
1439 held = 0
1441
1440
1442 def report(vfs, name, method):
1441 def report(vfs, name, method):
1443 # this causes stale locks to get reaped for more accurate reporting
1442 # this causes stale locks to get reaped for more accurate reporting
1444 try:
1443 try:
1445 l = method(False)
1444 l = method(False)
1446 except error.LockHeld:
1445 except error.LockHeld:
1447 l = None
1446 l = None
1448
1447
1449 if l:
1448 if l:
1450 l.release()
1449 l.release()
1451 else:
1450 else:
1452 try:
1451 try:
1453 st = vfs.lstat(name)
1452 st = vfs.lstat(name)
1454 age = now - st[stat.ST_MTIME]
1453 age = now - st[stat.ST_MTIME]
1455 user = util.username(st.st_uid)
1454 user = util.username(st.st_uid)
1456 locker = vfs.readlock(name)
1455 locker = vfs.readlock(name)
1457 if ":" in locker:
1456 if ":" in locker:
1458 host, pid = locker.split(':')
1457 host, pid = locker.split(':')
1459 if host == socket.gethostname():
1458 if host == socket.gethostname():
1460 locker = 'user %s, process %s' % (user, pid)
1459 locker = 'user %s, process %s' % (user, pid)
1461 else:
1460 else:
1462 locker = 'user %s, process %s, host %s' \
1461 locker = 'user %s, process %s, host %s' \
1463 % (user, pid, host)
1462 % (user, pid, host)
1464 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1463 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1465 return 1
1464 return 1
1466 except OSError as e:
1465 except OSError as e:
1467 if e.errno != errno.ENOENT:
1466 if e.errno != errno.ENOENT:
1468 raise
1467 raise
1469
1468
1470 ui.write(("%-6s free\n") % (name + ":"))
1469 ui.write(("%-6s free\n") % (name + ":"))
1471 return 0
1470 return 0
1472
1471
1473 held += report(repo.svfs, "lock", repo.lock)
1472 held += report(repo.svfs, "lock", repo.lock)
1474 held += report(repo.vfs, "wlock", repo.wlock)
1473 held += report(repo.vfs, "wlock", repo.wlock)
1475
1474
1476 return held
1475 return held
1477
1476
1478 @command('debugmanifestfulltextcache', [
1477 @command('debugmanifestfulltextcache', [
1479 ('', 'clear', False, _('clear the cache')),
1478 ('', 'clear', False, _('clear the cache')),
1480 ('a', 'add', '', _('add the given manifest node to the cache'),
1479 ('a', 'add', '', _('add the given manifest node to the cache'),
1481 _('NODE'))
1480 _('NODE'))
1482 ], '')
1481 ], '')
1483 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1482 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1484 """show, clear or amend the contents of the manifest fulltext cache"""
1483 """show, clear or amend the contents of the manifest fulltext cache"""
1485 with repo.lock():
1484 with repo.lock():
1486 r = repo.manifestlog._revlog
1485 r = repo.manifestlog._revlog
1487 try:
1486 try:
1488 cache = r._fulltextcache
1487 cache = r._fulltextcache
1489 except AttributeError:
1488 except AttributeError:
1490 ui.warn(_(
1489 ui.warn(_(
1491 "Current revlog implementation doesn't appear to have a "
1490 "Current revlog implementation doesn't appear to have a "
1492 'manifest fulltext cache\n'))
1491 'manifest fulltext cache\n'))
1493 return
1492 return
1494
1493
1495 if opts.get(r'clear'):
1494 if opts.get(r'clear'):
1496 cache.clear()
1495 cache.clear()
1497
1496
1498 if add:
1497 if add:
1499 try:
1498 try:
1500 manifest = repo.manifestlog[r.lookup(add)]
1499 manifest = repo.manifestlog[r.lookup(add)]
1501 except error.LookupError as e:
1500 except error.LookupError as e:
1502 raise error.Abort(e, hint="Check your manifest node id")
1501 raise error.Abort(e, hint="Check your manifest node id")
1503 manifest.read() # stores revisision in cache too
1502 manifest.read() # stores revisision in cache too
1504
1503
1505 if not len(cache):
1504 if not len(cache):
1506 ui.write(_('Cache empty'))
1505 ui.write(_('Cache empty'))
1507 else:
1506 else:
1508 ui.write(
1507 ui.write(
1509 _('Cache contains %d manifest entries, in order of most to '
1508 _('Cache contains %d manifest entries, in order of most to '
1510 'least recent:\n') % (len(cache),))
1509 'least recent:\n') % (len(cache),))
1511 totalsize = 0
1510 totalsize = 0
1512 for nodeid in cache:
1511 for nodeid in cache:
1513 # Use cache.get to not update the LRU order
1512 # Use cache.get to not update the LRU order
1514 data = cache.get(nodeid)
1513 data = cache.get(nodeid)
1515 size = len(data)
1514 size = len(data)
1516 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1515 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1517 ui.write(_('id: %s, size %s\n') % (
1516 ui.write(_('id: %s, size %s\n') % (
1518 hex(nodeid), util.bytecount(size)))
1517 hex(nodeid), util.bytecount(size)))
1519 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1518 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1520 ui.write(
1519 ui.write(
1521 _('Total cache data size %s, on-disk %s\n') % (
1520 _('Total cache data size %s, on-disk %s\n') % (
1522 util.bytecount(totalsize), util.bytecount(ondisk))
1521 util.bytecount(totalsize), util.bytecount(ondisk))
1523 )
1522 )
1524
1523
1525 @command('debugmergestate', [], '')
1524 @command('debugmergestate', [], '')
1526 def debugmergestate(ui, repo, *args):
1525 def debugmergestate(ui, repo, *args):
1527 """print merge state
1526 """print merge state
1528
1527
1529 Use --verbose to print out information about whether v1 or v2 merge state
1528 Use --verbose to print out information about whether v1 or v2 merge state
1530 was chosen."""
1529 was chosen."""
1531 def _hashornull(h):
1530 def _hashornull(h):
1532 if h == nullhex:
1531 if h == nullhex:
1533 return 'null'
1532 return 'null'
1534 else:
1533 else:
1535 return h
1534 return h
1536
1535
1537 def printrecords(version):
1536 def printrecords(version):
1538 ui.write(('* version %d records\n') % version)
1537 ui.write(('* version %d records\n') % version)
1539 if version == 1:
1538 if version == 1:
1540 records = v1records
1539 records = v1records
1541 else:
1540 else:
1542 records = v2records
1541 records = v2records
1543
1542
1544 for rtype, record in records:
1543 for rtype, record in records:
1545 # pretty print some record types
1544 # pretty print some record types
1546 if rtype == 'L':
1545 if rtype == 'L':
1547 ui.write(('local: %s\n') % record)
1546 ui.write(('local: %s\n') % record)
1548 elif rtype == 'O':
1547 elif rtype == 'O':
1549 ui.write(('other: %s\n') % record)
1548 ui.write(('other: %s\n') % record)
1550 elif rtype == 'm':
1549 elif rtype == 'm':
1551 driver, mdstate = record.split('\0', 1)
1550 driver, mdstate = record.split('\0', 1)
1552 ui.write(('merge driver: %s (state "%s")\n')
1551 ui.write(('merge driver: %s (state "%s")\n')
1553 % (driver, mdstate))
1552 % (driver, mdstate))
1554 elif rtype in 'FDC':
1553 elif rtype in 'FDC':
1555 r = record.split('\0')
1554 r = record.split('\0')
1556 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1555 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1557 if version == 1:
1556 if version == 1:
1558 onode = 'not stored in v1 format'
1557 onode = 'not stored in v1 format'
1559 flags = r[7]
1558 flags = r[7]
1560 else:
1559 else:
1561 onode, flags = r[7:9]
1560 onode, flags = r[7:9]
1562 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1561 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1563 % (f, rtype, state, _hashornull(hash)))
1562 % (f, rtype, state, _hashornull(hash)))
1564 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1563 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1565 ui.write((' ancestor path: %s (node %s)\n')
1564 ui.write((' ancestor path: %s (node %s)\n')
1566 % (afile, _hashornull(anode)))
1565 % (afile, _hashornull(anode)))
1567 ui.write((' other path: %s (node %s)\n')
1566 ui.write((' other path: %s (node %s)\n')
1568 % (ofile, _hashornull(onode)))
1567 % (ofile, _hashornull(onode)))
1569 elif rtype == 'f':
1568 elif rtype == 'f':
1570 filename, rawextras = record.split('\0', 1)
1569 filename, rawextras = record.split('\0', 1)
1571 extras = rawextras.split('\0')
1570 extras = rawextras.split('\0')
1572 i = 0
1571 i = 0
1573 extrastrings = []
1572 extrastrings = []
1574 while i < len(extras):
1573 while i < len(extras):
1575 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1574 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1576 i += 2
1575 i += 2
1577
1576
1578 ui.write(('file extras: %s (%s)\n')
1577 ui.write(('file extras: %s (%s)\n')
1579 % (filename, ', '.join(extrastrings)))
1578 % (filename, ', '.join(extrastrings)))
1580 elif rtype == 'l':
1579 elif rtype == 'l':
1581 labels = record.split('\0', 2)
1580 labels = record.split('\0', 2)
1582 labels = [l for l in labels if len(l) > 0]
1581 labels = [l for l in labels if len(l) > 0]
1583 ui.write(('labels:\n'))
1582 ui.write(('labels:\n'))
1584 ui.write((' local: %s\n' % labels[0]))
1583 ui.write((' local: %s\n' % labels[0]))
1585 ui.write((' other: %s\n' % labels[1]))
1584 ui.write((' other: %s\n' % labels[1]))
1586 if len(labels) > 2:
1585 if len(labels) > 2:
1587 ui.write((' base: %s\n' % labels[2]))
1586 ui.write((' base: %s\n' % labels[2]))
1588 else:
1587 else:
1589 ui.write(('unrecognized entry: %s\t%s\n')
1588 ui.write(('unrecognized entry: %s\t%s\n')
1590 % (rtype, record.replace('\0', '\t')))
1589 % (rtype, record.replace('\0', '\t')))
1591
1590
1592 # Avoid mergestate.read() since it may raise an exception for unsupported
1591 # Avoid mergestate.read() since it may raise an exception for unsupported
1593 # merge state records. We shouldn't be doing this, but this is OK since this
1592 # merge state records. We shouldn't be doing this, but this is OK since this
1594 # command is pretty low-level.
1593 # command is pretty low-level.
1595 ms = mergemod.mergestate(repo)
1594 ms = mergemod.mergestate(repo)
1596
1595
1597 # sort so that reasonable information is on top
1596 # sort so that reasonable information is on top
1598 v1records = ms._readrecordsv1()
1597 v1records = ms._readrecordsv1()
1599 v2records = ms._readrecordsv2()
1598 v2records = ms._readrecordsv2()
1600 order = 'LOml'
1599 order = 'LOml'
1601 def key(r):
1600 def key(r):
1602 idx = order.find(r[0])
1601 idx = order.find(r[0])
1603 if idx == -1:
1602 if idx == -1:
1604 return (1, r[1])
1603 return (1, r[1])
1605 else:
1604 else:
1606 return (0, idx)
1605 return (0, idx)
1607 v1records.sort(key=key)
1606 v1records.sort(key=key)
1608 v2records.sort(key=key)
1607 v2records.sort(key=key)
1609
1608
1610 if not v1records and not v2records:
1609 if not v1records and not v2records:
1611 ui.write(('no merge state found\n'))
1610 ui.write(('no merge state found\n'))
1612 elif not v2records:
1611 elif not v2records:
1613 ui.note(('no version 2 merge state\n'))
1612 ui.note(('no version 2 merge state\n'))
1614 printrecords(1)
1613 printrecords(1)
1615 elif ms._v1v2match(v1records, v2records):
1614 elif ms._v1v2match(v1records, v2records):
1616 ui.note(('v1 and v2 states match: using v2\n'))
1615 ui.note(('v1 and v2 states match: using v2\n'))
1617 printrecords(2)
1616 printrecords(2)
1618 else:
1617 else:
1619 ui.note(('v1 and v2 states mismatch: using v1\n'))
1618 ui.note(('v1 and v2 states mismatch: using v1\n'))
1620 printrecords(1)
1619 printrecords(1)
1621 if ui.verbose:
1620 if ui.verbose:
1622 printrecords(2)
1621 printrecords(2)
1623
1622
1624 @command('debugnamecomplete', [], _('NAME...'))
1623 @command('debugnamecomplete', [], _('NAME...'))
1625 def debugnamecomplete(ui, repo, *args):
1624 def debugnamecomplete(ui, repo, *args):
1626 '''complete "names" - tags, open branch names, bookmark names'''
1625 '''complete "names" - tags, open branch names, bookmark names'''
1627
1626
1628 names = set()
1627 names = set()
1629 # since we previously only listed open branches, we will handle that
1628 # since we previously only listed open branches, we will handle that
1630 # specially (after this for loop)
1629 # specially (after this for loop)
1631 for name, ns in repo.names.iteritems():
1630 for name, ns in repo.names.iteritems():
1632 if name != 'branches':
1631 if name != 'branches':
1633 names.update(ns.listnames(repo))
1632 names.update(ns.listnames(repo))
1634 names.update(tag for (tag, heads, tip, closed)
1633 names.update(tag for (tag, heads, tip, closed)
1635 in repo.branchmap().iterbranches() if not closed)
1634 in repo.branchmap().iterbranches() if not closed)
1636 completions = set()
1635 completions = set()
1637 if not args:
1636 if not args:
1638 args = ['']
1637 args = ['']
1639 for a in args:
1638 for a in args:
1640 completions.update(n for n in names if n.startswith(a))
1639 completions.update(n for n in names if n.startswith(a))
1641 ui.write('\n'.join(sorted(completions)))
1640 ui.write('\n'.join(sorted(completions)))
1642 ui.write('\n')
1641 ui.write('\n')
1643
1642
1644 @command('debugobsolete',
1643 @command('debugobsolete',
1645 [('', 'flags', 0, _('markers flag')),
1644 [('', 'flags', 0, _('markers flag')),
1646 ('', 'record-parents', False,
1645 ('', 'record-parents', False,
1647 _('record parent information for the precursor')),
1646 _('record parent information for the precursor')),
1648 ('r', 'rev', [], _('display markers relevant to REV')),
1647 ('r', 'rev', [], _('display markers relevant to REV')),
1649 ('', 'exclusive', False, _('restrict display to markers only '
1648 ('', 'exclusive', False, _('restrict display to markers only '
1650 'relevant to REV')),
1649 'relevant to REV')),
1651 ('', 'index', False, _('display index of the marker')),
1650 ('', 'index', False, _('display index of the marker')),
1652 ('', 'delete', [], _('delete markers specified by indices')),
1651 ('', 'delete', [], _('delete markers specified by indices')),
1653 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1652 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1654 _('[OBSOLETED [REPLACEMENT ...]]'))
1653 _('[OBSOLETED [REPLACEMENT ...]]'))
1655 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1654 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1656 """create arbitrary obsolete marker
1655 """create arbitrary obsolete marker
1657
1656
1658 With no arguments, displays the list of obsolescence markers."""
1657 With no arguments, displays the list of obsolescence markers."""
1659
1658
1660 opts = pycompat.byteskwargs(opts)
1659 opts = pycompat.byteskwargs(opts)
1661
1660
1662 def parsenodeid(s):
1661 def parsenodeid(s):
1663 try:
1662 try:
1664 # We do not use revsingle/revrange functions here to accept
1663 # We do not use revsingle/revrange functions here to accept
1665 # arbitrary node identifiers, possibly not present in the
1664 # arbitrary node identifiers, possibly not present in the
1666 # local repository.
1665 # local repository.
1667 n = bin(s)
1666 n = bin(s)
1668 if len(n) != len(nullid):
1667 if len(n) != len(nullid):
1669 raise TypeError()
1668 raise TypeError()
1670 return n
1669 return n
1671 except TypeError:
1670 except TypeError:
1672 raise error.Abort('changeset references must be full hexadecimal '
1671 raise error.Abort('changeset references must be full hexadecimal '
1673 'node identifiers')
1672 'node identifiers')
1674
1673
1675 if opts.get('delete'):
1674 if opts.get('delete'):
1676 indices = []
1675 indices = []
1677 for v in opts.get('delete'):
1676 for v in opts.get('delete'):
1678 try:
1677 try:
1679 indices.append(int(v))
1678 indices.append(int(v))
1680 except ValueError:
1679 except ValueError:
1681 raise error.Abort(_('invalid index value: %r') % v,
1680 raise error.Abort(_('invalid index value: %r') % v,
1682 hint=_('use integers for indices'))
1681 hint=_('use integers for indices'))
1683
1682
1684 if repo.currenttransaction():
1683 if repo.currenttransaction():
1685 raise error.Abort(_('cannot delete obsmarkers in the middle '
1684 raise error.Abort(_('cannot delete obsmarkers in the middle '
1686 'of transaction.'))
1685 'of transaction.'))
1687
1686
1688 with repo.lock():
1687 with repo.lock():
1689 n = repair.deleteobsmarkers(repo.obsstore, indices)
1688 n = repair.deleteobsmarkers(repo.obsstore, indices)
1690 ui.write(_('deleted %i obsolescence markers\n') % n)
1689 ui.write(_('deleted %i obsolescence markers\n') % n)
1691
1690
1692 return
1691 return
1693
1692
1694 if precursor is not None:
1693 if precursor is not None:
1695 if opts['rev']:
1694 if opts['rev']:
1696 raise error.Abort('cannot select revision when creating marker')
1695 raise error.Abort('cannot select revision when creating marker')
1697 metadata = {}
1696 metadata = {}
1698 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1697 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1699 succs = tuple(parsenodeid(succ) for succ in successors)
1698 succs = tuple(parsenodeid(succ) for succ in successors)
1700 l = repo.lock()
1699 l = repo.lock()
1701 try:
1700 try:
1702 tr = repo.transaction('debugobsolete')
1701 tr = repo.transaction('debugobsolete')
1703 try:
1702 try:
1704 date = opts.get('date')
1703 date = opts.get('date')
1705 if date:
1704 if date:
1706 date = dateutil.parsedate(date)
1705 date = dateutil.parsedate(date)
1707 else:
1706 else:
1708 date = None
1707 date = None
1709 prec = parsenodeid(precursor)
1708 prec = parsenodeid(precursor)
1710 parents = None
1709 parents = None
1711 if opts['record_parents']:
1710 if opts['record_parents']:
1712 if prec not in repo.unfiltered():
1711 if prec not in repo.unfiltered():
1713 raise error.Abort('cannot used --record-parents on '
1712 raise error.Abort('cannot used --record-parents on '
1714 'unknown changesets')
1713 'unknown changesets')
1715 parents = repo.unfiltered()[prec].parents()
1714 parents = repo.unfiltered()[prec].parents()
1716 parents = tuple(p.node() for p in parents)
1715 parents = tuple(p.node() for p in parents)
1717 repo.obsstore.create(tr, prec, succs, opts['flags'],
1716 repo.obsstore.create(tr, prec, succs, opts['flags'],
1718 parents=parents, date=date,
1717 parents=parents, date=date,
1719 metadata=metadata, ui=ui)
1718 metadata=metadata, ui=ui)
1720 tr.close()
1719 tr.close()
1721 except ValueError as exc:
1720 except ValueError as exc:
1722 raise error.Abort(_('bad obsmarker input: %s') %
1721 raise error.Abort(_('bad obsmarker input: %s') %
1723 pycompat.bytestr(exc))
1722 pycompat.bytestr(exc))
1724 finally:
1723 finally:
1725 tr.release()
1724 tr.release()
1726 finally:
1725 finally:
1727 l.release()
1726 l.release()
1728 else:
1727 else:
1729 if opts['rev']:
1728 if opts['rev']:
1730 revs = scmutil.revrange(repo, opts['rev'])
1729 revs = scmutil.revrange(repo, opts['rev'])
1731 nodes = [repo[r].node() for r in revs]
1730 nodes = [repo[r].node() for r in revs]
1732 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1731 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1733 exclusive=opts['exclusive']))
1732 exclusive=opts['exclusive']))
1734 markers.sort(key=lambda x: x._data)
1733 markers.sort(key=lambda x: x._data)
1735 else:
1734 else:
1736 markers = obsutil.getmarkers(repo)
1735 markers = obsutil.getmarkers(repo)
1737
1736
1738 markerstoiter = markers
1737 markerstoiter = markers
1739 isrelevant = lambda m: True
1738 isrelevant = lambda m: True
1740 if opts.get('rev') and opts.get('index'):
1739 if opts.get('rev') and opts.get('index'):
1741 markerstoiter = obsutil.getmarkers(repo)
1740 markerstoiter = obsutil.getmarkers(repo)
1742 markerset = set(markers)
1741 markerset = set(markers)
1743 isrelevant = lambda m: m in markerset
1742 isrelevant = lambda m: m in markerset
1744
1743
1745 fm = ui.formatter('debugobsolete', opts)
1744 fm = ui.formatter('debugobsolete', opts)
1746 for i, m in enumerate(markerstoiter):
1745 for i, m in enumerate(markerstoiter):
1747 if not isrelevant(m):
1746 if not isrelevant(m):
1748 # marker can be irrelevant when we're iterating over a set
1747 # marker can be irrelevant when we're iterating over a set
1749 # of markers (markerstoiter) which is bigger than the set
1748 # of markers (markerstoiter) which is bigger than the set
1750 # of markers we want to display (markers)
1749 # of markers we want to display (markers)
1751 # this can happen if both --index and --rev options are
1750 # this can happen if both --index and --rev options are
1752 # provided and thus we need to iterate over all of the markers
1751 # provided and thus we need to iterate over all of the markers
1753 # to get the correct indices, but only display the ones that
1752 # to get the correct indices, but only display the ones that
1754 # are relevant to --rev value
1753 # are relevant to --rev value
1755 continue
1754 continue
1756 fm.startitem()
1755 fm.startitem()
1757 ind = i if opts.get('index') else None
1756 ind = i if opts.get('index') else None
1758 cmdutil.showmarker(fm, m, index=ind)
1757 cmdutil.showmarker(fm, m, index=ind)
1759 fm.end()
1758 fm.end()
1760
1759
1761 @command('debugpathcomplete',
1760 @command('debugpathcomplete',
1762 [('f', 'full', None, _('complete an entire path')),
1761 [('f', 'full', None, _('complete an entire path')),
1763 ('n', 'normal', None, _('show only normal files')),
1762 ('n', 'normal', None, _('show only normal files')),
1764 ('a', 'added', None, _('show only added files')),
1763 ('a', 'added', None, _('show only added files')),
1765 ('r', 'removed', None, _('show only removed files'))],
1764 ('r', 'removed', None, _('show only removed files'))],
1766 _('FILESPEC...'))
1765 _('FILESPEC...'))
1767 def debugpathcomplete(ui, repo, *specs, **opts):
1766 def debugpathcomplete(ui, repo, *specs, **opts):
1768 '''complete part or all of a tracked path
1767 '''complete part or all of a tracked path
1769
1768
1770 This command supports shells that offer path name completion. It
1769 This command supports shells that offer path name completion. It
1771 currently completes only files already known to the dirstate.
1770 currently completes only files already known to the dirstate.
1772
1771
1773 Completion extends only to the next path segment unless
1772 Completion extends only to the next path segment unless
1774 --full is specified, in which case entire paths are used.'''
1773 --full is specified, in which case entire paths are used.'''
1775
1774
1776 def complete(path, acceptable):
1775 def complete(path, acceptable):
1777 dirstate = repo.dirstate
1776 dirstate = repo.dirstate
1778 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1777 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1779 rootdir = repo.root + pycompat.ossep
1778 rootdir = repo.root + pycompat.ossep
1780 if spec != repo.root and not spec.startswith(rootdir):
1779 if spec != repo.root and not spec.startswith(rootdir):
1781 return [], []
1780 return [], []
1782 if os.path.isdir(spec):
1781 if os.path.isdir(spec):
1783 spec += '/'
1782 spec += '/'
1784 spec = spec[len(rootdir):]
1783 spec = spec[len(rootdir):]
1785 fixpaths = pycompat.ossep != '/'
1784 fixpaths = pycompat.ossep != '/'
1786 if fixpaths:
1785 if fixpaths:
1787 spec = spec.replace(pycompat.ossep, '/')
1786 spec = spec.replace(pycompat.ossep, '/')
1788 speclen = len(spec)
1787 speclen = len(spec)
1789 fullpaths = opts[r'full']
1788 fullpaths = opts[r'full']
1790 files, dirs = set(), set()
1789 files, dirs = set(), set()
1791 adddir, addfile = dirs.add, files.add
1790 adddir, addfile = dirs.add, files.add
1792 for f, st in dirstate.iteritems():
1791 for f, st in dirstate.iteritems():
1793 if f.startswith(spec) and st[0] in acceptable:
1792 if f.startswith(spec) and st[0] in acceptable:
1794 if fixpaths:
1793 if fixpaths:
1795 f = f.replace('/', pycompat.ossep)
1794 f = f.replace('/', pycompat.ossep)
1796 if fullpaths:
1795 if fullpaths:
1797 addfile(f)
1796 addfile(f)
1798 continue
1797 continue
1799 s = f.find(pycompat.ossep, speclen)
1798 s = f.find(pycompat.ossep, speclen)
1800 if s >= 0:
1799 if s >= 0:
1801 adddir(f[:s])
1800 adddir(f[:s])
1802 else:
1801 else:
1803 addfile(f)
1802 addfile(f)
1804 return files, dirs
1803 return files, dirs
1805
1804
1806 acceptable = ''
1805 acceptable = ''
1807 if opts[r'normal']:
1806 if opts[r'normal']:
1808 acceptable += 'nm'
1807 acceptable += 'nm'
1809 if opts[r'added']:
1808 if opts[r'added']:
1810 acceptable += 'a'
1809 acceptable += 'a'
1811 if opts[r'removed']:
1810 if opts[r'removed']:
1812 acceptable += 'r'
1811 acceptable += 'r'
1813 cwd = repo.getcwd()
1812 cwd = repo.getcwd()
1814 if not specs:
1813 if not specs:
1815 specs = ['.']
1814 specs = ['.']
1816
1815
1817 files, dirs = set(), set()
1816 files, dirs = set(), set()
1818 for spec in specs:
1817 for spec in specs:
1819 f, d = complete(spec, acceptable or 'nmar')
1818 f, d = complete(spec, acceptable or 'nmar')
1820 files.update(f)
1819 files.update(f)
1821 dirs.update(d)
1820 dirs.update(d)
1822 files.update(dirs)
1821 files.update(dirs)
1823 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1822 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1824 ui.write('\n')
1823 ui.write('\n')
1825
1824
1826 @command('debugpeer', [], _('PATH'), norepo=True)
1825 @command('debugpeer', [], _('PATH'), norepo=True)
1827 def debugpeer(ui, path):
1826 def debugpeer(ui, path):
1828 """establish a connection to a peer repository"""
1827 """establish a connection to a peer repository"""
1829 # Always enable peer request logging. Requires --debug to display
1828 # Always enable peer request logging. Requires --debug to display
1830 # though.
1829 # though.
1831 overrides = {
1830 overrides = {
1832 ('devel', 'debug.peer-request'): True,
1831 ('devel', 'debug.peer-request'): True,
1833 }
1832 }
1834
1833
1835 with ui.configoverride(overrides):
1834 with ui.configoverride(overrides):
1836 peer = hg.peer(ui, {}, path)
1835 peer = hg.peer(ui, {}, path)
1837
1836
1838 local = peer.local() is not None
1837 local = peer.local() is not None
1839 canpush = peer.canpush()
1838 canpush = peer.canpush()
1840
1839
1841 ui.write(_('url: %s\n') % peer.url())
1840 ui.write(_('url: %s\n') % peer.url())
1842 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1841 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1843 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1842 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1844
1843
1845 @command('debugpickmergetool',
1844 @command('debugpickmergetool',
1846 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1845 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1847 ('', 'changedelete', None, _('emulate merging change and delete')),
1846 ('', 'changedelete', None, _('emulate merging change and delete')),
1848 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1847 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1849 _('[PATTERN]...'),
1848 _('[PATTERN]...'),
1850 inferrepo=True)
1849 inferrepo=True)
1851 def debugpickmergetool(ui, repo, *pats, **opts):
1850 def debugpickmergetool(ui, repo, *pats, **opts):
1852 """examine which merge tool is chosen for specified file
1851 """examine which merge tool is chosen for specified file
1853
1852
1854 As described in :hg:`help merge-tools`, Mercurial examines
1853 As described in :hg:`help merge-tools`, Mercurial examines
1855 configurations below in this order to decide which merge tool is
1854 configurations below in this order to decide which merge tool is
1856 chosen for specified file.
1855 chosen for specified file.
1857
1856
1858 1. ``--tool`` option
1857 1. ``--tool`` option
1859 2. ``HGMERGE`` environment variable
1858 2. ``HGMERGE`` environment variable
1860 3. configurations in ``merge-patterns`` section
1859 3. configurations in ``merge-patterns`` section
1861 4. configuration of ``ui.merge``
1860 4. configuration of ``ui.merge``
1862 5. configurations in ``merge-tools`` section
1861 5. configurations in ``merge-tools`` section
1863 6. ``hgmerge`` tool (for historical reason only)
1862 6. ``hgmerge`` tool (for historical reason only)
1864 7. default tool for fallback (``:merge`` or ``:prompt``)
1863 7. default tool for fallback (``:merge`` or ``:prompt``)
1865
1864
1866 This command writes out examination result in the style below::
1865 This command writes out examination result in the style below::
1867
1866
1868 FILE = MERGETOOL
1867 FILE = MERGETOOL
1869
1868
1870 By default, all files known in the first parent context of the
1869 By default, all files known in the first parent context of the
1871 working directory are examined. Use file patterns and/or -I/-X
1870 working directory are examined. Use file patterns and/or -I/-X
1872 options to limit target files. -r/--rev is also useful to examine
1871 options to limit target files. -r/--rev is also useful to examine
1873 files in another context without actual updating to it.
1872 files in another context without actual updating to it.
1874
1873
1875 With --debug, this command shows warning messages while matching
1874 With --debug, this command shows warning messages while matching
1876 against ``merge-patterns`` and so on, too. It is recommended to
1875 against ``merge-patterns`` and so on, too. It is recommended to
1877 use this option with explicit file patterns and/or -I/-X options,
1876 use this option with explicit file patterns and/or -I/-X options,
1878 because this option increases amount of output per file according
1877 because this option increases amount of output per file according
1879 to configurations in hgrc.
1878 to configurations in hgrc.
1880
1879
1881 With -v/--verbose, this command shows configurations below at
1880 With -v/--verbose, this command shows configurations below at
1882 first (only if specified).
1881 first (only if specified).
1883
1882
1884 - ``--tool`` option
1883 - ``--tool`` option
1885 - ``HGMERGE`` environment variable
1884 - ``HGMERGE`` environment variable
1886 - configuration of ``ui.merge``
1885 - configuration of ``ui.merge``
1887
1886
1888 If merge tool is chosen before matching against
1887 If merge tool is chosen before matching against
1889 ``merge-patterns``, this command can't show any helpful
1888 ``merge-patterns``, this command can't show any helpful
1890 information, even with --debug. In such case, information above is
1889 information, even with --debug. In such case, information above is
1891 useful to know why a merge tool is chosen.
1890 useful to know why a merge tool is chosen.
1892 """
1891 """
1893 opts = pycompat.byteskwargs(opts)
1892 opts = pycompat.byteskwargs(opts)
1894 overrides = {}
1893 overrides = {}
1895 if opts['tool']:
1894 if opts['tool']:
1896 overrides[('ui', 'forcemerge')] = opts['tool']
1895 overrides[('ui', 'forcemerge')] = opts['tool']
1897 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1896 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1898
1897
1899 with ui.configoverride(overrides, 'debugmergepatterns'):
1898 with ui.configoverride(overrides, 'debugmergepatterns'):
1900 hgmerge = encoding.environ.get("HGMERGE")
1899 hgmerge = encoding.environ.get("HGMERGE")
1901 if hgmerge is not None:
1900 if hgmerge is not None:
1902 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1901 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1903 uimerge = ui.config("ui", "merge")
1902 uimerge = ui.config("ui", "merge")
1904 if uimerge:
1903 if uimerge:
1905 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1904 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1906
1905
1907 ctx = scmutil.revsingle(repo, opts.get('rev'))
1906 ctx = scmutil.revsingle(repo, opts.get('rev'))
1908 m = scmutil.match(ctx, pats, opts)
1907 m = scmutil.match(ctx, pats, opts)
1909 changedelete = opts['changedelete']
1908 changedelete = opts['changedelete']
1910 for path in ctx.walk(m):
1909 for path in ctx.walk(m):
1911 fctx = ctx[path]
1910 fctx = ctx[path]
1912 try:
1911 try:
1913 if not ui.debugflag:
1912 if not ui.debugflag:
1914 ui.pushbuffer(error=True)
1913 ui.pushbuffer(error=True)
1915 tool, toolpath = filemerge._picktool(repo, ui, path,
1914 tool, toolpath = filemerge._picktool(repo, ui, path,
1916 fctx.isbinary(),
1915 fctx.isbinary(),
1917 'l' in fctx.flags(),
1916 'l' in fctx.flags(),
1918 changedelete)
1917 changedelete)
1919 finally:
1918 finally:
1920 if not ui.debugflag:
1919 if not ui.debugflag:
1921 ui.popbuffer()
1920 ui.popbuffer()
1922 ui.write(('%s = %s\n') % (path, tool))
1921 ui.write(('%s = %s\n') % (path, tool))
1923
1922
1924 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1923 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1925 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1924 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1926 '''access the pushkey key/value protocol
1925 '''access the pushkey key/value protocol
1927
1926
1928 With two args, list the keys in the given namespace.
1927 With two args, list the keys in the given namespace.
1929
1928
1930 With five args, set a key to new if it currently is set to old.
1929 With five args, set a key to new if it currently is set to old.
1931 Reports success or failure.
1930 Reports success or failure.
1932 '''
1931 '''
1933
1932
1934 target = hg.peer(ui, {}, repopath)
1933 target = hg.peer(ui, {}, repopath)
1935 if keyinfo:
1934 if keyinfo:
1936 key, old, new = keyinfo
1935 key, old, new = keyinfo
1937 with target.commandexecutor() as e:
1936 with target.commandexecutor() as e:
1938 r = e.callcommand('pushkey', {
1937 r = e.callcommand('pushkey', {
1939 'namespace': namespace,
1938 'namespace': namespace,
1940 'key': key,
1939 'key': key,
1941 'old': old,
1940 'old': old,
1942 'new': new,
1941 'new': new,
1943 }).result()
1942 }).result()
1944
1943
1945 ui.status(pycompat.bytestr(r) + '\n')
1944 ui.status(pycompat.bytestr(r) + '\n')
1946 return not r
1945 return not r
1947 else:
1946 else:
1948 for k, v in sorted(target.listkeys(namespace).iteritems()):
1947 for k, v in sorted(target.listkeys(namespace).iteritems()):
1949 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1948 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1950 stringutil.escapestr(v)))
1949 stringutil.escapestr(v)))
1951
1950
1952 @command('debugpvec', [], _('A B'))
1951 @command('debugpvec', [], _('A B'))
1953 def debugpvec(ui, repo, a, b=None):
1952 def debugpvec(ui, repo, a, b=None):
1954 ca = scmutil.revsingle(repo, a)
1953 ca = scmutil.revsingle(repo, a)
1955 cb = scmutil.revsingle(repo, b)
1954 cb = scmutil.revsingle(repo, b)
1956 pa = pvec.ctxpvec(ca)
1955 pa = pvec.ctxpvec(ca)
1957 pb = pvec.ctxpvec(cb)
1956 pb = pvec.ctxpvec(cb)
1958 if pa == pb:
1957 if pa == pb:
1959 rel = "="
1958 rel = "="
1960 elif pa > pb:
1959 elif pa > pb:
1961 rel = ">"
1960 rel = ">"
1962 elif pa < pb:
1961 elif pa < pb:
1963 rel = "<"
1962 rel = "<"
1964 elif pa | pb:
1963 elif pa | pb:
1965 rel = "|"
1964 rel = "|"
1966 ui.write(_("a: %s\n") % pa)
1965 ui.write(_("a: %s\n") % pa)
1967 ui.write(_("b: %s\n") % pb)
1966 ui.write(_("b: %s\n") % pb)
1968 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1967 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1969 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1968 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1970 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1969 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1971 pa.distance(pb), rel))
1970 pa.distance(pb), rel))
1972
1971
1973 @command('debugrebuilddirstate|debugrebuildstate',
1972 @command('debugrebuilddirstate|debugrebuildstate',
1974 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1973 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1975 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1974 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1976 'the working copy parent')),
1975 'the working copy parent')),
1977 ],
1976 ],
1978 _('[-r REV]'))
1977 _('[-r REV]'))
1979 def debugrebuilddirstate(ui, repo, rev, **opts):
1978 def debugrebuilddirstate(ui, repo, rev, **opts):
1980 """rebuild the dirstate as it would look like for the given revision
1979 """rebuild the dirstate as it would look like for the given revision
1981
1980
1982 If no revision is specified the first current parent will be used.
1981 If no revision is specified the first current parent will be used.
1983
1982
1984 The dirstate will be set to the files of the given revision.
1983 The dirstate will be set to the files of the given revision.
1985 The actual working directory content or existing dirstate
1984 The actual working directory content or existing dirstate
1986 information such as adds or removes is not considered.
1985 information such as adds or removes is not considered.
1987
1986
1988 ``minimal`` will only rebuild the dirstate status for files that claim to be
1987 ``minimal`` will only rebuild the dirstate status for files that claim to be
1989 tracked but are not in the parent manifest, or that exist in the parent
1988 tracked but are not in the parent manifest, or that exist in the parent
1990 manifest but are not in the dirstate. It will not change adds, removes, or
1989 manifest but are not in the dirstate. It will not change adds, removes, or
1991 modified files that are in the working copy parent.
1990 modified files that are in the working copy parent.
1992
1991
1993 One use of this command is to make the next :hg:`status` invocation
1992 One use of this command is to make the next :hg:`status` invocation
1994 check the actual file content.
1993 check the actual file content.
1995 """
1994 """
1996 ctx = scmutil.revsingle(repo, rev)
1995 ctx = scmutil.revsingle(repo, rev)
1997 with repo.wlock():
1996 with repo.wlock():
1998 dirstate = repo.dirstate
1997 dirstate = repo.dirstate
1999 changedfiles = None
1998 changedfiles = None
2000 # See command doc for what minimal does.
1999 # See command doc for what minimal does.
2001 if opts.get(r'minimal'):
2000 if opts.get(r'minimal'):
2002 manifestfiles = set(ctx.manifest().keys())
2001 manifestfiles = set(ctx.manifest().keys())
2003 dirstatefiles = set(dirstate)
2002 dirstatefiles = set(dirstate)
2004 manifestonly = manifestfiles - dirstatefiles
2003 manifestonly = manifestfiles - dirstatefiles
2005 dsonly = dirstatefiles - manifestfiles
2004 dsonly = dirstatefiles - manifestfiles
2006 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2005 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2007 changedfiles = manifestonly | dsnotadded
2006 changedfiles = manifestonly | dsnotadded
2008
2007
2009 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2008 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2010
2009
2011 @command('debugrebuildfncache', [], '')
2010 @command('debugrebuildfncache', [], '')
2012 def debugrebuildfncache(ui, repo):
2011 def debugrebuildfncache(ui, repo):
2013 """rebuild the fncache file"""
2012 """rebuild the fncache file"""
2014 repair.rebuildfncache(ui, repo)
2013 repair.rebuildfncache(ui, repo)
2015
2014
2016 @command('debugrename',
2015 @command('debugrename',
2017 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2016 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2018 _('[-r REV] FILE'))
2017 _('[-r REV] FILE'))
2019 def debugrename(ui, repo, file1, *pats, **opts):
2018 def debugrename(ui, repo, file1, *pats, **opts):
2020 """dump rename information"""
2019 """dump rename information"""
2021
2020
2022 opts = pycompat.byteskwargs(opts)
2021 opts = pycompat.byteskwargs(opts)
2023 ctx = scmutil.revsingle(repo, opts.get('rev'))
2022 ctx = scmutil.revsingle(repo, opts.get('rev'))
2024 m = scmutil.match(ctx, (file1,) + pats, opts)
2023 m = scmutil.match(ctx, (file1,) + pats, opts)
2025 for abs in ctx.walk(m):
2024 for abs in ctx.walk(m):
2026 fctx = ctx[abs]
2025 fctx = ctx[abs]
2027 o = fctx.filelog().renamed(fctx.filenode())
2026 o = fctx.filelog().renamed(fctx.filenode())
2028 rel = m.rel(abs)
2027 rel = m.rel(abs)
2029 if o:
2028 if o:
2030 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2029 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2031 else:
2030 else:
2032 ui.write(_("%s not renamed\n") % rel)
2031 ui.write(_("%s not renamed\n") % rel)
2033
2032
2034 @command('debugrevlog', cmdutil.debugrevlogopts +
2033 @command('debugrevlog', cmdutil.debugrevlogopts +
2035 [('d', 'dump', False, _('dump index data'))],
2034 [('d', 'dump', False, _('dump index data'))],
2036 _('-c|-m|FILE'),
2035 _('-c|-m|FILE'),
2037 optionalrepo=True)
2036 optionalrepo=True)
2038 def debugrevlog(ui, repo, file_=None, **opts):
2037 def debugrevlog(ui, repo, file_=None, **opts):
2039 """show data and statistics about a revlog"""
2038 """show data and statistics about a revlog"""
2040 opts = pycompat.byteskwargs(opts)
2039 opts = pycompat.byteskwargs(opts)
2041 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2040 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2042
2041
2043 if opts.get("dump"):
2042 if opts.get("dump"):
2044 numrevs = len(r)
2043 numrevs = len(r)
2045 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2044 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2046 " rawsize totalsize compression heads chainlen\n"))
2045 " rawsize totalsize compression heads chainlen\n"))
2047 ts = 0
2046 ts = 0
2048 heads = set()
2047 heads = set()
2049
2048
2050 for rev in pycompat.xrange(numrevs):
2049 for rev in pycompat.xrange(numrevs):
2051 dbase = r.deltaparent(rev)
2050 dbase = r.deltaparent(rev)
2052 if dbase == -1:
2051 if dbase == -1:
2053 dbase = rev
2052 dbase = rev
2054 cbase = r.chainbase(rev)
2053 cbase = r.chainbase(rev)
2055 clen = r.chainlen(rev)
2054 clen = r.chainlen(rev)
2056 p1, p2 = r.parentrevs(rev)
2055 p1, p2 = r.parentrevs(rev)
2057 rs = r.rawsize(rev)
2056 rs = r.rawsize(rev)
2058 ts = ts + rs
2057 ts = ts + rs
2059 heads -= set(r.parentrevs(rev))
2058 heads -= set(r.parentrevs(rev))
2060 heads.add(rev)
2059 heads.add(rev)
2061 try:
2060 try:
2062 compression = ts / r.end(rev)
2061 compression = ts / r.end(rev)
2063 except ZeroDivisionError:
2062 except ZeroDivisionError:
2064 compression = 0
2063 compression = 0
2065 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2064 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2066 "%11d %5d %8d\n" %
2065 "%11d %5d %8d\n" %
2067 (rev, p1, p2, r.start(rev), r.end(rev),
2066 (rev, p1, p2, r.start(rev), r.end(rev),
2068 r.start(dbase), r.start(cbase),
2067 r.start(dbase), r.start(cbase),
2069 r.start(p1), r.start(p2),
2068 r.start(p1), r.start(p2),
2070 rs, ts, compression, len(heads), clen))
2069 rs, ts, compression, len(heads), clen))
2071 return 0
2070 return 0
2072
2071
2073 v = r.version
2072 v = r.version
2074 format = v & 0xFFFF
2073 format = v & 0xFFFF
2075 flags = []
2074 flags = []
2076 gdelta = False
2075 gdelta = False
2077 if v & revlog.FLAG_INLINE_DATA:
2076 if v & revlog.FLAG_INLINE_DATA:
2078 flags.append('inline')
2077 flags.append('inline')
2079 if v & revlog.FLAG_GENERALDELTA:
2078 if v & revlog.FLAG_GENERALDELTA:
2080 gdelta = True
2079 gdelta = True
2081 flags.append('generaldelta')
2080 flags.append('generaldelta')
2082 if not flags:
2081 if not flags:
2083 flags = ['(none)']
2082 flags = ['(none)']
2084
2083
2085 nummerges = 0
2084 nummerges = 0
2086 numfull = 0
2085 numfull = 0
2087 numprev = 0
2086 numprev = 0
2088 nump1 = 0
2087 nump1 = 0
2089 nump2 = 0
2088 nump2 = 0
2090 numother = 0
2089 numother = 0
2091 nump1prev = 0
2090 nump1prev = 0
2092 nump2prev = 0
2091 nump2prev = 0
2093 chainlengths = []
2092 chainlengths = []
2094 chainbases = []
2093 chainbases = []
2095 chainspans = []
2094 chainspans = []
2096
2095
2097 datasize = [None, 0, 0]
2096 datasize = [None, 0, 0]
2098 fullsize = [None, 0, 0]
2097 fullsize = [None, 0, 0]
2099 deltasize = [None, 0, 0]
2098 deltasize = [None, 0, 0]
2100 chunktypecounts = {}
2099 chunktypecounts = {}
2101 chunktypesizes = {}
2100 chunktypesizes = {}
2102
2101
2103 def addsize(size, l):
2102 def addsize(size, l):
2104 if l[0] is None or size < l[0]:
2103 if l[0] is None or size < l[0]:
2105 l[0] = size
2104 l[0] = size
2106 if size > l[1]:
2105 if size > l[1]:
2107 l[1] = size
2106 l[1] = size
2108 l[2] += size
2107 l[2] += size
2109
2108
2110 numrevs = len(r)
2109 numrevs = len(r)
2111 for rev in pycompat.xrange(numrevs):
2110 for rev in pycompat.xrange(numrevs):
2112 p1, p2 = r.parentrevs(rev)
2111 p1, p2 = r.parentrevs(rev)
2113 delta = r.deltaparent(rev)
2112 delta = r.deltaparent(rev)
2114 if format > 0:
2113 if format > 0:
2115 addsize(r.rawsize(rev), datasize)
2114 addsize(r.rawsize(rev), datasize)
2116 if p2 != nullrev:
2115 if p2 != nullrev:
2117 nummerges += 1
2116 nummerges += 1
2118 size = r.length(rev)
2117 size = r.length(rev)
2119 if delta == nullrev:
2118 if delta == nullrev:
2120 chainlengths.append(0)
2119 chainlengths.append(0)
2121 chainbases.append(r.start(rev))
2120 chainbases.append(r.start(rev))
2122 chainspans.append(size)
2121 chainspans.append(size)
2123 numfull += 1
2122 numfull += 1
2124 addsize(size, fullsize)
2123 addsize(size, fullsize)
2125 else:
2124 else:
2126 chainlengths.append(chainlengths[delta] + 1)
2125 chainlengths.append(chainlengths[delta] + 1)
2127 baseaddr = chainbases[delta]
2126 baseaddr = chainbases[delta]
2128 revaddr = r.start(rev)
2127 revaddr = r.start(rev)
2129 chainbases.append(baseaddr)
2128 chainbases.append(baseaddr)
2130 chainspans.append((revaddr - baseaddr) + size)
2129 chainspans.append((revaddr - baseaddr) + size)
2131 addsize(size, deltasize)
2130 addsize(size, deltasize)
2132 if delta == rev - 1:
2131 if delta == rev - 1:
2133 numprev += 1
2132 numprev += 1
2134 if delta == p1:
2133 if delta == p1:
2135 nump1prev += 1
2134 nump1prev += 1
2136 elif delta == p2:
2135 elif delta == p2:
2137 nump2prev += 1
2136 nump2prev += 1
2138 elif delta == p1:
2137 elif delta == p1:
2139 nump1 += 1
2138 nump1 += 1
2140 elif delta == p2:
2139 elif delta == p2:
2141 nump2 += 1
2140 nump2 += 1
2142 elif delta != nullrev:
2141 elif delta != nullrev:
2143 numother += 1
2142 numother += 1
2144
2143
2145 # Obtain data on the raw chunks in the revlog.
2144 # Obtain data on the raw chunks in the revlog.
2146 segment = r._getsegmentforrevs(rev, rev)[1]
2145 segment = r._getsegmentforrevs(rev, rev)[1]
2147 if segment:
2146 if segment:
2148 chunktype = bytes(segment[0:1])
2147 chunktype = bytes(segment[0:1])
2149 else:
2148 else:
2150 chunktype = 'empty'
2149 chunktype = 'empty'
2151
2150
2152 if chunktype not in chunktypecounts:
2151 if chunktype not in chunktypecounts:
2153 chunktypecounts[chunktype] = 0
2152 chunktypecounts[chunktype] = 0
2154 chunktypesizes[chunktype] = 0
2153 chunktypesizes[chunktype] = 0
2155
2154
2156 chunktypecounts[chunktype] += 1
2155 chunktypecounts[chunktype] += 1
2157 chunktypesizes[chunktype] += size
2156 chunktypesizes[chunktype] += size
2158
2157
2159 # Adjust size min value for empty cases
2158 # Adjust size min value for empty cases
2160 for size in (datasize, fullsize, deltasize):
2159 for size in (datasize, fullsize, deltasize):
2161 if size[0] is None:
2160 if size[0] is None:
2162 size[0] = 0
2161 size[0] = 0
2163
2162
2164 numdeltas = numrevs - numfull
2163 numdeltas = numrevs - numfull
2165 numoprev = numprev - nump1prev - nump2prev
2164 numoprev = numprev - nump1prev - nump2prev
2166 totalrawsize = datasize[2]
2165 totalrawsize = datasize[2]
2167 datasize[2] /= numrevs
2166 datasize[2] /= numrevs
2168 fulltotal = fullsize[2]
2167 fulltotal = fullsize[2]
2169 fullsize[2] /= numfull
2168 fullsize[2] /= numfull
2170 deltatotal = deltasize[2]
2169 deltatotal = deltasize[2]
2171 if numrevs - numfull > 0:
2170 if numrevs - numfull > 0:
2172 deltasize[2] /= numrevs - numfull
2171 deltasize[2] /= numrevs - numfull
2173 totalsize = fulltotal + deltatotal
2172 totalsize = fulltotal + deltatotal
2174 avgchainlen = sum(chainlengths) / numrevs
2173 avgchainlen = sum(chainlengths) / numrevs
2175 maxchainlen = max(chainlengths)
2174 maxchainlen = max(chainlengths)
2176 maxchainspan = max(chainspans)
2175 maxchainspan = max(chainspans)
2177 compratio = 1
2176 compratio = 1
2178 if totalsize:
2177 if totalsize:
2179 compratio = totalrawsize / totalsize
2178 compratio = totalrawsize / totalsize
2180
2179
2181 basedfmtstr = '%%%dd\n'
2180 basedfmtstr = '%%%dd\n'
2182 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2181 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2183
2182
2184 def dfmtstr(max):
2183 def dfmtstr(max):
2185 return basedfmtstr % len(str(max))
2184 return basedfmtstr % len(str(max))
2186 def pcfmtstr(max, padding=0):
2185 def pcfmtstr(max, padding=0):
2187 return basepcfmtstr % (len(str(max)), ' ' * padding)
2186 return basepcfmtstr % (len(str(max)), ' ' * padding)
2188
2187
2189 def pcfmt(value, total):
2188 def pcfmt(value, total):
2190 if total:
2189 if total:
2191 return (value, 100 * float(value) / total)
2190 return (value, 100 * float(value) / total)
2192 else:
2191 else:
2193 return value, 100.0
2192 return value, 100.0
2194
2193
2195 ui.write(('format : %d\n') % format)
2194 ui.write(('format : %d\n') % format)
2196 ui.write(('flags : %s\n') % ', '.join(flags))
2195 ui.write(('flags : %s\n') % ', '.join(flags))
2197
2196
2198 ui.write('\n')
2197 ui.write('\n')
2199 fmt = pcfmtstr(totalsize)
2198 fmt = pcfmtstr(totalsize)
2200 fmt2 = dfmtstr(totalsize)
2199 fmt2 = dfmtstr(totalsize)
2201 ui.write(('revisions : ') + fmt2 % numrevs)
2200 ui.write(('revisions : ') + fmt2 % numrevs)
2202 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2201 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2203 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2202 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2204 ui.write(('revisions : ') + fmt2 % numrevs)
2203 ui.write(('revisions : ') + fmt2 % numrevs)
2205 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2204 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2206 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2205 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2207 ui.write(('revision size : ') + fmt2 % totalsize)
2206 ui.write(('revision size : ') + fmt2 % totalsize)
2208 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2207 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2209 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2208 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2210
2209
2211 def fmtchunktype(chunktype):
2210 def fmtchunktype(chunktype):
2212 if chunktype == 'empty':
2211 if chunktype == 'empty':
2213 return ' %s : ' % chunktype
2212 return ' %s : ' % chunktype
2214 elif chunktype in pycompat.bytestr(string.ascii_letters):
2213 elif chunktype in pycompat.bytestr(string.ascii_letters):
2215 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2214 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2216 else:
2215 else:
2217 return ' 0x%s : ' % hex(chunktype)
2216 return ' 0x%s : ' % hex(chunktype)
2218
2217
2219 ui.write('\n')
2218 ui.write('\n')
2220 ui.write(('chunks : ') + fmt2 % numrevs)
2219 ui.write(('chunks : ') + fmt2 % numrevs)
2221 for chunktype in sorted(chunktypecounts):
2220 for chunktype in sorted(chunktypecounts):
2222 ui.write(fmtchunktype(chunktype))
2221 ui.write(fmtchunktype(chunktype))
2223 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2222 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2224 ui.write(('chunks size : ') + fmt2 % totalsize)
2223 ui.write(('chunks size : ') + fmt2 % totalsize)
2225 for chunktype in sorted(chunktypecounts):
2224 for chunktype in sorted(chunktypecounts):
2226 ui.write(fmtchunktype(chunktype))
2225 ui.write(fmtchunktype(chunktype))
2227 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2226 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2228
2227
2229 ui.write('\n')
2228 ui.write('\n')
2230 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2229 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2231 ui.write(('avg chain length : ') + fmt % avgchainlen)
2230 ui.write(('avg chain length : ') + fmt % avgchainlen)
2232 ui.write(('max chain length : ') + fmt % maxchainlen)
2231 ui.write(('max chain length : ') + fmt % maxchainlen)
2233 ui.write(('max chain reach : ') + fmt % maxchainspan)
2232 ui.write(('max chain reach : ') + fmt % maxchainspan)
2234 ui.write(('compression ratio : ') + fmt % compratio)
2233 ui.write(('compression ratio : ') + fmt % compratio)
2235
2234
2236 if format > 0:
2235 if format > 0:
2237 ui.write('\n')
2236 ui.write('\n')
2238 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2237 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2239 % tuple(datasize))
2238 % tuple(datasize))
2240 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2239 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2241 % tuple(fullsize))
2240 % tuple(fullsize))
2242 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2241 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2243 % tuple(deltasize))
2242 % tuple(deltasize))
2244
2243
2245 if numdeltas > 0:
2244 if numdeltas > 0:
2246 ui.write('\n')
2245 ui.write('\n')
2247 fmt = pcfmtstr(numdeltas)
2246 fmt = pcfmtstr(numdeltas)
2248 fmt2 = pcfmtstr(numdeltas, 4)
2247 fmt2 = pcfmtstr(numdeltas, 4)
2249 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2248 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2250 if numprev > 0:
2249 if numprev > 0:
2251 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2250 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2252 numprev))
2251 numprev))
2253 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2252 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2254 numprev))
2253 numprev))
2255 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2254 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2256 numprev))
2255 numprev))
2257 if gdelta:
2256 if gdelta:
2258 ui.write(('deltas against p1 : ')
2257 ui.write(('deltas against p1 : ')
2259 + fmt % pcfmt(nump1, numdeltas))
2258 + fmt % pcfmt(nump1, numdeltas))
2260 ui.write(('deltas against p2 : ')
2259 ui.write(('deltas against p2 : ')
2261 + fmt % pcfmt(nump2, numdeltas))
2260 + fmt % pcfmt(nump2, numdeltas))
2262 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2261 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2263 numdeltas))
2262 numdeltas))
2264
2263
2265 @command('debugrevspec',
2264 @command('debugrevspec',
2266 [('', 'optimize', None,
2265 [('', 'optimize', None,
2267 _('print parsed tree after optimizing (DEPRECATED)')),
2266 _('print parsed tree after optimizing (DEPRECATED)')),
2268 ('', 'show-revs', True, _('print list of result revisions (default)')),
2267 ('', 'show-revs', True, _('print list of result revisions (default)')),
2269 ('s', 'show-set', None, _('print internal representation of result set')),
2268 ('s', 'show-set', None, _('print internal representation of result set')),
2270 ('p', 'show-stage', [],
2269 ('p', 'show-stage', [],
2271 _('print parsed tree at the given stage'), _('NAME')),
2270 _('print parsed tree at the given stage'), _('NAME')),
2272 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2271 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2273 ('', 'verify-optimized', False, _('verify optimized result')),
2272 ('', 'verify-optimized', False, _('verify optimized result')),
2274 ],
2273 ],
2275 ('REVSPEC'))
2274 ('REVSPEC'))
2276 def debugrevspec(ui, repo, expr, **opts):
2275 def debugrevspec(ui, repo, expr, **opts):
2277 """parse and apply a revision specification
2276 """parse and apply a revision specification
2278
2277
2279 Use -p/--show-stage option to print the parsed tree at the given stages.
2278 Use -p/--show-stage option to print the parsed tree at the given stages.
2280 Use -p all to print tree at every stage.
2279 Use -p all to print tree at every stage.
2281
2280
2282 Use --no-show-revs option with -s or -p to print only the set
2281 Use --no-show-revs option with -s or -p to print only the set
2283 representation or the parsed tree respectively.
2282 representation or the parsed tree respectively.
2284
2283
2285 Use --verify-optimized to compare the optimized result with the unoptimized
2284 Use --verify-optimized to compare the optimized result with the unoptimized
2286 one. Returns 1 if the optimized result differs.
2285 one. Returns 1 if the optimized result differs.
2287 """
2286 """
2288 opts = pycompat.byteskwargs(opts)
2287 opts = pycompat.byteskwargs(opts)
2289 aliases = ui.configitems('revsetalias')
2288 aliases = ui.configitems('revsetalias')
2290 stages = [
2289 stages = [
2291 ('parsed', lambda tree: tree),
2290 ('parsed', lambda tree: tree),
2292 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2291 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2293 ui.warn)),
2292 ui.warn)),
2294 ('concatenated', revsetlang.foldconcat),
2293 ('concatenated', revsetlang.foldconcat),
2295 ('analyzed', revsetlang.analyze),
2294 ('analyzed', revsetlang.analyze),
2296 ('optimized', revsetlang.optimize),
2295 ('optimized', revsetlang.optimize),
2297 ]
2296 ]
2298 if opts['no_optimized']:
2297 if opts['no_optimized']:
2299 stages = stages[:-1]
2298 stages = stages[:-1]
2300 if opts['verify_optimized'] and opts['no_optimized']:
2299 if opts['verify_optimized'] and opts['no_optimized']:
2301 raise error.Abort(_('cannot use --verify-optimized with '
2300 raise error.Abort(_('cannot use --verify-optimized with '
2302 '--no-optimized'))
2301 '--no-optimized'))
2303 stagenames = set(n for n, f in stages)
2302 stagenames = set(n for n, f in stages)
2304
2303
2305 showalways = set()
2304 showalways = set()
2306 showchanged = set()
2305 showchanged = set()
2307 if ui.verbose and not opts['show_stage']:
2306 if ui.verbose and not opts['show_stage']:
2308 # show parsed tree by --verbose (deprecated)
2307 # show parsed tree by --verbose (deprecated)
2309 showalways.add('parsed')
2308 showalways.add('parsed')
2310 showchanged.update(['expanded', 'concatenated'])
2309 showchanged.update(['expanded', 'concatenated'])
2311 if opts['optimize']:
2310 if opts['optimize']:
2312 showalways.add('optimized')
2311 showalways.add('optimized')
2313 if opts['show_stage'] and opts['optimize']:
2312 if opts['show_stage'] and opts['optimize']:
2314 raise error.Abort(_('cannot use --optimize with --show-stage'))
2313 raise error.Abort(_('cannot use --optimize with --show-stage'))
2315 if opts['show_stage'] == ['all']:
2314 if opts['show_stage'] == ['all']:
2316 showalways.update(stagenames)
2315 showalways.update(stagenames)
2317 else:
2316 else:
2318 for n in opts['show_stage']:
2317 for n in opts['show_stage']:
2319 if n not in stagenames:
2318 if n not in stagenames:
2320 raise error.Abort(_('invalid stage name: %s') % n)
2319 raise error.Abort(_('invalid stage name: %s') % n)
2321 showalways.update(opts['show_stage'])
2320 showalways.update(opts['show_stage'])
2322
2321
2323 treebystage = {}
2322 treebystage = {}
2324 printedtree = None
2323 printedtree = None
2325 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2324 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2326 for n, f in stages:
2325 for n, f in stages:
2327 treebystage[n] = tree = f(tree)
2326 treebystage[n] = tree = f(tree)
2328 if n in showalways or (n in showchanged and tree != printedtree):
2327 if n in showalways or (n in showchanged and tree != printedtree):
2329 if opts['show_stage'] or n != 'parsed':
2328 if opts['show_stage'] or n != 'parsed':
2330 ui.write(("* %s:\n") % n)
2329 ui.write(("* %s:\n") % n)
2331 ui.write(revsetlang.prettyformat(tree), "\n")
2330 ui.write(revsetlang.prettyformat(tree), "\n")
2332 printedtree = tree
2331 printedtree = tree
2333
2332
2334 if opts['verify_optimized']:
2333 if opts['verify_optimized']:
2335 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2334 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2336 brevs = revset.makematcher(treebystage['optimized'])(repo)
2335 brevs = revset.makematcher(treebystage['optimized'])(repo)
2337 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2336 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2338 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2337 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2339 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2338 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2340 arevs = list(arevs)
2339 arevs = list(arevs)
2341 brevs = list(brevs)
2340 brevs = list(brevs)
2342 if arevs == brevs:
2341 if arevs == brevs:
2343 return 0
2342 return 0
2344 ui.write(('--- analyzed\n'), label='diff.file_a')
2343 ui.write(('--- analyzed\n'), label='diff.file_a')
2345 ui.write(('+++ optimized\n'), label='diff.file_b')
2344 ui.write(('+++ optimized\n'), label='diff.file_b')
2346 sm = difflib.SequenceMatcher(None, arevs, brevs)
2345 sm = difflib.SequenceMatcher(None, arevs, brevs)
2347 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2346 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2348 if tag in ('delete', 'replace'):
2347 if tag in ('delete', 'replace'):
2349 for c in arevs[alo:ahi]:
2348 for c in arevs[alo:ahi]:
2350 ui.write('-%s\n' % c, label='diff.deleted')
2349 ui.write('-%s\n' % c, label='diff.deleted')
2351 if tag in ('insert', 'replace'):
2350 if tag in ('insert', 'replace'):
2352 for c in brevs[blo:bhi]:
2351 for c in brevs[blo:bhi]:
2353 ui.write('+%s\n' % c, label='diff.inserted')
2352 ui.write('+%s\n' % c, label='diff.inserted')
2354 if tag == 'equal':
2353 if tag == 'equal':
2355 for c in arevs[alo:ahi]:
2354 for c in arevs[alo:ahi]:
2356 ui.write(' %s\n' % c)
2355 ui.write(' %s\n' % c)
2357 return 1
2356 return 1
2358
2357
2359 func = revset.makematcher(tree)
2358 func = revset.makematcher(tree)
2360 revs = func(repo)
2359 revs = func(repo)
2361 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2360 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2362 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2361 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2363 if not opts['show_revs']:
2362 if not opts['show_revs']:
2364 return
2363 return
2365 for c in revs:
2364 for c in revs:
2366 ui.write("%d\n" % c)
2365 ui.write("%d\n" % c)
2367
2366
2368 @command('debugserve', [
2367 @command('debugserve', [
2369 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2368 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2370 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2369 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2371 ('', 'logiofile', '', _('file to log server I/O to')),
2370 ('', 'logiofile', '', _('file to log server I/O to')),
2372 ], '')
2371 ], '')
2373 def debugserve(ui, repo, **opts):
2372 def debugserve(ui, repo, **opts):
2374 """run a server with advanced settings
2373 """run a server with advanced settings
2375
2374
2376 This command is similar to :hg:`serve`. It exists partially as a
2375 This command is similar to :hg:`serve`. It exists partially as a
2377 workaround to the fact that ``hg serve --stdio`` must have specific
2376 workaround to the fact that ``hg serve --stdio`` must have specific
2378 arguments for security reasons.
2377 arguments for security reasons.
2379 """
2378 """
2380 opts = pycompat.byteskwargs(opts)
2379 opts = pycompat.byteskwargs(opts)
2381
2380
2382 if not opts['sshstdio']:
2381 if not opts['sshstdio']:
2383 raise error.Abort(_('only --sshstdio is currently supported'))
2382 raise error.Abort(_('only --sshstdio is currently supported'))
2384
2383
2385 logfh = None
2384 logfh = None
2386
2385
2387 if opts['logiofd'] and opts['logiofile']:
2386 if opts['logiofd'] and opts['logiofile']:
2388 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2387 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2389
2388
2390 if opts['logiofd']:
2389 if opts['logiofd']:
2391 # Line buffered because output is line based.
2390 # Line buffered because output is line based.
2392 try:
2391 try:
2393 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2392 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2394 except OSError as e:
2393 except OSError as e:
2395 if e.errno != errno.ESPIPE:
2394 if e.errno != errno.ESPIPE:
2396 raise
2395 raise
2397 # can't seek a pipe, so `ab` mode fails on py3
2396 # can't seek a pipe, so `ab` mode fails on py3
2398 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2397 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2399 elif opts['logiofile']:
2398 elif opts['logiofile']:
2400 logfh = open(opts['logiofile'], 'ab', 1)
2399 logfh = open(opts['logiofile'], 'ab', 1)
2401
2400
2402 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2401 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2403 s.serve_forever()
2402 s.serve_forever()
2404
2403
2405 @command('debugsetparents', [], _('REV1 [REV2]'))
2404 @command('debugsetparents', [], _('REV1 [REV2]'))
2406 def debugsetparents(ui, repo, rev1, rev2=None):
2405 def debugsetparents(ui, repo, rev1, rev2=None):
2407 """manually set the parents of the current working directory
2406 """manually set the parents of the current working directory
2408
2407
2409 This is useful for writing repository conversion tools, but should
2408 This is useful for writing repository conversion tools, but should
2410 be used with care. For example, neither the working directory nor the
2409 be used with care. For example, neither the working directory nor the
2411 dirstate is updated, so file status may be incorrect after running this
2410 dirstate is updated, so file status may be incorrect after running this
2412 command.
2411 command.
2413
2412
2414 Returns 0 on success.
2413 Returns 0 on success.
2415 """
2414 """
2416
2415
2417 node1 = scmutil.revsingle(repo, rev1).node()
2416 node1 = scmutil.revsingle(repo, rev1).node()
2418 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2417 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2419
2418
2420 with repo.wlock():
2419 with repo.wlock():
2421 repo.setparents(node1, node2)
2420 repo.setparents(node1, node2)
2422
2421
2423 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2422 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2424 def debugssl(ui, repo, source=None, **opts):
2423 def debugssl(ui, repo, source=None, **opts):
2425 '''test a secure connection to a server
2424 '''test a secure connection to a server
2426
2425
2427 This builds the certificate chain for the server on Windows, installing the
2426 This builds the certificate chain for the server on Windows, installing the
2428 missing intermediates and trusted root via Windows Update if necessary. It
2427 missing intermediates and trusted root via Windows Update if necessary. It
2429 does nothing on other platforms.
2428 does nothing on other platforms.
2430
2429
2431 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2430 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2432 that server is used. See :hg:`help urls` for more information.
2431 that server is used. See :hg:`help urls` for more information.
2433
2432
2434 If the update succeeds, retry the original operation. Otherwise, the cause
2433 If the update succeeds, retry the original operation. Otherwise, the cause
2435 of the SSL error is likely another issue.
2434 of the SSL error is likely another issue.
2436 '''
2435 '''
2437 if not pycompat.iswindows:
2436 if not pycompat.iswindows:
2438 raise error.Abort(_('certificate chain building is only possible on '
2437 raise error.Abort(_('certificate chain building is only possible on '
2439 'Windows'))
2438 'Windows'))
2440
2439
2441 if not source:
2440 if not source:
2442 if not repo:
2441 if not repo:
2443 raise error.Abort(_("there is no Mercurial repository here, and no "
2442 raise error.Abort(_("there is no Mercurial repository here, and no "
2444 "server specified"))
2443 "server specified"))
2445 source = "default"
2444 source = "default"
2446
2445
2447 source, branches = hg.parseurl(ui.expandpath(source))
2446 source, branches = hg.parseurl(ui.expandpath(source))
2448 url = util.url(source)
2447 url = util.url(source)
2449 addr = None
2448 addr = None
2450
2449
2451 defaultport = {'https': 443, 'ssh': 22}
2450 defaultport = {'https': 443, 'ssh': 22}
2452 if url.scheme in defaultport:
2451 if url.scheme in defaultport:
2453 try:
2452 try:
2454 addr = (url.host, int(url.port or defaultport[url.scheme]))
2453 addr = (url.host, int(url.port or defaultport[url.scheme]))
2455 except ValueError:
2454 except ValueError:
2456 raise error.Abort(_("malformed port number in URL"))
2455 raise error.Abort(_("malformed port number in URL"))
2457 else:
2456 else:
2458 raise error.Abort(_("only https and ssh connections are supported"))
2457 raise error.Abort(_("only https and ssh connections are supported"))
2459
2458
2460 from . import win32
2459 from . import win32
2461
2460
2462 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2461 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2463 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2462 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2464
2463
2465 try:
2464 try:
2466 s.connect(addr)
2465 s.connect(addr)
2467 cert = s.getpeercert(True)
2466 cert = s.getpeercert(True)
2468
2467
2469 ui.status(_('checking the certificate chain for %s\n') % url.host)
2468 ui.status(_('checking the certificate chain for %s\n') % url.host)
2470
2469
2471 complete = win32.checkcertificatechain(cert, build=False)
2470 complete = win32.checkcertificatechain(cert, build=False)
2472
2471
2473 if not complete:
2472 if not complete:
2474 ui.status(_('certificate chain is incomplete, updating... '))
2473 ui.status(_('certificate chain is incomplete, updating... '))
2475
2474
2476 if not win32.checkcertificatechain(cert):
2475 if not win32.checkcertificatechain(cert):
2477 ui.status(_('failed.\n'))
2476 ui.status(_('failed.\n'))
2478 else:
2477 else:
2479 ui.status(_('done.\n'))
2478 ui.status(_('done.\n'))
2480 else:
2479 else:
2481 ui.status(_('full certificate chain is available\n'))
2480 ui.status(_('full certificate chain is available\n'))
2482 finally:
2481 finally:
2483 s.close()
2482 s.close()
2484
2483
2485 @command('debugsub',
2484 @command('debugsub',
2486 [('r', 'rev', '',
2485 [('r', 'rev', '',
2487 _('revision to check'), _('REV'))],
2486 _('revision to check'), _('REV'))],
2488 _('[-r REV] [REV]'))
2487 _('[-r REV] [REV]'))
2489 def debugsub(ui, repo, rev=None):
2488 def debugsub(ui, repo, rev=None):
2490 ctx = scmutil.revsingle(repo, rev, None)
2489 ctx = scmutil.revsingle(repo, rev, None)
2491 for k, v in sorted(ctx.substate.items()):
2490 for k, v in sorted(ctx.substate.items()):
2492 ui.write(('path %s\n') % k)
2491 ui.write(('path %s\n') % k)
2493 ui.write((' source %s\n') % v[0])
2492 ui.write((' source %s\n') % v[0])
2494 ui.write((' revision %s\n') % v[1])
2493 ui.write((' revision %s\n') % v[1])
2495
2494
2496 @command('debugsuccessorssets',
2495 @command('debugsuccessorssets',
2497 [('', 'closest', False, _('return closest successors sets only'))],
2496 [('', 'closest', False, _('return closest successors sets only'))],
2498 _('[REV]'))
2497 _('[REV]'))
2499 def debugsuccessorssets(ui, repo, *revs, **opts):
2498 def debugsuccessorssets(ui, repo, *revs, **opts):
2500 """show set of successors for revision
2499 """show set of successors for revision
2501
2500
2502 A successors set of changeset A is a consistent group of revisions that
2501 A successors set of changeset A is a consistent group of revisions that
2503 succeed A. It contains non-obsolete changesets only unless closests
2502 succeed A. It contains non-obsolete changesets only unless closests
2504 successors set is set.
2503 successors set is set.
2505
2504
2506 In most cases a changeset A has a single successors set containing a single
2505 In most cases a changeset A has a single successors set containing a single
2507 successor (changeset A replaced by A').
2506 successor (changeset A replaced by A').
2508
2507
2509 A changeset that is made obsolete with no successors are called "pruned".
2508 A changeset that is made obsolete with no successors are called "pruned".
2510 Such changesets have no successors sets at all.
2509 Such changesets have no successors sets at all.
2511
2510
2512 A changeset that has been "split" will have a successors set containing
2511 A changeset that has been "split" will have a successors set containing
2513 more than one successor.
2512 more than one successor.
2514
2513
2515 A changeset that has been rewritten in multiple different ways is called
2514 A changeset that has been rewritten in multiple different ways is called
2516 "divergent". Such changesets have multiple successor sets (each of which
2515 "divergent". Such changesets have multiple successor sets (each of which
2517 may also be split, i.e. have multiple successors).
2516 may also be split, i.e. have multiple successors).
2518
2517
2519 Results are displayed as follows::
2518 Results are displayed as follows::
2520
2519
2521 <rev1>
2520 <rev1>
2522 <successors-1A>
2521 <successors-1A>
2523 <rev2>
2522 <rev2>
2524 <successors-2A>
2523 <successors-2A>
2525 <successors-2B1> <successors-2B2> <successors-2B3>
2524 <successors-2B1> <successors-2B2> <successors-2B3>
2526
2525
2527 Here rev2 has two possible (i.e. divergent) successors sets. The first
2526 Here rev2 has two possible (i.e. divergent) successors sets. The first
2528 holds one element, whereas the second holds three (i.e. the changeset has
2527 holds one element, whereas the second holds three (i.e. the changeset has
2529 been split).
2528 been split).
2530 """
2529 """
2531 # passed to successorssets caching computation from one call to another
2530 # passed to successorssets caching computation from one call to another
2532 cache = {}
2531 cache = {}
2533 ctx2str = bytes
2532 ctx2str = bytes
2534 node2str = short
2533 node2str = short
2535 for rev in scmutil.revrange(repo, revs):
2534 for rev in scmutil.revrange(repo, revs):
2536 ctx = repo[rev]
2535 ctx = repo[rev]
2537 ui.write('%s\n'% ctx2str(ctx))
2536 ui.write('%s\n'% ctx2str(ctx))
2538 for succsset in obsutil.successorssets(repo, ctx.node(),
2537 for succsset in obsutil.successorssets(repo, ctx.node(),
2539 closest=opts[r'closest'],
2538 closest=opts[r'closest'],
2540 cache=cache):
2539 cache=cache):
2541 if succsset:
2540 if succsset:
2542 ui.write(' ')
2541 ui.write(' ')
2543 ui.write(node2str(succsset[0]))
2542 ui.write(node2str(succsset[0]))
2544 for node in succsset[1:]:
2543 for node in succsset[1:]:
2545 ui.write(' ')
2544 ui.write(' ')
2546 ui.write(node2str(node))
2545 ui.write(node2str(node))
2547 ui.write('\n')
2546 ui.write('\n')
2548
2547
2549 @command('debugtemplate',
2548 @command('debugtemplate',
2550 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2549 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2551 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2550 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2552 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2551 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2553 optionalrepo=True)
2552 optionalrepo=True)
2554 def debugtemplate(ui, repo, tmpl, **opts):
2553 def debugtemplate(ui, repo, tmpl, **opts):
2555 """parse and apply a template
2554 """parse and apply a template
2556
2555
2557 If -r/--rev is given, the template is processed as a log template and
2556 If -r/--rev is given, the template is processed as a log template and
2558 applied to the given changesets. Otherwise, it is processed as a generic
2557 applied to the given changesets. Otherwise, it is processed as a generic
2559 template.
2558 template.
2560
2559
2561 Use --verbose to print the parsed tree.
2560 Use --verbose to print the parsed tree.
2562 """
2561 """
2563 revs = None
2562 revs = None
2564 if opts[r'rev']:
2563 if opts[r'rev']:
2565 if repo is None:
2564 if repo is None:
2566 raise error.RepoError(_('there is no Mercurial repository here '
2565 raise error.RepoError(_('there is no Mercurial repository here '
2567 '(.hg not found)'))
2566 '(.hg not found)'))
2568 revs = scmutil.revrange(repo, opts[r'rev'])
2567 revs = scmutil.revrange(repo, opts[r'rev'])
2569
2568
2570 props = {}
2569 props = {}
2571 for d in opts[r'define']:
2570 for d in opts[r'define']:
2572 try:
2571 try:
2573 k, v = (e.strip() for e in d.split('=', 1))
2572 k, v = (e.strip() for e in d.split('=', 1))
2574 if not k or k == 'ui':
2573 if not k or k == 'ui':
2575 raise ValueError
2574 raise ValueError
2576 props[k] = v
2575 props[k] = v
2577 except ValueError:
2576 except ValueError:
2578 raise error.Abort(_('malformed keyword definition: %s') % d)
2577 raise error.Abort(_('malformed keyword definition: %s') % d)
2579
2578
2580 if ui.verbose:
2579 if ui.verbose:
2581 aliases = ui.configitems('templatealias')
2580 aliases = ui.configitems('templatealias')
2582 tree = templater.parse(tmpl)
2581 tree = templater.parse(tmpl)
2583 ui.note(templater.prettyformat(tree), '\n')
2582 ui.note(templater.prettyformat(tree), '\n')
2584 newtree = templater.expandaliases(tree, aliases)
2583 newtree = templater.expandaliases(tree, aliases)
2585 if newtree != tree:
2584 if newtree != tree:
2586 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2585 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2587
2586
2588 if revs is None:
2587 if revs is None:
2589 tres = formatter.templateresources(ui, repo)
2588 tres = formatter.templateresources(ui, repo)
2590 t = formatter.maketemplater(ui, tmpl, resources=tres)
2589 t = formatter.maketemplater(ui, tmpl, resources=tres)
2591 if ui.verbose:
2590 if ui.verbose:
2592 kwds, funcs = t.symbolsuseddefault()
2591 kwds, funcs = t.symbolsuseddefault()
2593 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2592 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2594 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2593 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2595 ui.write(t.renderdefault(props))
2594 ui.write(t.renderdefault(props))
2596 else:
2595 else:
2597 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2596 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2598 if ui.verbose:
2597 if ui.verbose:
2599 kwds, funcs = displayer.t.symbolsuseddefault()
2598 kwds, funcs = displayer.t.symbolsuseddefault()
2600 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2599 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2601 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2600 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2602 for r in revs:
2601 for r in revs:
2603 displayer.show(repo[r], **pycompat.strkwargs(props))
2602 displayer.show(repo[r], **pycompat.strkwargs(props))
2604 displayer.close()
2603 displayer.close()
2605
2604
2606 @command('debuguigetpass', [
2605 @command('debuguigetpass', [
2607 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2606 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2608 ], _('[-p TEXT]'), norepo=True)
2607 ], _('[-p TEXT]'), norepo=True)
2609 def debuguigetpass(ui, prompt=''):
2608 def debuguigetpass(ui, prompt=''):
2610 """show prompt to type password"""
2609 """show prompt to type password"""
2611 r = ui.getpass(prompt)
2610 r = ui.getpass(prompt)
2612 ui.write(('respose: %s\n') % r)
2611 ui.write(('respose: %s\n') % r)
2613
2612
2614 @command('debuguiprompt', [
2613 @command('debuguiprompt', [
2615 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2614 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2616 ], _('[-p TEXT]'), norepo=True)
2615 ], _('[-p TEXT]'), norepo=True)
2617 def debuguiprompt(ui, prompt=''):
2616 def debuguiprompt(ui, prompt=''):
2618 """show plain prompt"""
2617 """show plain prompt"""
2619 r = ui.prompt(prompt)
2618 r = ui.prompt(prompt)
2620 ui.write(('response: %s\n') % r)
2619 ui.write(('response: %s\n') % r)
2621
2620
2622 @command('debugupdatecaches', [])
2621 @command('debugupdatecaches', [])
2623 def debugupdatecaches(ui, repo, *pats, **opts):
2622 def debugupdatecaches(ui, repo, *pats, **opts):
2624 """warm all known caches in the repository"""
2623 """warm all known caches in the repository"""
2625 with repo.wlock(), repo.lock():
2624 with repo.wlock(), repo.lock():
2626 repo.updatecaches(full=True)
2625 repo.updatecaches(full=True)
2627
2626
2628 @command('debugupgraderepo', [
2627 @command('debugupgraderepo', [
2629 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2628 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2630 ('', 'run', False, _('performs an upgrade')),
2629 ('', 'run', False, _('performs an upgrade')),
2631 ])
2630 ])
2632 def debugupgraderepo(ui, repo, run=False, optimize=None):
2631 def debugupgraderepo(ui, repo, run=False, optimize=None):
2633 """upgrade a repository to use different features
2632 """upgrade a repository to use different features
2634
2633
2635 If no arguments are specified, the repository is evaluated for upgrade
2634 If no arguments are specified, the repository is evaluated for upgrade
2636 and a list of problems and potential optimizations is printed.
2635 and a list of problems and potential optimizations is printed.
2637
2636
2638 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2637 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2639 can be influenced via additional arguments. More details will be provided
2638 can be influenced via additional arguments. More details will be provided
2640 by the command output when run without ``--run``.
2639 by the command output when run without ``--run``.
2641
2640
2642 During the upgrade, the repository will be locked and no writes will be
2641 During the upgrade, the repository will be locked and no writes will be
2643 allowed.
2642 allowed.
2644
2643
2645 At the end of the upgrade, the repository may not be readable while new
2644 At the end of the upgrade, the repository may not be readable while new
2646 repository data is swapped in. This window will be as long as it takes to
2645 repository data is swapped in. This window will be as long as it takes to
2647 rename some directories inside the ``.hg`` directory. On most machines, this
2646 rename some directories inside the ``.hg`` directory. On most machines, this
2648 should complete almost instantaneously and the chances of a consumer being
2647 should complete almost instantaneously and the chances of a consumer being
2649 unable to access the repository should be low.
2648 unable to access the repository should be low.
2650 """
2649 """
2651 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2650 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2652
2651
2653 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2652 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2654 inferrepo=True)
2653 inferrepo=True)
2655 def debugwalk(ui, repo, *pats, **opts):
2654 def debugwalk(ui, repo, *pats, **opts):
2656 """show how files match on given patterns"""
2655 """show how files match on given patterns"""
2657 opts = pycompat.byteskwargs(opts)
2656 opts = pycompat.byteskwargs(opts)
2658 m = scmutil.match(repo[None], pats, opts)
2657 m = scmutil.match(repo[None], pats, opts)
2659 if ui.verbose:
2658 if ui.verbose:
2660 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2659 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2661 items = list(repo[None].walk(m))
2660 items = list(repo[None].walk(m))
2662 if not items:
2661 if not items:
2663 return
2662 return
2664 f = lambda fn: fn
2663 f = lambda fn: fn
2665 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2664 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2666 f = lambda fn: util.normpath(fn)
2665 f = lambda fn: util.normpath(fn)
2667 fmt = 'f %%-%ds %%-%ds %%s' % (
2666 fmt = 'f %%-%ds %%-%ds %%s' % (
2668 max([len(abs) for abs in items]),
2667 max([len(abs) for abs in items]),
2669 max([len(m.rel(abs)) for abs in items]))
2668 max([len(m.rel(abs)) for abs in items]))
2670 for abs in items:
2669 for abs in items:
2671 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2670 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2672 ui.write("%s\n" % line.rstrip())
2671 ui.write("%s\n" % line.rstrip())
2673
2672
2674 @command('debugwhyunstable', [], _('REV'))
2673 @command('debugwhyunstable', [], _('REV'))
2675 def debugwhyunstable(ui, repo, rev):
2674 def debugwhyunstable(ui, repo, rev):
2676 """explain instabilities of a changeset"""
2675 """explain instabilities of a changeset"""
2677 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2676 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2678 dnodes = ''
2677 dnodes = ''
2679 if entry.get('divergentnodes'):
2678 if entry.get('divergentnodes'):
2680 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2679 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2681 for ctx in entry['divergentnodes']) + ' '
2680 for ctx in entry['divergentnodes']) + ' '
2682 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2681 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2683 entry['reason'], entry['node']))
2682 entry['reason'], entry['node']))
2684
2683
2685 @command('debugwireargs',
2684 @command('debugwireargs',
2686 [('', 'three', '', 'three'),
2685 [('', 'three', '', 'three'),
2687 ('', 'four', '', 'four'),
2686 ('', 'four', '', 'four'),
2688 ('', 'five', '', 'five'),
2687 ('', 'five', '', 'five'),
2689 ] + cmdutil.remoteopts,
2688 ] + cmdutil.remoteopts,
2690 _('REPO [OPTIONS]... [ONE [TWO]]'),
2689 _('REPO [OPTIONS]... [ONE [TWO]]'),
2691 norepo=True)
2690 norepo=True)
2692 def debugwireargs(ui, repopath, *vals, **opts):
2691 def debugwireargs(ui, repopath, *vals, **opts):
2693 opts = pycompat.byteskwargs(opts)
2692 opts = pycompat.byteskwargs(opts)
2694 repo = hg.peer(ui, opts, repopath)
2693 repo = hg.peer(ui, opts, repopath)
2695 for opt in cmdutil.remoteopts:
2694 for opt in cmdutil.remoteopts:
2696 del opts[opt[1]]
2695 del opts[opt[1]]
2697 args = {}
2696 args = {}
2698 for k, v in opts.iteritems():
2697 for k, v in opts.iteritems():
2699 if v:
2698 if v:
2700 args[k] = v
2699 args[k] = v
2701 args = pycompat.strkwargs(args)
2700 args = pycompat.strkwargs(args)
2702 # run twice to check that we don't mess up the stream for the next command
2701 # run twice to check that we don't mess up the stream for the next command
2703 res1 = repo.debugwireargs(*vals, **args)
2702 res1 = repo.debugwireargs(*vals, **args)
2704 res2 = repo.debugwireargs(*vals, **args)
2703 res2 = repo.debugwireargs(*vals, **args)
2705 ui.write("%s\n" % res1)
2704 ui.write("%s\n" % res1)
2706 if res1 != res2:
2705 if res1 != res2:
2707 ui.warn("%s\n" % res2)
2706 ui.warn("%s\n" % res2)
2708
2707
2709 def _parsewirelangblocks(fh):
2708 def _parsewirelangblocks(fh):
2710 activeaction = None
2709 activeaction = None
2711 blocklines = []
2710 blocklines = []
2712
2711
2713 for line in fh:
2712 for line in fh:
2714 line = line.rstrip()
2713 line = line.rstrip()
2715 if not line:
2714 if not line:
2716 continue
2715 continue
2717
2716
2718 if line.startswith(b'#'):
2717 if line.startswith(b'#'):
2719 continue
2718 continue
2720
2719
2721 if not line.startswith(' '):
2720 if not line.startswith(' '):
2722 # New block. Flush previous one.
2721 # New block. Flush previous one.
2723 if activeaction:
2722 if activeaction:
2724 yield activeaction, blocklines
2723 yield activeaction, blocklines
2725
2724
2726 activeaction = line
2725 activeaction = line
2727 blocklines = []
2726 blocklines = []
2728 continue
2727 continue
2729
2728
2730 # Else we start with an indent.
2729 # Else we start with an indent.
2731
2730
2732 if not activeaction:
2731 if not activeaction:
2733 raise error.Abort(_('indented line outside of block'))
2732 raise error.Abort(_('indented line outside of block'))
2734
2733
2735 blocklines.append(line)
2734 blocklines.append(line)
2736
2735
2737 # Flush last block.
2736 # Flush last block.
2738 if activeaction:
2737 if activeaction:
2739 yield activeaction, blocklines
2738 yield activeaction, blocklines
2740
2739
2741 @command('debugwireproto',
2740 @command('debugwireproto',
2742 [
2741 [
2743 ('', 'localssh', False, _('start an SSH server for this repo')),
2742 ('', 'localssh', False, _('start an SSH server for this repo')),
2744 ('', 'peer', '', _('construct a specific version of the peer')),
2743 ('', 'peer', '', _('construct a specific version of the peer')),
2745 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2744 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2746 ('', 'nologhandshake', False,
2745 ('', 'nologhandshake', False,
2747 _('do not log I/O related to the peer handshake')),
2746 _('do not log I/O related to the peer handshake')),
2748 ] + cmdutil.remoteopts,
2747 ] + cmdutil.remoteopts,
2749 _('[PATH]'),
2748 _('[PATH]'),
2750 optionalrepo=True)
2749 optionalrepo=True)
2751 def debugwireproto(ui, repo, path=None, **opts):
2750 def debugwireproto(ui, repo, path=None, **opts):
2752 """send wire protocol commands to a server
2751 """send wire protocol commands to a server
2753
2752
2754 This command can be used to issue wire protocol commands to remote
2753 This command can be used to issue wire protocol commands to remote
2755 peers and to debug the raw data being exchanged.
2754 peers and to debug the raw data being exchanged.
2756
2755
2757 ``--localssh`` will start an SSH server against the current repository
2756 ``--localssh`` will start an SSH server against the current repository
2758 and connect to that. By default, the connection will perform a handshake
2757 and connect to that. By default, the connection will perform a handshake
2759 and establish an appropriate peer instance.
2758 and establish an appropriate peer instance.
2760
2759
2761 ``--peer`` can be used to bypass the handshake protocol and construct a
2760 ``--peer`` can be used to bypass the handshake protocol and construct a
2762 peer instance using the specified class type. Valid values are ``raw``,
2761 peer instance using the specified class type. Valid values are ``raw``,
2763 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2762 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2764 raw data payloads and don't support higher-level command actions.
2763 raw data payloads and don't support higher-level command actions.
2765
2764
2766 ``--noreadstderr`` can be used to disable automatic reading from stderr
2765 ``--noreadstderr`` can be used to disable automatic reading from stderr
2767 of the peer (for SSH connections only). Disabling automatic reading of
2766 of the peer (for SSH connections only). Disabling automatic reading of
2768 stderr is useful for making output more deterministic.
2767 stderr is useful for making output more deterministic.
2769
2768
2770 Commands are issued via a mini language which is specified via stdin.
2769 Commands are issued via a mini language which is specified via stdin.
2771 The language consists of individual actions to perform. An action is
2770 The language consists of individual actions to perform. An action is
2772 defined by a block. A block is defined as a line with no leading
2771 defined by a block. A block is defined as a line with no leading
2773 space followed by 0 or more lines with leading space. Blocks are
2772 space followed by 0 or more lines with leading space. Blocks are
2774 effectively a high-level command with additional metadata.
2773 effectively a high-level command with additional metadata.
2775
2774
2776 Lines beginning with ``#`` are ignored.
2775 Lines beginning with ``#`` are ignored.
2777
2776
2778 The following sections denote available actions.
2777 The following sections denote available actions.
2779
2778
2780 raw
2779 raw
2781 ---
2780 ---
2782
2781
2783 Send raw data to the server.
2782 Send raw data to the server.
2784
2783
2785 The block payload contains the raw data to send as one atomic send
2784 The block payload contains the raw data to send as one atomic send
2786 operation. The data may not actually be delivered in a single system
2785 operation. The data may not actually be delivered in a single system
2787 call: it depends on the abilities of the transport being used.
2786 call: it depends on the abilities of the transport being used.
2788
2787
2789 Each line in the block is de-indented and concatenated. Then, that
2788 Each line in the block is de-indented and concatenated. Then, that
2790 value is evaluated as a Python b'' literal. This allows the use of
2789 value is evaluated as a Python b'' literal. This allows the use of
2791 backslash escaping, etc.
2790 backslash escaping, etc.
2792
2791
2793 raw+
2792 raw+
2794 ----
2793 ----
2795
2794
2796 Behaves like ``raw`` except flushes output afterwards.
2795 Behaves like ``raw`` except flushes output afterwards.
2797
2796
2798 command <X>
2797 command <X>
2799 -----------
2798 -----------
2800
2799
2801 Send a request to run a named command, whose name follows the ``command``
2800 Send a request to run a named command, whose name follows the ``command``
2802 string.
2801 string.
2803
2802
2804 Arguments to the command are defined as lines in this block. The format of
2803 Arguments to the command are defined as lines in this block. The format of
2805 each line is ``<key> <value>``. e.g.::
2804 each line is ``<key> <value>``. e.g.::
2806
2805
2807 command listkeys
2806 command listkeys
2808 namespace bookmarks
2807 namespace bookmarks
2809
2808
2810 If the value begins with ``eval:``, it will be interpreted as a Python
2809 If the value begins with ``eval:``, it will be interpreted as a Python
2811 literal expression. Otherwise values are interpreted as Python b'' literals.
2810 literal expression. Otherwise values are interpreted as Python b'' literals.
2812 This allows sending complex types and encoding special byte sequences via
2811 This allows sending complex types and encoding special byte sequences via
2813 backslash escaping.
2812 backslash escaping.
2814
2813
2815 The following arguments have special meaning:
2814 The following arguments have special meaning:
2816
2815
2817 ``PUSHFILE``
2816 ``PUSHFILE``
2818 When defined, the *push* mechanism of the peer will be used instead
2817 When defined, the *push* mechanism of the peer will be used instead
2819 of the static request-response mechanism and the content of the
2818 of the static request-response mechanism and the content of the
2820 file specified in the value of this argument will be sent as the
2819 file specified in the value of this argument will be sent as the
2821 command payload.
2820 command payload.
2822
2821
2823 This can be used to submit a local bundle file to the remote.
2822 This can be used to submit a local bundle file to the remote.
2824
2823
2825 batchbegin
2824 batchbegin
2826 ----------
2825 ----------
2827
2826
2828 Instruct the peer to begin a batched send.
2827 Instruct the peer to begin a batched send.
2829
2828
2830 All ``command`` blocks are queued for execution until the next
2829 All ``command`` blocks are queued for execution until the next
2831 ``batchsubmit`` block.
2830 ``batchsubmit`` block.
2832
2831
2833 batchsubmit
2832 batchsubmit
2834 -----------
2833 -----------
2835
2834
2836 Submit previously queued ``command`` blocks as a batch request.
2835 Submit previously queued ``command`` blocks as a batch request.
2837
2836
2838 This action MUST be paired with a ``batchbegin`` action.
2837 This action MUST be paired with a ``batchbegin`` action.
2839
2838
2840 httprequest <method> <path>
2839 httprequest <method> <path>
2841 ---------------------------
2840 ---------------------------
2842
2841
2843 (HTTP peer only)
2842 (HTTP peer only)
2844
2843
2845 Send an HTTP request to the peer.
2844 Send an HTTP request to the peer.
2846
2845
2847 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2846 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2848
2847
2849 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2848 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2850 headers to add to the request. e.g. ``Accept: foo``.
2849 headers to add to the request. e.g. ``Accept: foo``.
2851
2850
2852 The following arguments are special:
2851 The following arguments are special:
2853
2852
2854 ``BODYFILE``
2853 ``BODYFILE``
2855 The content of the file defined as the value to this argument will be
2854 The content of the file defined as the value to this argument will be
2856 transferred verbatim as the HTTP request body.
2855 transferred verbatim as the HTTP request body.
2857
2856
2858 ``frame <type> <flags> <payload>``
2857 ``frame <type> <flags> <payload>``
2859 Send a unified protocol frame as part of the request body.
2858 Send a unified protocol frame as part of the request body.
2860
2859
2861 All frames will be collected and sent as the body to the HTTP
2860 All frames will be collected and sent as the body to the HTTP
2862 request.
2861 request.
2863
2862
2864 close
2863 close
2865 -----
2864 -----
2866
2865
2867 Close the connection to the server.
2866 Close the connection to the server.
2868
2867
2869 flush
2868 flush
2870 -----
2869 -----
2871
2870
2872 Flush data written to the server.
2871 Flush data written to the server.
2873
2872
2874 readavailable
2873 readavailable
2875 -------------
2874 -------------
2876
2875
2877 Close the write end of the connection and read all available data from
2876 Close the write end of the connection and read all available data from
2878 the server.
2877 the server.
2879
2878
2880 If the connection to the server encompasses multiple pipes, we poll both
2879 If the connection to the server encompasses multiple pipes, we poll both
2881 pipes and read available data.
2880 pipes and read available data.
2882
2881
2883 readline
2882 readline
2884 --------
2883 --------
2885
2884
2886 Read a line of output from the server. If there are multiple output
2885 Read a line of output from the server. If there are multiple output
2887 pipes, reads only the main pipe.
2886 pipes, reads only the main pipe.
2888
2887
2889 ereadline
2888 ereadline
2890 ---------
2889 ---------
2891
2890
2892 Like ``readline``, but read from the stderr pipe, if available.
2891 Like ``readline``, but read from the stderr pipe, if available.
2893
2892
2894 read <X>
2893 read <X>
2895 --------
2894 --------
2896
2895
2897 ``read()`` N bytes from the server's main output pipe.
2896 ``read()`` N bytes from the server's main output pipe.
2898
2897
2899 eread <X>
2898 eread <X>
2900 ---------
2899 ---------
2901
2900
2902 ``read()`` N bytes from the server's stderr pipe, if available.
2901 ``read()`` N bytes from the server's stderr pipe, if available.
2903
2902
2904 Specifying Unified Frame-Based Protocol Frames
2903 Specifying Unified Frame-Based Protocol Frames
2905 ----------------------------------------------
2904 ----------------------------------------------
2906
2905
2907 It is possible to emit a *Unified Frame-Based Protocol* by using special
2906 It is possible to emit a *Unified Frame-Based Protocol* by using special
2908 syntax.
2907 syntax.
2909
2908
2910 A frame is composed as a type, flags, and payload. These can be parsed
2909 A frame is composed as a type, flags, and payload. These can be parsed
2911 from a string of the form:
2910 from a string of the form:
2912
2911
2913 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2912 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2914
2913
2915 ``request-id`` and ``stream-id`` are integers defining the request and
2914 ``request-id`` and ``stream-id`` are integers defining the request and
2916 stream identifiers.
2915 stream identifiers.
2917
2916
2918 ``type`` can be an integer value for the frame type or the string name
2917 ``type`` can be an integer value for the frame type or the string name
2919 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2918 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2920 ``command-name``.
2919 ``command-name``.
2921
2920
2922 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2921 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2923 components. Each component (and there can be just one) can be an integer
2922 components. Each component (and there can be just one) can be an integer
2924 or a flag name for stream flags or frame flags, respectively. Values are
2923 or a flag name for stream flags or frame flags, respectively. Values are
2925 resolved to integers and then bitwise OR'd together.
2924 resolved to integers and then bitwise OR'd together.
2926
2925
2927 ``payload`` represents the raw frame payload. If it begins with
2926 ``payload`` represents the raw frame payload. If it begins with
2928 ``cbor:``, the following string is evaluated as Python code and the
2927 ``cbor:``, the following string is evaluated as Python code and the
2929 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2928 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2930 as a Python byte string literal.
2929 as a Python byte string literal.
2931 """
2930 """
2932 opts = pycompat.byteskwargs(opts)
2931 opts = pycompat.byteskwargs(opts)
2933
2932
2934 if opts['localssh'] and not repo:
2933 if opts['localssh'] and not repo:
2935 raise error.Abort(_('--localssh requires a repository'))
2934 raise error.Abort(_('--localssh requires a repository'))
2936
2935
2937 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2936 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2938 raise error.Abort(_('invalid value for --peer'),
2937 raise error.Abort(_('invalid value for --peer'),
2939 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2938 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2940
2939
2941 if path and opts['localssh']:
2940 if path and opts['localssh']:
2942 raise error.Abort(_('cannot specify --localssh with an explicit '
2941 raise error.Abort(_('cannot specify --localssh with an explicit '
2943 'path'))
2942 'path'))
2944
2943
2945 if ui.interactive():
2944 if ui.interactive():
2946 ui.write(_('(waiting for commands on stdin)\n'))
2945 ui.write(_('(waiting for commands on stdin)\n'))
2947
2946
2948 blocks = list(_parsewirelangblocks(ui.fin))
2947 blocks = list(_parsewirelangblocks(ui.fin))
2949
2948
2950 proc = None
2949 proc = None
2951 stdin = None
2950 stdin = None
2952 stdout = None
2951 stdout = None
2953 stderr = None
2952 stderr = None
2954 opener = None
2953 opener = None
2955
2954
2956 if opts['localssh']:
2955 if opts['localssh']:
2957 # We start the SSH server in its own process so there is process
2956 # We start the SSH server in its own process so there is process
2958 # separation. This prevents a whole class of potential bugs around
2957 # separation. This prevents a whole class of potential bugs around
2959 # shared state from interfering with server operation.
2958 # shared state from interfering with server operation.
2960 args = procutil.hgcmd() + [
2959 args = procutil.hgcmd() + [
2961 '-R', repo.root,
2960 '-R', repo.root,
2962 'debugserve', '--sshstdio',
2961 'debugserve', '--sshstdio',
2963 ]
2962 ]
2964 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2963 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2965 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2964 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2966 bufsize=0)
2965 bufsize=0)
2967
2966
2968 stdin = proc.stdin
2967 stdin = proc.stdin
2969 stdout = proc.stdout
2968 stdout = proc.stdout
2970 stderr = proc.stderr
2969 stderr = proc.stderr
2971
2970
2972 # We turn the pipes into observers so we can log I/O.
2971 # We turn the pipes into observers so we can log I/O.
2973 if ui.verbose or opts['peer'] == 'raw':
2972 if ui.verbose or opts['peer'] == 'raw':
2974 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2973 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2975 logdata=True)
2974 logdata=True)
2976 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2975 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2977 logdata=True)
2976 logdata=True)
2978 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2977 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2979 logdata=True)
2978 logdata=True)
2980
2979
2981 # --localssh also implies the peer connection settings.
2980 # --localssh also implies the peer connection settings.
2982
2981
2983 url = 'ssh://localserver'
2982 url = 'ssh://localserver'
2984 autoreadstderr = not opts['noreadstderr']
2983 autoreadstderr = not opts['noreadstderr']
2985
2984
2986 if opts['peer'] == 'ssh1':
2985 if opts['peer'] == 'ssh1':
2987 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2986 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2988 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2987 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2989 None, autoreadstderr=autoreadstderr)
2988 None, autoreadstderr=autoreadstderr)
2990 elif opts['peer'] == 'ssh2':
2989 elif opts['peer'] == 'ssh2':
2991 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2990 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2992 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2991 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2993 None, autoreadstderr=autoreadstderr)
2992 None, autoreadstderr=autoreadstderr)
2994 elif opts['peer'] == 'raw':
2993 elif opts['peer'] == 'raw':
2995 ui.write(_('using raw connection to peer\n'))
2994 ui.write(_('using raw connection to peer\n'))
2996 peer = None
2995 peer = None
2997 else:
2996 else:
2998 ui.write(_('creating ssh peer from handshake results\n'))
2997 ui.write(_('creating ssh peer from handshake results\n'))
2999 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2998 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3000 autoreadstderr=autoreadstderr)
2999 autoreadstderr=autoreadstderr)
3001
3000
3002 elif path:
3001 elif path:
3003 # We bypass hg.peer() so we can proxy the sockets.
3002 # We bypass hg.peer() so we can proxy the sockets.
3004 # TODO consider not doing this because we skip
3003 # TODO consider not doing this because we skip
3005 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3004 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3006 u = util.url(path)
3005 u = util.url(path)
3007 if u.scheme != 'http':
3006 if u.scheme != 'http':
3008 raise error.Abort(_('only http:// paths are currently supported'))
3007 raise error.Abort(_('only http:// paths are currently supported'))
3009
3008
3010 url, authinfo = u.authinfo()
3009 url, authinfo = u.authinfo()
3011 openerargs = {
3010 openerargs = {
3012 r'useragent': b'Mercurial debugwireproto',
3011 r'useragent': b'Mercurial debugwireproto',
3013 }
3012 }
3014
3013
3015 # Turn pipes/sockets into observers so we can log I/O.
3014 # Turn pipes/sockets into observers so we can log I/O.
3016 if ui.verbose:
3015 if ui.verbose:
3017 openerargs.update({
3016 openerargs.update({
3018 r'loggingfh': ui,
3017 r'loggingfh': ui,
3019 r'loggingname': b's',
3018 r'loggingname': b's',
3020 r'loggingopts': {
3019 r'loggingopts': {
3021 r'logdata': True,
3020 r'logdata': True,
3022 r'logdataapis': False,
3021 r'logdataapis': False,
3023 },
3022 },
3024 })
3023 })
3025
3024
3026 if ui.debugflag:
3025 if ui.debugflag:
3027 openerargs[r'loggingopts'][r'logdataapis'] = True
3026 openerargs[r'loggingopts'][r'logdataapis'] = True
3028
3027
3029 # Don't send default headers when in raw mode. This allows us to
3028 # Don't send default headers when in raw mode. This allows us to
3030 # bypass most of the behavior of our URL handling code so we can
3029 # bypass most of the behavior of our URL handling code so we can
3031 # have near complete control over what's sent on the wire.
3030 # have near complete control over what's sent on the wire.
3032 if opts['peer'] == 'raw':
3031 if opts['peer'] == 'raw':
3033 openerargs[r'sendaccept'] = False
3032 openerargs[r'sendaccept'] = False
3034
3033
3035 opener = urlmod.opener(ui, authinfo, **openerargs)
3034 opener = urlmod.opener(ui, authinfo, **openerargs)
3036
3035
3037 if opts['peer'] == 'http2':
3036 if opts['peer'] == 'http2':
3038 ui.write(_('creating http peer for wire protocol version 2\n'))
3037 ui.write(_('creating http peer for wire protocol version 2\n'))
3039 # We go through makepeer() because we need an API descriptor for
3038 # We go through makepeer() because we need an API descriptor for
3040 # the peer instance to be useful.
3039 # the peer instance to be useful.
3041 with ui.configoverride({
3040 with ui.configoverride({
3042 ('experimental', 'httppeer.advertise-v2'): True}):
3041 ('experimental', 'httppeer.advertise-v2'): True}):
3043 if opts['nologhandshake']:
3042 if opts['nologhandshake']:
3044 ui.pushbuffer()
3043 ui.pushbuffer()
3045
3044
3046 peer = httppeer.makepeer(ui, path, opener=opener)
3045 peer = httppeer.makepeer(ui, path, opener=opener)
3047
3046
3048 if opts['nologhandshake']:
3047 if opts['nologhandshake']:
3049 ui.popbuffer()
3048 ui.popbuffer()
3050
3049
3051 if not isinstance(peer, httppeer.httpv2peer):
3050 if not isinstance(peer, httppeer.httpv2peer):
3052 raise error.Abort(_('could not instantiate HTTP peer for '
3051 raise error.Abort(_('could not instantiate HTTP peer for '
3053 'wire protocol version 2'),
3052 'wire protocol version 2'),
3054 hint=_('the server may not have the feature '
3053 hint=_('the server may not have the feature '
3055 'enabled or is not allowing this '
3054 'enabled or is not allowing this '
3056 'client version'))
3055 'client version'))
3057
3056
3058 elif opts['peer'] == 'raw':
3057 elif opts['peer'] == 'raw':
3059 ui.write(_('using raw connection to peer\n'))
3058 ui.write(_('using raw connection to peer\n'))
3060 peer = None
3059 peer = None
3061 elif opts['peer']:
3060 elif opts['peer']:
3062 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3061 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3063 opts['peer'])
3062 opts['peer'])
3064 else:
3063 else:
3065 peer = httppeer.makepeer(ui, path, opener=opener)
3064 peer = httppeer.makepeer(ui, path, opener=opener)
3066
3065
3067 # We /could/ populate stdin/stdout with sock.makefile()...
3066 # We /could/ populate stdin/stdout with sock.makefile()...
3068 else:
3067 else:
3069 raise error.Abort(_('unsupported connection configuration'))
3068 raise error.Abort(_('unsupported connection configuration'))
3070
3069
3071 batchedcommands = None
3070 batchedcommands = None
3072
3071
3073 # Now perform actions based on the parsed wire language instructions.
3072 # Now perform actions based on the parsed wire language instructions.
3074 for action, lines in blocks:
3073 for action, lines in blocks:
3075 if action in ('raw', 'raw+'):
3074 if action in ('raw', 'raw+'):
3076 if not stdin:
3075 if not stdin:
3077 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3076 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3078
3077
3079 # Concatenate the data together.
3078 # Concatenate the data together.
3080 data = ''.join(l.lstrip() for l in lines)
3079 data = ''.join(l.lstrip() for l in lines)
3081 data = stringutil.unescapestr(data)
3080 data = stringutil.unescapestr(data)
3082 stdin.write(data)
3081 stdin.write(data)
3083
3082
3084 if action == 'raw+':
3083 if action == 'raw+':
3085 stdin.flush()
3084 stdin.flush()
3086 elif action == 'flush':
3085 elif action == 'flush':
3087 if not stdin:
3086 if not stdin:
3088 raise error.Abort(_('cannot call flush on this peer'))
3087 raise error.Abort(_('cannot call flush on this peer'))
3089 stdin.flush()
3088 stdin.flush()
3090 elif action.startswith('command'):
3089 elif action.startswith('command'):
3091 if not peer:
3090 if not peer:
3092 raise error.Abort(_('cannot send commands unless peer instance '
3091 raise error.Abort(_('cannot send commands unless peer instance '
3093 'is available'))
3092 'is available'))
3094
3093
3095 command = action.split(' ', 1)[1]
3094 command = action.split(' ', 1)[1]
3096
3095
3097 args = {}
3096 args = {}
3098 for line in lines:
3097 for line in lines:
3099 # We need to allow empty values.
3098 # We need to allow empty values.
3100 fields = line.lstrip().split(' ', 1)
3099 fields = line.lstrip().split(' ', 1)
3101 if len(fields) == 1:
3100 if len(fields) == 1:
3102 key = fields[0]
3101 key = fields[0]
3103 value = ''
3102 value = ''
3104 else:
3103 else:
3105 key, value = fields
3104 key, value = fields
3106
3105
3107 if value.startswith('eval:'):
3106 if value.startswith('eval:'):
3108 value = stringutil.evalpythonliteral(value[5:])
3107 value = stringutil.evalpythonliteral(value[5:])
3109 else:
3108 else:
3110 value = stringutil.unescapestr(value)
3109 value = stringutil.unescapestr(value)
3111
3110
3112 args[key] = value
3111 args[key] = value
3113
3112
3114 if batchedcommands is not None:
3113 if batchedcommands is not None:
3115 batchedcommands.append((command, args))
3114 batchedcommands.append((command, args))
3116 continue
3115 continue
3117
3116
3118 ui.status(_('sending %s command\n') % command)
3117 ui.status(_('sending %s command\n') % command)
3119
3118
3120 if 'PUSHFILE' in args:
3119 if 'PUSHFILE' in args:
3121 with open(args['PUSHFILE'], r'rb') as fh:
3120 with open(args['PUSHFILE'], r'rb') as fh:
3122 del args['PUSHFILE']
3121 del args['PUSHFILE']
3123 res, output = peer._callpush(command, fh,
3122 res, output = peer._callpush(command, fh,
3124 **pycompat.strkwargs(args))
3123 **pycompat.strkwargs(args))
3125 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3124 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3126 ui.status(_('remote output: %s\n') %
3125 ui.status(_('remote output: %s\n') %
3127 stringutil.escapestr(output))
3126 stringutil.escapestr(output))
3128 else:
3127 else:
3129 with peer.commandexecutor() as e:
3128 with peer.commandexecutor() as e:
3130 res = e.callcommand(command, args).result()
3129 res = e.callcommand(command, args).result()
3131
3130
3132 if isinstance(res, wireprotov2peer.commandresponse):
3131 if isinstance(res, wireprotov2peer.commandresponse):
3133 val = list(res.cborobjects())
3132 val = list(res.cborobjects())
3134 ui.status(_('response: %s\n') %
3133 ui.status(_('response: %s\n') %
3135 stringutil.pprint(val, bprefix=True))
3134 stringutil.pprint(val, bprefix=True))
3136
3135
3137 else:
3136 else:
3138 ui.status(_('response: %s\n') %
3137 ui.status(_('response: %s\n') %
3139 stringutil.pprint(res, bprefix=True))
3138 stringutil.pprint(res, bprefix=True))
3140
3139
3141 elif action == 'batchbegin':
3140 elif action == 'batchbegin':
3142 if batchedcommands is not None:
3141 if batchedcommands is not None:
3143 raise error.Abort(_('nested batchbegin not allowed'))
3142 raise error.Abort(_('nested batchbegin not allowed'))
3144
3143
3145 batchedcommands = []
3144 batchedcommands = []
3146 elif action == 'batchsubmit':
3145 elif action == 'batchsubmit':
3147 # There is a batching API we could go through. But it would be
3146 # There is a batching API we could go through. But it would be
3148 # difficult to normalize requests into function calls. It is easier
3147 # difficult to normalize requests into function calls. It is easier
3149 # to bypass this layer and normalize to commands + args.
3148 # to bypass this layer and normalize to commands + args.
3150 ui.status(_('sending batch with %d sub-commands\n') %
3149 ui.status(_('sending batch with %d sub-commands\n') %
3151 len(batchedcommands))
3150 len(batchedcommands))
3152 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3151 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3153 ui.status(_('response #%d: %s\n') %
3152 ui.status(_('response #%d: %s\n') %
3154 (i, stringutil.escapestr(chunk)))
3153 (i, stringutil.escapestr(chunk)))
3155
3154
3156 batchedcommands = None
3155 batchedcommands = None
3157
3156
3158 elif action.startswith('httprequest '):
3157 elif action.startswith('httprequest '):
3159 if not opener:
3158 if not opener:
3160 raise error.Abort(_('cannot use httprequest without an HTTP '
3159 raise error.Abort(_('cannot use httprequest without an HTTP '
3161 'peer'))
3160 'peer'))
3162
3161
3163 request = action.split(' ', 2)
3162 request = action.split(' ', 2)
3164 if len(request) != 3:
3163 if len(request) != 3:
3165 raise error.Abort(_('invalid httprequest: expected format is '
3164 raise error.Abort(_('invalid httprequest: expected format is '
3166 '"httprequest <method> <path>'))
3165 '"httprequest <method> <path>'))
3167
3166
3168 method, httppath = request[1:]
3167 method, httppath = request[1:]
3169 headers = {}
3168 headers = {}
3170 body = None
3169 body = None
3171 frames = []
3170 frames = []
3172 for line in lines:
3171 for line in lines:
3173 line = line.lstrip()
3172 line = line.lstrip()
3174 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3173 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3175 if m:
3174 if m:
3176 headers[m.group(1)] = m.group(2)
3175 headers[m.group(1)] = m.group(2)
3177 continue
3176 continue
3178
3177
3179 if line.startswith(b'BODYFILE '):
3178 if line.startswith(b'BODYFILE '):
3180 with open(line.split(b' ', 1), 'rb') as fh:
3179 with open(line.split(b' ', 1), 'rb') as fh:
3181 body = fh.read()
3180 body = fh.read()
3182 elif line.startswith(b'frame '):
3181 elif line.startswith(b'frame '):
3183 frame = wireprotoframing.makeframefromhumanstring(
3182 frame = wireprotoframing.makeframefromhumanstring(
3184 line[len(b'frame '):])
3183 line[len(b'frame '):])
3185
3184
3186 frames.append(frame)
3185 frames.append(frame)
3187 else:
3186 else:
3188 raise error.Abort(_('unknown argument to httprequest: %s') %
3187 raise error.Abort(_('unknown argument to httprequest: %s') %
3189 line)
3188 line)
3190
3189
3191 url = path + httppath
3190 url = path + httppath
3192
3191
3193 if frames:
3192 if frames:
3194 body = b''.join(bytes(f) for f in frames)
3193 body = b''.join(bytes(f) for f in frames)
3195
3194
3196 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3195 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3197
3196
3198 # urllib.Request insists on using has_data() as a proxy for
3197 # urllib.Request insists on using has_data() as a proxy for
3199 # determining the request method. Override that to use our
3198 # determining the request method. Override that to use our
3200 # explicitly requested method.
3199 # explicitly requested method.
3201 req.get_method = lambda: method
3200 req.get_method = lambda: method
3202
3201
3203 try:
3202 try:
3204 res = opener.open(req)
3203 res = opener.open(req)
3205 body = res.read()
3204 body = res.read()
3206 except util.urlerr.urlerror as e:
3205 except util.urlerr.urlerror as e:
3207 e.read()
3206 e.read()
3208 continue
3207 continue
3209
3208
3210 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3209 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3211 ui.write(_('cbor> %s\n') %
3210 ui.write(_('cbor> %s\n') %
3212 stringutil.pprint(cbor.loads(body), bprefix=True))
3211 stringutil.pprint(cbor.loads(body), bprefix=True))
3213
3212
3214 elif action == 'close':
3213 elif action == 'close':
3215 peer.close()
3214 peer.close()
3216 elif action == 'readavailable':
3215 elif action == 'readavailable':
3217 if not stdout or not stderr:
3216 if not stdout or not stderr:
3218 raise error.Abort(_('readavailable not available on this peer'))
3217 raise error.Abort(_('readavailable not available on this peer'))
3219
3218
3220 stdin.close()
3219 stdin.close()
3221 stdout.read()
3220 stdout.read()
3222 stderr.read()
3221 stderr.read()
3223
3222
3224 elif action == 'readline':
3223 elif action == 'readline':
3225 if not stdout:
3224 if not stdout:
3226 raise error.Abort(_('readline not available on this peer'))
3225 raise error.Abort(_('readline not available on this peer'))
3227 stdout.readline()
3226 stdout.readline()
3228 elif action == 'ereadline':
3227 elif action == 'ereadline':
3229 if not stderr:
3228 if not stderr:
3230 raise error.Abort(_('ereadline not available on this peer'))
3229 raise error.Abort(_('ereadline not available on this peer'))
3231 stderr.readline()
3230 stderr.readline()
3232 elif action.startswith('read '):
3231 elif action.startswith('read '):
3233 count = int(action.split(' ', 1)[1])
3232 count = int(action.split(' ', 1)[1])
3234 if not stdout:
3233 if not stdout:
3235 raise error.Abort(_('read not available on this peer'))
3234 raise error.Abort(_('read not available on this peer'))
3236 stdout.read(count)
3235 stdout.read(count)
3237 elif action.startswith('eread '):
3236 elif action.startswith('eread '):
3238 count = int(action.split(' ', 1)[1])
3237 count = int(action.split(' ', 1)[1])
3239 if not stderr:
3238 if not stderr:
3240 raise error.Abort(_('eread not available on this peer'))
3239 raise error.Abort(_('eread not available on this peer'))
3241 stderr.read(count)
3240 stderr.read(count)
3242 else:
3241 else:
3243 raise error.Abort(_('unknown action: %s') % action)
3242 raise error.Abort(_('unknown action: %s') % action)
3244
3243
3245 if batchedcommands is not None:
3244 if batchedcommands is not None:
3246 raise error.Abort(_('unclosed "batchbegin" request'))
3245 raise error.Abort(_('unclosed "batchbegin" request'))
3247
3246
3248 if peer:
3247 if peer:
3249 peer.close()
3248 peer.close()
3250
3249
3251 if proc:
3250 if proc:
3252 proc.kill()
3251 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now