##// END OF EJS Templates
debugcommands: introduce actions to perform deterministic reads...
Gregory Szorc -
r37025:143219fc default
parent child Browse files
Show More
@@ -1,2868 +1,2891 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import stat
19 import stat
20 import string
20 import string
21 import subprocess
21 import subprocess
22 import sys
22 import sys
23 import tempfile
23 import tempfile
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 dagparser,
41 dagparser,
42 dagutil,
42 dagutil,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 fileset,
48 fileset,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 localrepo,
51 localrepo,
52 lock as lockmod,
52 lock as lockmod,
53 logcmdutil,
53 logcmdutil,
54 merge as mergemod,
54 merge as mergemod,
55 obsolete,
55 obsolete,
56 obsutil,
56 obsutil,
57 phases,
57 phases,
58 policy,
58 policy,
59 pvec,
59 pvec,
60 pycompat,
60 pycompat,
61 registrar,
61 registrar,
62 repair,
62 repair,
63 revlog,
63 revlog,
64 revset,
64 revset,
65 revsetlang,
65 revsetlang,
66 scmutil,
66 scmutil,
67 setdiscovery,
67 setdiscovery,
68 simplemerge,
68 simplemerge,
69 smartset,
69 smartset,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoserver,
79 wireprotoserver,
80 )
80 )
81 from .utils import dateutil
81 from .utils import dateutil
82
82
83 release = lockmod.release
83 release = lockmod.release
84
84
85 command = registrar.command()
85 command = registrar.command()
86
86
87 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
87 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
88 def debugancestor(ui, repo, *args):
88 def debugancestor(ui, repo, *args):
89 """find the ancestor revision of two revisions in a given index"""
89 """find the ancestor revision of two revisions in a given index"""
90 if len(args) == 3:
90 if len(args) == 3:
91 index, rev1, rev2 = args
91 index, rev1, rev2 = args
92 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
92 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
93 lookup = r.lookup
93 lookup = r.lookup
94 elif len(args) == 2:
94 elif len(args) == 2:
95 if not repo:
95 if not repo:
96 raise error.Abort(_('there is no Mercurial repository here '
96 raise error.Abort(_('there is no Mercurial repository here '
97 '(.hg not found)'))
97 '(.hg not found)'))
98 rev1, rev2 = args
98 rev1, rev2 = args
99 r = repo.changelog
99 r = repo.changelog
100 lookup = repo.lookup
100 lookup = repo.lookup
101 else:
101 else:
102 raise error.Abort(_('either two or three arguments required'))
102 raise error.Abort(_('either two or three arguments required'))
103 a = r.ancestor(lookup(rev1), lookup(rev2))
103 a = r.ancestor(lookup(rev1), lookup(rev2))
104 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
104 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
105
105
106 @command('debugapplystreamclonebundle', [], 'FILE')
106 @command('debugapplystreamclonebundle', [], 'FILE')
107 def debugapplystreamclonebundle(ui, repo, fname):
107 def debugapplystreamclonebundle(ui, repo, fname):
108 """apply a stream clone bundle file"""
108 """apply a stream clone bundle file"""
109 f = hg.openpath(ui, fname)
109 f = hg.openpath(ui, fname)
110 gen = exchange.readbundle(ui, f, fname)
110 gen = exchange.readbundle(ui, f, fname)
111 gen.apply(repo)
111 gen.apply(repo)
112
112
113 @command('debugbuilddag',
113 @command('debugbuilddag',
114 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
114 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
115 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
115 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
116 ('n', 'new-file', None, _('add new file at each rev'))],
116 ('n', 'new-file', None, _('add new file at each rev'))],
117 _('[OPTION]... [TEXT]'))
117 _('[OPTION]... [TEXT]'))
118 def debugbuilddag(ui, repo, text=None,
118 def debugbuilddag(ui, repo, text=None,
119 mergeable_file=False,
119 mergeable_file=False,
120 overwritten_file=False,
120 overwritten_file=False,
121 new_file=False):
121 new_file=False):
122 """builds a repo with a given DAG from scratch in the current empty repo
122 """builds a repo with a given DAG from scratch in the current empty repo
123
123
124 The description of the DAG is read from stdin if not given on the
124 The description of the DAG is read from stdin if not given on the
125 command line.
125 command line.
126
126
127 Elements:
127 Elements:
128
128
129 - "+n" is a linear run of n nodes based on the current default parent
129 - "+n" is a linear run of n nodes based on the current default parent
130 - "." is a single node based on the current default parent
130 - "." is a single node based on the current default parent
131 - "$" resets the default parent to null (implied at the start);
131 - "$" resets the default parent to null (implied at the start);
132 otherwise the default parent is always the last node created
132 otherwise the default parent is always the last node created
133 - "<p" sets the default parent to the backref p
133 - "<p" sets the default parent to the backref p
134 - "*p" is a fork at parent p, which is a backref
134 - "*p" is a fork at parent p, which is a backref
135 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
135 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
136 - "/p2" is a merge of the preceding node and p2
136 - "/p2" is a merge of the preceding node and p2
137 - ":tag" defines a local tag for the preceding node
137 - ":tag" defines a local tag for the preceding node
138 - "@branch" sets the named branch for subsequent nodes
138 - "@branch" sets the named branch for subsequent nodes
139 - "#...\\n" is a comment up to the end of the line
139 - "#...\\n" is a comment up to the end of the line
140
140
141 Whitespace between the above elements is ignored.
141 Whitespace between the above elements is ignored.
142
142
143 A backref is either
143 A backref is either
144
144
145 - a number n, which references the node curr-n, where curr is the current
145 - a number n, which references the node curr-n, where curr is the current
146 node, or
146 node, or
147 - the name of a local tag you placed earlier using ":tag", or
147 - the name of a local tag you placed earlier using ":tag", or
148 - empty to denote the default parent.
148 - empty to denote the default parent.
149
149
150 All string valued-elements are either strictly alphanumeric, or must
150 All string valued-elements are either strictly alphanumeric, or must
151 be enclosed in double quotes ("..."), with "\\" as escape character.
151 be enclosed in double quotes ("..."), with "\\" as escape character.
152 """
152 """
153
153
154 if text is None:
154 if text is None:
155 ui.status(_("reading DAG from stdin\n"))
155 ui.status(_("reading DAG from stdin\n"))
156 text = ui.fin.read()
156 text = ui.fin.read()
157
157
158 cl = repo.changelog
158 cl = repo.changelog
159 if len(cl) > 0:
159 if len(cl) > 0:
160 raise error.Abort(_('repository is not empty'))
160 raise error.Abort(_('repository is not empty'))
161
161
162 # determine number of revs in DAG
162 # determine number of revs in DAG
163 total = 0
163 total = 0
164 for type, data in dagparser.parsedag(text):
164 for type, data in dagparser.parsedag(text):
165 if type == 'n':
165 if type == 'n':
166 total += 1
166 total += 1
167
167
168 if mergeable_file:
168 if mergeable_file:
169 linesperrev = 2
169 linesperrev = 2
170 # make a file with k lines per rev
170 # make a file with k lines per rev
171 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
171 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
172 initialmergedlines.append("")
172 initialmergedlines.append("")
173
173
174 tags = []
174 tags = []
175
175
176 wlock = lock = tr = None
176 wlock = lock = tr = None
177 try:
177 try:
178 wlock = repo.wlock()
178 wlock = repo.wlock()
179 lock = repo.lock()
179 lock = repo.lock()
180 tr = repo.transaction("builddag")
180 tr = repo.transaction("builddag")
181
181
182 at = -1
182 at = -1
183 atbranch = 'default'
183 atbranch = 'default'
184 nodeids = []
184 nodeids = []
185 id = 0
185 id = 0
186 ui.progress(_('building'), id, unit=_('revisions'), total=total)
186 ui.progress(_('building'), id, unit=_('revisions'), total=total)
187 for type, data in dagparser.parsedag(text):
187 for type, data in dagparser.parsedag(text):
188 if type == 'n':
188 if type == 'n':
189 ui.note(('node %s\n' % pycompat.bytestr(data)))
189 ui.note(('node %s\n' % pycompat.bytestr(data)))
190 id, ps = data
190 id, ps = data
191
191
192 files = []
192 files = []
193 filecontent = {}
193 filecontent = {}
194
194
195 p2 = None
195 p2 = None
196 if mergeable_file:
196 if mergeable_file:
197 fn = "mf"
197 fn = "mf"
198 p1 = repo[ps[0]]
198 p1 = repo[ps[0]]
199 if len(ps) > 1:
199 if len(ps) > 1:
200 p2 = repo[ps[1]]
200 p2 = repo[ps[1]]
201 pa = p1.ancestor(p2)
201 pa = p1.ancestor(p2)
202 base, local, other = [x[fn].data() for x in (pa, p1,
202 base, local, other = [x[fn].data() for x in (pa, p1,
203 p2)]
203 p2)]
204 m3 = simplemerge.Merge3Text(base, local, other)
204 m3 = simplemerge.Merge3Text(base, local, other)
205 ml = [l.strip() for l in m3.merge_lines()]
205 ml = [l.strip() for l in m3.merge_lines()]
206 ml.append("")
206 ml.append("")
207 elif at > 0:
207 elif at > 0:
208 ml = p1[fn].data().split("\n")
208 ml = p1[fn].data().split("\n")
209 else:
209 else:
210 ml = initialmergedlines
210 ml = initialmergedlines
211 ml[id * linesperrev] += " r%i" % id
211 ml[id * linesperrev] += " r%i" % id
212 mergedtext = "\n".join(ml)
212 mergedtext = "\n".join(ml)
213 files.append(fn)
213 files.append(fn)
214 filecontent[fn] = mergedtext
214 filecontent[fn] = mergedtext
215
215
216 if overwritten_file:
216 if overwritten_file:
217 fn = "of"
217 fn = "of"
218 files.append(fn)
218 files.append(fn)
219 filecontent[fn] = "r%i\n" % id
219 filecontent[fn] = "r%i\n" % id
220
220
221 if new_file:
221 if new_file:
222 fn = "nf%i" % id
222 fn = "nf%i" % id
223 files.append(fn)
223 files.append(fn)
224 filecontent[fn] = "r%i\n" % id
224 filecontent[fn] = "r%i\n" % id
225 if len(ps) > 1:
225 if len(ps) > 1:
226 if not p2:
226 if not p2:
227 p2 = repo[ps[1]]
227 p2 = repo[ps[1]]
228 for fn in p2:
228 for fn in p2:
229 if fn.startswith("nf"):
229 if fn.startswith("nf"):
230 files.append(fn)
230 files.append(fn)
231 filecontent[fn] = p2[fn].data()
231 filecontent[fn] = p2[fn].data()
232
232
233 def fctxfn(repo, cx, path):
233 def fctxfn(repo, cx, path):
234 if path in filecontent:
234 if path in filecontent:
235 return context.memfilectx(repo, cx, path,
235 return context.memfilectx(repo, cx, path,
236 filecontent[path])
236 filecontent[path])
237 return None
237 return None
238
238
239 if len(ps) == 0 or ps[0] < 0:
239 if len(ps) == 0 or ps[0] < 0:
240 pars = [None, None]
240 pars = [None, None]
241 elif len(ps) == 1:
241 elif len(ps) == 1:
242 pars = [nodeids[ps[0]], None]
242 pars = [nodeids[ps[0]], None]
243 else:
243 else:
244 pars = [nodeids[p] for p in ps]
244 pars = [nodeids[p] for p in ps]
245 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
245 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
246 date=(id, 0),
246 date=(id, 0),
247 user="debugbuilddag",
247 user="debugbuilddag",
248 extra={'branch': atbranch})
248 extra={'branch': atbranch})
249 nodeid = repo.commitctx(cx)
249 nodeid = repo.commitctx(cx)
250 nodeids.append(nodeid)
250 nodeids.append(nodeid)
251 at = id
251 at = id
252 elif type == 'l':
252 elif type == 'l':
253 id, name = data
253 id, name = data
254 ui.note(('tag %s\n' % name))
254 ui.note(('tag %s\n' % name))
255 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
255 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
256 elif type == 'a':
256 elif type == 'a':
257 ui.note(('branch %s\n' % data))
257 ui.note(('branch %s\n' % data))
258 atbranch = data
258 atbranch = data
259 ui.progress(_('building'), id, unit=_('revisions'), total=total)
259 ui.progress(_('building'), id, unit=_('revisions'), total=total)
260 tr.close()
260 tr.close()
261
261
262 if tags:
262 if tags:
263 repo.vfs.write("localtags", "".join(tags))
263 repo.vfs.write("localtags", "".join(tags))
264 finally:
264 finally:
265 ui.progress(_('building'), None)
265 ui.progress(_('building'), None)
266 release(tr, lock, wlock)
266 release(tr, lock, wlock)
267
267
268 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
268 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 indent_string = ' ' * indent
269 indent_string = ' ' * indent
270 if all:
270 if all:
271 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
271 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 % indent_string)
272 % indent_string)
273
273
274 def showchunks(named):
274 def showchunks(named):
275 ui.write("\n%s%s\n" % (indent_string, named))
275 ui.write("\n%s%s\n" % (indent_string, named))
276 for deltadata in gen.deltaiter():
276 for deltadata in gen.deltaiter():
277 node, p1, p2, cs, deltabase, delta, flags = deltadata
277 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 ui.write("%s%s %s %s %s %s %d\n" %
278 ui.write("%s%s %s %s %s %s %d\n" %
279 (indent_string, hex(node), hex(p1), hex(p2),
279 (indent_string, hex(node), hex(p1), hex(p2),
280 hex(cs), hex(deltabase), len(delta)))
280 hex(cs), hex(deltabase), len(delta)))
281
281
282 chunkdata = gen.changelogheader()
282 chunkdata = gen.changelogheader()
283 showchunks("changelog")
283 showchunks("changelog")
284 chunkdata = gen.manifestheader()
284 chunkdata = gen.manifestheader()
285 showchunks("manifest")
285 showchunks("manifest")
286 for chunkdata in iter(gen.filelogheader, {}):
286 for chunkdata in iter(gen.filelogheader, {}):
287 fname = chunkdata['filename']
287 fname = chunkdata['filename']
288 showchunks(fname)
288 showchunks(fname)
289 else:
289 else:
290 if isinstance(gen, bundle2.unbundle20):
290 if isinstance(gen, bundle2.unbundle20):
291 raise error.Abort(_('use debugbundle2 for this file'))
291 raise error.Abort(_('use debugbundle2 for this file'))
292 chunkdata = gen.changelogheader()
292 chunkdata = gen.changelogheader()
293 for deltadata in gen.deltaiter():
293 for deltadata in gen.deltaiter():
294 node, p1, p2, cs, deltabase, delta, flags = deltadata
294 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 ui.write("%s%s\n" % (indent_string, hex(node)))
295 ui.write("%s%s\n" % (indent_string, hex(node)))
296
296
297 def _debugobsmarkers(ui, part, indent=0, **opts):
297 def _debugobsmarkers(ui, part, indent=0, **opts):
298 """display version and markers contained in 'data'"""
298 """display version and markers contained in 'data'"""
299 opts = pycompat.byteskwargs(opts)
299 opts = pycompat.byteskwargs(opts)
300 data = part.read()
300 data = part.read()
301 indent_string = ' ' * indent
301 indent_string = ' ' * indent
302 try:
302 try:
303 version, markers = obsolete._readmarkers(data)
303 version, markers = obsolete._readmarkers(data)
304 except error.UnknownVersion as exc:
304 except error.UnknownVersion as exc:
305 msg = "%sunsupported version: %s (%d bytes)\n"
305 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg %= indent_string, exc.version, len(data)
306 msg %= indent_string, exc.version, len(data)
307 ui.write(msg)
307 ui.write(msg)
308 else:
308 else:
309 msg = "%sversion: %d (%d bytes)\n"
309 msg = "%sversion: %d (%d bytes)\n"
310 msg %= indent_string, version, len(data)
310 msg %= indent_string, version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 fm = ui.formatter('debugobsolete', opts)
312 fm = ui.formatter('debugobsolete', opts)
313 for rawmarker in sorted(markers):
313 for rawmarker in sorted(markers):
314 m = obsutil.marker(None, rawmarker)
314 m = obsutil.marker(None, rawmarker)
315 fm.startitem()
315 fm.startitem()
316 fm.plain(indent_string)
316 fm.plain(indent_string)
317 cmdutil.showmarker(fm, m)
317 cmdutil.showmarker(fm, m)
318 fm.end()
318 fm.end()
319
319
320 def _debugphaseheads(ui, data, indent=0):
320 def _debugphaseheads(ui, data, indent=0):
321 """display version and markers contained in 'data'"""
321 """display version and markers contained in 'data'"""
322 indent_string = ' ' * indent
322 indent_string = ' ' * indent
323 headsbyphase = phases.binarydecode(data)
323 headsbyphase = phases.binarydecode(data)
324 for phase in phases.allphases:
324 for phase in phases.allphases:
325 for head in headsbyphase[phase]:
325 for head in headsbyphase[phase]:
326 ui.write(indent_string)
326 ui.write(indent_string)
327 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
327 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328
328
329 def _quasirepr(thing):
329 def _quasirepr(thing):
330 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
330 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 return '{%s}' % (
331 return '{%s}' % (
332 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
332 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 return pycompat.bytestr(repr(thing))
333 return pycompat.bytestr(repr(thing))
334
334
335 def _debugbundle2(ui, gen, all=None, **opts):
335 def _debugbundle2(ui, gen, all=None, **opts):
336 """lists the contents of a bundle2"""
336 """lists the contents of a bundle2"""
337 if not isinstance(gen, bundle2.unbundle20):
337 if not isinstance(gen, bundle2.unbundle20):
338 raise error.Abort(_('not a bundle2 file'))
338 raise error.Abort(_('not a bundle2 file'))
339 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
339 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 parttypes = opts.get(r'part_type', [])
340 parttypes = opts.get(r'part_type', [])
341 for part in gen.iterparts():
341 for part in gen.iterparts():
342 if parttypes and part.type not in parttypes:
342 if parttypes and part.type not in parttypes:
343 continue
343 continue
344 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
344 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
345 if part.type == 'changegroup':
345 if part.type == 'changegroup':
346 version = part.params.get('version', '01')
346 version = part.params.get('version', '01')
347 cg = changegroup.getunbundler(version, part, 'UN')
347 cg = changegroup.getunbundler(version, part, 'UN')
348 if not ui.quiet:
348 if not ui.quiet:
349 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
349 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
350 if part.type == 'obsmarkers':
350 if part.type == 'obsmarkers':
351 if not ui.quiet:
351 if not ui.quiet:
352 _debugobsmarkers(ui, part, indent=4, **opts)
352 _debugobsmarkers(ui, part, indent=4, **opts)
353 if part.type == 'phase-heads':
353 if part.type == 'phase-heads':
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugphaseheads(ui, part, indent=4)
355 _debugphaseheads(ui, part, indent=4)
356
356
357 @command('debugbundle',
357 @command('debugbundle',
358 [('a', 'all', None, _('show all details')),
358 [('a', 'all', None, _('show all details')),
359 ('', 'part-type', [], _('show only the named part type')),
359 ('', 'part-type', [], _('show only the named part type')),
360 ('', 'spec', None, _('print the bundlespec of the bundle'))],
360 ('', 'spec', None, _('print the bundlespec of the bundle'))],
361 _('FILE'),
361 _('FILE'),
362 norepo=True)
362 norepo=True)
363 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
363 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
364 """lists the contents of a bundle"""
364 """lists the contents of a bundle"""
365 with hg.openpath(ui, bundlepath) as f:
365 with hg.openpath(ui, bundlepath) as f:
366 if spec:
366 if spec:
367 spec = exchange.getbundlespec(ui, f)
367 spec = exchange.getbundlespec(ui, f)
368 ui.write('%s\n' % spec)
368 ui.write('%s\n' % spec)
369 return
369 return
370
370
371 gen = exchange.readbundle(ui, f, bundlepath)
371 gen = exchange.readbundle(ui, f, bundlepath)
372 if isinstance(gen, bundle2.unbundle20):
372 if isinstance(gen, bundle2.unbundle20):
373 return _debugbundle2(ui, gen, all=all, **opts)
373 return _debugbundle2(ui, gen, all=all, **opts)
374 _debugchangegroup(ui, gen, all=all, **opts)
374 _debugchangegroup(ui, gen, all=all, **opts)
375
375
376 @command('debugcapabilities',
376 @command('debugcapabilities',
377 [], _('PATH'),
377 [], _('PATH'),
378 norepo=True)
378 norepo=True)
379 def debugcapabilities(ui, path, **opts):
379 def debugcapabilities(ui, path, **opts):
380 """lists the capabilities of a remote peer"""
380 """lists the capabilities of a remote peer"""
381 opts = pycompat.byteskwargs(opts)
381 opts = pycompat.byteskwargs(opts)
382 peer = hg.peer(ui, opts, path)
382 peer = hg.peer(ui, opts, path)
383 caps = peer.capabilities()
383 caps = peer.capabilities()
384 ui.write(('Main capabilities:\n'))
384 ui.write(('Main capabilities:\n'))
385 for c in sorted(caps):
385 for c in sorted(caps):
386 ui.write((' %s\n') % c)
386 ui.write((' %s\n') % c)
387 b2caps = bundle2.bundle2caps(peer)
387 b2caps = bundle2.bundle2caps(peer)
388 if b2caps:
388 if b2caps:
389 ui.write(('Bundle2 capabilities:\n'))
389 ui.write(('Bundle2 capabilities:\n'))
390 for key, values in sorted(b2caps.iteritems()):
390 for key, values in sorted(b2caps.iteritems()):
391 ui.write((' %s\n') % key)
391 ui.write((' %s\n') % key)
392 for v in values:
392 for v in values:
393 ui.write((' %s\n') % v)
393 ui.write((' %s\n') % v)
394
394
395 @command('debugcheckstate', [], '')
395 @command('debugcheckstate', [], '')
396 def debugcheckstate(ui, repo):
396 def debugcheckstate(ui, repo):
397 """validate the correctness of the current dirstate"""
397 """validate the correctness of the current dirstate"""
398 parent1, parent2 = repo.dirstate.parents()
398 parent1, parent2 = repo.dirstate.parents()
399 m1 = repo[parent1].manifest()
399 m1 = repo[parent1].manifest()
400 m2 = repo[parent2].manifest()
400 m2 = repo[parent2].manifest()
401 errors = 0
401 errors = 0
402 for f in repo.dirstate:
402 for f in repo.dirstate:
403 state = repo.dirstate[f]
403 state = repo.dirstate[f]
404 if state in "nr" and f not in m1:
404 if state in "nr" and f not in m1:
405 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
405 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
406 errors += 1
406 errors += 1
407 if state in "a" and f in m1:
407 if state in "a" and f in m1:
408 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
408 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
409 errors += 1
409 errors += 1
410 if state in "m" and f not in m1 and f not in m2:
410 if state in "m" and f not in m1 and f not in m2:
411 ui.warn(_("%s in state %s, but not in either manifest\n") %
411 ui.warn(_("%s in state %s, but not in either manifest\n") %
412 (f, state))
412 (f, state))
413 errors += 1
413 errors += 1
414 for f in m1:
414 for f in m1:
415 state = repo.dirstate[f]
415 state = repo.dirstate[f]
416 if state not in "nrm":
416 if state not in "nrm":
417 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
417 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
418 errors += 1
418 errors += 1
419 if errors:
419 if errors:
420 error = _(".hg/dirstate inconsistent with current parent's manifest")
420 error = _(".hg/dirstate inconsistent with current parent's manifest")
421 raise error.Abort(error)
421 raise error.Abort(error)
422
422
423 @command('debugcolor',
423 @command('debugcolor',
424 [('', 'style', None, _('show all configured styles'))],
424 [('', 'style', None, _('show all configured styles'))],
425 'hg debugcolor')
425 'hg debugcolor')
426 def debugcolor(ui, repo, **opts):
426 def debugcolor(ui, repo, **opts):
427 """show available color, effects or style"""
427 """show available color, effects or style"""
428 ui.write(('color mode: %s\n') % ui._colormode)
428 ui.write(('color mode: %s\n') % ui._colormode)
429 if opts.get(r'style'):
429 if opts.get(r'style'):
430 return _debugdisplaystyle(ui)
430 return _debugdisplaystyle(ui)
431 else:
431 else:
432 return _debugdisplaycolor(ui)
432 return _debugdisplaycolor(ui)
433
433
434 def _debugdisplaycolor(ui):
434 def _debugdisplaycolor(ui):
435 ui = ui.copy()
435 ui = ui.copy()
436 ui._styles.clear()
436 ui._styles.clear()
437 for effect in color._activeeffects(ui).keys():
437 for effect in color._activeeffects(ui).keys():
438 ui._styles[effect] = effect
438 ui._styles[effect] = effect
439 if ui._terminfoparams:
439 if ui._terminfoparams:
440 for k, v in ui.configitems('color'):
440 for k, v in ui.configitems('color'):
441 if k.startswith('color.'):
441 if k.startswith('color.'):
442 ui._styles[k] = k[6:]
442 ui._styles[k] = k[6:]
443 elif k.startswith('terminfo.'):
443 elif k.startswith('terminfo.'):
444 ui._styles[k] = k[9:]
444 ui._styles[k] = k[9:]
445 ui.write(_('available colors:\n'))
445 ui.write(_('available colors:\n'))
446 # sort label with a '_' after the other to group '_background' entry.
446 # sort label with a '_' after the other to group '_background' entry.
447 items = sorted(ui._styles.items(),
447 items = sorted(ui._styles.items(),
448 key=lambda i: ('_' in i[0], i[0], i[1]))
448 key=lambda i: ('_' in i[0], i[0], i[1]))
449 for colorname, label in items:
449 for colorname, label in items:
450 ui.write(('%s\n') % colorname, label=label)
450 ui.write(('%s\n') % colorname, label=label)
451
451
452 def _debugdisplaystyle(ui):
452 def _debugdisplaystyle(ui):
453 ui.write(_('available style:\n'))
453 ui.write(_('available style:\n'))
454 width = max(len(s) for s in ui._styles)
454 width = max(len(s) for s in ui._styles)
455 for label, effects in sorted(ui._styles.items()):
455 for label, effects in sorted(ui._styles.items()):
456 ui.write('%s' % label, label=label)
456 ui.write('%s' % label, label=label)
457 if effects:
457 if effects:
458 # 50
458 # 50
459 ui.write(': ')
459 ui.write(': ')
460 ui.write(' ' * (max(0, width - len(label))))
460 ui.write(' ' * (max(0, width - len(label))))
461 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
461 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
462 ui.write('\n')
462 ui.write('\n')
463
463
464 @command('debugcreatestreamclonebundle', [], 'FILE')
464 @command('debugcreatestreamclonebundle', [], 'FILE')
465 def debugcreatestreamclonebundle(ui, repo, fname):
465 def debugcreatestreamclonebundle(ui, repo, fname):
466 """create a stream clone bundle file
466 """create a stream clone bundle file
467
467
468 Stream bundles are special bundles that are essentially archives of
468 Stream bundles are special bundles that are essentially archives of
469 revlog files. They are commonly used for cloning very quickly.
469 revlog files. They are commonly used for cloning very quickly.
470 """
470 """
471 # TODO we may want to turn this into an abort when this functionality
471 # TODO we may want to turn this into an abort when this functionality
472 # is moved into `hg bundle`.
472 # is moved into `hg bundle`.
473 if phases.hassecret(repo):
473 if phases.hassecret(repo):
474 ui.warn(_('(warning: stream clone bundle will contain secret '
474 ui.warn(_('(warning: stream clone bundle will contain secret '
475 'revisions)\n'))
475 'revisions)\n'))
476
476
477 requirements, gen = streamclone.generatebundlev1(repo)
477 requirements, gen = streamclone.generatebundlev1(repo)
478 changegroup.writechunks(ui, gen, fname)
478 changegroup.writechunks(ui, gen, fname)
479
479
480 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
480 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
481
481
482 @command('debugdag',
482 @command('debugdag',
483 [('t', 'tags', None, _('use tags as labels')),
483 [('t', 'tags', None, _('use tags as labels')),
484 ('b', 'branches', None, _('annotate with branch names')),
484 ('b', 'branches', None, _('annotate with branch names')),
485 ('', 'dots', None, _('use dots for runs')),
485 ('', 'dots', None, _('use dots for runs')),
486 ('s', 'spaces', None, _('separate elements by spaces'))],
486 ('s', 'spaces', None, _('separate elements by spaces'))],
487 _('[OPTION]... [FILE [REV]...]'),
487 _('[OPTION]... [FILE [REV]...]'),
488 optionalrepo=True)
488 optionalrepo=True)
489 def debugdag(ui, repo, file_=None, *revs, **opts):
489 def debugdag(ui, repo, file_=None, *revs, **opts):
490 """format the changelog or an index DAG as a concise textual description
490 """format the changelog or an index DAG as a concise textual description
491
491
492 If you pass a revlog index, the revlog's DAG is emitted. If you list
492 If you pass a revlog index, the revlog's DAG is emitted. If you list
493 revision numbers, they get labeled in the output as rN.
493 revision numbers, they get labeled in the output as rN.
494
494
495 Otherwise, the changelog DAG of the current repo is emitted.
495 Otherwise, the changelog DAG of the current repo is emitted.
496 """
496 """
497 spaces = opts.get(r'spaces')
497 spaces = opts.get(r'spaces')
498 dots = opts.get(r'dots')
498 dots = opts.get(r'dots')
499 if file_:
499 if file_:
500 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
500 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
501 file_)
501 file_)
502 revs = set((int(r) for r in revs))
502 revs = set((int(r) for r in revs))
503 def events():
503 def events():
504 for r in rlog:
504 for r in rlog:
505 yield 'n', (r, list(p for p in rlog.parentrevs(r)
505 yield 'n', (r, list(p for p in rlog.parentrevs(r)
506 if p != -1))
506 if p != -1))
507 if r in revs:
507 if r in revs:
508 yield 'l', (r, "r%i" % r)
508 yield 'l', (r, "r%i" % r)
509 elif repo:
509 elif repo:
510 cl = repo.changelog
510 cl = repo.changelog
511 tags = opts.get(r'tags')
511 tags = opts.get(r'tags')
512 branches = opts.get(r'branches')
512 branches = opts.get(r'branches')
513 if tags:
513 if tags:
514 labels = {}
514 labels = {}
515 for l, n in repo.tags().items():
515 for l, n in repo.tags().items():
516 labels.setdefault(cl.rev(n), []).append(l)
516 labels.setdefault(cl.rev(n), []).append(l)
517 def events():
517 def events():
518 b = "default"
518 b = "default"
519 for r in cl:
519 for r in cl:
520 if branches:
520 if branches:
521 newb = cl.read(cl.node(r))[5]['branch']
521 newb = cl.read(cl.node(r))[5]['branch']
522 if newb != b:
522 if newb != b:
523 yield 'a', newb
523 yield 'a', newb
524 b = newb
524 b = newb
525 yield 'n', (r, list(p for p in cl.parentrevs(r)
525 yield 'n', (r, list(p for p in cl.parentrevs(r)
526 if p != -1))
526 if p != -1))
527 if tags:
527 if tags:
528 ls = labels.get(r)
528 ls = labels.get(r)
529 if ls:
529 if ls:
530 for l in ls:
530 for l in ls:
531 yield 'l', (r, l)
531 yield 'l', (r, l)
532 else:
532 else:
533 raise error.Abort(_('need repo for changelog dag'))
533 raise error.Abort(_('need repo for changelog dag'))
534
534
535 for line in dagparser.dagtextlines(events(),
535 for line in dagparser.dagtextlines(events(),
536 addspaces=spaces,
536 addspaces=spaces,
537 wraplabels=True,
537 wraplabels=True,
538 wrapannotations=True,
538 wrapannotations=True,
539 wrapnonlinear=dots,
539 wrapnonlinear=dots,
540 usedots=dots,
540 usedots=dots,
541 maxlinewidth=70):
541 maxlinewidth=70):
542 ui.write(line)
542 ui.write(line)
543 ui.write("\n")
543 ui.write("\n")
544
544
545 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
545 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
546 def debugdata(ui, repo, file_, rev=None, **opts):
546 def debugdata(ui, repo, file_, rev=None, **opts):
547 """dump the contents of a data file revision"""
547 """dump the contents of a data file revision"""
548 opts = pycompat.byteskwargs(opts)
548 opts = pycompat.byteskwargs(opts)
549 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
549 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
550 if rev is not None:
550 if rev is not None:
551 raise error.CommandError('debugdata', _('invalid arguments'))
551 raise error.CommandError('debugdata', _('invalid arguments'))
552 file_, rev = None, file_
552 file_, rev = None, file_
553 elif rev is None:
553 elif rev is None:
554 raise error.CommandError('debugdata', _('invalid arguments'))
554 raise error.CommandError('debugdata', _('invalid arguments'))
555 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
555 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
556 try:
556 try:
557 ui.write(r.revision(r.lookup(rev), raw=True))
557 ui.write(r.revision(r.lookup(rev), raw=True))
558 except KeyError:
558 except KeyError:
559 raise error.Abort(_('invalid revision identifier %s') % rev)
559 raise error.Abort(_('invalid revision identifier %s') % rev)
560
560
561 @command('debugdate',
561 @command('debugdate',
562 [('e', 'extended', None, _('try extended date formats'))],
562 [('e', 'extended', None, _('try extended date formats'))],
563 _('[-e] DATE [RANGE]'),
563 _('[-e] DATE [RANGE]'),
564 norepo=True, optionalrepo=True)
564 norepo=True, optionalrepo=True)
565 def debugdate(ui, date, range=None, **opts):
565 def debugdate(ui, date, range=None, **opts):
566 """parse and display a date"""
566 """parse and display a date"""
567 if opts[r"extended"]:
567 if opts[r"extended"]:
568 d = dateutil.parsedate(date, util.extendeddateformats)
568 d = dateutil.parsedate(date, util.extendeddateformats)
569 else:
569 else:
570 d = dateutil.parsedate(date)
570 d = dateutil.parsedate(date)
571 ui.write(("internal: %d %d\n") % d)
571 ui.write(("internal: %d %d\n") % d)
572 ui.write(("standard: %s\n") % dateutil.datestr(d))
572 ui.write(("standard: %s\n") % dateutil.datestr(d))
573 if range:
573 if range:
574 m = dateutil.matchdate(range)
574 m = dateutil.matchdate(range)
575 ui.write(("match: %s\n") % m(d[0]))
575 ui.write(("match: %s\n") % m(d[0]))
576
576
577 @command('debugdeltachain',
577 @command('debugdeltachain',
578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
579 _('-c|-m|FILE'),
579 _('-c|-m|FILE'),
580 optionalrepo=True)
580 optionalrepo=True)
581 def debugdeltachain(ui, repo, file_=None, **opts):
581 def debugdeltachain(ui, repo, file_=None, **opts):
582 """dump information about delta chains in a revlog
582 """dump information about delta chains in a revlog
583
583
584 Output can be templatized. Available template keywords are:
584 Output can be templatized. Available template keywords are:
585
585
586 :``rev``: revision number
586 :``rev``: revision number
587 :``chainid``: delta chain identifier (numbered by unique base)
587 :``chainid``: delta chain identifier (numbered by unique base)
588 :``chainlen``: delta chain length to this revision
588 :``chainlen``: delta chain length to this revision
589 :``prevrev``: previous revision in delta chain
589 :``prevrev``: previous revision in delta chain
590 :``deltatype``: role of delta / how it was computed
590 :``deltatype``: role of delta / how it was computed
591 :``compsize``: compressed size of revision
591 :``compsize``: compressed size of revision
592 :``uncompsize``: uncompressed size of revision
592 :``uncompsize``: uncompressed size of revision
593 :``chainsize``: total size of compressed revisions in chain
593 :``chainsize``: total size of compressed revisions in chain
594 :``chainratio``: total chain size divided by uncompressed revision size
594 :``chainratio``: total chain size divided by uncompressed revision size
595 (new delta chains typically start at ratio 2.00)
595 (new delta chains typically start at ratio 2.00)
596 :``lindist``: linear distance from base revision in delta chain to end
596 :``lindist``: linear distance from base revision in delta chain to end
597 of this revision
597 of this revision
598 :``extradist``: total size of revisions not part of this delta chain from
598 :``extradist``: total size of revisions not part of this delta chain from
599 base of delta chain to end of this revision; a measurement
599 base of delta chain to end of this revision; a measurement
600 of how much extra data we need to read/seek across to read
600 of how much extra data we need to read/seek across to read
601 the delta chain for this revision
601 the delta chain for this revision
602 :``extraratio``: extradist divided by chainsize; another representation of
602 :``extraratio``: extradist divided by chainsize; another representation of
603 how much unrelated data is needed to load this delta chain
603 how much unrelated data is needed to load this delta chain
604
604
605 If the repository is configured to use the sparse read, additional keywords
605 If the repository is configured to use the sparse read, additional keywords
606 are available:
606 are available:
607
607
608 :``readsize``: total size of data read from the disk for a revision
608 :``readsize``: total size of data read from the disk for a revision
609 (sum of the sizes of all the blocks)
609 (sum of the sizes of all the blocks)
610 :``largestblock``: size of the largest block of data read from the disk
610 :``largestblock``: size of the largest block of data read from the disk
611 :``readdensity``: density of useful bytes in the data read from the disk
611 :``readdensity``: density of useful bytes in the data read from the disk
612 :``srchunks``: in how many data hunks the whole revision would be read
612 :``srchunks``: in how many data hunks the whole revision would be read
613
613
614 The sparse read can be enabled with experimental.sparse-read = True
614 The sparse read can be enabled with experimental.sparse-read = True
615 """
615 """
616 opts = pycompat.byteskwargs(opts)
616 opts = pycompat.byteskwargs(opts)
617 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
617 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
618 index = r.index
618 index = r.index
619 generaldelta = r.version & revlog.FLAG_GENERALDELTA
619 generaldelta = r.version & revlog.FLAG_GENERALDELTA
620 withsparseread = getattr(r, '_withsparseread', False)
620 withsparseread = getattr(r, '_withsparseread', False)
621
621
622 def revinfo(rev):
622 def revinfo(rev):
623 e = index[rev]
623 e = index[rev]
624 compsize = e[1]
624 compsize = e[1]
625 uncompsize = e[2]
625 uncompsize = e[2]
626 chainsize = 0
626 chainsize = 0
627
627
628 if generaldelta:
628 if generaldelta:
629 if e[3] == e[5]:
629 if e[3] == e[5]:
630 deltatype = 'p1'
630 deltatype = 'p1'
631 elif e[3] == e[6]:
631 elif e[3] == e[6]:
632 deltatype = 'p2'
632 deltatype = 'p2'
633 elif e[3] == rev - 1:
633 elif e[3] == rev - 1:
634 deltatype = 'prev'
634 deltatype = 'prev'
635 elif e[3] == rev:
635 elif e[3] == rev:
636 deltatype = 'base'
636 deltatype = 'base'
637 else:
637 else:
638 deltatype = 'other'
638 deltatype = 'other'
639 else:
639 else:
640 if e[3] == rev:
640 if e[3] == rev:
641 deltatype = 'base'
641 deltatype = 'base'
642 else:
642 else:
643 deltatype = 'prev'
643 deltatype = 'prev'
644
644
645 chain = r._deltachain(rev)[0]
645 chain = r._deltachain(rev)[0]
646 for iterrev in chain:
646 for iterrev in chain:
647 e = index[iterrev]
647 e = index[iterrev]
648 chainsize += e[1]
648 chainsize += e[1]
649
649
650 return compsize, uncompsize, deltatype, chain, chainsize
650 return compsize, uncompsize, deltatype, chain, chainsize
651
651
652 fm = ui.formatter('debugdeltachain', opts)
652 fm = ui.formatter('debugdeltachain', opts)
653
653
654 fm.plain(' rev chain# chainlen prev delta '
654 fm.plain(' rev chain# chainlen prev delta '
655 'size rawsize chainsize ratio lindist extradist '
655 'size rawsize chainsize ratio lindist extradist '
656 'extraratio')
656 'extraratio')
657 if withsparseread:
657 if withsparseread:
658 fm.plain(' readsize largestblk rddensity srchunks')
658 fm.plain(' readsize largestblk rddensity srchunks')
659 fm.plain('\n')
659 fm.plain('\n')
660
660
661 chainbases = {}
661 chainbases = {}
662 for rev in r:
662 for rev in r:
663 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
663 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
664 chainbase = chain[0]
664 chainbase = chain[0]
665 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
665 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
666 start = r.start
666 start = r.start
667 length = r.length
667 length = r.length
668 basestart = start(chainbase)
668 basestart = start(chainbase)
669 revstart = start(rev)
669 revstart = start(rev)
670 lineardist = revstart + comp - basestart
670 lineardist = revstart + comp - basestart
671 extradist = lineardist - chainsize
671 extradist = lineardist - chainsize
672 try:
672 try:
673 prevrev = chain[-2]
673 prevrev = chain[-2]
674 except IndexError:
674 except IndexError:
675 prevrev = -1
675 prevrev = -1
676
676
677 chainratio = float(chainsize) / float(uncomp)
677 chainratio = float(chainsize) / float(uncomp)
678 extraratio = float(extradist) / float(chainsize)
678 extraratio = float(extradist) / float(chainsize)
679
679
680 fm.startitem()
680 fm.startitem()
681 fm.write('rev chainid chainlen prevrev deltatype compsize '
681 fm.write('rev chainid chainlen prevrev deltatype compsize '
682 'uncompsize chainsize chainratio lindist extradist '
682 'uncompsize chainsize chainratio lindist extradist '
683 'extraratio',
683 'extraratio',
684 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
684 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
685 rev, chainid, len(chain), prevrev, deltatype, comp,
685 rev, chainid, len(chain), prevrev, deltatype, comp,
686 uncomp, chainsize, chainratio, lineardist, extradist,
686 uncomp, chainsize, chainratio, lineardist, extradist,
687 extraratio,
687 extraratio,
688 rev=rev, chainid=chainid, chainlen=len(chain),
688 rev=rev, chainid=chainid, chainlen=len(chain),
689 prevrev=prevrev, deltatype=deltatype, compsize=comp,
689 prevrev=prevrev, deltatype=deltatype, compsize=comp,
690 uncompsize=uncomp, chainsize=chainsize,
690 uncompsize=uncomp, chainsize=chainsize,
691 chainratio=chainratio, lindist=lineardist,
691 chainratio=chainratio, lindist=lineardist,
692 extradist=extradist, extraratio=extraratio)
692 extradist=extradist, extraratio=extraratio)
693 if withsparseread:
693 if withsparseread:
694 readsize = 0
694 readsize = 0
695 largestblock = 0
695 largestblock = 0
696 srchunks = 0
696 srchunks = 0
697
697
698 for revschunk in revlog._slicechunk(r, chain):
698 for revschunk in revlog._slicechunk(r, chain):
699 srchunks += 1
699 srchunks += 1
700 blkend = start(revschunk[-1]) + length(revschunk[-1])
700 blkend = start(revschunk[-1]) + length(revschunk[-1])
701 blksize = blkend - start(revschunk[0])
701 blksize = blkend - start(revschunk[0])
702
702
703 readsize += blksize
703 readsize += blksize
704 if largestblock < blksize:
704 if largestblock < blksize:
705 largestblock = blksize
705 largestblock = blksize
706
706
707 readdensity = float(chainsize) / float(readsize)
707 readdensity = float(chainsize) / float(readsize)
708
708
709 fm.write('readsize largestblock readdensity srchunks',
709 fm.write('readsize largestblock readdensity srchunks',
710 ' %10d %10d %9.5f %8d',
710 ' %10d %10d %9.5f %8d',
711 readsize, largestblock, readdensity, srchunks,
711 readsize, largestblock, readdensity, srchunks,
712 readsize=readsize, largestblock=largestblock,
712 readsize=readsize, largestblock=largestblock,
713 readdensity=readdensity, srchunks=srchunks)
713 readdensity=readdensity, srchunks=srchunks)
714
714
715 fm.plain('\n')
715 fm.plain('\n')
716
716
717 fm.end()
717 fm.end()
718
718
719 @command('debugdirstate|debugstate',
719 @command('debugdirstate|debugstate',
720 [('', 'nodates', None, _('do not display the saved mtime')),
720 [('', 'nodates', None, _('do not display the saved mtime')),
721 ('', 'datesort', None, _('sort by saved mtime'))],
721 ('', 'datesort', None, _('sort by saved mtime'))],
722 _('[OPTION]...'))
722 _('[OPTION]...'))
723 def debugstate(ui, repo, **opts):
723 def debugstate(ui, repo, **opts):
724 """show the contents of the current dirstate"""
724 """show the contents of the current dirstate"""
725
725
726 nodates = opts.get(r'nodates')
726 nodates = opts.get(r'nodates')
727 datesort = opts.get(r'datesort')
727 datesort = opts.get(r'datesort')
728
728
729 timestr = ""
729 timestr = ""
730 if datesort:
730 if datesort:
731 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
731 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
732 else:
732 else:
733 keyfunc = None # sort by filename
733 keyfunc = None # sort by filename
734 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
734 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
735 if ent[3] == -1:
735 if ent[3] == -1:
736 timestr = 'unset '
736 timestr = 'unset '
737 elif nodates:
737 elif nodates:
738 timestr = 'set '
738 timestr = 'set '
739 else:
739 else:
740 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
740 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
741 time.localtime(ent[3]))
741 time.localtime(ent[3]))
742 timestr = encoding.strtolocal(timestr)
742 timestr = encoding.strtolocal(timestr)
743 if ent[1] & 0o20000:
743 if ent[1] & 0o20000:
744 mode = 'lnk'
744 mode = 'lnk'
745 else:
745 else:
746 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
746 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
747 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
747 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
748 for f in repo.dirstate.copies():
748 for f in repo.dirstate.copies():
749 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
749 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
750
750
751 @command('debugdiscovery',
751 @command('debugdiscovery',
752 [('', 'old', None, _('use old-style discovery')),
752 [('', 'old', None, _('use old-style discovery')),
753 ('', 'nonheads', None,
753 ('', 'nonheads', None,
754 _('use old-style discovery with non-heads included')),
754 _('use old-style discovery with non-heads included')),
755 ('', 'rev', [], 'restrict discovery to this set of revs'),
755 ('', 'rev', [], 'restrict discovery to this set of revs'),
756 ] + cmdutil.remoteopts,
756 ] + cmdutil.remoteopts,
757 _('[--rev REV] [OTHER]'))
757 _('[--rev REV] [OTHER]'))
758 def debugdiscovery(ui, repo, remoteurl="default", **opts):
758 def debugdiscovery(ui, repo, remoteurl="default", **opts):
759 """runs the changeset discovery protocol in isolation"""
759 """runs the changeset discovery protocol in isolation"""
760 opts = pycompat.byteskwargs(opts)
760 opts = pycompat.byteskwargs(opts)
761 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
761 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
762 remote = hg.peer(repo, opts, remoteurl)
762 remote = hg.peer(repo, opts, remoteurl)
763 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
763 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
764
764
765 # make sure tests are repeatable
765 # make sure tests are repeatable
766 random.seed(12323)
766 random.seed(12323)
767
767
768 def doit(pushedrevs, remoteheads, remote=remote):
768 def doit(pushedrevs, remoteheads, remote=remote):
769 if opts.get('old'):
769 if opts.get('old'):
770 if not util.safehasattr(remote, 'branches'):
770 if not util.safehasattr(remote, 'branches'):
771 # enable in-client legacy support
771 # enable in-client legacy support
772 remote = localrepo.locallegacypeer(remote.local())
772 remote = localrepo.locallegacypeer(remote.local())
773 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
773 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
774 force=True)
774 force=True)
775 common = set(common)
775 common = set(common)
776 if not opts.get('nonheads'):
776 if not opts.get('nonheads'):
777 ui.write(("unpruned common: %s\n") %
777 ui.write(("unpruned common: %s\n") %
778 " ".join(sorted(short(n) for n in common)))
778 " ".join(sorted(short(n) for n in common)))
779 dag = dagutil.revlogdag(repo.changelog)
779 dag = dagutil.revlogdag(repo.changelog)
780 all = dag.ancestorset(dag.internalizeall(common))
780 all = dag.ancestorset(dag.internalizeall(common))
781 common = dag.externalizeall(dag.headsetofconnecteds(all))
781 common = dag.externalizeall(dag.headsetofconnecteds(all))
782 else:
782 else:
783 nodes = None
783 nodes = None
784 if pushedrevs:
784 if pushedrevs:
785 revs = scmutil.revrange(repo, pushedrevs)
785 revs = scmutil.revrange(repo, pushedrevs)
786 nodes = [repo[r].node() for r in revs]
786 nodes = [repo[r].node() for r in revs]
787 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
787 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
788 ancestorsof=nodes)
788 ancestorsof=nodes)
789 common = set(common)
789 common = set(common)
790 rheads = set(hds)
790 rheads = set(hds)
791 lheads = set(repo.heads())
791 lheads = set(repo.heads())
792 ui.write(("common heads: %s\n") %
792 ui.write(("common heads: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
793 " ".join(sorted(short(n) for n in common)))
794 if lheads <= common:
794 if lheads <= common:
795 ui.write(("local is subset\n"))
795 ui.write(("local is subset\n"))
796 elif rheads <= common:
796 elif rheads <= common:
797 ui.write(("remote is subset\n"))
797 ui.write(("remote is subset\n"))
798
798
799 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
799 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
800 localrevs = opts['rev']
800 localrevs = opts['rev']
801 doit(localrevs, remoterevs)
801 doit(localrevs, remoterevs)
802
802
803 _chunksize = 4 << 10
803 _chunksize = 4 << 10
804
804
805 @command('debugdownload',
805 @command('debugdownload',
806 [
806 [
807 ('o', 'output', '', _('path')),
807 ('o', 'output', '', _('path')),
808 ],
808 ],
809 optionalrepo=True)
809 optionalrepo=True)
810 def debugdownload(ui, repo, url, output=None, **opts):
810 def debugdownload(ui, repo, url, output=None, **opts):
811 """download a resource using Mercurial logic and config
811 """download a resource using Mercurial logic and config
812 """
812 """
813 fh = urlmod.open(ui, url, output)
813 fh = urlmod.open(ui, url, output)
814
814
815 dest = ui
815 dest = ui
816 if output:
816 if output:
817 dest = open(output, "wb", _chunksize)
817 dest = open(output, "wb", _chunksize)
818 try:
818 try:
819 data = fh.read(_chunksize)
819 data = fh.read(_chunksize)
820 while data:
820 while data:
821 dest.write(data)
821 dest.write(data)
822 data = fh.read(_chunksize)
822 data = fh.read(_chunksize)
823 finally:
823 finally:
824 if output:
824 if output:
825 dest.close()
825 dest.close()
826
826
827 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
827 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
828 def debugextensions(ui, **opts):
828 def debugextensions(ui, **opts):
829 '''show information about active extensions'''
829 '''show information about active extensions'''
830 opts = pycompat.byteskwargs(opts)
830 opts = pycompat.byteskwargs(opts)
831 exts = extensions.extensions(ui)
831 exts = extensions.extensions(ui)
832 hgver = util.version()
832 hgver = util.version()
833 fm = ui.formatter('debugextensions', opts)
833 fm = ui.formatter('debugextensions', opts)
834 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
834 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
835 isinternal = extensions.ismoduleinternal(extmod)
835 isinternal = extensions.ismoduleinternal(extmod)
836 extsource = pycompat.fsencode(extmod.__file__)
836 extsource = pycompat.fsencode(extmod.__file__)
837 if isinternal:
837 if isinternal:
838 exttestedwith = [] # never expose magic string to users
838 exttestedwith = [] # never expose magic string to users
839 else:
839 else:
840 exttestedwith = getattr(extmod, 'testedwith', '').split()
840 exttestedwith = getattr(extmod, 'testedwith', '').split()
841 extbuglink = getattr(extmod, 'buglink', None)
841 extbuglink = getattr(extmod, 'buglink', None)
842
842
843 fm.startitem()
843 fm.startitem()
844
844
845 if ui.quiet or ui.verbose:
845 if ui.quiet or ui.verbose:
846 fm.write('name', '%s\n', extname)
846 fm.write('name', '%s\n', extname)
847 else:
847 else:
848 fm.write('name', '%s', extname)
848 fm.write('name', '%s', extname)
849 if isinternal or hgver in exttestedwith:
849 if isinternal or hgver in exttestedwith:
850 fm.plain('\n')
850 fm.plain('\n')
851 elif not exttestedwith:
851 elif not exttestedwith:
852 fm.plain(_(' (untested!)\n'))
852 fm.plain(_(' (untested!)\n'))
853 else:
853 else:
854 lasttestedversion = exttestedwith[-1]
854 lasttestedversion = exttestedwith[-1]
855 fm.plain(' (%s!)\n' % lasttestedversion)
855 fm.plain(' (%s!)\n' % lasttestedversion)
856
856
857 fm.condwrite(ui.verbose and extsource, 'source',
857 fm.condwrite(ui.verbose and extsource, 'source',
858 _(' location: %s\n'), extsource or "")
858 _(' location: %s\n'), extsource or "")
859
859
860 if ui.verbose:
860 if ui.verbose:
861 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
861 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
862 fm.data(bundled=isinternal)
862 fm.data(bundled=isinternal)
863
863
864 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
864 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
865 _(' tested with: %s\n'),
865 _(' tested with: %s\n'),
866 fm.formatlist(exttestedwith, name='ver'))
866 fm.formatlist(exttestedwith, name='ver'))
867
867
868 fm.condwrite(ui.verbose and extbuglink, 'buglink',
868 fm.condwrite(ui.verbose and extbuglink, 'buglink',
869 _(' bug reporting: %s\n'), extbuglink or "")
869 _(' bug reporting: %s\n'), extbuglink or "")
870
870
871 fm.end()
871 fm.end()
872
872
873 @command('debugfileset',
873 @command('debugfileset',
874 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
874 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
875 _('[-r REV] FILESPEC'))
875 _('[-r REV] FILESPEC'))
876 def debugfileset(ui, repo, expr, **opts):
876 def debugfileset(ui, repo, expr, **opts):
877 '''parse and apply a fileset specification'''
877 '''parse and apply a fileset specification'''
878 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
878 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
879 if ui.verbose:
879 if ui.verbose:
880 tree = fileset.parse(expr)
880 tree = fileset.parse(expr)
881 ui.note(fileset.prettyformat(tree), "\n")
881 ui.note(fileset.prettyformat(tree), "\n")
882
882
883 for f in ctx.getfileset(expr):
883 for f in ctx.getfileset(expr):
884 ui.write("%s\n" % f)
884 ui.write("%s\n" % f)
885
885
886 @command('debugformat',
886 @command('debugformat',
887 [] + cmdutil.formatteropts,
887 [] + cmdutil.formatteropts,
888 _(''))
888 _(''))
889 def debugformat(ui, repo, **opts):
889 def debugformat(ui, repo, **opts):
890 """display format information about the current repository
890 """display format information about the current repository
891
891
892 Use --verbose to get extra information about current config value and
892 Use --verbose to get extra information about current config value and
893 Mercurial default."""
893 Mercurial default."""
894 opts = pycompat.byteskwargs(opts)
894 opts = pycompat.byteskwargs(opts)
895 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
895 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
896 maxvariantlength = max(len('format-variant'), maxvariantlength)
896 maxvariantlength = max(len('format-variant'), maxvariantlength)
897
897
898 def makeformatname(name):
898 def makeformatname(name):
899 return '%s:' + (' ' * (maxvariantlength - len(name)))
899 return '%s:' + (' ' * (maxvariantlength - len(name)))
900
900
901 fm = ui.formatter('debugformat', opts)
901 fm = ui.formatter('debugformat', opts)
902 if fm.isplain():
902 if fm.isplain():
903 def formatvalue(value):
903 def formatvalue(value):
904 if util.safehasattr(value, 'startswith'):
904 if util.safehasattr(value, 'startswith'):
905 return value
905 return value
906 if value:
906 if value:
907 return 'yes'
907 return 'yes'
908 else:
908 else:
909 return 'no'
909 return 'no'
910 else:
910 else:
911 formatvalue = pycompat.identity
911 formatvalue = pycompat.identity
912
912
913 fm.plain('format-variant')
913 fm.plain('format-variant')
914 fm.plain(' ' * (maxvariantlength - len('format-variant')))
914 fm.plain(' ' * (maxvariantlength - len('format-variant')))
915 fm.plain(' repo')
915 fm.plain(' repo')
916 if ui.verbose:
916 if ui.verbose:
917 fm.plain(' config default')
917 fm.plain(' config default')
918 fm.plain('\n')
918 fm.plain('\n')
919 for fv in upgrade.allformatvariant:
919 for fv in upgrade.allformatvariant:
920 fm.startitem()
920 fm.startitem()
921 repovalue = fv.fromrepo(repo)
921 repovalue = fv.fromrepo(repo)
922 configvalue = fv.fromconfig(repo)
922 configvalue = fv.fromconfig(repo)
923
923
924 if repovalue != configvalue:
924 if repovalue != configvalue:
925 namelabel = 'formatvariant.name.mismatchconfig'
925 namelabel = 'formatvariant.name.mismatchconfig'
926 repolabel = 'formatvariant.repo.mismatchconfig'
926 repolabel = 'formatvariant.repo.mismatchconfig'
927 elif repovalue != fv.default:
927 elif repovalue != fv.default:
928 namelabel = 'formatvariant.name.mismatchdefault'
928 namelabel = 'formatvariant.name.mismatchdefault'
929 repolabel = 'formatvariant.repo.mismatchdefault'
929 repolabel = 'formatvariant.repo.mismatchdefault'
930 else:
930 else:
931 namelabel = 'formatvariant.name.uptodate'
931 namelabel = 'formatvariant.name.uptodate'
932 repolabel = 'formatvariant.repo.uptodate'
932 repolabel = 'formatvariant.repo.uptodate'
933
933
934 fm.write('name', makeformatname(fv.name), fv.name,
934 fm.write('name', makeformatname(fv.name), fv.name,
935 label=namelabel)
935 label=namelabel)
936 fm.write('repo', ' %3s', formatvalue(repovalue),
936 fm.write('repo', ' %3s', formatvalue(repovalue),
937 label=repolabel)
937 label=repolabel)
938 if fv.default != configvalue:
938 if fv.default != configvalue:
939 configlabel = 'formatvariant.config.special'
939 configlabel = 'formatvariant.config.special'
940 else:
940 else:
941 configlabel = 'formatvariant.config.default'
941 configlabel = 'formatvariant.config.default'
942 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
942 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
943 label=configlabel)
943 label=configlabel)
944 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
944 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
945 label='formatvariant.default')
945 label='formatvariant.default')
946 fm.plain('\n')
946 fm.plain('\n')
947 fm.end()
947 fm.end()
948
948
949 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
949 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
950 def debugfsinfo(ui, path="."):
950 def debugfsinfo(ui, path="."):
951 """show information detected about current filesystem"""
951 """show information detected about current filesystem"""
952 ui.write(('path: %s\n') % path)
952 ui.write(('path: %s\n') % path)
953 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
953 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
954 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
954 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
955 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
955 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
956 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
956 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
957 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
957 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
958 casesensitive = '(unknown)'
958 casesensitive = '(unknown)'
959 try:
959 try:
960 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
960 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
961 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
961 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
962 except OSError:
962 except OSError:
963 pass
963 pass
964 ui.write(('case-sensitive: %s\n') % casesensitive)
964 ui.write(('case-sensitive: %s\n') % casesensitive)
965
965
966 @command('debuggetbundle',
966 @command('debuggetbundle',
967 [('H', 'head', [], _('id of head node'), _('ID')),
967 [('H', 'head', [], _('id of head node'), _('ID')),
968 ('C', 'common', [], _('id of common node'), _('ID')),
968 ('C', 'common', [], _('id of common node'), _('ID')),
969 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
969 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
970 _('REPO FILE [-H|-C ID]...'),
970 _('REPO FILE [-H|-C ID]...'),
971 norepo=True)
971 norepo=True)
972 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
972 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
973 """retrieves a bundle from a repo
973 """retrieves a bundle from a repo
974
974
975 Every ID must be a full-length hex node id string. Saves the bundle to the
975 Every ID must be a full-length hex node id string. Saves the bundle to the
976 given file.
976 given file.
977 """
977 """
978 opts = pycompat.byteskwargs(opts)
978 opts = pycompat.byteskwargs(opts)
979 repo = hg.peer(ui, opts, repopath)
979 repo = hg.peer(ui, opts, repopath)
980 if not repo.capable('getbundle'):
980 if not repo.capable('getbundle'):
981 raise error.Abort("getbundle() not supported by target repository")
981 raise error.Abort("getbundle() not supported by target repository")
982 args = {}
982 args = {}
983 if common:
983 if common:
984 args[r'common'] = [bin(s) for s in common]
984 args[r'common'] = [bin(s) for s in common]
985 if head:
985 if head:
986 args[r'heads'] = [bin(s) for s in head]
986 args[r'heads'] = [bin(s) for s in head]
987 # TODO: get desired bundlecaps from command line.
987 # TODO: get desired bundlecaps from command line.
988 args[r'bundlecaps'] = None
988 args[r'bundlecaps'] = None
989 bundle = repo.getbundle('debug', **args)
989 bundle = repo.getbundle('debug', **args)
990
990
991 bundletype = opts.get('type', 'bzip2').lower()
991 bundletype = opts.get('type', 'bzip2').lower()
992 btypes = {'none': 'HG10UN',
992 btypes = {'none': 'HG10UN',
993 'bzip2': 'HG10BZ',
993 'bzip2': 'HG10BZ',
994 'gzip': 'HG10GZ',
994 'gzip': 'HG10GZ',
995 'bundle2': 'HG20'}
995 'bundle2': 'HG20'}
996 bundletype = btypes.get(bundletype)
996 bundletype = btypes.get(bundletype)
997 if bundletype not in bundle2.bundletypes:
997 if bundletype not in bundle2.bundletypes:
998 raise error.Abort(_('unknown bundle type specified with --type'))
998 raise error.Abort(_('unknown bundle type specified with --type'))
999 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
999 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1000
1000
1001 @command('debugignore', [], '[FILE]')
1001 @command('debugignore', [], '[FILE]')
1002 def debugignore(ui, repo, *files, **opts):
1002 def debugignore(ui, repo, *files, **opts):
1003 """display the combined ignore pattern and information about ignored files
1003 """display the combined ignore pattern and information about ignored files
1004
1004
1005 With no argument display the combined ignore pattern.
1005 With no argument display the combined ignore pattern.
1006
1006
1007 Given space separated file names, shows if the given file is ignored and
1007 Given space separated file names, shows if the given file is ignored and
1008 if so, show the ignore rule (file and line number) that matched it.
1008 if so, show the ignore rule (file and line number) that matched it.
1009 """
1009 """
1010 ignore = repo.dirstate._ignore
1010 ignore = repo.dirstate._ignore
1011 if not files:
1011 if not files:
1012 # Show all the patterns
1012 # Show all the patterns
1013 ui.write("%s\n" % pycompat.byterepr(ignore))
1013 ui.write("%s\n" % pycompat.byterepr(ignore))
1014 else:
1014 else:
1015 m = scmutil.match(repo[None], pats=files)
1015 m = scmutil.match(repo[None], pats=files)
1016 for f in m.files():
1016 for f in m.files():
1017 nf = util.normpath(f)
1017 nf = util.normpath(f)
1018 ignored = None
1018 ignored = None
1019 ignoredata = None
1019 ignoredata = None
1020 if nf != '.':
1020 if nf != '.':
1021 if ignore(nf):
1021 if ignore(nf):
1022 ignored = nf
1022 ignored = nf
1023 ignoredata = repo.dirstate._ignorefileandline(nf)
1023 ignoredata = repo.dirstate._ignorefileandline(nf)
1024 else:
1024 else:
1025 for p in util.finddirs(nf):
1025 for p in util.finddirs(nf):
1026 if ignore(p):
1026 if ignore(p):
1027 ignored = p
1027 ignored = p
1028 ignoredata = repo.dirstate._ignorefileandline(p)
1028 ignoredata = repo.dirstate._ignorefileandline(p)
1029 break
1029 break
1030 if ignored:
1030 if ignored:
1031 if ignored == nf:
1031 if ignored == nf:
1032 ui.write(_("%s is ignored\n") % m.uipath(f))
1032 ui.write(_("%s is ignored\n") % m.uipath(f))
1033 else:
1033 else:
1034 ui.write(_("%s is ignored because of "
1034 ui.write(_("%s is ignored because of "
1035 "containing folder %s\n")
1035 "containing folder %s\n")
1036 % (m.uipath(f), ignored))
1036 % (m.uipath(f), ignored))
1037 ignorefile, lineno, line = ignoredata
1037 ignorefile, lineno, line = ignoredata
1038 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1038 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1039 % (ignorefile, lineno, line))
1039 % (ignorefile, lineno, line))
1040 else:
1040 else:
1041 ui.write(_("%s is not ignored\n") % m.uipath(f))
1041 ui.write(_("%s is not ignored\n") % m.uipath(f))
1042
1042
1043 @command('debugindex', cmdutil.debugrevlogopts +
1043 @command('debugindex', cmdutil.debugrevlogopts +
1044 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1044 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1045 _('[-f FORMAT] -c|-m|FILE'),
1045 _('[-f FORMAT] -c|-m|FILE'),
1046 optionalrepo=True)
1046 optionalrepo=True)
1047 def debugindex(ui, repo, file_=None, **opts):
1047 def debugindex(ui, repo, file_=None, **opts):
1048 """dump the contents of an index file"""
1048 """dump the contents of an index file"""
1049 opts = pycompat.byteskwargs(opts)
1049 opts = pycompat.byteskwargs(opts)
1050 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1050 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1051 format = opts.get('format', 0)
1051 format = opts.get('format', 0)
1052 if format not in (0, 1):
1052 if format not in (0, 1):
1053 raise error.Abort(_("unknown format %d") % format)
1053 raise error.Abort(_("unknown format %d") % format)
1054
1054
1055 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1055 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1056 if generaldelta:
1056 if generaldelta:
1057 basehdr = ' delta'
1057 basehdr = ' delta'
1058 else:
1058 else:
1059 basehdr = ' base'
1059 basehdr = ' base'
1060
1060
1061 if ui.debugflag:
1061 if ui.debugflag:
1062 shortfn = hex
1062 shortfn = hex
1063 else:
1063 else:
1064 shortfn = short
1064 shortfn = short
1065
1065
1066 # There might not be anything in r, so have a sane default
1066 # There might not be anything in r, so have a sane default
1067 idlen = 12
1067 idlen = 12
1068 for i in r:
1068 for i in r:
1069 idlen = len(shortfn(r.node(i)))
1069 idlen = len(shortfn(r.node(i)))
1070 break
1070 break
1071
1071
1072 if format == 0:
1072 if format == 0:
1073 ui.write((" rev offset length " + basehdr + " linkrev"
1073 ui.write((" rev offset length " + basehdr + " linkrev"
1074 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1074 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1075 elif format == 1:
1075 elif format == 1:
1076 ui.write((" rev flag offset length"
1076 ui.write((" rev flag offset length"
1077 " size " + basehdr + " link p1 p2"
1077 " size " + basehdr + " link p1 p2"
1078 " %s\n") % "nodeid".rjust(idlen))
1078 " %s\n") % "nodeid".rjust(idlen))
1079
1079
1080 for i in r:
1080 for i in r:
1081 node = r.node(i)
1081 node = r.node(i)
1082 if generaldelta:
1082 if generaldelta:
1083 base = r.deltaparent(i)
1083 base = r.deltaparent(i)
1084 else:
1084 else:
1085 base = r.chainbase(i)
1085 base = r.chainbase(i)
1086 if format == 0:
1086 if format == 0:
1087 try:
1087 try:
1088 pp = r.parents(node)
1088 pp = r.parents(node)
1089 except Exception:
1089 except Exception:
1090 pp = [nullid, nullid]
1090 pp = [nullid, nullid]
1091 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1091 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1092 i, r.start(i), r.length(i), base, r.linkrev(i),
1092 i, r.start(i), r.length(i), base, r.linkrev(i),
1093 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1093 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1094 elif format == 1:
1094 elif format == 1:
1095 pr = r.parentrevs(i)
1095 pr = r.parentrevs(i)
1096 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1096 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1097 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1097 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1098 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1098 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1099
1099
1100 @command('debugindexdot', cmdutil.debugrevlogopts,
1100 @command('debugindexdot', cmdutil.debugrevlogopts,
1101 _('-c|-m|FILE'), optionalrepo=True)
1101 _('-c|-m|FILE'), optionalrepo=True)
1102 def debugindexdot(ui, repo, file_=None, **opts):
1102 def debugindexdot(ui, repo, file_=None, **opts):
1103 """dump an index DAG as a graphviz dot file"""
1103 """dump an index DAG as a graphviz dot file"""
1104 opts = pycompat.byteskwargs(opts)
1104 opts = pycompat.byteskwargs(opts)
1105 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1105 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1106 ui.write(("digraph G {\n"))
1106 ui.write(("digraph G {\n"))
1107 for i in r:
1107 for i in r:
1108 node = r.node(i)
1108 node = r.node(i)
1109 pp = r.parents(node)
1109 pp = r.parents(node)
1110 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1110 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1111 if pp[1] != nullid:
1111 if pp[1] != nullid:
1112 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1112 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1113 ui.write("}\n")
1113 ui.write("}\n")
1114
1114
1115 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1115 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1116 def debuginstall(ui, **opts):
1116 def debuginstall(ui, **opts):
1117 '''test Mercurial installation
1117 '''test Mercurial installation
1118
1118
1119 Returns 0 on success.
1119 Returns 0 on success.
1120 '''
1120 '''
1121 opts = pycompat.byteskwargs(opts)
1121 opts = pycompat.byteskwargs(opts)
1122
1122
1123 def writetemp(contents):
1123 def writetemp(contents):
1124 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1124 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1125 f = os.fdopen(fd, r"wb")
1125 f = os.fdopen(fd, r"wb")
1126 f.write(contents)
1126 f.write(contents)
1127 f.close()
1127 f.close()
1128 return name
1128 return name
1129
1129
1130 problems = 0
1130 problems = 0
1131
1131
1132 fm = ui.formatter('debuginstall', opts)
1132 fm = ui.formatter('debuginstall', opts)
1133 fm.startitem()
1133 fm.startitem()
1134
1134
1135 # encoding
1135 # encoding
1136 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1136 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1137 err = None
1137 err = None
1138 try:
1138 try:
1139 codecs.lookup(pycompat.sysstr(encoding.encoding))
1139 codecs.lookup(pycompat.sysstr(encoding.encoding))
1140 except LookupError as inst:
1140 except LookupError as inst:
1141 err = util.forcebytestr(inst)
1141 err = util.forcebytestr(inst)
1142 problems += 1
1142 problems += 1
1143 fm.condwrite(err, 'encodingerror', _(" %s\n"
1143 fm.condwrite(err, 'encodingerror', _(" %s\n"
1144 " (check that your locale is properly set)\n"), err)
1144 " (check that your locale is properly set)\n"), err)
1145
1145
1146 # Python
1146 # Python
1147 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1147 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1148 pycompat.sysexecutable)
1148 pycompat.sysexecutable)
1149 fm.write('pythonver', _("checking Python version (%s)\n"),
1149 fm.write('pythonver', _("checking Python version (%s)\n"),
1150 ("%d.%d.%d" % sys.version_info[:3]))
1150 ("%d.%d.%d" % sys.version_info[:3]))
1151 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1151 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1152 os.path.dirname(pycompat.fsencode(os.__file__)))
1152 os.path.dirname(pycompat.fsencode(os.__file__)))
1153
1153
1154 security = set(sslutil.supportedprotocols)
1154 security = set(sslutil.supportedprotocols)
1155 if sslutil.hassni:
1155 if sslutil.hassni:
1156 security.add('sni')
1156 security.add('sni')
1157
1157
1158 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1158 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1159 fm.formatlist(sorted(security), name='protocol',
1159 fm.formatlist(sorted(security), name='protocol',
1160 fmt='%s', sep=','))
1160 fmt='%s', sep=','))
1161
1161
1162 # These are warnings, not errors. So don't increment problem count. This
1162 # These are warnings, not errors. So don't increment problem count. This
1163 # may change in the future.
1163 # may change in the future.
1164 if 'tls1.2' not in security:
1164 if 'tls1.2' not in security:
1165 fm.plain(_(' TLS 1.2 not supported by Python install; '
1165 fm.plain(_(' TLS 1.2 not supported by Python install; '
1166 'network connections lack modern security\n'))
1166 'network connections lack modern security\n'))
1167 if 'sni' not in security:
1167 if 'sni' not in security:
1168 fm.plain(_(' SNI not supported by Python install; may have '
1168 fm.plain(_(' SNI not supported by Python install; may have '
1169 'connectivity issues with some servers\n'))
1169 'connectivity issues with some servers\n'))
1170
1170
1171 # TODO print CA cert info
1171 # TODO print CA cert info
1172
1172
1173 # hg version
1173 # hg version
1174 hgver = util.version()
1174 hgver = util.version()
1175 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1175 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1176 hgver.split('+')[0])
1176 hgver.split('+')[0])
1177 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1177 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1178 '+'.join(hgver.split('+')[1:]))
1178 '+'.join(hgver.split('+')[1:]))
1179
1179
1180 # compiled modules
1180 # compiled modules
1181 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1181 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1182 policy.policy)
1182 policy.policy)
1183 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1183 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1184 os.path.dirname(pycompat.fsencode(__file__)))
1184 os.path.dirname(pycompat.fsencode(__file__)))
1185
1185
1186 if policy.policy in ('c', 'allow'):
1186 if policy.policy in ('c', 'allow'):
1187 err = None
1187 err = None
1188 try:
1188 try:
1189 from .cext import (
1189 from .cext import (
1190 base85,
1190 base85,
1191 bdiff,
1191 bdiff,
1192 mpatch,
1192 mpatch,
1193 osutil,
1193 osutil,
1194 )
1194 )
1195 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1195 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1196 except Exception as inst:
1196 except Exception as inst:
1197 err = util.forcebytestr(inst)
1197 err = util.forcebytestr(inst)
1198 problems += 1
1198 problems += 1
1199 fm.condwrite(err, 'extensionserror', " %s\n", err)
1199 fm.condwrite(err, 'extensionserror', " %s\n", err)
1200
1200
1201 compengines = util.compengines._engines.values()
1201 compengines = util.compengines._engines.values()
1202 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1202 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1203 fm.formatlist(sorted(e.name() for e in compengines),
1203 fm.formatlist(sorted(e.name() for e in compengines),
1204 name='compengine', fmt='%s', sep=', '))
1204 name='compengine', fmt='%s', sep=', '))
1205 fm.write('compenginesavail', _('checking available compression engines '
1205 fm.write('compenginesavail', _('checking available compression engines '
1206 '(%s)\n'),
1206 '(%s)\n'),
1207 fm.formatlist(sorted(e.name() for e in compengines
1207 fm.formatlist(sorted(e.name() for e in compengines
1208 if e.available()),
1208 if e.available()),
1209 name='compengine', fmt='%s', sep=', '))
1209 name='compengine', fmt='%s', sep=', '))
1210 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1210 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1211 fm.write('compenginesserver', _('checking available compression engines '
1211 fm.write('compenginesserver', _('checking available compression engines '
1212 'for wire protocol (%s)\n'),
1212 'for wire protocol (%s)\n'),
1213 fm.formatlist([e.name() for e in wirecompengines
1213 fm.formatlist([e.name() for e in wirecompengines
1214 if e.wireprotosupport()],
1214 if e.wireprotosupport()],
1215 name='compengine', fmt='%s', sep=', '))
1215 name='compengine', fmt='%s', sep=', '))
1216 re2 = 'missing'
1216 re2 = 'missing'
1217 if util._re2:
1217 if util._re2:
1218 re2 = 'available'
1218 re2 = 'available'
1219 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1219 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1220 fm.data(re2=bool(util._re2))
1220 fm.data(re2=bool(util._re2))
1221
1221
1222 # templates
1222 # templates
1223 p = templater.templatepaths()
1223 p = templater.templatepaths()
1224 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1224 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1225 fm.condwrite(not p, '', _(" no template directories found\n"))
1225 fm.condwrite(not p, '', _(" no template directories found\n"))
1226 if p:
1226 if p:
1227 m = templater.templatepath("map-cmdline.default")
1227 m = templater.templatepath("map-cmdline.default")
1228 if m:
1228 if m:
1229 # template found, check if it is working
1229 # template found, check if it is working
1230 err = None
1230 err = None
1231 try:
1231 try:
1232 templater.templater.frommapfile(m)
1232 templater.templater.frommapfile(m)
1233 except Exception as inst:
1233 except Exception as inst:
1234 err = util.forcebytestr(inst)
1234 err = util.forcebytestr(inst)
1235 p = None
1235 p = None
1236 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1236 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1237 else:
1237 else:
1238 p = None
1238 p = None
1239 fm.condwrite(p, 'defaulttemplate',
1239 fm.condwrite(p, 'defaulttemplate',
1240 _("checking default template (%s)\n"), m)
1240 _("checking default template (%s)\n"), m)
1241 fm.condwrite(not m, 'defaulttemplatenotfound',
1241 fm.condwrite(not m, 'defaulttemplatenotfound',
1242 _(" template '%s' not found\n"), "default")
1242 _(" template '%s' not found\n"), "default")
1243 if not p:
1243 if not p:
1244 problems += 1
1244 problems += 1
1245 fm.condwrite(not p, '',
1245 fm.condwrite(not p, '',
1246 _(" (templates seem to have been installed incorrectly)\n"))
1246 _(" (templates seem to have been installed incorrectly)\n"))
1247
1247
1248 # editor
1248 # editor
1249 editor = ui.geteditor()
1249 editor = ui.geteditor()
1250 editor = util.expandpath(editor)
1250 editor = util.expandpath(editor)
1251 editorbin = util.shellsplit(editor)[0]
1251 editorbin = util.shellsplit(editor)[0]
1252 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1252 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1253 cmdpath = util.findexe(editorbin)
1253 cmdpath = util.findexe(editorbin)
1254 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1254 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1255 _(" No commit editor set and can't find %s in PATH\n"
1255 _(" No commit editor set and can't find %s in PATH\n"
1256 " (specify a commit editor in your configuration"
1256 " (specify a commit editor in your configuration"
1257 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1257 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1258 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1258 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1259 _(" Can't find editor '%s' in PATH\n"
1259 _(" Can't find editor '%s' in PATH\n"
1260 " (specify a commit editor in your configuration"
1260 " (specify a commit editor in your configuration"
1261 " file)\n"), not cmdpath and editorbin)
1261 " file)\n"), not cmdpath and editorbin)
1262 if not cmdpath and editor != 'vi':
1262 if not cmdpath and editor != 'vi':
1263 problems += 1
1263 problems += 1
1264
1264
1265 # check username
1265 # check username
1266 username = None
1266 username = None
1267 err = None
1267 err = None
1268 try:
1268 try:
1269 username = ui.username()
1269 username = ui.username()
1270 except error.Abort as e:
1270 except error.Abort as e:
1271 err = util.forcebytestr(e)
1271 err = util.forcebytestr(e)
1272 problems += 1
1272 problems += 1
1273
1273
1274 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1274 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1275 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1275 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1276 " (specify a username in your configuration file)\n"), err)
1276 " (specify a username in your configuration file)\n"), err)
1277
1277
1278 fm.condwrite(not problems, '',
1278 fm.condwrite(not problems, '',
1279 _("no problems detected\n"))
1279 _("no problems detected\n"))
1280 if not problems:
1280 if not problems:
1281 fm.data(problems=problems)
1281 fm.data(problems=problems)
1282 fm.condwrite(problems, 'problems',
1282 fm.condwrite(problems, 'problems',
1283 _("%d problems detected,"
1283 _("%d problems detected,"
1284 " please check your install!\n"), problems)
1284 " please check your install!\n"), problems)
1285 fm.end()
1285 fm.end()
1286
1286
1287 return problems
1287 return problems
1288
1288
1289 @command('debugknown', [], _('REPO ID...'), norepo=True)
1289 @command('debugknown', [], _('REPO ID...'), norepo=True)
1290 def debugknown(ui, repopath, *ids, **opts):
1290 def debugknown(ui, repopath, *ids, **opts):
1291 """test whether node ids are known to a repo
1291 """test whether node ids are known to a repo
1292
1292
1293 Every ID must be a full-length hex node id string. Returns a list of 0s
1293 Every ID must be a full-length hex node id string. Returns a list of 0s
1294 and 1s indicating unknown/known.
1294 and 1s indicating unknown/known.
1295 """
1295 """
1296 opts = pycompat.byteskwargs(opts)
1296 opts = pycompat.byteskwargs(opts)
1297 repo = hg.peer(ui, opts, repopath)
1297 repo = hg.peer(ui, opts, repopath)
1298 if not repo.capable('known'):
1298 if not repo.capable('known'):
1299 raise error.Abort("known() not supported by target repository")
1299 raise error.Abort("known() not supported by target repository")
1300 flags = repo.known([bin(s) for s in ids])
1300 flags = repo.known([bin(s) for s in ids])
1301 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1301 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1302
1302
1303 @command('debuglabelcomplete', [], _('LABEL...'))
1303 @command('debuglabelcomplete', [], _('LABEL...'))
1304 def debuglabelcomplete(ui, repo, *args):
1304 def debuglabelcomplete(ui, repo, *args):
1305 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1305 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1306 debugnamecomplete(ui, repo, *args)
1306 debugnamecomplete(ui, repo, *args)
1307
1307
1308 @command('debuglocks',
1308 @command('debuglocks',
1309 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1309 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1310 ('W', 'force-wlock', None,
1310 ('W', 'force-wlock', None,
1311 _('free the working state lock (DANGEROUS)')),
1311 _('free the working state lock (DANGEROUS)')),
1312 ('s', 'set-lock', None, _('set the store lock until stopped')),
1312 ('s', 'set-lock', None, _('set the store lock until stopped')),
1313 ('S', 'set-wlock', None,
1313 ('S', 'set-wlock', None,
1314 _('set the working state lock until stopped'))],
1314 _('set the working state lock until stopped'))],
1315 _('[OPTION]...'))
1315 _('[OPTION]...'))
1316 def debuglocks(ui, repo, **opts):
1316 def debuglocks(ui, repo, **opts):
1317 """show or modify state of locks
1317 """show or modify state of locks
1318
1318
1319 By default, this command will show which locks are held. This
1319 By default, this command will show which locks are held. This
1320 includes the user and process holding the lock, the amount of time
1320 includes the user and process holding the lock, the amount of time
1321 the lock has been held, and the machine name where the process is
1321 the lock has been held, and the machine name where the process is
1322 running if it's not local.
1322 running if it's not local.
1323
1323
1324 Locks protect the integrity of Mercurial's data, so should be
1324 Locks protect the integrity of Mercurial's data, so should be
1325 treated with care. System crashes or other interruptions may cause
1325 treated with care. System crashes or other interruptions may cause
1326 locks to not be properly released, though Mercurial will usually
1326 locks to not be properly released, though Mercurial will usually
1327 detect and remove such stale locks automatically.
1327 detect and remove such stale locks automatically.
1328
1328
1329 However, detecting stale locks may not always be possible (for
1329 However, detecting stale locks may not always be possible (for
1330 instance, on a shared filesystem). Removing locks may also be
1330 instance, on a shared filesystem). Removing locks may also be
1331 blocked by filesystem permissions.
1331 blocked by filesystem permissions.
1332
1332
1333 Setting a lock will prevent other commands from changing the data.
1333 Setting a lock will prevent other commands from changing the data.
1334 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1334 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1335 The set locks are removed when the command exits.
1335 The set locks are removed when the command exits.
1336
1336
1337 Returns 0 if no locks are held.
1337 Returns 0 if no locks are held.
1338
1338
1339 """
1339 """
1340
1340
1341 if opts.get(r'force_lock'):
1341 if opts.get(r'force_lock'):
1342 repo.svfs.unlink('lock')
1342 repo.svfs.unlink('lock')
1343 if opts.get(r'force_wlock'):
1343 if opts.get(r'force_wlock'):
1344 repo.vfs.unlink('wlock')
1344 repo.vfs.unlink('wlock')
1345 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1345 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1346 return 0
1346 return 0
1347
1347
1348 locks = []
1348 locks = []
1349 try:
1349 try:
1350 if opts.get(r'set_wlock'):
1350 if opts.get(r'set_wlock'):
1351 try:
1351 try:
1352 locks.append(repo.wlock(False))
1352 locks.append(repo.wlock(False))
1353 except error.LockHeld:
1353 except error.LockHeld:
1354 raise error.Abort(_('wlock is already held'))
1354 raise error.Abort(_('wlock is already held'))
1355 if opts.get(r'set_lock'):
1355 if opts.get(r'set_lock'):
1356 try:
1356 try:
1357 locks.append(repo.lock(False))
1357 locks.append(repo.lock(False))
1358 except error.LockHeld:
1358 except error.LockHeld:
1359 raise error.Abort(_('lock is already held'))
1359 raise error.Abort(_('lock is already held'))
1360 if len(locks):
1360 if len(locks):
1361 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1361 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1362 return 0
1362 return 0
1363 finally:
1363 finally:
1364 release(*locks)
1364 release(*locks)
1365
1365
1366 now = time.time()
1366 now = time.time()
1367 held = 0
1367 held = 0
1368
1368
1369 def report(vfs, name, method):
1369 def report(vfs, name, method):
1370 # this causes stale locks to get reaped for more accurate reporting
1370 # this causes stale locks to get reaped for more accurate reporting
1371 try:
1371 try:
1372 l = method(False)
1372 l = method(False)
1373 except error.LockHeld:
1373 except error.LockHeld:
1374 l = None
1374 l = None
1375
1375
1376 if l:
1376 if l:
1377 l.release()
1377 l.release()
1378 else:
1378 else:
1379 try:
1379 try:
1380 st = vfs.lstat(name)
1380 st = vfs.lstat(name)
1381 age = now - st[stat.ST_MTIME]
1381 age = now - st[stat.ST_MTIME]
1382 user = util.username(st.st_uid)
1382 user = util.username(st.st_uid)
1383 locker = vfs.readlock(name)
1383 locker = vfs.readlock(name)
1384 if ":" in locker:
1384 if ":" in locker:
1385 host, pid = locker.split(':')
1385 host, pid = locker.split(':')
1386 if host == socket.gethostname():
1386 if host == socket.gethostname():
1387 locker = 'user %s, process %s' % (user, pid)
1387 locker = 'user %s, process %s' % (user, pid)
1388 else:
1388 else:
1389 locker = 'user %s, process %s, host %s' \
1389 locker = 'user %s, process %s, host %s' \
1390 % (user, pid, host)
1390 % (user, pid, host)
1391 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1391 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1392 return 1
1392 return 1
1393 except OSError as e:
1393 except OSError as e:
1394 if e.errno != errno.ENOENT:
1394 if e.errno != errno.ENOENT:
1395 raise
1395 raise
1396
1396
1397 ui.write(("%-6s free\n") % (name + ":"))
1397 ui.write(("%-6s free\n") % (name + ":"))
1398 return 0
1398 return 0
1399
1399
1400 held += report(repo.svfs, "lock", repo.lock)
1400 held += report(repo.svfs, "lock", repo.lock)
1401 held += report(repo.vfs, "wlock", repo.wlock)
1401 held += report(repo.vfs, "wlock", repo.wlock)
1402
1402
1403 return held
1403 return held
1404
1404
1405 @command('debugmergestate', [], '')
1405 @command('debugmergestate', [], '')
1406 def debugmergestate(ui, repo, *args):
1406 def debugmergestate(ui, repo, *args):
1407 """print merge state
1407 """print merge state
1408
1408
1409 Use --verbose to print out information about whether v1 or v2 merge state
1409 Use --verbose to print out information about whether v1 or v2 merge state
1410 was chosen."""
1410 was chosen."""
1411 def _hashornull(h):
1411 def _hashornull(h):
1412 if h == nullhex:
1412 if h == nullhex:
1413 return 'null'
1413 return 'null'
1414 else:
1414 else:
1415 return h
1415 return h
1416
1416
1417 def printrecords(version):
1417 def printrecords(version):
1418 ui.write(('* version %d records\n') % version)
1418 ui.write(('* version %d records\n') % version)
1419 if version == 1:
1419 if version == 1:
1420 records = v1records
1420 records = v1records
1421 else:
1421 else:
1422 records = v2records
1422 records = v2records
1423
1423
1424 for rtype, record in records:
1424 for rtype, record in records:
1425 # pretty print some record types
1425 # pretty print some record types
1426 if rtype == 'L':
1426 if rtype == 'L':
1427 ui.write(('local: %s\n') % record)
1427 ui.write(('local: %s\n') % record)
1428 elif rtype == 'O':
1428 elif rtype == 'O':
1429 ui.write(('other: %s\n') % record)
1429 ui.write(('other: %s\n') % record)
1430 elif rtype == 'm':
1430 elif rtype == 'm':
1431 driver, mdstate = record.split('\0', 1)
1431 driver, mdstate = record.split('\0', 1)
1432 ui.write(('merge driver: %s (state "%s")\n')
1432 ui.write(('merge driver: %s (state "%s")\n')
1433 % (driver, mdstate))
1433 % (driver, mdstate))
1434 elif rtype in 'FDC':
1434 elif rtype in 'FDC':
1435 r = record.split('\0')
1435 r = record.split('\0')
1436 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1436 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1437 if version == 1:
1437 if version == 1:
1438 onode = 'not stored in v1 format'
1438 onode = 'not stored in v1 format'
1439 flags = r[7]
1439 flags = r[7]
1440 else:
1440 else:
1441 onode, flags = r[7:9]
1441 onode, flags = r[7:9]
1442 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1442 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1443 % (f, rtype, state, _hashornull(hash)))
1443 % (f, rtype, state, _hashornull(hash)))
1444 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1444 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1445 ui.write((' ancestor path: %s (node %s)\n')
1445 ui.write((' ancestor path: %s (node %s)\n')
1446 % (afile, _hashornull(anode)))
1446 % (afile, _hashornull(anode)))
1447 ui.write((' other path: %s (node %s)\n')
1447 ui.write((' other path: %s (node %s)\n')
1448 % (ofile, _hashornull(onode)))
1448 % (ofile, _hashornull(onode)))
1449 elif rtype == 'f':
1449 elif rtype == 'f':
1450 filename, rawextras = record.split('\0', 1)
1450 filename, rawextras = record.split('\0', 1)
1451 extras = rawextras.split('\0')
1451 extras = rawextras.split('\0')
1452 i = 0
1452 i = 0
1453 extrastrings = []
1453 extrastrings = []
1454 while i < len(extras):
1454 while i < len(extras):
1455 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1455 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1456 i += 2
1456 i += 2
1457
1457
1458 ui.write(('file extras: %s (%s)\n')
1458 ui.write(('file extras: %s (%s)\n')
1459 % (filename, ', '.join(extrastrings)))
1459 % (filename, ', '.join(extrastrings)))
1460 elif rtype == 'l':
1460 elif rtype == 'l':
1461 labels = record.split('\0', 2)
1461 labels = record.split('\0', 2)
1462 labels = [l for l in labels if len(l) > 0]
1462 labels = [l for l in labels if len(l) > 0]
1463 ui.write(('labels:\n'))
1463 ui.write(('labels:\n'))
1464 ui.write((' local: %s\n' % labels[0]))
1464 ui.write((' local: %s\n' % labels[0]))
1465 ui.write((' other: %s\n' % labels[1]))
1465 ui.write((' other: %s\n' % labels[1]))
1466 if len(labels) > 2:
1466 if len(labels) > 2:
1467 ui.write((' base: %s\n' % labels[2]))
1467 ui.write((' base: %s\n' % labels[2]))
1468 else:
1468 else:
1469 ui.write(('unrecognized entry: %s\t%s\n')
1469 ui.write(('unrecognized entry: %s\t%s\n')
1470 % (rtype, record.replace('\0', '\t')))
1470 % (rtype, record.replace('\0', '\t')))
1471
1471
1472 # Avoid mergestate.read() since it may raise an exception for unsupported
1472 # Avoid mergestate.read() since it may raise an exception for unsupported
1473 # merge state records. We shouldn't be doing this, but this is OK since this
1473 # merge state records. We shouldn't be doing this, but this is OK since this
1474 # command is pretty low-level.
1474 # command is pretty low-level.
1475 ms = mergemod.mergestate(repo)
1475 ms = mergemod.mergestate(repo)
1476
1476
1477 # sort so that reasonable information is on top
1477 # sort so that reasonable information is on top
1478 v1records = ms._readrecordsv1()
1478 v1records = ms._readrecordsv1()
1479 v2records = ms._readrecordsv2()
1479 v2records = ms._readrecordsv2()
1480 order = 'LOml'
1480 order = 'LOml'
1481 def key(r):
1481 def key(r):
1482 idx = order.find(r[0])
1482 idx = order.find(r[0])
1483 if idx == -1:
1483 if idx == -1:
1484 return (1, r[1])
1484 return (1, r[1])
1485 else:
1485 else:
1486 return (0, idx)
1486 return (0, idx)
1487 v1records.sort(key=key)
1487 v1records.sort(key=key)
1488 v2records.sort(key=key)
1488 v2records.sort(key=key)
1489
1489
1490 if not v1records and not v2records:
1490 if not v1records and not v2records:
1491 ui.write(('no merge state found\n'))
1491 ui.write(('no merge state found\n'))
1492 elif not v2records:
1492 elif not v2records:
1493 ui.note(('no version 2 merge state\n'))
1493 ui.note(('no version 2 merge state\n'))
1494 printrecords(1)
1494 printrecords(1)
1495 elif ms._v1v2match(v1records, v2records):
1495 elif ms._v1v2match(v1records, v2records):
1496 ui.note(('v1 and v2 states match: using v2\n'))
1496 ui.note(('v1 and v2 states match: using v2\n'))
1497 printrecords(2)
1497 printrecords(2)
1498 else:
1498 else:
1499 ui.note(('v1 and v2 states mismatch: using v1\n'))
1499 ui.note(('v1 and v2 states mismatch: using v1\n'))
1500 printrecords(1)
1500 printrecords(1)
1501 if ui.verbose:
1501 if ui.verbose:
1502 printrecords(2)
1502 printrecords(2)
1503
1503
1504 @command('debugnamecomplete', [], _('NAME...'))
1504 @command('debugnamecomplete', [], _('NAME...'))
1505 def debugnamecomplete(ui, repo, *args):
1505 def debugnamecomplete(ui, repo, *args):
1506 '''complete "names" - tags, open branch names, bookmark names'''
1506 '''complete "names" - tags, open branch names, bookmark names'''
1507
1507
1508 names = set()
1508 names = set()
1509 # since we previously only listed open branches, we will handle that
1509 # since we previously only listed open branches, we will handle that
1510 # specially (after this for loop)
1510 # specially (after this for loop)
1511 for name, ns in repo.names.iteritems():
1511 for name, ns in repo.names.iteritems():
1512 if name != 'branches':
1512 if name != 'branches':
1513 names.update(ns.listnames(repo))
1513 names.update(ns.listnames(repo))
1514 names.update(tag for (tag, heads, tip, closed)
1514 names.update(tag for (tag, heads, tip, closed)
1515 in repo.branchmap().iterbranches() if not closed)
1515 in repo.branchmap().iterbranches() if not closed)
1516 completions = set()
1516 completions = set()
1517 if not args:
1517 if not args:
1518 args = ['']
1518 args = ['']
1519 for a in args:
1519 for a in args:
1520 completions.update(n for n in names if n.startswith(a))
1520 completions.update(n for n in names if n.startswith(a))
1521 ui.write('\n'.join(sorted(completions)))
1521 ui.write('\n'.join(sorted(completions)))
1522 ui.write('\n')
1522 ui.write('\n')
1523
1523
1524 @command('debugobsolete',
1524 @command('debugobsolete',
1525 [('', 'flags', 0, _('markers flag')),
1525 [('', 'flags', 0, _('markers flag')),
1526 ('', 'record-parents', False,
1526 ('', 'record-parents', False,
1527 _('record parent information for the precursor')),
1527 _('record parent information for the precursor')),
1528 ('r', 'rev', [], _('display markers relevant to REV')),
1528 ('r', 'rev', [], _('display markers relevant to REV')),
1529 ('', 'exclusive', False, _('restrict display to markers only '
1529 ('', 'exclusive', False, _('restrict display to markers only '
1530 'relevant to REV')),
1530 'relevant to REV')),
1531 ('', 'index', False, _('display index of the marker')),
1531 ('', 'index', False, _('display index of the marker')),
1532 ('', 'delete', [], _('delete markers specified by indices')),
1532 ('', 'delete', [], _('delete markers specified by indices')),
1533 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1533 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1534 _('[OBSOLETED [REPLACEMENT ...]]'))
1534 _('[OBSOLETED [REPLACEMENT ...]]'))
1535 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1535 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1536 """create arbitrary obsolete marker
1536 """create arbitrary obsolete marker
1537
1537
1538 With no arguments, displays the list of obsolescence markers."""
1538 With no arguments, displays the list of obsolescence markers."""
1539
1539
1540 opts = pycompat.byteskwargs(opts)
1540 opts = pycompat.byteskwargs(opts)
1541
1541
1542 def parsenodeid(s):
1542 def parsenodeid(s):
1543 try:
1543 try:
1544 # We do not use revsingle/revrange functions here to accept
1544 # We do not use revsingle/revrange functions here to accept
1545 # arbitrary node identifiers, possibly not present in the
1545 # arbitrary node identifiers, possibly not present in the
1546 # local repository.
1546 # local repository.
1547 n = bin(s)
1547 n = bin(s)
1548 if len(n) != len(nullid):
1548 if len(n) != len(nullid):
1549 raise TypeError()
1549 raise TypeError()
1550 return n
1550 return n
1551 except TypeError:
1551 except TypeError:
1552 raise error.Abort('changeset references must be full hexadecimal '
1552 raise error.Abort('changeset references must be full hexadecimal '
1553 'node identifiers')
1553 'node identifiers')
1554
1554
1555 if opts.get('delete'):
1555 if opts.get('delete'):
1556 indices = []
1556 indices = []
1557 for v in opts.get('delete'):
1557 for v in opts.get('delete'):
1558 try:
1558 try:
1559 indices.append(int(v))
1559 indices.append(int(v))
1560 except ValueError:
1560 except ValueError:
1561 raise error.Abort(_('invalid index value: %r') % v,
1561 raise error.Abort(_('invalid index value: %r') % v,
1562 hint=_('use integers for indices'))
1562 hint=_('use integers for indices'))
1563
1563
1564 if repo.currenttransaction():
1564 if repo.currenttransaction():
1565 raise error.Abort(_('cannot delete obsmarkers in the middle '
1565 raise error.Abort(_('cannot delete obsmarkers in the middle '
1566 'of transaction.'))
1566 'of transaction.'))
1567
1567
1568 with repo.lock():
1568 with repo.lock():
1569 n = repair.deleteobsmarkers(repo.obsstore, indices)
1569 n = repair.deleteobsmarkers(repo.obsstore, indices)
1570 ui.write(_('deleted %i obsolescence markers\n') % n)
1570 ui.write(_('deleted %i obsolescence markers\n') % n)
1571
1571
1572 return
1572 return
1573
1573
1574 if precursor is not None:
1574 if precursor is not None:
1575 if opts['rev']:
1575 if opts['rev']:
1576 raise error.Abort('cannot select revision when creating marker')
1576 raise error.Abort('cannot select revision when creating marker')
1577 metadata = {}
1577 metadata = {}
1578 metadata['user'] = opts['user'] or ui.username()
1578 metadata['user'] = opts['user'] or ui.username()
1579 succs = tuple(parsenodeid(succ) for succ in successors)
1579 succs = tuple(parsenodeid(succ) for succ in successors)
1580 l = repo.lock()
1580 l = repo.lock()
1581 try:
1581 try:
1582 tr = repo.transaction('debugobsolete')
1582 tr = repo.transaction('debugobsolete')
1583 try:
1583 try:
1584 date = opts.get('date')
1584 date = opts.get('date')
1585 if date:
1585 if date:
1586 date = dateutil.parsedate(date)
1586 date = dateutil.parsedate(date)
1587 else:
1587 else:
1588 date = None
1588 date = None
1589 prec = parsenodeid(precursor)
1589 prec = parsenodeid(precursor)
1590 parents = None
1590 parents = None
1591 if opts['record_parents']:
1591 if opts['record_parents']:
1592 if prec not in repo.unfiltered():
1592 if prec not in repo.unfiltered():
1593 raise error.Abort('cannot used --record-parents on '
1593 raise error.Abort('cannot used --record-parents on '
1594 'unknown changesets')
1594 'unknown changesets')
1595 parents = repo.unfiltered()[prec].parents()
1595 parents = repo.unfiltered()[prec].parents()
1596 parents = tuple(p.node() for p in parents)
1596 parents = tuple(p.node() for p in parents)
1597 repo.obsstore.create(tr, prec, succs, opts['flags'],
1597 repo.obsstore.create(tr, prec, succs, opts['flags'],
1598 parents=parents, date=date,
1598 parents=parents, date=date,
1599 metadata=metadata, ui=ui)
1599 metadata=metadata, ui=ui)
1600 tr.close()
1600 tr.close()
1601 except ValueError as exc:
1601 except ValueError as exc:
1602 raise error.Abort(_('bad obsmarker input: %s') %
1602 raise error.Abort(_('bad obsmarker input: %s') %
1603 pycompat.bytestr(exc))
1603 pycompat.bytestr(exc))
1604 finally:
1604 finally:
1605 tr.release()
1605 tr.release()
1606 finally:
1606 finally:
1607 l.release()
1607 l.release()
1608 else:
1608 else:
1609 if opts['rev']:
1609 if opts['rev']:
1610 revs = scmutil.revrange(repo, opts['rev'])
1610 revs = scmutil.revrange(repo, opts['rev'])
1611 nodes = [repo[r].node() for r in revs]
1611 nodes = [repo[r].node() for r in revs]
1612 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1612 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1613 exclusive=opts['exclusive']))
1613 exclusive=opts['exclusive']))
1614 markers.sort(key=lambda x: x._data)
1614 markers.sort(key=lambda x: x._data)
1615 else:
1615 else:
1616 markers = obsutil.getmarkers(repo)
1616 markers = obsutil.getmarkers(repo)
1617
1617
1618 markerstoiter = markers
1618 markerstoiter = markers
1619 isrelevant = lambda m: True
1619 isrelevant = lambda m: True
1620 if opts.get('rev') and opts.get('index'):
1620 if opts.get('rev') and opts.get('index'):
1621 markerstoiter = obsutil.getmarkers(repo)
1621 markerstoiter = obsutil.getmarkers(repo)
1622 markerset = set(markers)
1622 markerset = set(markers)
1623 isrelevant = lambda m: m in markerset
1623 isrelevant = lambda m: m in markerset
1624
1624
1625 fm = ui.formatter('debugobsolete', opts)
1625 fm = ui.formatter('debugobsolete', opts)
1626 for i, m in enumerate(markerstoiter):
1626 for i, m in enumerate(markerstoiter):
1627 if not isrelevant(m):
1627 if not isrelevant(m):
1628 # marker can be irrelevant when we're iterating over a set
1628 # marker can be irrelevant when we're iterating over a set
1629 # of markers (markerstoiter) which is bigger than the set
1629 # of markers (markerstoiter) which is bigger than the set
1630 # of markers we want to display (markers)
1630 # of markers we want to display (markers)
1631 # this can happen if both --index and --rev options are
1631 # this can happen if both --index and --rev options are
1632 # provided and thus we need to iterate over all of the markers
1632 # provided and thus we need to iterate over all of the markers
1633 # to get the correct indices, but only display the ones that
1633 # to get the correct indices, but only display the ones that
1634 # are relevant to --rev value
1634 # are relevant to --rev value
1635 continue
1635 continue
1636 fm.startitem()
1636 fm.startitem()
1637 ind = i if opts.get('index') else None
1637 ind = i if opts.get('index') else None
1638 cmdutil.showmarker(fm, m, index=ind)
1638 cmdutil.showmarker(fm, m, index=ind)
1639 fm.end()
1639 fm.end()
1640
1640
1641 @command('debugpathcomplete',
1641 @command('debugpathcomplete',
1642 [('f', 'full', None, _('complete an entire path')),
1642 [('f', 'full', None, _('complete an entire path')),
1643 ('n', 'normal', None, _('show only normal files')),
1643 ('n', 'normal', None, _('show only normal files')),
1644 ('a', 'added', None, _('show only added files')),
1644 ('a', 'added', None, _('show only added files')),
1645 ('r', 'removed', None, _('show only removed files'))],
1645 ('r', 'removed', None, _('show only removed files'))],
1646 _('FILESPEC...'))
1646 _('FILESPEC...'))
1647 def debugpathcomplete(ui, repo, *specs, **opts):
1647 def debugpathcomplete(ui, repo, *specs, **opts):
1648 '''complete part or all of a tracked path
1648 '''complete part or all of a tracked path
1649
1649
1650 This command supports shells that offer path name completion. It
1650 This command supports shells that offer path name completion. It
1651 currently completes only files already known to the dirstate.
1651 currently completes only files already known to the dirstate.
1652
1652
1653 Completion extends only to the next path segment unless
1653 Completion extends only to the next path segment unless
1654 --full is specified, in which case entire paths are used.'''
1654 --full is specified, in which case entire paths are used.'''
1655
1655
1656 def complete(path, acceptable):
1656 def complete(path, acceptable):
1657 dirstate = repo.dirstate
1657 dirstate = repo.dirstate
1658 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1658 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1659 rootdir = repo.root + pycompat.ossep
1659 rootdir = repo.root + pycompat.ossep
1660 if spec != repo.root and not spec.startswith(rootdir):
1660 if spec != repo.root and not spec.startswith(rootdir):
1661 return [], []
1661 return [], []
1662 if os.path.isdir(spec):
1662 if os.path.isdir(spec):
1663 spec += '/'
1663 spec += '/'
1664 spec = spec[len(rootdir):]
1664 spec = spec[len(rootdir):]
1665 fixpaths = pycompat.ossep != '/'
1665 fixpaths = pycompat.ossep != '/'
1666 if fixpaths:
1666 if fixpaths:
1667 spec = spec.replace(pycompat.ossep, '/')
1667 spec = spec.replace(pycompat.ossep, '/')
1668 speclen = len(spec)
1668 speclen = len(spec)
1669 fullpaths = opts[r'full']
1669 fullpaths = opts[r'full']
1670 files, dirs = set(), set()
1670 files, dirs = set(), set()
1671 adddir, addfile = dirs.add, files.add
1671 adddir, addfile = dirs.add, files.add
1672 for f, st in dirstate.iteritems():
1672 for f, st in dirstate.iteritems():
1673 if f.startswith(spec) and st[0] in acceptable:
1673 if f.startswith(spec) and st[0] in acceptable:
1674 if fixpaths:
1674 if fixpaths:
1675 f = f.replace('/', pycompat.ossep)
1675 f = f.replace('/', pycompat.ossep)
1676 if fullpaths:
1676 if fullpaths:
1677 addfile(f)
1677 addfile(f)
1678 continue
1678 continue
1679 s = f.find(pycompat.ossep, speclen)
1679 s = f.find(pycompat.ossep, speclen)
1680 if s >= 0:
1680 if s >= 0:
1681 adddir(f[:s])
1681 adddir(f[:s])
1682 else:
1682 else:
1683 addfile(f)
1683 addfile(f)
1684 return files, dirs
1684 return files, dirs
1685
1685
1686 acceptable = ''
1686 acceptable = ''
1687 if opts[r'normal']:
1687 if opts[r'normal']:
1688 acceptable += 'nm'
1688 acceptable += 'nm'
1689 if opts[r'added']:
1689 if opts[r'added']:
1690 acceptable += 'a'
1690 acceptable += 'a'
1691 if opts[r'removed']:
1691 if opts[r'removed']:
1692 acceptable += 'r'
1692 acceptable += 'r'
1693 cwd = repo.getcwd()
1693 cwd = repo.getcwd()
1694 if not specs:
1694 if not specs:
1695 specs = ['.']
1695 specs = ['.']
1696
1696
1697 files, dirs = set(), set()
1697 files, dirs = set(), set()
1698 for spec in specs:
1698 for spec in specs:
1699 f, d = complete(spec, acceptable or 'nmar')
1699 f, d = complete(spec, acceptable or 'nmar')
1700 files.update(f)
1700 files.update(f)
1701 dirs.update(d)
1701 dirs.update(d)
1702 files.update(dirs)
1702 files.update(dirs)
1703 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1703 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1704 ui.write('\n')
1704 ui.write('\n')
1705
1705
1706 @command('debugpeer', [], _('PATH'), norepo=True)
1706 @command('debugpeer', [], _('PATH'), norepo=True)
1707 def debugpeer(ui, path):
1707 def debugpeer(ui, path):
1708 """establish a connection to a peer repository"""
1708 """establish a connection to a peer repository"""
1709 # Always enable peer request logging. Requires --debug to display
1709 # Always enable peer request logging. Requires --debug to display
1710 # though.
1710 # though.
1711 overrides = {
1711 overrides = {
1712 ('devel', 'debug.peer-request'): True,
1712 ('devel', 'debug.peer-request'): True,
1713 }
1713 }
1714
1714
1715 with ui.configoverride(overrides):
1715 with ui.configoverride(overrides):
1716 peer = hg.peer(ui, {}, path)
1716 peer = hg.peer(ui, {}, path)
1717
1717
1718 local = peer.local() is not None
1718 local = peer.local() is not None
1719 canpush = peer.canpush()
1719 canpush = peer.canpush()
1720
1720
1721 ui.write(_('url: %s\n') % peer.url())
1721 ui.write(_('url: %s\n') % peer.url())
1722 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1722 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1723 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1723 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1724
1724
1725 @command('debugpickmergetool',
1725 @command('debugpickmergetool',
1726 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1726 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1727 ('', 'changedelete', None, _('emulate merging change and delete')),
1727 ('', 'changedelete', None, _('emulate merging change and delete')),
1728 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1728 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1729 _('[PATTERN]...'),
1729 _('[PATTERN]...'),
1730 inferrepo=True)
1730 inferrepo=True)
1731 def debugpickmergetool(ui, repo, *pats, **opts):
1731 def debugpickmergetool(ui, repo, *pats, **opts):
1732 """examine which merge tool is chosen for specified file
1732 """examine which merge tool is chosen for specified file
1733
1733
1734 As described in :hg:`help merge-tools`, Mercurial examines
1734 As described in :hg:`help merge-tools`, Mercurial examines
1735 configurations below in this order to decide which merge tool is
1735 configurations below in this order to decide which merge tool is
1736 chosen for specified file.
1736 chosen for specified file.
1737
1737
1738 1. ``--tool`` option
1738 1. ``--tool`` option
1739 2. ``HGMERGE`` environment variable
1739 2. ``HGMERGE`` environment variable
1740 3. configurations in ``merge-patterns`` section
1740 3. configurations in ``merge-patterns`` section
1741 4. configuration of ``ui.merge``
1741 4. configuration of ``ui.merge``
1742 5. configurations in ``merge-tools`` section
1742 5. configurations in ``merge-tools`` section
1743 6. ``hgmerge`` tool (for historical reason only)
1743 6. ``hgmerge`` tool (for historical reason only)
1744 7. default tool for fallback (``:merge`` or ``:prompt``)
1744 7. default tool for fallback (``:merge`` or ``:prompt``)
1745
1745
1746 This command writes out examination result in the style below::
1746 This command writes out examination result in the style below::
1747
1747
1748 FILE = MERGETOOL
1748 FILE = MERGETOOL
1749
1749
1750 By default, all files known in the first parent context of the
1750 By default, all files known in the first parent context of the
1751 working directory are examined. Use file patterns and/or -I/-X
1751 working directory are examined. Use file patterns and/or -I/-X
1752 options to limit target files. -r/--rev is also useful to examine
1752 options to limit target files. -r/--rev is also useful to examine
1753 files in another context without actual updating to it.
1753 files in another context without actual updating to it.
1754
1754
1755 With --debug, this command shows warning messages while matching
1755 With --debug, this command shows warning messages while matching
1756 against ``merge-patterns`` and so on, too. It is recommended to
1756 against ``merge-patterns`` and so on, too. It is recommended to
1757 use this option with explicit file patterns and/or -I/-X options,
1757 use this option with explicit file patterns and/or -I/-X options,
1758 because this option increases amount of output per file according
1758 because this option increases amount of output per file according
1759 to configurations in hgrc.
1759 to configurations in hgrc.
1760
1760
1761 With -v/--verbose, this command shows configurations below at
1761 With -v/--verbose, this command shows configurations below at
1762 first (only if specified).
1762 first (only if specified).
1763
1763
1764 - ``--tool`` option
1764 - ``--tool`` option
1765 - ``HGMERGE`` environment variable
1765 - ``HGMERGE`` environment variable
1766 - configuration of ``ui.merge``
1766 - configuration of ``ui.merge``
1767
1767
1768 If merge tool is chosen before matching against
1768 If merge tool is chosen before matching against
1769 ``merge-patterns``, this command can't show any helpful
1769 ``merge-patterns``, this command can't show any helpful
1770 information, even with --debug. In such case, information above is
1770 information, even with --debug. In such case, information above is
1771 useful to know why a merge tool is chosen.
1771 useful to know why a merge tool is chosen.
1772 """
1772 """
1773 opts = pycompat.byteskwargs(opts)
1773 opts = pycompat.byteskwargs(opts)
1774 overrides = {}
1774 overrides = {}
1775 if opts['tool']:
1775 if opts['tool']:
1776 overrides[('ui', 'forcemerge')] = opts['tool']
1776 overrides[('ui', 'forcemerge')] = opts['tool']
1777 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1777 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1778
1778
1779 with ui.configoverride(overrides, 'debugmergepatterns'):
1779 with ui.configoverride(overrides, 'debugmergepatterns'):
1780 hgmerge = encoding.environ.get("HGMERGE")
1780 hgmerge = encoding.environ.get("HGMERGE")
1781 if hgmerge is not None:
1781 if hgmerge is not None:
1782 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1782 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1783 uimerge = ui.config("ui", "merge")
1783 uimerge = ui.config("ui", "merge")
1784 if uimerge:
1784 if uimerge:
1785 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1785 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1786
1786
1787 ctx = scmutil.revsingle(repo, opts.get('rev'))
1787 ctx = scmutil.revsingle(repo, opts.get('rev'))
1788 m = scmutil.match(ctx, pats, opts)
1788 m = scmutil.match(ctx, pats, opts)
1789 changedelete = opts['changedelete']
1789 changedelete = opts['changedelete']
1790 for path in ctx.walk(m):
1790 for path in ctx.walk(m):
1791 fctx = ctx[path]
1791 fctx = ctx[path]
1792 try:
1792 try:
1793 if not ui.debugflag:
1793 if not ui.debugflag:
1794 ui.pushbuffer(error=True)
1794 ui.pushbuffer(error=True)
1795 tool, toolpath = filemerge._picktool(repo, ui, path,
1795 tool, toolpath = filemerge._picktool(repo, ui, path,
1796 fctx.isbinary(),
1796 fctx.isbinary(),
1797 'l' in fctx.flags(),
1797 'l' in fctx.flags(),
1798 changedelete)
1798 changedelete)
1799 finally:
1799 finally:
1800 if not ui.debugflag:
1800 if not ui.debugflag:
1801 ui.popbuffer()
1801 ui.popbuffer()
1802 ui.write(('%s = %s\n') % (path, tool))
1802 ui.write(('%s = %s\n') % (path, tool))
1803
1803
1804 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1804 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1805 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1805 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1806 '''access the pushkey key/value protocol
1806 '''access the pushkey key/value protocol
1807
1807
1808 With two args, list the keys in the given namespace.
1808 With two args, list the keys in the given namespace.
1809
1809
1810 With five args, set a key to new if it currently is set to old.
1810 With five args, set a key to new if it currently is set to old.
1811 Reports success or failure.
1811 Reports success or failure.
1812 '''
1812 '''
1813
1813
1814 target = hg.peer(ui, {}, repopath)
1814 target = hg.peer(ui, {}, repopath)
1815 if keyinfo:
1815 if keyinfo:
1816 key, old, new = keyinfo
1816 key, old, new = keyinfo
1817 r = target.pushkey(namespace, key, old, new)
1817 r = target.pushkey(namespace, key, old, new)
1818 ui.status(pycompat.bytestr(r) + '\n')
1818 ui.status(pycompat.bytestr(r) + '\n')
1819 return not r
1819 return not r
1820 else:
1820 else:
1821 for k, v in sorted(target.listkeys(namespace).iteritems()):
1821 for k, v in sorted(target.listkeys(namespace).iteritems()):
1822 ui.write("%s\t%s\n" % (util.escapestr(k),
1822 ui.write("%s\t%s\n" % (util.escapestr(k),
1823 util.escapestr(v)))
1823 util.escapestr(v)))
1824
1824
1825 @command('debugpvec', [], _('A B'))
1825 @command('debugpvec', [], _('A B'))
1826 def debugpvec(ui, repo, a, b=None):
1826 def debugpvec(ui, repo, a, b=None):
1827 ca = scmutil.revsingle(repo, a)
1827 ca = scmutil.revsingle(repo, a)
1828 cb = scmutil.revsingle(repo, b)
1828 cb = scmutil.revsingle(repo, b)
1829 pa = pvec.ctxpvec(ca)
1829 pa = pvec.ctxpvec(ca)
1830 pb = pvec.ctxpvec(cb)
1830 pb = pvec.ctxpvec(cb)
1831 if pa == pb:
1831 if pa == pb:
1832 rel = "="
1832 rel = "="
1833 elif pa > pb:
1833 elif pa > pb:
1834 rel = ">"
1834 rel = ">"
1835 elif pa < pb:
1835 elif pa < pb:
1836 rel = "<"
1836 rel = "<"
1837 elif pa | pb:
1837 elif pa | pb:
1838 rel = "|"
1838 rel = "|"
1839 ui.write(_("a: %s\n") % pa)
1839 ui.write(_("a: %s\n") % pa)
1840 ui.write(_("b: %s\n") % pb)
1840 ui.write(_("b: %s\n") % pb)
1841 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1841 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1842 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1842 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1843 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1843 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1844 pa.distance(pb), rel))
1844 pa.distance(pb), rel))
1845
1845
1846 @command('debugrebuilddirstate|debugrebuildstate',
1846 @command('debugrebuilddirstate|debugrebuildstate',
1847 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1847 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1848 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1848 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1849 'the working copy parent')),
1849 'the working copy parent')),
1850 ],
1850 ],
1851 _('[-r REV]'))
1851 _('[-r REV]'))
1852 def debugrebuilddirstate(ui, repo, rev, **opts):
1852 def debugrebuilddirstate(ui, repo, rev, **opts):
1853 """rebuild the dirstate as it would look like for the given revision
1853 """rebuild the dirstate as it would look like for the given revision
1854
1854
1855 If no revision is specified the first current parent will be used.
1855 If no revision is specified the first current parent will be used.
1856
1856
1857 The dirstate will be set to the files of the given revision.
1857 The dirstate will be set to the files of the given revision.
1858 The actual working directory content or existing dirstate
1858 The actual working directory content or existing dirstate
1859 information such as adds or removes is not considered.
1859 information such as adds or removes is not considered.
1860
1860
1861 ``minimal`` will only rebuild the dirstate status for files that claim to be
1861 ``minimal`` will only rebuild the dirstate status for files that claim to be
1862 tracked but are not in the parent manifest, or that exist in the parent
1862 tracked but are not in the parent manifest, or that exist in the parent
1863 manifest but are not in the dirstate. It will not change adds, removes, or
1863 manifest but are not in the dirstate. It will not change adds, removes, or
1864 modified files that are in the working copy parent.
1864 modified files that are in the working copy parent.
1865
1865
1866 One use of this command is to make the next :hg:`status` invocation
1866 One use of this command is to make the next :hg:`status` invocation
1867 check the actual file content.
1867 check the actual file content.
1868 """
1868 """
1869 ctx = scmutil.revsingle(repo, rev)
1869 ctx = scmutil.revsingle(repo, rev)
1870 with repo.wlock():
1870 with repo.wlock():
1871 dirstate = repo.dirstate
1871 dirstate = repo.dirstate
1872 changedfiles = None
1872 changedfiles = None
1873 # See command doc for what minimal does.
1873 # See command doc for what minimal does.
1874 if opts.get(r'minimal'):
1874 if opts.get(r'minimal'):
1875 manifestfiles = set(ctx.manifest().keys())
1875 manifestfiles = set(ctx.manifest().keys())
1876 dirstatefiles = set(dirstate)
1876 dirstatefiles = set(dirstate)
1877 manifestonly = manifestfiles - dirstatefiles
1877 manifestonly = manifestfiles - dirstatefiles
1878 dsonly = dirstatefiles - manifestfiles
1878 dsonly = dirstatefiles - manifestfiles
1879 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1879 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1880 changedfiles = manifestonly | dsnotadded
1880 changedfiles = manifestonly | dsnotadded
1881
1881
1882 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1882 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1883
1883
1884 @command('debugrebuildfncache', [], '')
1884 @command('debugrebuildfncache', [], '')
1885 def debugrebuildfncache(ui, repo):
1885 def debugrebuildfncache(ui, repo):
1886 """rebuild the fncache file"""
1886 """rebuild the fncache file"""
1887 repair.rebuildfncache(ui, repo)
1887 repair.rebuildfncache(ui, repo)
1888
1888
1889 @command('debugrename',
1889 @command('debugrename',
1890 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1890 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1891 _('[-r REV] FILE'))
1891 _('[-r REV] FILE'))
1892 def debugrename(ui, repo, file1, *pats, **opts):
1892 def debugrename(ui, repo, file1, *pats, **opts):
1893 """dump rename information"""
1893 """dump rename information"""
1894
1894
1895 opts = pycompat.byteskwargs(opts)
1895 opts = pycompat.byteskwargs(opts)
1896 ctx = scmutil.revsingle(repo, opts.get('rev'))
1896 ctx = scmutil.revsingle(repo, opts.get('rev'))
1897 m = scmutil.match(ctx, (file1,) + pats, opts)
1897 m = scmutil.match(ctx, (file1,) + pats, opts)
1898 for abs in ctx.walk(m):
1898 for abs in ctx.walk(m):
1899 fctx = ctx[abs]
1899 fctx = ctx[abs]
1900 o = fctx.filelog().renamed(fctx.filenode())
1900 o = fctx.filelog().renamed(fctx.filenode())
1901 rel = m.rel(abs)
1901 rel = m.rel(abs)
1902 if o:
1902 if o:
1903 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1903 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1904 else:
1904 else:
1905 ui.write(_("%s not renamed\n") % rel)
1905 ui.write(_("%s not renamed\n") % rel)
1906
1906
1907 @command('debugrevlog', cmdutil.debugrevlogopts +
1907 @command('debugrevlog', cmdutil.debugrevlogopts +
1908 [('d', 'dump', False, _('dump index data'))],
1908 [('d', 'dump', False, _('dump index data'))],
1909 _('-c|-m|FILE'),
1909 _('-c|-m|FILE'),
1910 optionalrepo=True)
1910 optionalrepo=True)
1911 def debugrevlog(ui, repo, file_=None, **opts):
1911 def debugrevlog(ui, repo, file_=None, **opts):
1912 """show data and statistics about a revlog"""
1912 """show data and statistics about a revlog"""
1913 opts = pycompat.byteskwargs(opts)
1913 opts = pycompat.byteskwargs(opts)
1914 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1914 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1915
1915
1916 if opts.get("dump"):
1916 if opts.get("dump"):
1917 numrevs = len(r)
1917 numrevs = len(r)
1918 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1918 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1919 " rawsize totalsize compression heads chainlen\n"))
1919 " rawsize totalsize compression heads chainlen\n"))
1920 ts = 0
1920 ts = 0
1921 heads = set()
1921 heads = set()
1922
1922
1923 for rev in xrange(numrevs):
1923 for rev in xrange(numrevs):
1924 dbase = r.deltaparent(rev)
1924 dbase = r.deltaparent(rev)
1925 if dbase == -1:
1925 if dbase == -1:
1926 dbase = rev
1926 dbase = rev
1927 cbase = r.chainbase(rev)
1927 cbase = r.chainbase(rev)
1928 clen = r.chainlen(rev)
1928 clen = r.chainlen(rev)
1929 p1, p2 = r.parentrevs(rev)
1929 p1, p2 = r.parentrevs(rev)
1930 rs = r.rawsize(rev)
1930 rs = r.rawsize(rev)
1931 ts = ts + rs
1931 ts = ts + rs
1932 heads -= set(r.parentrevs(rev))
1932 heads -= set(r.parentrevs(rev))
1933 heads.add(rev)
1933 heads.add(rev)
1934 try:
1934 try:
1935 compression = ts / r.end(rev)
1935 compression = ts / r.end(rev)
1936 except ZeroDivisionError:
1936 except ZeroDivisionError:
1937 compression = 0
1937 compression = 0
1938 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1938 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1939 "%11d %5d %8d\n" %
1939 "%11d %5d %8d\n" %
1940 (rev, p1, p2, r.start(rev), r.end(rev),
1940 (rev, p1, p2, r.start(rev), r.end(rev),
1941 r.start(dbase), r.start(cbase),
1941 r.start(dbase), r.start(cbase),
1942 r.start(p1), r.start(p2),
1942 r.start(p1), r.start(p2),
1943 rs, ts, compression, len(heads), clen))
1943 rs, ts, compression, len(heads), clen))
1944 return 0
1944 return 0
1945
1945
1946 v = r.version
1946 v = r.version
1947 format = v & 0xFFFF
1947 format = v & 0xFFFF
1948 flags = []
1948 flags = []
1949 gdelta = False
1949 gdelta = False
1950 if v & revlog.FLAG_INLINE_DATA:
1950 if v & revlog.FLAG_INLINE_DATA:
1951 flags.append('inline')
1951 flags.append('inline')
1952 if v & revlog.FLAG_GENERALDELTA:
1952 if v & revlog.FLAG_GENERALDELTA:
1953 gdelta = True
1953 gdelta = True
1954 flags.append('generaldelta')
1954 flags.append('generaldelta')
1955 if not flags:
1955 if not flags:
1956 flags = ['(none)']
1956 flags = ['(none)']
1957
1957
1958 nummerges = 0
1958 nummerges = 0
1959 numfull = 0
1959 numfull = 0
1960 numprev = 0
1960 numprev = 0
1961 nump1 = 0
1961 nump1 = 0
1962 nump2 = 0
1962 nump2 = 0
1963 numother = 0
1963 numother = 0
1964 nump1prev = 0
1964 nump1prev = 0
1965 nump2prev = 0
1965 nump2prev = 0
1966 chainlengths = []
1966 chainlengths = []
1967 chainbases = []
1967 chainbases = []
1968 chainspans = []
1968 chainspans = []
1969
1969
1970 datasize = [None, 0, 0]
1970 datasize = [None, 0, 0]
1971 fullsize = [None, 0, 0]
1971 fullsize = [None, 0, 0]
1972 deltasize = [None, 0, 0]
1972 deltasize = [None, 0, 0]
1973 chunktypecounts = {}
1973 chunktypecounts = {}
1974 chunktypesizes = {}
1974 chunktypesizes = {}
1975
1975
1976 def addsize(size, l):
1976 def addsize(size, l):
1977 if l[0] is None or size < l[0]:
1977 if l[0] is None or size < l[0]:
1978 l[0] = size
1978 l[0] = size
1979 if size > l[1]:
1979 if size > l[1]:
1980 l[1] = size
1980 l[1] = size
1981 l[2] += size
1981 l[2] += size
1982
1982
1983 numrevs = len(r)
1983 numrevs = len(r)
1984 for rev in xrange(numrevs):
1984 for rev in xrange(numrevs):
1985 p1, p2 = r.parentrevs(rev)
1985 p1, p2 = r.parentrevs(rev)
1986 delta = r.deltaparent(rev)
1986 delta = r.deltaparent(rev)
1987 if format > 0:
1987 if format > 0:
1988 addsize(r.rawsize(rev), datasize)
1988 addsize(r.rawsize(rev), datasize)
1989 if p2 != nullrev:
1989 if p2 != nullrev:
1990 nummerges += 1
1990 nummerges += 1
1991 size = r.length(rev)
1991 size = r.length(rev)
1992 if delta == nullrev:
1992 if delta == nullrev:
1993 chainlengths.append(0)
1993 chainlengths.append(0)
1994 chainbases.append(r.start(rev))
1994 chainbases.append(r.start(rev))
1995 chainspans.append(size)
1995 chainspans.append(size)
1996 numfull += 1
1996 numfull += 1
1997 addsize(size, fullsize)
1997 addsize(size, fullsize)
1998 else:
1998 else:
1999 chainlengths.append(chainlengths[delta] + 1)
1999 chainlengths.append(chainlengths[delta] + 1)
2000 baseaddr = chainbases[delta]
2000 baseaddr = chainbases[delta]
2001 revaddr = r.start(rev)
2001 revaddr = r.start(rev)
2002 chainbases.append(baseaddr)
2002 chainbases.append(baseaddr)
2003 chainspans.append((revaddr - baseaddr) + size)
2003 chainspans.append((revaddr - baseaddr) + size)
2004 addsize(size, deltasize)
2004 addsize(size, deltasize)
2005 if delta == rev - 1:
2005 if delta == rev - 1:
2006 numprev += 1
2006 numprev += 1
2007 if delta == p1:
2007 if delta == p1:
2008 nump1prev += 1
2008 nump1prev += 1
2009 elif delta == p2:
2009 elif delta == p2:
2010 nump2prev += 1
2010 nump2prev += 1
2011 elif delta == p1:
2011 elif delta == p1:
2012 nump1 += 1
2012 nump1 += 1
2013 elif delta == p2:
2013 elif delta == p2:
2014 nump2 += 1
2014 nump2 += 1
2015 elif delta != nullrev:
2015 elif delta != nullrev:
2016 numother += 1
2016 numother += 1
2017
2017
2018 # Obtain data on the raw chunks in the revlog.
2018 # Obtain data on the raw chunks in the revlog.
2019 segment = r._getsegmentforrevs(rev, rev)[1]
2019 segment = r._getsegmentforrevs(rev, rev)[1]
2020 if segment:
2020 if segment:
2021 chunktype = bytes(segment[0:1])
2021 chunktype = bytes(segment[0:1])
2022 else:
2022 else:
2023 chunktype = 'empty'
2023 chunktype = 'empty'
2024
2024
2025 if chunktype not in chunktypecounts:
2025 if chunktype not in chunktypecounts:
2026 chunktypecounts[chunktype] = 0
2026 chunktypecounts[chunktype] = 0
2027 chunktypesizes[chunktype] = 0
2027 chunktypesizes[chunktype] = 0
2028
2028
2029 chunktypecounts[chunktype] += 1
2029 chunktypecounts[chunktype] += 1
2030 chunktypesizes[chunktype] += size
2030 chunktypesizes[chunktype] += size
2031
2031
2032 # Adjust size min value for empty cases
2032 # Adjust size min value for empty cases
2033 for size in (datasize, fullsize, deltasize):
2033 for size in (datasize, fullsize, deltasize):
2034 if size[0] is None:
2034 if size[0] is None:
2035 size[0] = 0
2035 size[0] = 0
2036
2036
2037 numdeltas = numrevs - numfull
2037 numdeltas = numrevs - numfull
2038 numoprev = numprev - nump1prev - nump2prev
2038 numoprev = numprev - nump1prev - nump2prev
2039 totalrawsize = datasize[2]
2039 totalrawsize = datasize[2]
2040 datasize[2] /= numrevs
2040 datasize[2] /= numrevs
2041 fulltotal = fullsize[2]
2041 fulltotal = fullsize[2]
2042 fullsize[2] /= numfull
2042 fullsize[2] /= numfull
2043 deltatotal = deltasize[2]
2043 deltatotal = deltasize[2]
2044 if numrevs - numfull > 0:
2044 if numrevs - numfull > 0:
2045 deltasize[2] /= numrevs - numfull
2045 deltasize[2] /= numrevs - numfull
2046 totalsize = fulltotal + deltatotal
2046 totalsize = fulltotal + deltatotal
2047 avgchainlen = sum(chainlengths) / numrevs
2047 avgchainlen = sum(chainlengths) / numrevs
2048 maxchainlen = max(chainlengths)
2048 maxchainlen = max(chainlengths)
2049 maxchainspan = max(chainspans)
2049 maxchainspan = max(chainspans)
2050 compratio = 1
2050 compratio = 1
2051 if totalsize:
2051 if totalsize:
2052 compratio = totalrawsize / totalsize
2052 compratio = totalrawsize / totalsize
2053
2053
2054 basedfmtstr = '%%%dd\n'
2054 basedfmtstr = '%%%dd\n'
2055 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2055 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2056
2056
2057 def dfmtstr(max):
2057 def dfmtstr(max):
2058 return basedfmtstr % len(str(max))
2058 return basedfmtstr % len(str(max))
2059 def pcfmtstr(max, padding=0):
2059 def pcfmtstr(max, padding=0):
2060 return basepcfmtstr % (len(str(max)), ' ' * padding)
2060 return basepcfmtstr % (len(str(max)), ' ' * padding)
2061
2061
2062 def pcfmt(value, total):
2062 def pcfmt(value, total):
2063 if total:
2063 if total:
2064 return (value, 100 * float(value) / total)
2064 return (value, 100 * float(value) / total)
2065 else:
2065 else:
2066 return value, 100.0
2066 return value, 100.0
2067
2067
2068 ui.write(('format : %d\n') % format)
2068 ui.write(('format : %d\n') % format)
2069 ui.write(('flags : %s\n') % ', '.join(flags))
2069 ui.write(('flags : %s\n') % ', '.join(flags))
2070
2070
2071 ui.write('\n')
2071 ui.write('\n')
2072 fmt = pcfmtstr(totalsize)
2072 fmt = pcfmtstr(totalsize)
2073 fmt2 = dfmtstr(totalsize)
2073 fmt2 = dfmtstr(totalsize)
2074 ui.write(('revisions : ') + fmt2 % numrevs)
2074 ui.write(('revisions : ') + fmt2 % numrevs)
2075 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2075 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2076 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2076 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2077 ui.write(('revisions : ') + fmt2 % numrevs)
2077 ui.write(('revisions : ') + fmt2 % numrevs)
2078 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2078 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2079 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2079 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2080 ui.write(('revision size : ') + fmt2 % totalsize)
2080 ui.write(('revision size : ') + fmt2 % totalsize)
2081 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2081 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2082 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2082 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2083
2083
2084 def fmtchunktype(chunktype):
2084 def fmtchunktype(chunktype):
2085 if chunktype == 'empty':
2085 if chunktype == 'empty':
2086 return ' %s : ' % chunktype
2086 return ' %s : ' % chunktype
2087 elif chunktype in pycompat.bytestr(string.ascii_letters):
2087 elif chunktype in pycompat.bytestr(string.ascii_letters):
2088 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2088 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2089 else:
2089 else:
2090 return ' 0x%s : ' % hex(chunktype)
2090 return ' 0x%s : ' % hex(chunktype)
2091
2091
2092 ui.write('\n')
2092 ui.write('\n')
2093 ui.write(('chunks : ') + fmt2 % numrevs)
2093 ui.write(('chunks : ') + fmt2 % numrevs)
2094 for chunktype in sorted(chunktypecounts):
2094 for chunktype in sorted(chunktypecounts):
2095 ui.write(fmtchunktype(chunktype))
2095 ui.write(fmtchunktype(chunktype))
2096 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2096 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2097 ui.write(('chunks size : ') + fmt2 % totalsize)
2097 ui.write(('chunks size : ') + fmt2 % totalsize)
2098 for chunktype in sorted(chunktypecounts):
2098 for chunktype in sorted(chunktypecounts):
2099 ui.write(fmtchunktype(chunktype))
2099 ui.write(fmtchunktype(chunktype))
2100 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2100 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2101
2101
2102 ui.write('\n')
2102 ui.write('\n')
2103 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2103 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2104 ui.write(('avg chain length : ') + fmt % avgchainlen)
2104 ui.write(('avg chain length : ') + fmt % avgchainlen)
2105 ui.write(('max chain length : ') + fmt % maxchainlen)
2105 ui.write(('max chain length : ') + fmt % maxchainlen)
2106 ui.write(('max chain reach : ') + fmt % maxchainspan)
2106 ui.write(('max chain reach : ') + fmt % maxchainspan)
2107 ui.write(('compression ratio : ') + fmt % compratio)
2107 ui.write(('compression ratio : ') + fmt % compratio)
2108
2108
2109 if format > 0:
2109 if format > 0:
2110 ui.write('\n')
2110 ui.write('\n')
2111 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2111 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2112 % tuple(datasize))
2112 % tuple(datasize))
2113 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2113 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2114 % tuple(fullsize))
2114 % tuple(fullsize))
2115 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2115 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2116 % tuple(deltasize))
2116 % tuple(deltasize))
2117
2117
2118 if numdeltas > 0:
2118 if numdeltas > 0:
2119 ui.write('\n')
2119 ui.write('\n')
2120 fmt = pcfmtstr(numdeltas)
2120 fmt = pcfmtstr(numdeltas)
2121 fmt2 = pcfmtstr(numdeltas, 4)
2121 fmt2 = pcfmtstr(numdeltas, 4)
2122 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2122 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2123 if numprev > 0:
2123 if numprev > 0:
2124 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2124 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2125 numprev))
2125 numprev))
2126 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2126 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2127 numprev))
2127 numprev))
2128 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2128 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2129 numprev))
2129 numprev))
2130 if gdelta:
2130 if gdelta:
2131 ui.write(('deltas against p1 : ')
2131 ui.write(('deltas against p1 : ')
2132 + fmt % pcfmt(nump1, numdeltas))
2132 + fmt % pcfmt(nump1, numdeltas))
2133 ui.write(('deltas against p2 : ')
2133 ui.write(('deltas against p2 : ')
2134 + fmt % pcfmt(nump2, numdeltas))
2134 + fmt % pcfmt(nump2, numdeltas))
2135 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2135 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2136 numdeltas))
2136 numdeltas))
2137
2137
2138 @command('debugrevspec',
2138 @command('debugrevspec',
2139 [('', 'optimize', None,
2139 [('', 'optimize', None,
2140 _('print parsed tree after optimizing (DEPRECATED)')),
2140 _('print parsed tree after optimizing (DEPRECATED)')),
2141 ('', 'show-revs', True, _('print list of result revisions (default)')),
2141 ('', 'show-revs', True, _('print list of result revisions (default)')),
2142 ('s', 'show-set', None, _('print internal representation of result set')),
2142 ('s', 'show-set', None, _('print internal representation of result set')),
2143 ('p', 'show-stage', [],
2143 ('p', 'show-stage', [],
2144 _('print parsed tree at the given stage'), _('NAME')),
2144 _('print parsed tree at the given stage'), _('NAME')),
2145 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2145 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2146 ('', 'verify-optimized', False, _('verify optimized result')),
2146 ('', 'verify-optimized', False, _('verify optimized result')),
2147 ],
2147 ],
2148 ('REVSPEC'))
2148 ('REVSPEC'))
2149 def debugrevspec(ui, repo, expr, **opts):
2149 def debugrevspec(ui, repo, expr, **opts):
2150 """parse and apply a revision specification
2150 """parse and apply a revision specification
2151
2151
2152 Use -p/--show-stage option to print the parsed tree at the given stages.
2152 Use -p/--show-stage option to print the parsed tree at the given stages.
2153 Use -p all to print tree at every stage.
2153 Use -p all to print tree at every stage.
2154
2154
2155 Use --no-show-revs option with -s or -p to print only the set
2155 Use --no-show-revs option with -s or -p to print only the set
2156 representation or the parsed tree respectively.
2156 representation or the parsed tree respectively.
2157
2157
2158 Use --verify-optimized to compare the optimized result with the unoptimized
2158 Use --verify-optimized to compare the optimized result with the unoptimized
2159 one. Returns 1 if the optimized result differs.
2159 one. Returns 1 if the optimized result differs.
2160 """
2160 """
2161 opts = pycompat.byteskwargs(opts)
2161 opts = pycompat.byteskwargs(opts)
2162 aliases = ui.configitems('revsetalias')
2162 aliases = ui.configitems('revsetalias')
2163 stages = [
2163 stages = [
2164 ('parsed', lambda tree: tree),
2164 ('parsed', lambda tree: tree),
2165 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2165 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2166 ui.warn)),
2166 ui.warn)),
2167 ('concatenated', revsetlang.foldconcat),
2167 ('concatenated', revsetlang.foldconcat),
2168 ('analyzed', revsetlang.analyze),
2168 ('analyzed', revsetlang.analyze),
2169 ('optimized', revsetlang.optimize),
2169 ('optimized', revsetlang.optimize),
2170 ]
2170 ]
2171 if opts['no_optimized']:
2171 if opts['no_optimized']:
2172 stages = stages[:-1]
2172 stages = stages[:-1]
2173 if opts['verify_optimized'] and opts['no_optimized']:
2173 if opts['verify_optimized'] and opts['no_optimized']:
2174 raise error.Abort(_('cannot use --verify-optimized with '
2174 raise error.Abort(_('cannot use --verify-optimized with '
2175 '--no-optimized'))
2175 '--no-optimized'))
2176 stagenames = set(n for n, f in stages)
2176 stagenames = set(n for n, f in stages)
2177
2177
2178 showalways = set()
2178 showalways = set()
2179 showchanged = set()
2179 showchanged = set()
2180 if ui.verbose and not opts['show_stage']:
2180 if ui.verbose and not opts['show_stage']:
2181 # show parsed tree by --verbose (deprecated)
2181 # show parsed tree by --verbose (deprecated)
2182 showalways.add('parsed')
2182 showalways.add('parsed')
2183 showchanged.update(['expanded', 'concatenated'])
2183 showchanged.update(['expanded', 'concatenated'])
2184 if opts['optimize']:
2184 if opts['optimize']:
2185 showalways.add('optimized')
2185 showalways.add('optimized')
2186 if opts['show_stage'] and opts['optimize']:
2186 if opts['show_stage'] and opts['optimize']:
2187 raise error.Abort(_('cannot use --optimize with --show-stage'))
2187 raise error.Abort(_('cannot use --optimize with --show-stage'))
2188 if opts['show_stage'] == ['all']:
2188 if opts['show_stage'] == ['all']:
2189 showalways.update(stagenames)
2189 showalways.update(stagenames)
2190 else:
2190 else:
2191 for n in opts['show_stage']:
2191 for n in opts['show_stage']:
2192 if n not in stagenames:
2192 if n not in stagenames:
2193 raise error.Abort(_('invalid stage name: %s') % n)
2193 raise error.Abort(_('invalid stage name: %s') % n)
2194 showalways.update(opts['show_stage'])
2194 showalways.update(opts['show_stage'])
2195
2195
2196 treebystage = {}
2196 treebystage = {}
2197 printedtree = None
2197 printedtree = None
2198 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2198 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2199 for n, f in stages:
2199 for n, f in stages:
2200 treebystage[n] = tree = f(tree)
2200 treebystage[n] = tree = f(tree)
2201 if n in showalways or (n in showchanged and tree != printedtree):
2201 if n in showalways or (n in showchanged and tree != printedtree):
2202 if opts['show_stage'] or n != 'parsed':
2202 if opts['show_stage'] or n != 'parsed':
2203 ui.write(("* %s:\n") % n)
2203 ui.write(("* %s:\n") % n)
2204 ui.write(revsetlang.prettyformat(tree), "\n")
2204 ui.write(revsetlang.prettyformat(tree), "\n")
2205 printedtree = tree
2205 printedtree = tree
2206
2206
2207 if opts['verify_optimized']:
2207 if opts['verify_optimized']:
2208 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2208 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2209 brevs = revset.makematcher(treebystage['optimized'])(repo)
2209 brevs = revset.makematcher(treebystage['optimized'])(repo)
2210 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2210 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2211 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2211 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2212 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2212 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2213 arevs = list(arevs)
2213 arevs = list(arevs)
2214 brevs = list(brevs)
2214 brevs = list(brevs)
2215 if arevs == brevs:
2215 if arevs == brevs:
2216 return 0
2216 return 0
2217 ui.write(('--- analyzed\n'), label='diff.file_a')
2217 ui.write(('--- analyzed\n'), label='diff.file_a')
2218 ui.write(('+++ optimized\n'), label='diff.file_b')
2218 ui.write(('+++ optimized\n'), label='diff.file_b')
2219 sm = difflib.SequenceMatcher(None, arevs, brevs)
2219 sm = difflib.SequenceMatcher(None, arevs, brevs)
2220 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2220 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2221 if tag in ('delete', 'replace'):
2221 if tag in ('delete', 'replace'):
2222 for c in arevs[alo:ahi]:
2222 for c in arevs[alo:ahi]:
2223 ui.write('-%s\n' % c, label='diff.deleted')
2223 ui.write('-%s\n' % c, label='diff.deleted')
2224 if tag in ('insert', 'replace'):
2224 if tag in ('insert', 'replace'):
2225 for c in brevs[blo:bhi]:
2225 for c in brevs[blo:bhi]:
2226 ui.write('+%s\n' % c, label='diff.inserted')
2226 ui.write('+%s\n' % c, label='diff.inserted')
2227 if tag == 'equal':
2227 if tag == 'equal':
2228 for c in arevs[alo:ahi]:
2228 for c in arevs[alo:ahi]:
2229 ui.write(' %s\n' % c)
2229 ui.write(' %s\n' % c)
2230 return 1
2230 return 1
2231
2231
2232 func = revset.makematcher(tree)
2232 func = revset.makematcher(tree)
2233 revs = func(repo)
2233 revs = func(repo)
2234 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2234 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2235 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2235 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2236 if not opts['show_revs']:
2236 if not opts['show_revs']:
2237 return
2237 return
2238 for c in revs:
2238 for c in revs:
2239 ui.write("%d\n" % c)
2239 ui.write("%d\n" % c)
2240
2240
2241 @command('debugserve', [
2241 @command('debugserve', [
2242 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2242 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2243 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2243 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2244 ('', 'logiofile', '', _('file to log server I/O to')),
2244 ('', 'logiofile', '', _('file to log server I/O to')),
2245 ], '')
2245 ], '')
2246 def debugserve(ui, repo, **opts):
2246 def debugserve(ui, repo, **opts):
2247 """run a server with advanced settings
2247 """run a server with advanced settings
2248
2248
2249 This command is similar to :hg:`serve`. It exists partially as a
2249 This command is similar to :hg:`serve`. It exists partially as a
2250 workaround to the fact that ``hg serve --stdio`` must have specific
2250 workaround to the fact that ``hg serve --stdio`` must have specific
2251 arguments for security reasons.
2251 arguments for security reasons.
2252 """
2252 """
2253 opts = pycompat.byteskwargs(opts)
2253 opts = pycompat.byteskwargs(opts)
2254
2254
2255 if not opts['sshstdio']:
2255 if not opts['sshstdio']:
2256 raise error.Abort(_('only --sshstdio is currently supported'))
2256 raise error.Abort(_('only --sshstdio is currently supported'))
2257
2257
2258 logfh = None
2258 logfh = None
2259
2259
2260 if opts['logiofd'] and opts['logiofile']:
2260 if opts['logiofd'] and opts['logiofile']:
2261 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2261 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2262
2262
2263 if opts['logiofd']:
2263 if opts['logiofd']:
2264 # Line buffered because output is line based.
2264 # Line buffered because output is line based.
2265 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2265 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2266 elif opts['logiofile']:
2266 elif opts['logiofile']:
2267 logfh = open(opts['logiofile'], 'ab', 1)
2267 logfh = open(opts['logiofile'], 'ab', 1)
2268
2268
2269 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2269 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2270 s.serve_forever()
2270 s.serve_forever()
2271
2271
2272 @command('debugsetparents', [], _('REV1 [REV2]'))
2272 @command('debugsetparents', [], _('REV1 [REV2]'))
2273 def debugsetparents(ui, repo, rev1, rev2=None):
2273 def debugsetparents(ui, repo, rev1, rev2=None):
2274 """manually set the parents of the current working directory
2274 """manually set the parents of the current working directory
2275
2275
2276 This is useful for writing repository conversion tools, but should
2276 This is useful for writing repository conversion tools, but should
2277 be used with care. For example, neither the working directory nor the
2277 be used with care. For example, neither the working directory nor the
2278 dirstate is updated, so file status may be incorrect after running this
2278 dirstate is updated, so file status may be incorrect after running this
2279 command.
2279 command.
2280
2280
2281 Returns 0 on success.
2281 Returns 0 on success.
2282 """
2282 """
2283
2283
2284 r1 = scmutil.revsingle(repo, rev1).node()
2284 r1 = scmutil.revsingle(repo, rev1).node()
2285 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2285 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2286
2286
2287 with repo.wlock():
2287 with repo.wlock():
2288 repo.setparents(r1, r2)
2288 repo.setparents(r1, r2)
2289
2289
2290 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2290 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2291 def debugssl(ui, repo, source=None, **opts):
2291 def debugssl(ui, repo, source=None, **opts):
2292 '''test a secure connection to a server
2292 '''test a secure connection to a server
2293
2293
2294 This builds the certificate chain for the server on Windows, installing the
2294 This builds the certificate chain for the server on Windows, installing the
2295 missing intermediates and trusted root via Windows Update if necessary. It
2295 missing intermediates and trusted root via Windows Update if necessary. It
2296 does nothing on other platforms.
2296 does nothing on other platforms.
2297
2297
2298 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2298 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2299 that server is used. See :hg:`help urls` for more information.
2299 that server is used. See :hg:`help urls` for more information.
2300
2300
2301 If the update succeeds, retry the original operation. Otherwise, the cause
2301 If the update succeeds, retry the original operation. Otherwise, the cause
2302 of the SSL error is likely another issue.
2302 of the SSL error is likely another issue.
2303 '''
2303 '''
2304 if not pycompat.iswindows:
2304 if not pycompat.iswindows:
2305 raise error.Abort(_('certificate chain building is only possible on '
2305 raise error.Abort(_('certificate chain building is only possible on '
2306 'Windows'))
2306 'Windows'))
2307
2307
2308 if not source:
2308 if not source:
2309 if not repo:
2309 if not repo:
2310 raise error.Abort(_("there is no Mercurial repository here, and no "
2310 raise error.Abort(_("there is no Mercurial repository here, and no "
2311 "server specified"))
2311 "server specified"))
2312 source = "default"
2312 source = "default"
2313
2313
2314 source, branches = hg.parseurl(ui.expandpath(source))
2314 source, branches = hg.parseurl(ui.expandpath(source))
2315 url = util.url(source)
2315 url = util.url(source)
2316 addr = None
2316 addr = None
2317
2317
2318 defaultport = {'https': 443, 'ssh': 22}
2318 defaultport = {'https': 443, 'ssh': 22}
2319 if url.scheme in defaultport:
2319 if url.scheme in defaultport:
2320 try:
2320 try:
2321 addr = (url.host, int(url.port or defaultport[url.scheme]))
2321 addr = (url.host, int(url.port or defaultport[url.scheme]))
2322 except ValueError:
2322 except ValueError:
2323 raise error.Abort(_("malformed port number in URL"))
2323 raise error.Abort(_("malformed port number in URL"))
2324 else:
2324 else:
2325 raise error.Abort(_("only https and ssh connections are supported"))
2325 raise error.Abort(_("only https and ssh connections are supported"))
2326
2326
2327 from . import win32
2327 from . import win32
2328
2328
2329 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2329 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2330 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2330 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2331
2331
2332 try:
2332 try:
2333 s.connect(addr)
2333 s.connect(addr)
2334 cert = s.getpeercert(True)
2334 cert = s.getpeercert(True)
2335
2335
2336 ui.status(_('checking the certificate chain for %s\n') % url.host)
2336 ui.status(_('checking the certificate chain for %s\n') % url.host)
2337
2337
2338 complete = win32.checkcertificatechain(cert, build=False)
2338 complete = win32.checkcertificatechain(cert, build=False)
2339
2339
2340 if not complete:
2340 if not complete:
2341 ui.status(_('certificate chain is incomplete, updating... '))
2341 ui.status(_('certificate chain is incomplete, updating... '))
2342
2342
2343 if not win32.checkcertificatechain(cert):
2343 if not win32.checkcertificatechain(cert):
2344 ui.status(_('failed.\n'))
2344 ui.status(_('failed.\n'))
2345 else:
2345 else:
2346 ui.status(_('done.\n'))
2346 ui.status(_('done.\n'))
2347 else:
2347 else:
2348 ui.status(_('full certificate chain is available\n'))
2348 ui.status(_('full certificate chain is available\n'))
2349 finally:
2349 finally:
2350 s.close()
2350 s.close()
2351
2351
2352 @command('debugsub',
2352 @command('debugsub',
2353 [('r', 'rev', '',
2353 [('r', 'rev', '',
2354 _('revision to check'), _('REV'))],
2354 _('revision to check'), _('REV'))],
2355 _('[-r REV] [REV]'))
2355 _('[-r REV] [REV]'))
2356 def debugsub(ui, repo, rev=None):
2356 def debugsub(ui, repo, rev=None):
2357 ctx = scmutil.revsingle(repo, rev, None)
2357 ctx = scmutil.revsingle(repo, rev, None)
2358 for k, v in sorted(ctx.substate.items()):
2358 for k, v in sorted(ctx.substate.items()):
2359 ui.write(('path %s\n') % k)
2359 ui.write(('path %s\n') % k)
2360 ui.write((' source %s\n') % v[0])
2360 ui.write((' source %s\n') % v[0])
2361 ui.write((' revision %s\n') % v[1])
2361 ui.write((' revision %s\n') % v[1])
2362
2362
2363 @command('debugsuccessorssets',
2363 @command('debugsuccessorssets',
2364 [('', 'closest', False, _('return closest successors sets only'))],
2364 [('', 'closest', False, _('return closest successors sets only'))],
2365 _('[REV]'))
2365 _('[REV]'))
2366 def debugsuccessorssets(ui, repo, *revs, **opts):
2366 def debugsuccessorssets(ui, repo, *revs, **opts):
2367 """show set of successors for revision
2367 """show set of successors for revision
2368
2368
2369 A successors set of changeset A is a consistent group of revisions that
2369 A successors set of changeset A is a consistent group of revisions that
2370 succeed A. It contains non-obsolete changesets only unless closests
2370 succeed A. It contains non-obsolete changesets only unless closests
2371 successors set is set.
2371 successors set is set.
2372
2372
2373 In most cases a changeset A has a single successors set containing a single
2373 In most cases a changeset A has a single successors set containing a single
2374 successor (changeset A replaced by A').
2374 successor (changeset A replaced by A').
2375
2375
2376 A changeset that is made obsolete with no successors are called "pruned".
2376 A changeset that is made obsolete with no successors are called "pruned".
2377 Such changesets have no successors sets at all.
2377 Such changesets have no successors sets at all.
2378
2378
2379 A changeset that has been "split" will have a successors set containing
2379 A changeset that has been "split" will have a successors set containing
2380 more than one successor.
2380 more than one successor.
2381
2381
2382 A changeset that has been rewritten in multiple different ways is called
2382 A changeset that has been rewritten in multiple different ways is called
2383 "divergent". Such changesets have multiple successor sets (each of which
2383 "divergent". Such changesets have multiple successor sets (each of which
2384 may also be split, i.e. have multiple successors).
2384 may also be split, i.e. have multiple successors).
2385
2385
2386 Results are displayed as follows::
2386 Results are displayed as follows::
2387
2387
2388 <rev1>
2388 <rev1>
2389 <successors-1A>
2389 <successors-1A>
2390 <rev2>
2390 <rev2>
2391 <successors-2A>
2391 <successors-2A>
2392 <successors-2B1> <successors-2B2> <successors-2B3>
2392 <successors-2B1> <successors-2B2> <successors-2B3>
2393
2393
2394 Here rev2 has two possible (i.e. divergent) successors sets. The first
2394 Here rev2 has two possible (i.e. divergent) successors sets. The first
2395 holds one element, whereas the second holds three (i.e. the changeset has
2395 holds one element, whereas the second holds three (i.e. the changeset has
2396 been split).
2396 been split).
2397 """
2397 """
2398 # passed to successorssets caching computation from one call to another
2398 # passed to successorssets caching computation from one call to another
2399 cache = {}
2399 cache = {}
2400 ctx2str = bytes
2400 ctx2str = bytes
2401 node2str = short
2401 node2str = short
2402 for rev in scmutil.revrange(repo, revs):
2402 for rev in scmutil.revrange(repo, revs):
2403 ctx = repo[rev]
2403 ctx = repo[rev]
2404 ui.write('%s\n'% ctx2str(ctx))
2404 ui.write('%s\n'% ctx2str(ctx))
2405 for succsset in obsutil.successorssets(repo, ctx.node(),
2405 for succsset in obsutil.successorssets(repo, ctx.node(),
2406 closest=opts[r'closest'],
2406 closest=opts[r'closest'],
2407 cache=cache):
2407 cache=cache):
2408 if succsset:
2408 if succsset:
2409 ui.write(' ')
2409 ui.write(' ')
2410 ui.write(node2str(succsset[0]))
2410 ui.write(node2str(succsset[0]))
2411 for node in succsset[1:]:
2411 for node in succsset[1:]:
2412 ui.write(' ')
2412 ui.write(' ')
2413 ui.write(node2str(node))
2413 ui.write(node2str(node))
2414 ui.write('\n')
2414 ui.write('\n')
2415
2415
2416 @command('debugtemplate',
2416 @command('debugtemplate',
2417 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2417 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2418 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2418 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2419 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2419 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2420 optionalrepo=True)
2420 optionalrepo=True)
2421 def debugtemplate(ui, repo, tmpl, **opts):
2421 def debugtemplate(ui, repo, tmpl, **opts):
2422 """parse and apply a template
2422 """parse and apply a template
2423
2423
2424 If -r/--rev is given, the template is processed as a log template and
2424 If -r/--rev is given, the template is processed as a log template and
2425 applied to the given changesets. Otherwise, it is processed as a generic
2425 applied to the given changesets. Otherwise, it is processed as a generic
2426 template.
2426 template.
2427
2427
2428 Use --verbose to print the parsed tree.
2428 Use --verbose to print the parsed tree.
2429 """
2429 """
2430 revs = None
2430 revs = None
2431 if opts[r'rev']:
2431 if opts[r'rev']:
2432 if repo is None:
2432 if repo is None:
2433 raise error.RepoError(_('there is no Mercurial repository here '
2433 raise error.RepoError(_('there is no Mercurial repository here '
2434 '(.hg not found)'))
2434 '(.hg not found)'))
2435 revs = scmutil.revrange(repo, opts[r'rev'])
2435 revs = scmutil.revrange(repo, opts[r'rev'])
2436
2436
2437 props = {}
2437 props = {}
2438 for d in opts[r'define']:
2438 for d in opts[r'define']:
2439 try:
2439 try:
2440 k, v = (e.strip() for e in d.split('=', 1))
2440 k, v = (e.strip() for e in d.split('=', 1))
2441 if not k or k == 'ui':
2441 if not k or k == 'ui':
2442 raise ValueError
2442 raise ValueError
2443 props[k] = v
2443 props[k] = v
2444 except ValueError:
2444 except ValueError:
2445 raise error.Abort(_('malformed keyword definition: %s') % d)
2445 raise error.Abort(_('malformed keyword definition: %s') % d)
2446
2446
2447 if ui.verbose:
2447 if ui.verbose:
2448 aliases = ui.configitems('templatealias')
2448 aliases = ui.configitems('templatealias')
2449 tree = templater.parse(tmpl)
2449 tree = templater.parse(tmpl)
2450 ui.note(templater.prettyformat(tree), '\n')
2450 ui.note(templater.prettyformat(tree), '\n')
2451 newtree = templater.expandaliases(tree, aliases)
2451 newtree = templater.expandaliases(tree, aliases)
2452 if newtree != tree:
2452 if newtree != tree:
2453 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2453 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2454
2454
2455 if revs is None:
2455 if revs is None:
2456 tres = formatter.templateresources(ui, repo)
2456 tres = formatter.templateresources(ui, repo)
2457 t = formatter.maketemplater(ui, tmpl, resources=tres)
2457 t = formatter.maketemplater(ui, tmpl, resources=tres)
2458 ui.write(t.renderdefault(props))
2458 ui.write(t.renderdefault(props))
2459 else:
2459 else:
2460 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2460 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2461 for r in revs:
2461 for r in revs:
2462 displayer.show(repo[r], **pycompat.strkwargs(props))
2462 displayer.show(repo[r], **pycompat.strkwargs(props))
2463 displayer.close()
2463 displayer.close()
2464
2464
2465 @command('debuguigetpass', [
2465 @command('debuguigetpass', [
2466 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2466 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2467 ], _('[-p TEXT]'), norepo=True)
2467 ], _('[-p TEXT]'), norepo=True)
2468 def debuguigetpass(ui, prompt=''):
2468 def debuguigetpass(ui, prompt=''):
2469 """show prompt to type password"""
2469 """show prompt to type password"""
2470 r = ui.getpass(prompt)
2470 r = ui.getpass(prompt)
2471 ui.write(('respose: %s\n') % r)
2471 ui.write(('respose: %s\n') % r)
2472
2472
2473 @command('debuguiprompt', [
2473 @command('debuguiprompt', [
2474 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2474 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2475 ], _('[-p TEXT]'), norepo=True)
2475 ], _('[-p TEXT]'), norepo=True)
2476 def debuguiprompt(ui, prompt=''):
2476 def debuguiprompt(ui, prompt=''):
2477 """show plain prompt"""
2477 """show plain prompt"""
2478 r = ui.prompt(prompt)
2478 r = ui.prompt(prompt)
2479 ui.write(('response: %s\n') % r)
2479 ui.write(('response: %s\n') % r)
2480
2480
2481 @command('debugupdatecaches', [])
2481 @command('debugupdatecaches', [])
2482 def debugupdatecaches(ui, repo, *pats, **opts):
2482 def debugupdatecaches(ui, repo, *pats, **opts):
2483 """warm all known caches in the repository"""
2483 """warm all known caches in the repository"""
2484 with repo.wlock(), repo.lock():
2484 with repo.wlock(), repo.lock():
2485 repo.updatecaches(full=True)
2485 repo.updatecaches(full=True)
2486
2486
2487 @command('debugupgraderepo', [
2487 @command('debugupgraderepo', [
2488 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2488 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2489 ('', 'run', False, _('performs an upgrade')),
2489 ('', 'run', False, _('performs an upgrade')),
2490 ])
2490 ])
2491 def debugupgraderepo(ui, repo, run=False, optimize=None):
2491 def debugupgraderepo(ui, repo, run=False, optimize=None):
2492 """upgrade a repository to use different features
2492 """upgrade a repository to use different features
2493
2493
2494 If no arguments are specified, the repository is evaluated for upgrade
2494 If no arguments are specified, the repository is evaluated for upgrade
2495 and a list of problems and potential optimizations is printed.
2495 and a list of problems and potential optimizations is printed.
2496
2496
2497 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2497 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2498 can be influenced via additional arguments. More details will be provided
2498 can be influenced via additional arguments. More details will be provided
2499 by the command output when run without ``--run``.
2499 by the command output when run without ``--run``.
2500
2500
2501 During the upgrade, the repository will be locked and no writes will be
2501 During the upgrade, the repository will be locked and no writes will be
2502 allowed.
2502 allowed.
2503
2503
2504 At the end of the upgrade, the repository may not be readable while new
2504 At the end of the upgrade, the repository may not be readable while new
2505 repository data is swapped in. This window will be as long as it takes to
2505 repository data is swapped in. This window will be as long as it takes to
2506 rename some directories inside the ``.hg`` directory. On most machines, this
2506 rename some directories inside the ``.hg`` directory. On most machines, this
2507 should complete almost instantaneously and the chances of a consumer being
2507 should complete almost instantaneously and the chances of a consumer being
2508 unable to access the repository should be low.
2508 unable to access the repository should be low.
2509 """
2509 """
2510 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2510 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2511
2511
2512 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2512 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2513 inferrepo=True)
2513 inferrepo=True)
2514 def debugwalk(ui, repo, *pats, **opts):
2514 def debugwalk(ui, repo, *pats, **opts):
2515 """show how files match on given patterns"""
2515 """show how files match on given patterns"""
2516 opts = pycompat.byteskwargs(opts)
2516 opts = pycompat.byteskwargs(opts)
2517 m = scmutil.match(repo[None], pats, opts)
2517 m = scmutil.match(repo[None], pats, opts)
2518 ui.write(('matcher: %r\n' % m))
2518 ui.write(('matcher: %r\n' % m))
2519 items = list(repo[None].walk(m))
2519 items = list(repo[None].walk(m))
2520 if not items:
2520 if not items:
2521 return
2521 return
2522 f = lambda fn: fn
2522 f = lambda fn: fn
2523 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2523 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2524 f = lambda fn: util.normpath(fn)
2524 f = lambda fn: util.normpath(fn)
2525 fmt = 'f %%-%ds %%-%ds %%s' % (
2525 fmt = 'f %%-%ds %%-%ds %%s' % (
2526 max([len(abs) for abs in items]),
2526 max([len(abs) for abs in items]),
2527 max([len(m.rel(abs)) for abs in items]))
2527 max([len(m.rel(abs)) for abs in items]))
2528 for abs in items:
2528 for abs in items:
2529 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2529 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2530 ui.write("%s\n" % line.rstrip())
2530 ui.write("%s\n" % line.rstrip())
2531
2531
2532 @command('debugwhyunstable', [], _('REV'))
2532 @command('debugwhyunstable', [], _('REV'))
2533 def debugwhyunstable(ui, repo, rev):
2533 def debugwhyunstable(ui, repo, rev):
2534 """explain instabilities of a changeset"""
2534 """explain instabilities of a changeset"""
2535 for entry in obsutil.whyunstable(repo, repo[rev]):
2535 for entry in obsutil.whyunstable(repo, repo[rev]):
2536 dnodes = ''
2536 dnodes = ''
2537 if entry.get('divergentnodes'):
2537 if entry.get('divergentnodes'):
2538 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2538 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2539 for ctx in entry['divergentnodes']) + ' '
2539 for ctx in entry['divergentnodes']) + ' '
2540 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2540 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2541 entry['reason'], entry['node']))
2541 entry['reason'], entry['node']))
2542
2542
2543 @command('debugwireargs',
2543 @command('debugwireargs',
2544 [('', 'three', '', 'three'),
2544 [('', 'three', '', 'three'),
2545 ('', 'four', '', 'four'),
2545 ('', 'four', '', 'four'),
2546 ('', 'five', '', 'five'),
2546 ('', 'five', '', 'five'),
2547 ] + cmdutil.remoteopts,
2547 ] + cmdutil.remoteopts,
2548 _('REPO [OPTIONS]... [ONE [TWO]]'),
2548 _('REPO [OPTIONS]... [ONE [TWO]]'),
2549 norepo=True)
2549 norepo=True)
2550 def debugwireargs(ui, repopath, *vals, **opts):
2550 def debugwireargs(ui, repopath, *vals, **opts):
2551 opts = pycompat.byteskwargs(opts)
2551 opts = pycompat.byteskwargs(opts)
2552 repo = hg.peer(ui, opts, repopath)
2552 repo = hg.peer(ui, opts, repopath)
2553 for opt in cmdutil.remoteopts:
2553 for opt in cmdutil.remoteopts:
2554 del opts[opt[1]]
2554 del opts[opt[1]]
2555 args = {}
2555 args = {}
2556 for k, v in opts.iteritems():
2556 for k, v in opts.iteritems():
2557 if v:
2557 if v:
2558 args[k] = v
2558 args[k] = v
2559 args = pycompat.strkwargs(args)
2559 args = pycompat.strkwargs(args)
2560 # run twice to check that we don't mess up the stream for the next command
2560 # run twice to check that we don't mess up the stream for the next command
2561 res1 = repo.debugwireargs(*vals, **args)
2561 res1 = repo.debugwireargs(*vals, **args)
2562 res2 = repo.debugwireargs(*vals, **args)
2562 res2 = repo.debugwireargs(*vals, **args)
2563 ui.write("%s\n" % res1)
2563 ui.write("%s\n" % res1)
2564 if res1 != res2:
2564 if res1 != res2:
2565 ui.warn("%s\n" % res2)
2565 ui.warn("%s\n" % res2)
2566
2566
2567 def _parsewirelangblocks(fh):
2567 def _parsewirelangblocks(fh):
2568 activeaction = None
2568 activeaction = None
2569 blocklines = []
2569 blocklines = []
2570
2570
2571 for line in fh:
2571 for line in fh:
2572 line = line.rstrip()
2572 line = line.rstrip()
2573 if not line:
2573 if not line:
2574 continue
2574 continue
2575
2575
2576 if line.startswith(b'#'):
2576 if line.startswith(b'#'):
2577 continue
2577 continue
2578
2578
2579 if not line.startswith(' '):
2579 if not line.startswith(' '):
2580 # New block. Flush previous one.
2580 # New block. Flush previous one.
2581 if activeaction:
2581 if activeaction:
2582 yield activeaction, blocklines
2582 yield activeaction, blocklines
2583
2583
2584 activeaction = line
2584 activeaction = line
2585 blocklines = []
2585 blocklines = []
2586 continue
2586 continue
2587
2587
2588 # Else we start with an indent.
2588 # Else we start with an indent.
2589
2589
2590 if not activeaction:
2590 if not activeaction:
2591 raise error.Abort(_('indented line outside of block'))
2591 raise error.Abort(_('indented line outside of block'))
2592
2592
2593 blocklines.append(line)
2593 blocklines.append(line)
2594
2594
2595 # Flush last block.
2595 # Flush last block.
2596 if activeaction:
2596 if activeaction:
2597 yield activeaction, blocklines
2597 yield activeaction, blocklines
2598
2598
2599 @command('debugwireproto',
2599 @command('debugwireproto',
2600 [
2600 [
2601 ('', 'localssh', False, _('start an SSH server for this repo')),
2601 ('', 'localssh', False, _('start an SSH server for this repo')),
2602 ('', 'peer', '', _('construct a specific version of the peer')),
2602 ('', 'peer', '', _('construct a specific version of the peer')),
2603 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2603 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2604 ] + cmdutil.remoteopts,
2604 ] + cmdutil.remoteopts,
2605 _('[REPO]'),
2605 _('[REPO]'),
2606 optionalrepo=True)
2606 optionalrepo=True)
2607 def debugwireproto(ui, repo, **opts):
2607 def debugwireproto(ui, repo, **opts):
2608 """send wire protocol commands to a server
2608 """send wire protocol commands to a server
2609
2609
2610 This command can be used to issue wire protocol commands to remote
2610 This command can be used to issue wire protocol commands to remote
2611 peers and to debug the raw data being exchanged.
2611 peers and to debug the raw data being exchanged.
2612
2612
2613 ``--localssh`` will start an SSH server against the current repository
2613 ``--localssh`` will start an SSH server against the current repository
2614 and connect to that. By default, the connection will perform a handshake
2614 and connect to that. By default, the connection will perform a handshake
2615 and establish an appropriate peer instance.
2615 and establish an appropriate peer instance.
2616
2616
2617 ``--peer`` can be used to bypass the handshake protocol and construct a
2617 ``--peer`` can be used to bypass the handshake protocol and construct a
2618 peer instance using the specified class type. Valid values are ``raw``,
2618 peer instance using the specified class type. Valid values are ``raw``,
2619 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2619 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2620 payloads and don't support higher-level command actions.
2620 payloads and don't support higher-level command actions.
2621
2621
2622 ``--noreadstderr`` can be used to disable automatic reading from stderr
2622 ``--noreadstderr`` can be used to disable automatic reading from stderr
2623 of the peer (for SSH connections only). Disabling automatic reading of
2623 of the peer (for SSH connections only). Disabling automatic reading of
2624 stderr is useful for making output more deterministic.
2624 stderr is useful for making output more deterministic.
2625
2625
2626 Commands are issued via a mini language which is specified via stdin.
2626 Commands are issued via a mini language which is specified via stdin.
2627 The language consists of individual actions to perform. An action is
2627 The language consists of individual actions to perform. An action is
2628 defined by a block. A block is defined as a line with no leading
2628 defined by a block. A block is defined as a line with no leading
2629 space followed by 0 or more lines with leading space. Blocks are
2629 space followed by 0 or more lines with leading space. Blocks are
2630 effectively a high-level command with additional metadata.
2630 effectively a high-level command with additional metadata.
2631
2631
2632 Lines beginning with ``#`` are ignored.
2632 Lines beginning with ``#`` are ignored.
2633
2633
2634 The following sections denote available actions.
2634 The following sections denote available actions.
2635
2635
2636 raw
2636 raw
2637 ---
2637 ---
2638
2638
2639 Send raw data to the server.
2639 Send raw data to the server.
2640
2640
2641 The block payload contains the raw data to send as one atomic send
2641 The block payload contains the raw data to send as one atomic send
2642 operation. The data may not actually be delivered in a single system
2642 operation. The data may not actually be delivered in a single system
2643 call: it depends on the abilities of the transport being used.
2643 call: it depends on the abilities of the transport being used.
2644
2644
2645 Each line in the block is de-indented and concatenated. Then, that
2645 Each line in the block is de-indented and concatenated. Then, that
2646 value is evaluated as a Python b'' literal. This allows the use of
2646 value is evaluated as a Python b'' literal. This allows the use of
2647 backslash escaping, etc.
2647 backslash escaping, etc.
2648
2648
2649 raw+
2649 raw+
2650 ----
2650 ----
2651
2651
2652 Behaves like ``raw`` except flushes output afterwards.
2652 Behaves like ``raw`` except flushes output afterwards.
2653
2653
2654 command <X>
2654 command <X>
2655 -----------
2655 -----------
2656
2656
2657 Send a request to run a named command, whose name follows the ``command``
2657 Send a request to run a named command, whose name follows the ``command``
2658 string.
2658 string.
2659
2659
2660 Arguments to the command are defined as lines in this block. The format of
2660 Arguments to the command are defined as lines in this block. The format of
2661 each line is ``<key> <value>``. e.g.::
2661 each line is ``<key> <value>``. e.g.::
2662
2662
2663 command listkeys
2663 command listkeys
2664 namespace bookmarks
2664 namespace bookmarks
2665
2665
2666 Values are interpreted as Python b'' literals. This allows encoding
2666 Values are interpreted as Python b'' literals. This allows encoding
2667 special byte sequences via backslash escaping.
2667 special byte sequences via backslash escaping.
2668
2668
2669 The following arguments have special meaning:
2669 The following arguments have special meaning:
2670
2670
2671 ``PUSHFILE``
2671 ``PUSHFILE``
2672 When defined, the *push* mechanism of the peer will be used instead
2672 When defined, the *push* mechanism of the peer will be used instead
2673 of the static request-response mechanism and the content of the
2673 of the static request-response mechanism and the content of the
2674 file specified in the value of this argument will be sent as the
2674 file specified in the value of this argument will be sent as the
2675 command payload.
2675 command payload.
2676
2676
2677 This can be used to submit a local bundle file to the remote.
2677 This can be used to submit a local bundle file to the remote.
2678
2678
2679 batchbegin
2679 batchbegin
2680 ----------
2680 ----------
2681
2681
2682 Instruct the peer to begin a batched send.
2682 Instruct the peer to begin a batched send.
2683
2683
2684 All ``command`` blocks are queued for execution until the next
2684 All ``command`` blocks are queued for execution until the next
2685 ``batchsubmit`` block.
2685 ``batchsubmit`` block.
2686
2686
2687 batchsubmit
2687 batchsubmit
2688 -----------
2688 -----------
2689
2689
2690 Submit previously queued ``command`` blocks as a batch request.
2690 Submit previously queued ``command`` blocks as a batch request.
2691
2691
2692 This action MUST be paired with a ``batchbegin`` action.
2692 This action MUST be paired with a ``batchbegin`` action.
2693
2693
2694 close
2694 close
2695 -----
2695 -----
2696
2696
2697 Close the connection to the server.
2697 Close the connection to the server.
2698
2698
2699 flush
2699 flush
2700 -----
2700 -----
2701
2701
2702 Flush data written to the server.
2702 Flush data written to the server.
2703
2703
2704 readavailable
2704 readavailable
2705 -------------
2705 -------------
2706
2706
2707 Close the write end of the connection and read all available data from
2707 Close the write end of the connection and read all available data from
2708 the server.
2708 the server.
2709
2709
2710 If the connection to the server encompasses multiple pipes, we poll both
2710 If the connection to the server encompasses multiple pipes, we poll both
2711 pipes and read available data.
2711 pipes and read available data.
2712
2712
2713 readline
2713 readline
2714 --------
2714 --------
2715
2715
2716 Read a line of output from the server. If there are multiple output
2716 Read a line of output from the server. If there are multiple output
2717 pipes, reads only the main pipe.
2717 pipes, reads only the main pipe.
2718
2719 ereadline
2720 ---------
2721
2722 Like ``readline``, but read from the stderr pipe, if available.
2723
2724 read <X>
2725 --------
2726
2727 ``read()`` N bytes from the server's main output pipe.
2728
2729 eread <X>
2730 ---------
2731
2732 ``read()`` N bytes from the server's stderr pipe, if available.
2718 """
2733 """
2719 opts = pycompat.byteskwargs(opts)
2734 opts = pycompat.byteskwargs(opts)
2720
2735
2721 if opts['localssh'] and not repo:
2736 if opts['localssh'] and not repo:
2722 raise error.Abort(_('--localssh requires a repository'))
2737 raise error.Abort(_('--localssh requires a repository'))
2723
2738
2724 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2739 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2725 raise error.Abort(_('invalid value for --peer'),
2740 raise error.Abort(_('invalid value for --peer'),
2726 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2741 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2727
2742
2728 if ui.interactive():
2743 if ui.interactive():
2729 ui.write(_('(waiting for commands on stdin)\n'))
2744 ui.write(_('(waiting for commands on stdin)\n'))
2730
2745
2731 blocks = list(_parsewirelangblocks(ui.fin))
2746 blocks = list(_parsewirelangblocks(ui.fin))
2732
2747
2733 proc = None
2748 proc = None
2734
2749
2735 if opts['localssh']:
2750 if opts['localssh']:
2736 # We start the SSH server in its own process so there is process
2751 # We start the SSH server in its own process so there is process
2737 # separation. This prevents a whole class of potential bugs around
2752 # separation. This prevents a whole class of potential bugs around
2738 # shared state from interfering with server operation.
2753 # shared state from interfering with server operation.
2739 args = util.hgcmd() + [
2754 args = util.hgcmd() + [
2740 '-R', repo.root,
2755 '-R', repo.root,
2741 'debugserve', '--sshstdio',
2756 'debugserve', '--sshstdio',
2742 ]
2757 ]
2743 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2758 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2744 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2759 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2745 bufsize=0)
2760 bufsize=0)
2746
2761
2747 stdin = proc.stdin
2762 stdin = proc.stdin
2748 stdout = proc.stdout
2763 stdout = proc.stdout
2749 stderr = proc.stderr
2764 stderr = proc.stderr
2750
2765
2751 # We turn the pipes into observers so we can log I/O.
2766 # We turn the pipes into observers so we can log I/O.
2752 if ui.verbose or opts['peer'] == 'raw':
2767 if ui.verbose or opts['peer'] == 'raw':
2753 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2768 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2754 logdata=True)
2769 logdata=True)
2755 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2770 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2756 logdata=True)
2771 logdata=True)
2757 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2772 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2758 logdata=True)
2773 logdata=True)
2759
2774
2760 # --localssh also implies the peer connection settings.
2775 # --localssh also implies the peer connection settings.
2761
2776
2762 url = 'ssh://localserver'
2777 url = 'ssh://localserver'
2763 autoreadstderr = not opts['noreadstderr']
2778 autoreadstderr = not opts['noreadstderr']
2764
2779
2765 if opts['peer'] == 'ssh1':
2780 if opts['peer'] == 'ssh1':
2766 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2781 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2767 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2782 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2768 None, autoreadstderr=autoreadstderr)
2783 None, autoreadstderr=autoreadstderr)
2769 elif opts['peer'] == 'ssh2':
2784 elif opts['peer'] == 'ssh2':
2770 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2785 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2771 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2786 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2772 None, autoreadstderr=autoreadstderr)
2787 None, autoreadstderr=autoreadstderr)
2773 elif opts['peer'] == 'raw':
2788 elif opts['peer'] == 'raw':
2774 ui.write(_('using raw connection to peer\n'))
2789 ui.write(_('using raw connection to peer\n'))
2775 peer = None
2790 peer = None
2776 else:
2791 else:
2777 ui.write(_('creating ssh peer from handshake results\n'))
2792 ui.write(_('creating ssh peer from handshake results\n'))
2778 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2793 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2779 autoreadstderr=autoreadstderr)
2794 autoreadstderr=autoreadstderr)
2780
2795
2781 else:
2796 else:
2782 raise error.Abort(_('only --localssh is currently supported'))
2797 raise error.Abort(_('only --localssh is currently supported'))
2783
2798
2784 batchedcommands = None
2799 batchedcommands = None
2785
2800
2786 # Now perform actions based on the parsed wire language instructions.
2801 # Now perform actions based on the parsed wire language instructions.
2787 for action, lines in blocks:
2802 for action, lines in blocks:
2788 if action in ('raw', 'raw+'):
2803 if action in ('raw', 'raw+'):
2789 # Concatenate the data together.
2804 # Concatenate the data together.
2790 data = ''.join(l.lstrip() for l in lines)
2805 data = ''.join(l.lstrip() for l in lines)
2791 data = util.unescapestr(data)
2806 data = util.unescapestr(data)
2792 stdin.write(data)
2807 stdin.write(data)
2793
2808
2794 if action == 'raw+':
2809 if action == 'raw+':
2795 stdin.flush()
2810 stdin.flush()
2796 elif action == 'flush':
2811 elif action == 'flush':
2797 stdin.flush()
2812 stdin.flush()
2798 elif action.startswith('command'):
2813 elif action.startswith('command'):
2799 if not peer:
2814 if not peer:
2800 raise error.Abort(_('cannot send commands unless peer instance '
2815 raise error.Abort(_('cannot send commands unless peer instance '
2801 'is available'))
2816 'is available'))
2802
2817
2803 command = action.split(' ', 1)[1]
2818 command = action.split(' ', 1)[1]
2804
2819
2805 args = {}
2820 args = {}
2806 for line in lines:
2821 for line in lines:
2807 # We need to allow empty values.
2822 # We need to allow empty values.
2808 fields = line.lstrip().split(' ', 1)
2823 fields = line.lstrip().split(' ', 1)
2809 if len(fields) == 1:
2824 if len(fields) == 1:
2810 key = fields[0]
2825 key = fields[0]
2811 value = ''
2826 value = ''
2812 else:
2827 else:
2813 key, value = fields
2828 key, value = fields
2814
2829
2815 args[key] = util.unescapestr(value)
2830 args[key] = util.unescapestr(value)
2816
2831
2817 if batchedcommands is not None:
2832 if batchedcommands is not None:
2818 batchedcommands.append((command, args))
2833 batchedcommands.append((command, args))
2819 continue
2834 continue
2820
2835
2821 ui.status(_('sending %s command\n') % command)
2836 ui.status(_('sending %s command\n') % command)
2822
2837
2823 if 'PUSHFILE' in args:
2838 if 'PUSHFILE' in args:
2824 with open(args['PUSHFILE'], r'rb') as fh:
2839 with open(args['PUSHFILE'], r'rb') as fh:
2825 del args['PUSHFILE']
2840 del args['PUSHFILE']
2826 res, output = peer._callpush(command, fh,
2841 res, output = peer._callpush(command, fh,
2827 **pycompat.strkwargs(args))
2842 **pycompat.strkwargs(args))
2828 ui.status(_('result: %s\n') % util.escapedata(res))
2843 ui.status(_('result: %s\n') % util.escapedata(res))
2829 ui.status(_('remote output: %s\n') %
2844 ui.status(_('remote output: %s\n') %
2830 util.escapedata(output))
2845 util.escapedata(output))
2831 else:
2846 else:
2832 res = peer._call(command, **pycompat.strkwargs(args))
2847 res = peer._call(command, **pycompat.strkwargs(args))
2833 ui.status(_('response: %s\n') % util.escapedata(res))
2848 ui.status(_('response: %s\n') % util.escapedata(res))
2834
2849
2835 elif action == 'batchbegin':
2850 elif action == 'batchbegin':
2836 if batchedcommands is not None:
2851 if batchedcommands is not None:
2837 raise error.Abort(_('nested batchbegin not allowed'))
2852 raise error.Abort(_('nested batchbegin not allowed'))
2838
2853
2839 batchedcommands = []
2854 batchedcommands = []
2840 elif action == 'batchsubmit':
2855 elif action == 'batchsubmit':
2841 # There is a batching API we could go through. But it would be
2856 # There is a batching API we could go through. But it would be
2842 # difficult to normalize requests into function calls. It is easier
2857 # difficult to normalize requests into function calls. It is easier
2843 # to bypass this layer and normalize to commands + args.
2858 # to bypass this layer and normalize to commands + args.
2844 ui.status(_('sending batch with %d sub-commands\n') %
2859 ui.status(_('sending batch with %d sub-commands\n') %
2845 len(batchedcommands))
2860 len(batchedcommands))
2846 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2861 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2847 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2862 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2848
2863
2849 batchedcommands = None
2864 batchedcommands = None
2850 elif action == 'close':
2865 elif action == 'close':
2851 peer.close()
2866 peer.close()
2852 elif action == 'readavailable':
2867 elif action == 'readavailable':
2853 stdin.close()
2868 stdin.close()
2854 stdout.read()
2869 stdout.read()
2855 stderr.read()
2870 stderr.read()
2856 elif action == 'readline':
2871 elif action == 'readline':
2857 stdout.readline()
2872 stdout.readline()
2873 elif action == 'ereadline':
2874 stderr.readline()
2875 elif action.startswith('read '):
2876 count = int(action.split(' ', 1)[1])
2877 stdout.read(count)
2878 elif action.startswith('eread '):
2879 count = int(action.split(' ', 1)[1])
2880 stderr.read(count)
2858 else:
2881 else:
2859 raise error.Abort(_('unknown action: %s') % action)
2882 raise error.Abort(_('unknown action: %s') % action)
2860
2883
2861 if batchedcommands is not None:
2884 if batchedcommands is not None:
2862 raise error.Abort(_('unclosed "batchbegin" request'))
2885 raise error.Abort(_('unclosed "batchbegin" request'))
2863
2886
2864 if peer:
2887 if peer:
2865 peer.close()
2888 peer.close()
2866
2889
2867 if proc:
2890 if proc:
2868 proc.kill()
2891 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now