##// END OF EJS Templates
debugcommands: fix some %r output with bytestr() wrappers...
Augie Fackler -
r36796:7aae39d0 default
parent child Browse files
Show More
@@ -1,2844 +1,2844 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import string
19 import string
20 import subprocess
20 import subprocess
21 import sys
21 import sys
22 import tempfile
22 import tempfile
23 import time
23 import time
24
24
25 from .i18n import _
25 from .i18n import _
26 from .node import (
26 from .node import (
27 bin,
27 bin,
28 hex,
28 hex,
29 nullhex,
29 nullhex,
30 nullid,
30 nullid,
31 nullrev,
31 nullrev,
32 short,
32 short,
33 )
33 )
34 from . import (
34 from . import (
35 bundle2,
35 bundle2,
36 changegroup,
36 changegroup,
37 cmdutil,
37 cmdutil,
38 color,
38 color,
39 context,
39 context,
40 dagparser,
40 dagparser,
41 dagutil,
41 dagutil,
42 encoding,
42 encoding,
43 error,
43 error,
44 exchange,
44 exchange,
45 extensions,
45 extensions,
46 filemerge,
46 filemerge,
47 fileset,
47 fileset,
48 formatter,
48 formatter,
49 hg,
49 hg,
50 localrepo,
50 localrepo,
51 lock as lockmod,
51 lock as lockmod,
52 logcmdutil,
52 logcmdutil,
53 merge as mergemod,
53 merge as mergemod,
54 obsolete,
54 obsolete,
55 obsutil,
55 obsutil,
56 phases,
56 phases,
57 policy,
57 policy,
58 pvec,
58 pvec,
59 pycompat,
59 pycompat,
60 registrar,
60 registrar,
61 repair,
61 repair,
62 revlog,
62 revlog,
63 revset,
63 revset,
64 revsetlang,
64 revsetlang,
65 scmutil,
65 scmutil,
66 setdiscovery,
66 setdiscovery,
67 simplemerge,
67 simplemerge,
68 smartset,
68 smartset,
69 sshpeer,
69 sshpeer,
70 sslutil,
70 sslutil,
71 streamclone,
71 streamclone,
72 templater,
72 templater,
73 treediscovery,
73 treediscovery,
74 upgrade,
74 upgrade,
75 url as urlmod,
75 url as urlmod,
76 util,
76 util,
77 vfs as vfsmod,
77 vfs as vfsmod,
78 wireprotoserver,
78 wireprotoserver,
79 )
79 )
80 from .utils import dateutil
80 from .utils import dateutil
81
81
82 release = lockmod.release
82 release = lockmod.release
83
83
84 command = registrar.command()
84 command = registrar.command()
85
85
86 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
86 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
87 def debugancestor(ui, repo, *args):
87 def debugancestor(ui, repo, *args):
88 """find the ancestor revision of two revisions in a given index"""
88 """find the ancestor revision of two revisions in a given index"""
89 if len(args) == 3:
89 if len(args) == 3:
90 index, rev1, rev2 = args
90 index, rev1, rev2 = args
91 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
91 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
92 lookup = r.lookup
92 lookup = r.lookup
93 elif len(args) == 2:
93 elif len(args) == 2:
94 if not repo:
94 if not repo:
95 raise error.Abort(_('there is no Mercurial repository here '
95 raise error.Abort(_('there is no Mercurial repository here '
96 '(.hg not found)'))
96 '(.hg not found)'))
97 rev1, rev2 = args
97 rev1, rev2 = args
98 r = repo.changelog
98 r = repo.changelog
99 lookup = repo.lookup
99 lookup = repo.lookup
100 else:
100 else:
101 raise error.Abort(_('either two or three arguments required'))
101 raise error.Abort(_('either two or three arguments required'))
102 a = r.ancestor(lookup(rev1), lookup(rev2))
102 a = r.ancestor(lookup(rev1), lookup(rev2))
103 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
103 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
104
104
105 @command('debugapplystreamclonebundle', [], 'FILE')
105 @command('debugapplystreamclonebundle', [], 'FILE')
106 def debugapplystreamclonebundle(ui, repo, fname):
106 def debugapplystreamclonebundle(ui, repo, fname):
107 """apply a stream clone bundle file"""
107 """apply a stream clone bundle file"""
108 f = hg.openpath(ui, fname)
108 f = hg.openpath(ui, fname)
109 gen = exchange.readbundle(ui, f, fname)
109 gen = exchange.readbundle(ui, f, fname)
110 gen.apply(repo)
110 gen.apply(repo)
111
111
112 @command('debugbuilddag',
112 @command('debugbuilddag',
113 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
113 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
114 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
114 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
115 ('n', 'new-file', None, _('add new file at each rev'))],
115 ('n', 'new-file', None, _('add new file at each rev'))],
116 _('[OPTION]... [TEXT]'))
116 _('[OPTION]... [TEXT]'))
117 def debugbuilddag(ui, repo, text=None,
117 def debugbuilddag(ui, repo, text=None,
118 mergeable_file=False,
118 mergeable_file=False,
119 overwritten_file=False,
119 overwritten_file=False,
120 new_file=False):
120 new_file=False):
121 """builds a repo with a given DAG from scratch in the current empty repo
121 """builds a repo with a given DAG from scratch in the current empty repo
122
122
123 The description of the DAG is read from stdin if not given on the
123 The description of the DAG is read from stdin if not given on the
124 command line.
124 command line.
125
125
126 Elements:
126 Elements:
127
127
128 - "+n" is a linear run of n nodes based on the current default parent
128 - "+n" is a linear run of n nodes based on the current default parent
129 - "." is a single node based on the current default parent
129 - "." is a single node based on the current default parent
130 - "$" resets the default parent to null (implied at the start);
130 - "$" resets the default parent to null (implied at the start);
131 otherwise the default parent is always the last node created
131 otherwise the default parent is always the last node created
132 - "<p" sets the default parent to the backref p
132 - "<p" sets the default parent to the backref p
133 - "*p" is a fork at parent p, which is a backref
133 - "*p" is a fork at parent p, which is a backref
134 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
134 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
135 - "/p2" is a merge of the preceding node and p2
135 - "/p2" is a merge of the preceding node and p2
136 - ":tag" defines a local tag for the preceding node
136 - ":tag" defines a local tag for the preceding node
137 - "@branch" sets the named branch for subsequent nodes
137 - "@branch" sets the named branch for subsequent nodes
138 - "#...\\n" is a comment up to the end of the line
138 - "#...\\n" is a comment up to the end of the line
139
139
140 Whitespace between the above elements is ignored.
140 Whitespace between the above elements is ignored.
141
141
142 A backref is either
142 A backref is either
143
143
144 - a number n, which references the node curr-n, where curr is the current
144 - a number n, which references the node curr-n, where curr is the current
145 node, or
145 node, or
146 - the name of a local tag you placed earlier using ":tag", or
146 - the name of a local tag you placed earlier using ":tag", or
147 - empty to denote the default parent.
147 - empty to denote the default parent.
148
148
149 All string valued-elements are either strictly alphanumeric, or must
149 All string valued-elements are either strictly alphanumeric, or must
150 be enclosed in double quotes ("..."), with "\\" as escape character.
150 be enclosed in double quotes ("..."), with "\\" as escape character.
151 """
151 """
152
152
153 if text is None:
153 if text is None:
154 ui.status(_("reading DAG from stdin\n"))
154 ui.status(_("reading DAG from stdin\n"))
155 text = ui.fin.read()
155 text = ui.fin.read()
156
156
157 cl = repo.changelog
157 cl = repo.changelog
158 if len(cl) > 0:
158 if len(cl) > 0:
159 raise error.Abort(_('repository is not empty'))
159 raise error.Abort(_('repository is not empty'))
160
160
161 # determine number of revs in DAG
161 # determine number of revs in DAG
162 total = 0
162 total = 0
163 for type, data in dagparser.parsedag(text):
163 for type, data in dagparser.parsedag(text):
164 if type == 'n':
164 if type == 'n':
165 total += 1
165 total += 1
166
166
167 if mergeable_file:
167 if mergeable_file:
168 linesperrev = 2
168 linesperrev = 2
169 # make a file with k lines per rev
169 # make a file with k lines per rev
170 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
170 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
171 initialmergedlines.append("")
171 initialmergedlines.append("")
172
172
173 tags = []
173 tags = []
174
174
175 wlock = lock = tr = None
175 wlock = lock = tr = None
176 try:
176 try:
177 wlock = repo.wlock()
177 wlock = repo.wlock()
178 lock = repo.lock()
178 lock = repo.lock()
179 tr = repo.transaction("builddag")
179 tr = repo.transaction("builddag")
180
180
181 at = -1
181 at = -1
182 atbranch = 'default'
182 atbranch = 'default'
183 nodeids = []
183 nodeids = []
184 id = 0
184 id = 0
185 ui.progress(_('building'), id, unit=_('revisions'), total=total)
185 ui.progress(_('building'), id, unit=_('revisions'), total=total)
186 for type, data in dagparser.parsedag(text):
186 for type, data in dagparser.parsedag(text):
187 if type == 'n':
187 if type == 'n':
188 ui.note(('node %s\n' % pycompat.bytestr(data)))
188 ui.note(('node %s\n' % pycompat.bytestr(data)))
189 id, ps = data
189 id, ps = data
190
190
191 files = []
191 files = []
192 filecontent = {}
192 filecontent = {}
193
193
194 p2 = None
194 p2 = None
195 if mergeable_file:
195 if mergeable_file:
196 fn = "mf"
196 fn = "mf"
197 p1 = repo[ps[0]]
197 p1 = repo[ps[0]]
198 if len(ps) > 1:
198 if len(ps) > 1:
199 p2 = repo[ps[1]]
199 p2 = repo[ps[1]]
200 pa = p1.ancestor(p2)
200 pa = p1.ancestor(p2)
201 base, local, other = [x[fn].data() for x in (pa, p1,
201 base, local, other = [x[fn].data() for x in (pa, p1,
202 p2)]
202 p2)]
203 m3 = simplemerge.Merge3Text(base, local, other)
203 m3 = simplemerge.Merge3Text(base, local, other)
204 ml = [l.strip() for l in m3.merge_lines()]
204 ml = [l.strip() for l in m3.merge_lines()]
205 ml.append("")
205 ml.append("")
206 elif at > 0:
206 elif at > 0:
207 ml = p1[fn].data().split("\n")
207 ml = p1[fn].data().split("\n")
208 else:
208 else:
209 ml = initialmergedlines
209 ml = initialmergedlines
210 ml[id * linesperrev] += " r%i" % id
210 ml[id * linesperrev] += " r%i" % id
211 mergedtext = "\n".join(ml)
211 mergedtext = "\n".join(ml)
212 files.append(fn)
212 files.append(fn)
213 filecontent[fn] = mergedtext
213 filecontent[fn] = mergedtext
214
214
215 if overwritten_file:
215 if overwritten_file:
216 fn = "of"
216 fn = "of"
217 files.append(fn)
217 files.append(fn)
218 filecontent[fn] = "r%i\n" % id
218 filecontent[fn] = "r%i\n" % id
219
219
220 if new_file:
220 if new_file:
221 fn = "nf%i" % id
221 fn = "nf%i" % id
222 files.append(fn)
222 files.append(fn)
223 filecontent[fn] = "r%i\n" % id
223 filecontent[fn] = "r%i\n" % id
224 if len(ps) > 1:
224 if len(ps) > 1:
225 if not p2:
225 if not p2:
226 p2 = repo[ps[1]]
226 p2 = repo[ps[1]]
227 for fn in p2:
227 for fn in p2:
228 if fn.startswith("nf"):
228 if fn.startswith("nf"):
229 files.append(fn)
229 files.append(fn)
230 filecontent[fn] = p2[fn].data()
230 filecontent[fn] = p2[fn].data()
231
231
232 def fctxfn(repo, cx, path):
232 def fctxfn(repo, cx, path):
233 if path in filecontent:
233 if path in filecontent:
234 return context.memfilectx(repo, cx, path,
234 return context.memfilectx(repo, cx, path,
235 filecontent[path])
235 filecontent[path])
236 return None
236 return None
237
237
238 if len(ps) == 0 or ps[0] < 0:
238 if len(ps) == 0 or ps[0] < 0:
239 pars = [None, None]
239 pars = [None, None]
240 elif len(ps) == 1:
240 elif len(ps) == 1:
241 pars = [nodeids[ps[0]], None]
241 pars = [nodeids[ps[0]], None]
242 else:
242 else:
243 pars = [nodeids[p] for p in ps]
243 pars = [nodeids[p] for p in ps]
244 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
244 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
245 date=(id, 0),
245 date=(id, 0),
246 user="debugbuilddag",
246 user="debugbuilddag",
247 extra={'branch': atbranch})
247 extra={'branch': atbranch})
248 nodeid = repo.commitctx(cx)
248 nodeid = repo.commitctx(cx)
249 nodeids.append(nodeid)
249 nodeids.append(nodeid)
250 at = id
250 at = id
251 elif type == 'l':
251 elif type == 'l':
252 id, name = data
252 id, name = data
253 ui.note(('tag %s\n' % name))
253 ui.note(('tag %s\n' % name))
254 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
254 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
255 elif type == 'a':
255 elif type == 'a':
256 ui.note(('branch %s\n' % data))
256 ui.note(('branch %s\n' % data))
257 atbranch = data
257 atbranch = data
258 ui.progress(_('building'), id, unit=_('revisions'), total=total)
258 ui.progress(_('building'), id, unit=_('revisions'), total=total)
259 tr.close()
259 tr.close()
260
260
261 if tags:
261 if tags:
262 repo.vfs.write("localtags", "".join(tags))
262 repo.vfs.write("localtags", "".join(tags))
263 finally:
263 finally:
264 ui.progress(_('building'), None)
264 ui.progress(_('building'), None)
265 release(tr, lock, wlock)
265 release(tr, lock, wlock)
266
266
267 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
267 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
268 indent_string = ' ' * indent
268 indent_string = ' ' * indent
269 if all:
269 if all:
270 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
270 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
271 % indent_string)
271 % indent_string)
272
272
273 def showchunks(named):
273 def showchunks(named):
274 ui.write("\n%s%s\n" % (indent_string, named))
274 ui.write("\n%s%s\n" % (indent_string, named))
275 for deltadata in gen.deltaiter():
275 for deltadata in gen.deltaiter():
276 node, p1, p2, cs, deltabase, delta, flags = deltadata
276 node, p1, p2, cs, deltabase, delta, flags = deltadata
277 ui.write("%s%s %s %s %s %s %d\n" %
277 ui.write("%s%s %s %s %s %s %d\n" %
278 (indent_string, hex(node), hex(p1), hex(p2),
278 (indent_string, hex(node), hex(p1), hex(p2),
279 hex(cs), hex(deltabase), len(delta)))
279 hex(cs), hex(deltabase), len(delta)))
280
280
281 chunkdata = gen.changelogheader()
281 chunkdata = gen.changelogheader()
282 showchunks("changelog")
282 showchunks("changelog")
283 chunkdata = gen.manifestheader()
283 chunkdata = gen.manifestheader()
284 showchunks("manifest")
284 showchunks("manifest")
285 for chunkdata in iter(gen.filelogheader, {}):
285 for chunkdata in iter(gen.filelogheader, {}):
286 fname = chunkdata['filename']
286 fname = chunkdata['filename']
287 showchunks(fname)
287 showchunks(fname)
288 else:
288 else:
289 if isinstance(gen, bundle2.unbundle20):
289 if isinstance(gen, bundle2.unbundle20):
290 raise error.Abort(_('use debugbundle2 for this file'))
290 raise error.Abort(_('use debugbundle2 for this file'))
291 chunkdata = gen.changelogheader()
291 chunkdata = gen.changelogheader()
292 for deltadata in gen.deltaiter():
292 for deltadata in gen.deltaiter():
293 node, p1, p2, cs, deltabase, delta, flags = deltadata
293 node, p1, p2, cs, deltabase, delta, flags = deltadata
294 ui.write("%s%s\n" % (indent_string, hex(node)))
294 ui.write("%s%s\n" % (indent_string, hex(node)))
295
295
296 def _debugobsmarkers(ui, part, indent=0, **opts):
296 def _debugobsmarkers(ui, part, indent=0, **opts):
297 """display version and markers contained in 'data'"""
297 """display version and markers contained in 'data'"""
298 opts = pycompat.byteskwargs(opts)
298 opts = pycompat.byteskwargs(opts)
299 data = part.read()
299 data = part.read()
300 indent_string = ' ' * indent
300 indent_string = ' ' * indent
301 try:
301 try:
302 version, markers = obsolete._readmarkers(data)
302 version, markers = obsolete._readmarkers(data)
303 except error.UnknownVersion as exc:
303 except error.UnknownVersion as exc:
304 msg = "%sunsupported version: %s (%d bytes)\n"
304 msg = "%sunsupported version: %s (%d bytes)\n"
305 msg %= indent_string, exc.version, len(data)
305 msg %= indent_string, exc.version, len(data)
306 ui.write(msg)
306 ui.write(msg)
307 else:
307 else:
308 msg = "%sversion: %d (%d bytes)\n"
308 msg = "%sversion: %d (%d bytes)\n"
309 msg %= indent_string, version, len(data)
309 msg %= indent_string, version, len(data)
310 ui.write(msg)
310 ui.write(msg)
311 fm = ui.formatter('debugobsolete', opts)
311 fm = ui.formatter('debugobsolete', opts)
312 for rawmarker in sorted(markers):
312 for rawmarker in sorted(markers):
313 m = obsutil.marker(None, rawmarker)
313 m = obsutil.marker(None, rawmarker)
314 fm.startitem()
314 fm.startitem()
315 fm.plain(indent_string)
315 fm.plain(indent_string)
316 cmdutil.showmarker(fm, m)
316 cmdutil.showmarker(fm, m)
317 fm.end()
317 fm.end()
318
318
319 def _debugphaseheads(ui, data, indent=0):
319 def _debugphaseheads(ui, data, indent=0):
320 """display version and markers contained in 'data'"""
320 """display version and markers contained in 'data'"""
321 indent_string = ' ' * indent
321 indent_string = ' ' * indent
322 headsbyphase = phases.binarydecode(data)
322 headsbyphase = phases.binarydecode(data)
323 for phase in phases.allphases:
323 for phase in phases.allphases:
324 for head in headsbyphase[phase]:
324 for head in headsbyphase[phase]:
325 ui.write(indent_string)
325 ui.write(indent_string)
326 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
326 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
327
327
328 def _quasirepr(thing):
328 def _quasirepr(thing):
329 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
329 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
330 return '{%s}' % (
330 return '{%s}' % (
331 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
331 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
332 return pycompat.bytestr(repr(thing))
332 return pycompat.bytestr(repr(thing))
333
333
334 def _debugbundle2(ui, gen, all=None, **opts):
334 def _debugbundle2(ui, gen, all=None, **opts):
335 """lists the contents of a bundle2"""
335 """lists the contents of a bundle2"""
336 if not isinstance(gen, bundle2.unbundle20):
336 if not isinstance(gen, bundle2.unbundle20):
337 raise error.Abort(_('not a bundle2 file'))
337 raise error.Abort(_('not a bundle2 file'))
338 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
338 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
339 parttypes = opts.get(r'part_type', [])
339 parttypes = opts.get(r'part_type', [])
340 for part in gen.iterparts():
340 for part in gen.iterparts():
341 if parttypes and part.type not in parttypes:
341 if parttypes and part.type not in parttypes:
342 continue
342 continue
343 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
343 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
344 if part.type == 'changegroup':
344 if part.type == 'changegroup':
345 version = part.params.get('version', '01')
345 version = part.params.get('version', '01')
346 cg = changegroup.getunbundler(version, part, 'UN')
346 cg = changegroup.getunbundler(version, part, 'UN')
347 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
347 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
348 if part.type == 'obsmarkers':
348 if part.type == 'obsmarkers':
349 _debugobsmarkers(ui, part, indent=4, **opts)
349 _debugobsmarkers(ui, part, indent=4, **opts)
350 if part.type == 'phase-heads':
350 if part.type == 'phase-heads':
351 _debugphaseheads(ui, part, indent=4)
351 _debugphaseheads(ui, part, indent=4)
352
352
353 @command('debugbundle',
353 @command('debugbundle',
354 [('a', 'all', None, _('show all details')),
354 [('a', 'all', None, _('show all details')),
355 ('', 'part-type', [], _('show only the named part type')),
355 ('', 'part-type', [], _('show only the named part type')),
356 ('', 'spec', None, _('print the bundlespec of the bundle'))],
356 ('', 'spec', None, _('print the bundlespec of the bundle'))],
357 _('FILE'),
357 _('FILE'),
358 norepo=True)
358 norepo=True)
359 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
359 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
360 """lists the contents of a bundle"""
360 """lists the contents of a bundle"""
361 with hg.openpath(ui, bundlepath) as f:
361 with hg.openpath(ui, bundlepath) as f:
362 if spec:
362 if spec:
363 spec = exchange.getbundlespec(ui, f)
363 spec = exchange.getbundlespec(ui, f)
364 ui.write('%s\n' % spec)
364 ui.write('%s\n' % spec)
365 return
365 return
366
366
367 gen = exchange.readbundle(ui, f, bundlepath)
367 gen = exchange.readbundle(ui, f, bundlepath)
368 if isinstance(gen, bundle2.unbundle20):
368 if isinstance(gen, bundle2.unbundle20):
369 return _debugbundle2(ui, gen, all=all, **opts)
369 return _debugbundle2(ui, gen, all=all, **opts)
370 _debugchangegroup(ui, gen, all=all, **opts)
370 _debugchangegroup(ui, gen, all=all, **opts)
371
371
372 @command('debugcapabilities',
372 @command('debugcapabilities',
373 [], _('PATH'),
373 [], _('PATH'),
374 norepo=True)
374 norepo=True)
375 def debugcapabilities(ui, path, **opts):
375 def debugcapabilities(ui, path, **opts):
376 """lists the capabilities of a remote peer"""
376 """lists the capabilities of a remote peer"""
377 opts = pycompat.byteskwargs(opts)
377 opts = pycompat.byteskwargs(opts)
378 peer = hg.peer(ui, opts, path)
378 peer = hg.peer(ui, opts, path)
379 caps = peer.capabilities()
379 caps = peer.capabilities()
380 ui.write(('Main capabilities:\n'))
380 ui.write(('Main capabilities:\n'))
381 for c in sorted(caps):
381 for c in sorted(caps):
382 ui.write((' %s\n') % c)
382 ui.write((' %s\n') % c)
383 b2caps = bundle2.bundle2caps(peer)
383 b2caps = bundle2.bundle2caps(peer)
384 if b2caps:
384 if b2caps:
385 ui.write(('Bundle2 capabilities:\n'))
385 ui.write(('Bundle2 capabilities:\n'))
386 for key, values in sorted(b2caps.iteritems()):
386 for key, values in sorted(b2caps.iteritems()):
387 ui.write((' %s\n') % key)
387 ui.write((' %s\n') % key)
388 for v in values:
388 for v in values:
389 ui.write((' %s\n') % v)
389 ui.write((' %s\n') % v)
390
390
391 @command('debugcheckstate', [], '')
391 @command('debugcheckstate', [], '')
392 def debugcheckstate(ui, repo):
392 def debugcheckstate(ui, repo):
393 """validate the correctness of the current dirstate"""
393 """validate the correctness of the current dirstate"""
394 parent1, parent2 = repo.dirstate.parents()
394 parent1, parent2 = repo.dirstate.parents()
395 m1 = repo[parent1].manifest()
395 m1 = repo[parent1].manifest()
396 m2 = repo[parent2].manifest()
396 m2 = repo[parent2].manifest()
397 errors = 0
397 errors = 0
398 for f in repo.dirstate:
398 for f in repo.dirstate:
399 state = repo.dirstate[f]
399 state = repo.dirstate[f]
400 if state in "nr" and f not in m1:
400 if state in "nr" and f not in m1:
401 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
401 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
402 errors += 1
402 errors += 1
403 if state in "a" and f in m1:
403 if state in "a" and f in m1:
404 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
404 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
405 errors += 1
405 errors += 1
406 if state in "m" and f not in m1 and f not in m2:
406 if state in "m" and f not in m1 and f not in m2:
407 ui.warn(_("%s in state %s, but not in either manifest\n") %
407 ui.warn(_("%s in state %s, but not in either manifest\n") %
408 (f, state))
408 (f, state))
409 errors += 1
409 errors += 1
410 for f in m1:
410 for f in m1:
411 state = repo.dirstate[f]
411 state = repo.dirstate[f]
412 if state not in "nrm":
412 if state not in "nrm":
413 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
413 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
414 errors += 1
414 errors += 1
415 if errors:
415 if errors:
416 error = _(".hg/dirstate inconsistent with current parent's manifest")
416 error = _(".hg/dirstate inconsistent with current parent's manifest")
417 raise error.Abort(error)
417 raise error.Abort(error)
418
418
419 @command('debugcolor',
419 @command('debugcolor',
420 [('', 'style', None, _('show all configured styles'))],
420 [('', 'style', None, _('show all configured styles'))],
421 'hg debugcolor')
421 'hg debugcolor')
422 def debugcolor(ui, repo, **opts):
422 def debugcolor(ui, repo, **opts):
423 """show available color, effects or style"""
423 """show available color, effects or style"""
424 ui.write(('color mode: %s\n') % ui._colormode)
424 ui.write(('color mode: %s\n') % ui._colormode)
425 if opts.get(r'style'):
425 if opts.get(r'style'):
426 return _debugdisplaystyle(ui)
426 return _debugdisplaystyle(ui)
427 else:
427 else:
428 return _debugdisplaycolor(ui)
428 return _debugdisplaycolor(ui)
429
429
430 def _debugdisplaycolor(ui):
430 def _debugdisplaycolor(ui):
431 ui = ui.copy()
431 ui = ui.copy()
432 ui._styles.clear()
432 ui._styles.clear()
433 for effect in color._activeeffects(ui).keys():
433 for effect in color._activeeffects(ui).keys():
434 ui._styles[effect] = effect
434 ui._styles[effect] = effect
435 if ui._terminfoparams:
435 if ui._terminfoparams:
436 for k, v in ui.configitems('color'):
436 for k, v in ui.configitems('color'):
437 if k.startswith('color.'):
437 if k.startswith('color.'):
438 ui._styles[k] = k[6:]
438 ui._styles[k] = k[6:]
439 elif k.startswith('terminfo.'):
439 elif k.startswith('terminfo.'):
440 ui._styles[k] = k[9:]
440 ui._styles[k] = k[9:]
441 ui.write(_('available colors:\n'))
441 ui.write(_('available colors:\n'))
442 # sort label with a '_' after the other to group '_background' entry.
442 # sort label with a '_' after the other to group '_background' entry.
443 items = sorted(ui._styles.items(),
443 items = sorted(ui._styles.items(),
444 key=lambda i: ('_' in i[0], i[0], i[1]))
444 key=lambda i: ('_' in i[0], i[0], i[1]))
445 for colorname, label in items:
445 for colorname, label in items:
446 ui.write(('%s\n') % colorname, label=label)
446 ui.write(('%s\n') % colorname, label=label)
447
447
448 def _debugdisplaystyle(ui):
448 def _debugdisplaystyle(ui):
449 ui.write(_('available style:\n'))
449 ui.write(_('available style:\n'))
450 width = max(len(s) for s in ui._styles)
450 width = max(len(s) for s in ui._styles)
451 for label, effects in sorted(ui._styles.items()):
451 for label, effects in sorted(ui._styles.items()):
452 ui.write('%s' % label, label=label)
452 ui.write('%s' % label, label=label)
453 if effects:
453 if effects:
454 # 50
454 # 50
455 ui.write(': ')
455 ui.write(': ')
456 ui.write(' ' * (max(0, width - len(label))))
456 ui.write(' ' * (max(0, width - len(label))))
457 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
457 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
458 ui.write('\n')
458 ui.write('\n')
459
459
460 @command('debugcreatestreamclonebundle', [], 'FILE')
460 @command('debugcreatestreamclonebundle', [], 'FILE')
461 def debugcreatestreamclonebundle(ui, repo, fname):
461 def debugcreatestreamclonebundle(ui, repo, fname):
462 """create a stream clone bundle file
462 """create a stream clone bundle file
463
463
464 Stream bundles are special bundles that are essentially archives of
464 Stream bundles are special bundles that are essentially archives of
465 revlog files. They are commonly used for cloning very quickly.
465 revlog files. They are commonly used for cloning very quickly.
466 """
466 """
467 # TODO we may want to turn this into an abort when this functionality
467 # TODO we may want to turn this into an abort when this functionality
468 # is moved into `hg bundle`.
468 # is moved into `hg bundle`.
469 if phases.hassecret(repo):
469 if phases.hassecret(repo):
470 ui.warn(_('(warning: stream clone bundle will contain secret '
470 ui.warn(_('(warning: stream clone bundle will contain secret '
471 'revisions)\n'))
471 'revisions)\n'))
472
472
473 requirements, gen = streamclone.generatebundlev1(repo)
473 requirements, gen = streamclone.generatebundlev1(repo)
474 changegroup.writechunks(ui, gen, fname)
474 changegroup.writechunks(ui, gen, fname)
475
475
476 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
476 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
477
477
478 @command('debugdag',
478 @command('debugdag',
479 [('t', 'tags', None, _('use tags as labels')),
479 [('t', 'tags', None, _('use tags as labels')),
480 ('b', 'branches', None, _('annotate with branch names')),
480 ('b', 'branches', None, _('annotate with branch names')),
481 ('', 'dots', None, _('use dots for runs')),
481 ('', 'dots', None, _('use dots for runs')),
482 ('s', 'spaces', None, _('separate elements by spaces'))],
482 ('s', 'spaces', None, _('separate elements by spaces'))],
483 _('[OPTION]... [FILE [REV]...]'),
483 _('[OPTION]... [FILE [REV]...]'),
484 optionalrepo=True)
484 optionalrepo=True)
485 def debugdag(ui, repo, file_=None, *revs, **opts):
485 def debugdag(ui, repo, file_=None, *revs, **opts):
486 """format the changelog or an index DAG as a concise textual description
486 """format the changelog or an index DAG as a concise textual description
487
487
488 If you pass a revlog index, the revlog's DAG is emitted. If you list
488 If you pass a revlog index, the revlog's DAG is emitted. If you list
489 revision numbers, they get labeled in the output as rN.
489 revision numbers, they get labeled in the output as rN.
490
490
491 Otherwise, the changelog DAG of the current repo is emitted.
491 Otherwise, the changelog DAG of the current repo is emitted.
492 """
492 """
493 spaces = opts.get(r'spaces')
493 spaces = opts.get(r'spaces')
494 dots = opts.get(r'dots')
494 dots = opts.get(r'dots')
495 if file_:
495 if file_:
496 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
496 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
497 file_)
497 file_)
498 revs = set((int(r) for r in revs))
498 revs = set((int(r) for r in revs))
499 def events():
499 def events():
500 for r in rlog:
500 for r in rlog:
501 yield 'n', (r, list(p for p in rlog.parentrevs(r)
501 yield 'n', (r, list(p for p in rlog.parentrevs(r)
502 if p != -1))
502 if p != -1))
503 if r in revs:
503 if r in revs:
504 yield 'l', (r, "r%i" % r)
504 yield 'l', (r, "r%i" % r)
505 elif repo:
505 elif repo:
506 cl = repo.changelog
506 cl = repo.changelog
507 tags = opts.get(r'tags')
507 tags = opts.get(r'tags')
508 branches = opts.get(r'branches')
508 branches = opts.get(r'branches')
509 if tags:
509 if tags:
510 labels = {}
510 labels = {}
511 for l, n in repo.tags().items():
511 for l, n in repo.tags().items():
512 labels.setdefault(cl.rev(n), []).append(l)
512 labels.setdefault(cl.rev(n), []).append(l)
513 def events():
513 def events():
514 b = "default"
514 b = "default"
515 for r in cl:
515 for r in cl:
516 if branches:
516 if branches:
517 newb = cl.read(cl.node(r))[5]['branch']
517 newb = cl.read(cl.node(r))[5]['branch']
518 if newb != b:
518 if newb != b:
519 yield 'a', newb
519 yield 'a', newb
520 b = newb
520 b = newb
521 yield 'n', (r, list(p for p in cl.parentrevs(r)
521 yield 'n', (r, list(p for p in cl.parentrevs(r)
522 if p != -1))
522 if p != -1))
523 if tags:
523 if tags:
524 ls = labels.get(r)
524 ls = labels.get(r)
525 if ls:
525 if ls:
526 for l in ls:
526 for l in ls:
527 yield 'l', (r, l)
527 yield 'l', (r, l)
528 else:
528 else:
529 raise error.Abort(_('need repo for changelog dag'))
529 raise error.Abort(_('need repo for changelog dag'))
530
530
531 for line in dagparser.dagtextlines(events(),
531 for line in dagparser.dagtextlines(events(),
532 addspaces=spaces,
532 addspaces=spaces,
533 wraplabels=True,
533 wraplabels=True,
534 wrapannotations=True,
534 wrapannotations=True,
535 wrapnonlinear=dots,
535 wrapnonlinear=dots,
536 usedots=dots,
536 usedots=dots,
537 maxlinewidth=70):
537 maxlinewidth=70):
538 ui.write(line)
538 ui.write(line)
539 ui.write("\n")
539 ui.write("\n")
540
540
541 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
541 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
542 def debugdata(ui, repo, file_, rev=None, **opts):
542 def debugdata(ui, repo, file_, rev=None, **opts):
543 """dump the contents of a data file revision"""
543 """dump the contents of a data file revision"""
544 opts = pycompat.byteskwargs(opts)
544 opts = pycompat.byteskwargs(opts)
545 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
545 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
546 if rev is not None:
546 if rev is not None:
547 raise error.CommandError('debugdata', _('invalid arguments'))
547 raise error.CommandError('debugdata', _('invalid arguments'))
548 file_, rev = None, file_
548 file_, rev = None, file_
549 elif rev is None:
549 elif rev is None:
550 raise error.CommandError('debugdata', _('invalid arguments'))
550 raise error.CommandError('debugdata', _('invalid arguments'))
551 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
551 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
552 try:
552 try:
553 ui.write(r.revision(r.lookup(rev), raw=True))
553 ui.write(r.revision(r.lookup(rev), raw=True))
554 except KeyError:
554 except KeyError:
555 raise error.Abort(_('invalid revision identifier %s') % rev)
555 raise error.Abort(_('invalid revision identifier %s') % rev)
556
556
557 @command('debugdate',
557 @command('debugdate',
558 [('e', 'extended', None, _('try extended date formats'))],
558 [('e', 'extended', None, _('try extended date formats'))],
559 _('[-e] DATE [RANGE]'),
559 _('[-e] DATE [RANGE]'),
560 norepo=True, optionalrepo=True)
560 norepo=True, optionalrepo=True)
561 def debugdate(ui, date, range=None, **opts):
561 def debugdate(ui, date, range=None, **opts):
562 """parse and display a date"""
562 """parse and display a date"""
563 if opts[r"extended"]:
563 if opts[r"extended"]:
564 d = dateutil.parsedate(date, util.extendeddateformats)
564 d = dateutil.parsedate(date, util.extendeddateformats)
565 else:
565 else:
566 d = dateutil.parsedate(date)
566 d = dateutil.parsedate(date)
567 ui.write(("internal: %d %d\n") % d)
567 ui.write(("internal: %d %d\n") % d)
568 ui.write(("standard: %s\n") % dateutil.datestr(d))
568 ui.write(("standard: %s\n") % dateutil.datestr(d))
569 if range:
569 if range:
570 m = dateutil.matchdate(range)
570 m = dateutil.matchdate(range)
571 ui.write(("match: %s\n") % m(d[0]))
571 ui.write(("match: %s\n") % m(d[0]))
572
572
573 @command('debugdeltachain',
573 @command('debugdeltachain',
574 cmdutil.debugrevlogopts + cmdutil.formatteropts,
574 cmdutil.debugrevlogopts + cmdutil.formatteropts,
575 _('-c|-m|FILE'),
575 _('-c|-m|FILE'),
576 optionalrepo=True)
576 optionalrepo=True)
577 def debugdeltachain(ui, repo, file_=None, **opts):
577 def debugdeltachain(ui, repo, file_=None, **opts):
578 """dump information about delta chains in a revlog
578 """dump information about delta chains in a revlog
579
579
580 Output can be templatized. Available template keywords are:
580 Output can be templatized. Available template keywords are:
581
581
582 :``rev``: revision number
582 :``rev``: revision number
583 :``chainid``: delta chain identifier (numbered by unique base)
583 :``chainid``: delta chain identifier (numbered by unique base)
584 :``chainlen``: delta chain length to this revision
584 :``chainlen``: delta chain length to this revision
585 :``prevrev``: previous revision in delta chain
585 :``prevrev``: previous revision in delta chain
586 :``deltatype``: role of delta / how it was computed
586 :``deltatype``: role of delta / how it was computed
587 :``compsize``: compressed size of revision
587 :``compsize``: compressed size of revision
588 :``uncompsize``: uncompressed size of revision
588 :``uncompsize``: uncompressed size of revision
589 :``chainsize``: total size of compressed revisions in chain
589 :``chainsize``: total size of compressed revisions in chain
590 :``chainratio``: total chain size divided by uncompressed revision size
590 :``chainratio``: total chain size divided by uncompressed revision size
591 (new delta chains typically start at ratio 2.00)
591 (new delta chains typically start at ratio 2.00)
592 :``lindist``: linear distance from base revision in delta chain to end
592 :``lindist``: linear distance from base revision in delta chain to end
593 of this revision
593 of this revision
594 :``extradist``: total size of revisions not part of this delta chain from
594 :``extradist``: total size of revisions not part of this delta chain from
595 base of delta chain to end of this revision; a measurement
595 base of delta chain to end of this revision; a measurement
596 of how much extra data we need to read/seek across to read
596 of how much extra data we need to read/seek across to read
597 the delta chain for this revision
597 the delta chain for this revision
598 :``extraratio``: extradist divided by chainsize; another representation of
598 :``extraratio``: extradist divided by chainsize; another representation of
599 how much unrelated data is needed to load this delta chain
599 how much unrelated data is needed to load this delta chain
600
600
601 If the repository is configured to use the sparse read, additional keywords
601 If the repository is configured to use the sparse read, additional keywords
602 are available:
602 are available:
603
603
604 :``readsize``: total size of data read from the disk for a revision
604 :``readsize``: total size of data read from the disk for a revision
605 (sum of the sizes of all the blocks)
605 (sum of the sizes of all the blocks)
606 :``largestblock``: size of the largest block of data read from the disk
606 :``largestblock``: size of the largest block of data read from the disk
607 :``readdensity``: density of useful bytes in the data read from the disk
607 :``readdensity``: density of useful bytes in the data read from the disk
608 :``srchunks``: in how many data hunks the whole revision would be read
608 :``srchunks``: in how many data hunks the whole revision would be read
609
609
610 The sparse read can be enabled with experimental.sparse-read = True
610 The sparse read can be enabled with experimental.sparse-read = True
611 """
611 """
612 opts = pycompat.byteskwargs(opts)
612 opts = pycompat.byteskwargs(opts)
613 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
613 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
614 index = r.index
614 index = r.index
615 generaldelta = r.version & revlog.FLAG_GENERALDELTA
615 generaldelta = r.version & revlog.FLAG_GENERALDELTA
616 withsparseread = getattr(r, '_withsparseread', False)
616 withsparseread = getattr(r, '_withsparseread', False)
617
617
618 def revinfo(rev):
618 def revinfo(rev):
619 e = index[rev]
619 e = index[rev]
620 compsize = e[1]
620 compsize = e[1]
621 uncompsize = e[2]
621 uncompsize = e[2]
622 chainsize = 0
622 chainsize = 0
623
623
624 if generaldelta:
624 if generaldelta:
625 if e[3] == e[5]:
625 if e[3] == e[5]:
626 deltatype = 'p1'
626 deltatype = 'p1'
627 elif e[3] == e[6]:
627 elif e[3] == e[6]:
628 deltatype = 'p2'
628 deltatype = 'p2'
629 elif e[3] == rev - 1:
629 elif e[3] == rev - 1:
630 deltatype = 'prev'
630 deltatype = 'prev'
631 elif e[3] == rev:
631 elif e[3] == rev:
632 deltatype = 'base'
632 deltatype = 'base'
633 else:
633 else:
634 deltatype = 'other'
634 deltatype = 'other'
635 else:
635 else:
636 if e[3] == rev:
636 if e[3] == rev:
637 deltatype = 'base'
637 deltatype = 'base'
638 else:
638 else:
639 deltatype = 'prev'
639 deltatype = 'prev'
640
640
641 chain = r._deltachain(rev)[0]
641 chain = r._deltachain(rev)[0]
642 for iterrev in chain:
642 for iterrev in chain:
643 e = index[iterrev]
643 e = index[iterrev]
644 chainsize += e[1]
644 chainsize += e[1]
645
645
646 return compsize, uncompsize, deltatype, chain, chainsize
646 return compsize, uncompsize, deltatype, chain, chainsize
647
647
648 fm = ui.formatter('debugdeltachain', opts)
648 fm = ui.formatter('debugdeltachain', opts)
649
649
650 fm.plain(' rev chain# chainlen prev delta '
650 fm.plain(' rev chain# chainlen prev delta '
651 'size rawsize chainsize ratio lindist extradist '
651 'size rawsize chainsize ratio lindist extradist '
652 'extraratio')
652 'extraratio')
653 if withsparseread:
653 if withsparseread:
654 fm.plain(' readsize largestblk rddensity srchunks')
654 fm.plain(' readsize largestblk rddensity srchunks')
655 fm.plain('\n')
655 fm.plain('\n')
656
656
657 chainbases = {}
657 chainbases = {}
658 for rev in r:
658 for rev in r:
659 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
659 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
660 chainbase = chain[0]
660 chainbase = chain[0]
661 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
661 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
662 start = r.start
662 start = r.start
663 length = r.length
663 length = r.length
664 basestart = start(chainbase)
664 basestart = start(chainbase)
665 revstart = start(rev)
665 revstart = start(rev)
666 lineardist = revstart + comp - basestart
666 lineardist = revstart + comp - basestart
667 extradist = lineardist - chainsize
667 extradist = lineardist - chainsize
668 try:
668 try:
669 prevrev = chain[-2]
669 prevrev = chain[-2]
670 except IndexError:
670 except IndexError:
671 prevrev = -1
671 prevrev = -1
672
672
673 chainratio = float(chainsize) / float(uncomp)
673 chainratio = float(chainsize) / float(uncomp)
674 extraratio = float(extradist) / float(chainsize)
674 extraratio = float(extradist) / float(chainsize)
675
675
676 fm.startitem()
676 fm.startitem()
677 fm.write('rev chainid chainlen prevrev deltatype compsize '
677 fm.write('rev chainid chainlen prevrev deltatype compsize '
678 'uncompsize chainsize chainratio lindist extradist '
678 'uncompsize chainsize chainratio lindist extradist '
679 'extraratio',
679 'extraratio',
680 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
680 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
681 rev, chainid, len(chain), prevrev, deltatype, comp,
681 rev, chainid, len(chain), prevrev, deltatype, comp,
682 uncomp, chainsize, chainratio, lineardist, extradist,
682 uncomp, chainsize, chainratio, lineardist, extradist,
683 extraratio,
683 extraratio,
684 rev=rev, chainid=chainid, chainlen=len(chain),
684 rev=rev, chainid=chainid, chainlen=len(chain),
685 prevrev=prevrev, deltatype=deltatype, compsize=comp,
685 prevrev=prevrev, deltatype=deltatype, compsize=comp,
686 uncompsize=uncomp, chainsize=chainsize,
686 uncompsize=uncomp, chainsize=chainsize,
687 chainratio=chainratio, lindist=lineardist,
687 chainratio=chainratio, lindist=lineardist,
688 extradist=extradist, extraratio=extraratio)
688 extradist=extradist, extraratio=extraratio)
689 if withsparseread:
689 if withsparseread:
690 readsize = 0
690 readsize = 0
691 largestblock = 0
691 largestblock = 0
692 srchunks = 0
692 srchunks = 0
693
693
694 for revschunk in revlog._slicechunk(r, chain):
694 for revschunk in revlog._slicechunk(r, chain):
695 srchunks += 1
695 srchunks += 1
696 blkend = start(revschunk[-1]) + length(revschunk[-1])
696 blkend = start(revschunk[-1]) + length(revschunk[-1])
697 blksize = blkend - start(revschunk[0])
697 blksize = blkend - start(revschunk[0])
698
698
699 readsize += blksize
699 readsize += blksize
700 if largestblock < blksize:
700 if largestblock < blksize:
701 largestblock = blksize
701 largestblock = blksize
702
702
703 readdensity = float(chainsize) / float(readsize)
703 readdensity = float(chainsize) / float(readsize)
704
704
705 fm.write('readsize largestblock readdensity srchunks',
705 fm.write('readsize largestblock readdensity srchunks',
706 ' %10d %10d %9.5f %8d',
706 ' %10d %10d %9.5f %8d',
707 readsize, largestblock, readdensity, srchunks,
707 readsize, largestblock, readdensity, srchunks,
708 readsize=readsize, largestblock=largestblock,
708 readsize=readsize, largestblock=largestblock,
709 readdensity=readdensity, srchunks=srchunks)
709 readdensity=readdensity, srchunks=srchunks)
710
710
711 fm.plain('\n')
711 fm.plain('\n')
712
712
713 fm.end()
713 fm.end()
714
714
715 @command('debugdirstate|debugstate',
715 @command('debugdirstate|debugstate',
716 [('', 'nodates', None, _('do not display the saved mtime')),
716 [('', 'nodates', None, _('do not display the saved mtime')),
717 ('', 'datesort', None, _('sort by saved mtime'))],
717 ('', 'datesort', None, _('sort by saved mtime'))],
718 _('[OPTION]...'))
718 _('[OPTION]...'))
719 def debugstate(ui, repo, **opts):
719 def debugstate(ui, repo, **opts):
720 """show the contents of the current dirstate"""
720 """show the contents of the current dirstate"""
721
721
722 nodates = opts.get(r'nodates')
722 nodates = opts.get(r'nodates')
723 datesort = opts.get(r'datesort')
723 datesort = opts.get(r'datesort')
724
724
725 timestr = ""
725 timestr = ""
726 if datesort:
726 if datesort:
727 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
727 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
728 else:
728 else:
729 keyfunc = None # sort by filename
729 keyfunc = None # sort by filename
730 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
730 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
731 if ent[3] == -1:
731 if ent[3] == -1:
732 timestr = 'unset '
732 timestr = 'unset '
733 elif nodates:
733 elif nodates:
734 timestr = 'set '
734 timestr = 'set '
735 else:
735 else:
736 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
736 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
737 time.localtime(ent[3]))
737 time.localtime(ent[3]))
738 timestr = encoding.strtolocal(timestr)
738 timestr = encoding.strtolocal(timestr)
739 if ent[1] & 0o20000:
739 if ent[1] & 0o20000:
740 mode = 'lnk'
740 mode = 'lnk'
741 else:
741 else:
742 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
742 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
743 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
743 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
744 for f in repo.dirstate.copies():
744 for f in repo.dirstate.copies():
745 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
745 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
746
746
747 @command('debugdiscovery',
747 @command('debugdiscovery',
748 [('', 'old', None, _('use old-style discovery')),
748 [('', 'old', None, _('use old-style discovery')),
749 ('', 'nonheads', None,
749 ('', 'nonheads', None,
750 _('use old-style discovery with non-heads included')),
750 _('use old-style discovery with non-heads included')),
751 ('', 'rev', [], 'restrict discovery to this set of revs'),
751 ('', 'rev', [], 'restrict discovery to this set of revs'),
752 ] + cmdutil.remoteopts,
752 ] + cmdutil.remoteopts,
753 _('[--rev REV] [OTHER]'))
753 _('[--rev REV] [OTHER]'))
754 def debugdiscovery(ui, repo, remoteurl="default", **opts):
754 def debugdiscovery(ui, repo, remoteurl="default", **opts):
755 """runs the changeset discovery protocol in isolation"""
755 """runs the changeset discovery protocol in isolation"""
756 opts = pycompat.byteskwargs(opts)
756 opts = pycompat.byteskwargs(opts)
757 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
757 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
758 remote = hg.peer(repo, opts, remoteurl)
758 remote = hg.peer(repo, opts, remoteurl)
759 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
759 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
760
760
761 # make sure tests are repeatable
761 # make sure tests are repeatable
762 random.seed(12323)
762 random.seed(12323)
763
763
764 def doit(pushedrevs, remoteheads, remote=remote):
764 def doit(pushedrevs, remoteheads, remote=remote):
765 if opts.get('old'):
765 if opts.get('old'):
766 if not util.safehasattr(remote, 'branches'):
766 if not util.safehasattr(remote, 'branches'):
767 # enable in-client legacy support
767 # enable in-client legacy support
768 remote = localrepo.locallegacypeer(remote.local())
768 remote = localrepo.locallegacypeer(remote.local())
769 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
769 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
770 force=True)
770 force=True)
771 common = set(common)
771 common = set(common)
772 if not opts.get('nonheads'):
772 if not opts.get('nonheads'):
773 ui.write(("unpruned common: %s\n") %
773 ui.write(("unpruned common: %s\n") %
774 " ".join(sorted(short(n) for n in common)))
774 " ".join(sorted(short(n) for n in common)))
775 dag = dagutil.revlogdag(repo.changelog)
775 dag = dagutil.revlogdag(repo.changelog)
776 all = dag.ancestorset(dag.internalizeall(common))
776 all = dag.ancestorset(dag.internalizeall(common))
777 common = dag.externalizeall(dag.headsetofconnecteds(all))
777 common = dag.externalizeall(dag.headsetofconnecteds(all))
778 else:
778 else:
779 nodes = None
779 nodes = None
780 if pushedrevs:
780 if pushedrevs:
781 revs = scmutil.revrange(repo, pushedrevs)
781 revs = scmutil.revrange(repo, pushedrevs)
782 nodes = [repo[r].node() for r in revs]
782 nodes = [repo[r].node() for r in revs]
783 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
783 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
784 ancestorsof=nodes)
784 ancestorsof=nodes)
785 common = set(common)
785 common = set(common)
786 rheads = set(hds)
786 rheads = set(hds)
787 lheads = set(repo.heads())
787 lheads = set(repo.heads())
788 ui.write(("common heads: %s\n") %
788 ui.write(("common heads: %s\n") %
789 " ".join(sorted(short(n) for n in common)))
789 " ".join(sorted(short(n) for n in common)))
790 if lheads <= common:
790 if lheads <= common:
791 ui.write(("local is subset\n"))
791 ui.write(("local is subset\n"))
792 elif rheads <= common:
792 elif rheads <= common:
793 ui.write(("remote is subset\n"))
793 ui.write(("remote is subset\n"))
794
794
795 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
795 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
796 localrevs = opts['rev']
796 localrevs = opts['rev']
797 doit(localrevs, remoterevs)
797 doit(localrevs, remoterevs)
798
798
799 _chunksize = 4 << 10
799 _chunksize = 4 << 10
800
800
801 @command('debugdownload',
801 @command('debugdownload',
802 [
802 [
803 ('o', 'output', '', _('path')),
803 ('o', 'output', '', _('path')),
804 ],
804 ],
805 optionalrepo=True)
805 optionalrepo=True)
806 def debugdownload(ui, repo, url, output=None, **opts):
806 def debugdownload(ui, repo, url, output=None, **opts):
807 """download a resource using Mercurial logic and config
807 """download a resource using Mercurial logic and config
808 """
808 """
809 fh = urlmod.open(ui, url, output)
809 fh = urlmod.open(ui, url, output)
810
810
811 dest = ui
811 dest = ui
812 if output:
812 if output:
813 dest = open(output, "wb", _chunksize)
813 dest = open(output, "wb", _chunksize)
814 try:
814 try:
815 data = fh.read(_chunksize)
815 data = fh.read(_chunksize)
816 while data:
816 while data:
817 dest.write(data)
817 dest.write(data)
818 data = fh.read(_chunksize)
818 data = fh.read(_chunksize)
819 finally:
819 finally:
820 if output:
820 if output:
821 dest.close()
821 dest.close()
822
822
823 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
823 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
824 def debugextensions(ui, **opts):
824 def debugextensions(ui, **opts):
825 '''show information about active extensions'''
825 '''show information about active extensions'''
826 opts = pycompat.byteskwargs(opts)
826 opts = pycompat.byteskwargs(opts)
827 exts = extensions.extensions(ui)
827 exts = extensions.extensions(ui)
828 hgver = util.version()
828 hgver = util.version()
829 fm = ui.formatter('debugextensions', opts)
829 fm = ui.formatter('debugextensions', opts)
830 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
830 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
831 isinternal = extensions.ismoduleinternal(extmod)
831 isinternal = extensions.ismoduleinternal(extmod)
832 extsource = pycompat.fsencode(extmod.__file__)
832 extsource = pycompat.fsencode(extmod.__file__)
833 if isinternal:
833 if isinternal:
834 exttestedwith = [] # never expose magic string to users
834 exttestedwith = [] # never expose magic string to users
835 else:
835 else:
836 exttestedwith = getattr(extmod, 'testedwith', '').split()
836 exttestedwith = getattr(extmod, 'testedwith', '').split()
837 extbuglink = getattr(extmod, 'buglink', None)
837 extbuglink = getattr(extmod, 'buglink', None)
838
838
839 fm.startitem()
839 fm.startitem()
840
840
841 if ui.quiet or ui.verbose:
841 if ui.quiet or ui.verbose:
842 fm.write('name', '%s\n', extname)
842 fm.write('name', '%s\n', extname)
843 else:
843 else:
844 fm.write('name', '%s', extname)
844 fm.write('name', '%s', extname)
845 if isinternal or hgver in exttestedwith:
845 if isinternal or hgver in exttestedwith:
846 fm.plain('\n')
846 fm.plain('\n')
847 elif not exttestedwith:
847 elif not exttestedwith:
848 fm.plain(_(' (untested!)\n'))
848 fm.plain(_(' (untested!)\n'))
849 else:
849 else:
850 lasttestedversion = exttestedwith[-1]
850 lasttestedversion = exttestedwith[-1]
851 fm.plain(' (%s!)\n' % lasttestedversion)
851 fm.plain(' (%s!)\n' % lasttestedversion)
852
852
853 fm.condwrite(ui.verbose and extsource, 'source',
853 fm.condwrite(ui.verbose and extsource, 'source',
854 _(' location: %s\n'), extsource or "")
854 _(' location: %s\n'), extsource or "")
855
855
856 if ui.verbose:
856 if ui.verbose:
857 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
857 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
858 fm.data(bundled=isinternal)
858 fm.data(bundled=isinternal)
859
859
860 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
860 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
861 _(' tested with: %s\n'),
861 _(' tested with: %s\n'),
862 fm.formatlist(exttestedwith, name='ver'))
862 fm.formatlist(exttestedwith, name='ver'))
863
863
864 fm.condwrite(ui.verbose and extbuglink, 'buglink',
864 fm.condwrite(ui.verbose and extbuglink, 'buglink',
865 _(' bug reporting: %s\n'), extbuglink or "")
865 _(' bug reporting: %s\n'), extbuglink or "")
866
866
867 fm.end()
867 fm.end()
868
868
869 @command('debugfileset',
869 @command('debugfileset',
870 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
870 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
871 _('[-r REV] FILESPEC'))
871 _('[-r REV] FILESPEC'))
872 def debugfileset(ui, repo, expr, **opts):
872 def debugfileset(ui, repo, expr, **opts):
873 '''parse and apply a fileset specification'''
873 '''parse and apply a fileset specification'''
874 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
874 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
875 if ui.verbose:
875 if ui.verbose:
876 tree = fileset.parse(expr)
876 tree = fileset.parse(expr)
877 ui.note(fileset.prettyformat(tree), "\n")
877 ui.note(fileset.prettyformat(tree), "\n")
878
878
879 for f in ctx.getfileset(expr):
879 for f in ctx.getfileset(expr):
880 ui.write("%s\n" % f)
880 ui.write("%s\n" % f)
881
881
882 @command('debugformat',
882 @command('debugformat',
883 [] + cmdutil.formatteropts,
883 [] + cmdutil.formatteropts,
884 _(''))
884 _(''))
885 def debugformat(ui, repo, **opts):
885 def debugformat(ui, repo, **opts):
886 """display format information about the current repository
886 """display format information about the current repository
887
887
888 Use --verbose to get extra information about current config value and
888 Use --verbose to get extra information about current config value and
889 Mercurial default."""
889 Mercurial default."""
890 opts = pycompat.byteskwargs(opts)
890 opts = pycompat.byteskwargs(opts)
891 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
891 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
892 maxvariantlength = max(len('format-variant'), maxvariantlength)
892 maxvariantlength = max(len('format-variant'), maxvariantlength)
893
893
894 def makeformatname(name):
894 def makeformatname(name):
895 return '%s:' + (' ' * (maxvariantlength - len(name)))
895 return '%s:' + (' ' * (maxvariantlength - len(name)))
896
896
897 fm = ui.formatter('debugformat', opts)
897 fm = ui.formatter('debugformat', opts)
898 if fm.isplain():
898 if fm.isplain():
899 def formatvalue(value):
899 def formatvalue(value):
900 if util.safehasattr(value, 'startswith'):
900 if util.safehasattr(value, 'startswith'):
901 return value
901 return value
902 if value:
902 if value:
903 return 'yes'
903 return 'yes'
904 else:
904 else:
905 return 'no'
905 return 'no'
906 else:
906 else:
907 formatvalue = pycompat.identity
907 formatvalue = pycompat.identity
908
908
909 fm.plain('format-variant')
909 fm.plain('format-variant')
910 fm.plain(' ' * (maxvariantlength - len('format-variant')))
910 fm.plain(' ' * (maxvariantlength - len('format-variant')))
911 fm.plain(' repo')
911 fm.plain(' repo')
912 if ui.verbose:
912 if ui.verbose:
913 fm.plain(' config default')
913 fm.plain(' config default')
914 fm.plain('\n')
914 fm.plain('\n')
915 for fv in upgrade.allformatvariant:
915 for fv in upgrade.allformatvariant:
916 fm.startitem()
916 fm.startitem()
917 repovalue = fv.fromrepo(repo)
917 repovalue = fv.fromrepo(repo)
918 configvalue = fv.fromconfig(repo)
918 configvalue = fv.fromconfig(repo)
919
919
920 if repovalue != configvalue:
920 if repovalue != configvalue:
921 namelabel = 'formatvariant.name.mismatchconfig'
921 namelabel = 'formatvariant.name.mismatchconfig'
922 repolabel = 'formatvariant.repo.mismatchconfig'
922 repolabel = 'formatvariant.repo.mismatchconfig'
923 elif repovalue != fv.default:
923 elif repovalue != fv.default:
924 namelabel = 'formatvariant.name.mismatchdefault'
924 namelabel = 'formatvariant.name.mismatchdefault'
925 repolabel = 'formatvariant.repo.mismatchdefault'
925 repolabel = 'formatvariant.repo.mismatchdefault'
926 else:
926 else:
927 namelabel = 'formatvariant.name.uptodate'
927 namelabel = 'formatvariant.name.uptodate'
928 repolabel = 'formatvariant.repo.uptodate'
928 repolabel = 'formatvariant.repo.uptodate'
929
929
930 fm.write('name', makeformatname(fv.name), fv.name,
930 fm.write('name', makeformatname(fv.name), fv.name,
931 label=namelabel)
931 label=namelabel)
932 fm.write('repo', ' %3s', formatvalue(repovalue),
932 fm.write('repo', ' %3s', formatvalue(repovalue),
933 label=repolabel)
933 label=repolabel)
934 if fv.default != configvalue:
934 if fv.default != configvalue:
935 configlabel = 'formatvariant.config.special'
935 configlabel = 'formatvariant.config.special'
936 else:
936 else:
937 configlabel = 'formatvariant.config.default'
937 configlabel = 'formatvariant.config.default'
938 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
938 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
939 label=configlabel)
939 label=configlabel)
940 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
940 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
941 label='formatvariant.default')
941 label='formatvariant.default')
942 fm.plain('\n')
942 fm.plain('\n')
943 fm.end()
943 fm.end()
944
944
945 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
945 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
946 def debugfsinfo(ui, path="."):
946 def debugfsinfo(ui, path="."):
947 """show information detected about current filesystem"""
947 """show information detected about current filesystem"""
948 ui.write(('path: %s\n') % path)
948 ui.write(('path: %s\n') % path)
949 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
949 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
950 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
950 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
951 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
951 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
952 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
952 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
953 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
953 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
954 casesensitive = '(unknown)'
954 casesensitive = '(unknown)'
955 try:
955 try:
956 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
956 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
957 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
957 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
958 except OSError:
958 except OSError:
959 pass
959 pass
960 ui.write(('case-sensitive: %s\n') % casesensitive)
960 ui.write(('case-sensitive: %s\n') % casesensitive)
961
961
962 @command('debuggetbundle',
962 @command('debuggetbundle',
963 [('H', 'head', [], _('id of head node'), _('ID')),
963 [('H', 'head', [], _('id of head node'), _('ID')),
964 ('C', 'common', [], _('id of common node'), _('ID')),
964 ('C', 'common', [], _('id of common node'), _('ID')),
965 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
965 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
966 _('REPO FILE [-H|-C ID]...'),
966 _('REPO FILE [-H|-C ID]...'),
967 norepo=True)
967 norepo=True)
968 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
968 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
969 """retrieves a bundle from a repo
969 """retrieves a bundle from a repo
970
970
971 Every ID must be a full-length hex node id string. Saves the bundle to the
971 Every ID must be a full-length hex node id string. Saves the bundle to the
972 given file.
972 given file.
973 """
973 """
974 opts = pycompat.byteskwargs(opts)
974 opts = pycompat.byteskwargs(opts)
975 repo = hg.peer(ui, opts, repopath)
975 repo = hg.peer(ui, opts, repopath)
976 if not repo.capable('getbundle'):
976 if not repo.capable('getbundle'):
977 raise error.Abort("getbundle() not supported by target repository")
977 raise error.Abort("getbundle() not supported by target repository")
978 args = {}
978 args = {}
979 if common:
979 if common:
980 args[r'common'] = [bin(s) for s in common]
980 args[r'common'] = [bin(s) for s in common]
981 if head:
981 if head:
982 args[r'heads'] = [bin(s) for s in head]
982 args[r'heads'] = [bin(s) for s in head]
983 # TODO: get desired bundlecaps from command line.
983 # TODO: get desired bundlecaps from command line.
984 args[r'bundlecaps'] = None
984 args[r'bundlecaps'] = None
985 bundle = repo.getbundle('debug', **args)
985 bundle = repo.getbundle('debug', **args)
986
986
987 bundletype = opts.get('type', 'bzip2').lower()
987 bundletype = opts.get('type', 'bzip2').lower()
988 btypes = {'none': 'HG10UN',
988 btypes = {'none': 'HG10UN',
989 'bzip2': 'HG10BZ',
989 'bzip2': 'HG10BZ',
990 'gzip': 'HG10GZ',
990 'gzip': 'HG10GZ',
991 'bundle2': 'HG20'}
991 'bundle2': 'HG20'}
992 bundletype = btypes.get(bundletype)
992 bundletype = btypes.get(bundletype)
993 if bundletype not in bundle2.bundletypes:
993 if bundletype not in bundle2.bundletypes:
994 raise error.Abort(_('unknown bundle type specified with --type'))
994 raise error.Abort(_('unknown bundle type specified with --type'))
995 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
995 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
996
996
997 @command('debugignore', [], '[FILE]')
997 @command('debugignore', [], '[FILE]')
998 def debugignore(ui, repo, *files, **opts):
998 def debugignore(ui, repo, *files, **opts):
999 """display the combined ignore pattern and information about ignored files
999 """display the combined ignore pattern and information about ignored files
1000
1000
1001 With no argument display the combined ignore pattern.
1001 With no argument display the combined ignore pattern.
1002
1002
1003 Given space separated file names, shows if the given file is ignored and
1003 Given space separated file names, shows if the given file is ignored and
1004 if so, show the ignore rule (file and line number) that matched it.
1004 if so, show the ignore rule (file and line number) that matched it.
1005 """
1005 """
1006 ignore = repo.dirstate._ignore
1006 ignore = repo.dirstate._ignore
1007 if not files:
1007 if not files:
1008 # Show all the patterns
1008 # Show all the patterns
1009 ui.write("%s\n" % pycompat.byterepr(ignore))
1009 ui.write("%s\n" % pycompat.byterepr(ignore))
1010 else:
1010 else:
1011 m = scmutil.match(repo[None], pats=files)
1011 m = scmutil.match(repo[None], pats=files)
1012 for f in m.files():
1012 for f in m.files():
1013 nf = util.normpath(f)
1013 nf = util.normpath(f)
1014 ignored = None
1014 ignored = None
1015 ignoredata = None
1015 ignoredata = None
1016 if nf != '.':
1016 if nf != '.':
1017 if ignore(nf):
1017 if ignore(nf):
1018 ignored = nf
1018 ignored = nf
1019 ignoredata = repo.dirstate._ignorefileandline(nf)
1019 ignoredata = repo.dirstate._ignorefileandline(nf)
1020 else:
1020 else:
1021 for p in util.finddirs(nf):
1021 for p in util.finddirs(nf):
1022 if ignore(p):
1022 if ignore(p):
1023 ignored = p
1023 ignored = p
1024 ignoredata = repo.dirstate._ignorefileandline(p)
1024 ignoredata = repo.dirstate._ignorefileandline(p)
1025 break
1025 break
1026 if ignored:
1026 if ignored:
1027 if ignored == nf:
1027 if ignored == nf:
1028 ui.write(_("%s is ignored\n") % m.uipath(f))
1028 ui.write(_("%s is ignored\n") % m.uipath(f))
1029 else:
1029 else:
1030 ui.write(_("%s is ignored because of "
1030 ui.write(_("%s is ignored because of "
1031 "containing folder %s\n")
1031 "containing folder %s\n")
1032 % (m.uipath(f), ignored))
1032 % (m.uipath(f), ignored))
1033 ignorefile, lineno, line = ignoredata
1033 ignorefile, lineno, line = ignoredata
1034 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1034 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1035 % (ignorefile, lineno, line))
1035 % (ignorefile, lineno, line))
1036 else:
1036 else:
1037 ui.write(_("%s is not ignored\n") % m.uipath(f))
1037 ui.write(_("%s is not ignored\n") % m.uipath(f))
1038
1038
1039 @command('debugindex', cmdutil.debugrevlogopts +
1039 @command('debugindex', cmdutil.debugrevlogopts +
1040 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1040 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1041 _('[-f FORMAT] -c|-m|FILE'),
1041 _('[-f FORMAT] -c|-m|FILE'),
1042 optionalrepo=True)
1042 optionalrepo=True)
1043 def debugindex(ui, repo, file_=None, **opts):
1043 def debugindex(ui, repo, file_=None, **opts):
1044 """dump the contents of an index file"""
1044 """dump the contents of an index file"""
1045 opts = pycompat.byteskwargs(opts)
1045 opts = pycompat.byteskwargs(opts)
1046 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1046 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1047 format = opts.get('format', 0)
1047 format = opts.get('format', 0)
1048 if format not in (0, 1):
1048 if format not in (0, 1):
1049 raise error.Abort(_("unknown format %d") % format)
1049 raise error.Abort(_("unknown format %d") % format)
1050
1050
1051 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1051 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1052 if generaldelta:
1052 if generaldelta:
1053 basehdr = ' delta'
1053 basehdr = ' delta'
1054 else:
1054 else:
1055 basehdr = ' base'
1055 basehdr = ' base'
1056
1056
1057 if ui.debugflag:
1057 if ui.debugflag:
1058 shortfn = hex
1058 shortfn = hex
1059 else:
1059 else:
1060 shortfn = short
1060 shortfn = short
1061
1061
1062 # There might not be anything in r, so have a sane default
1062 # There might not be anything in r, so have a sane default
1063 idlen = 12
1063 idlen = 12
1064 for i in r:
1064 for i in r:
1065 idlen = len(shortfn(r.node(i)))
1065 idlen = len(shortfn(r.node(i)))
1066 break
1066 break
1067
1067
1068 if format == 0:
1068 if format == 0:
1069 ui.write((" rev offset length " + basehdr + " linkrev"
1069 ui.write((" rev offset length " + basehdr + " linkrev"
1070 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1070 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1071 elif format == 1:
1071 elif format == 1:
1072 ui.write((" rev flag offset length"
1072 ui.write((" rev flag offset length"
1073 " size " + basehdr + " link p1 p2"
1073 " size " + basehdr + " link p1 p2"
1074 " %s\n") % "nodeid".rjust(idlen))
1074 " %s\n") % "nodeid".rjust(idlen))
1075
1075
1076 for i in r:
1076 for i in r:
1077 node = r.node(i)
1077 node = r.node(i)
1078 if generaldelta:
1078 if generaldelta:
1079 base = r.deltaparent(i)
1079 base = r.deltaparent(i)
1080 else:
1080 else:
1081 base = r.chainbase(i)
1081 base = r.chainbase(i)
1082 if format == 0:
1082 if format == 0:
1083 try:
1083 try:
1084 pp = r.parents(node)
1084 pp = r.parents(node)
1085 except Exception:
1085 except Exception:
1086 pp = [nullid, nullid]
1086 pp = [nullid, nullid]
1087 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1087 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1088 i, r.start(i), r.length(i), base, r.linkrev(i),
1088 i, r.start(i), r.length(i), base, r.linkrev(i),
1089 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1089 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1090 elif format == 1:
1090 elif format == 1:
1091 pr = r.parentrevs(i)
1091 pr = r.parentrevs(i)
1092 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1092 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1093 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1093 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1094 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1094 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1095
1095
1096 @command('debugindexdot', cmdutil.debugrevlogopts,
1096 @command('debugindexdot', cmdutil.debugrevlogopts,
1097 _('-c|-m|FILE'), optionalrepo=True)
1097 _('-c|-m|FILE'), optionalrepo=True)
1098 def debugindexdot(ui, repo, file_=None, **opts):
1098 def debugindexdot(ui, repo, file_=None, **opts):
1099 """dump an index DAG as a graphviz dot file"""
1099 """dump an index DAG as a graphviz dot file"""
1100 opts = pycompat.byteskwargs(opts)
1100 opts = pycompat.byteskwargs(opts)
1101 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1101 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1102 ui.write(("digraph G {\n"))
1102 ui.write(("digraph G {\n"))
1103 for i in r:
1103 for i in r:
1104 node = r.node(i)
1104 node = r.node(i)
1105 pp = r.parents(node)
1105 pp = r.parents(node)
1106 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1106 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1107 if pp[1] != nullid:
1107 if pp[1] != nullid:
1108 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1108 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1109 ui.write("}\n")
1109 ui.write("}\n")
1110
1110
1111 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1111 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1112 def debuginstall(ui, **opts):
1112 def debuginstall(ui, **opts):
1113 '''test Mercurial installation
1113 '''test Mercurial installation
1114
1114
1115 Returns 0 on success.
1115 Returns 0 on success.
1116 '''
1116 '''
1117 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1118
1118
1119 def writetemp(contents):
1119 def writetemp(contents):
1120 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1120 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1121 f = os.fdopen(fd, pycompat.sysstr("wb"))
1121 f = os.fdopen(fd, pycompat.sysstr("wb"))
1122 f.write(contents)
1122 f.write(contents)
1123 f.close()
1123 f.close()
1124 return name
1124 return name
1125
1125
1126 problems = 0
1126 problems = 0
1127
1127
1128 fm = ui.formatter('debuginstall', opts)
1128 fm = ui.formatter('debuginstall', opts)
1129 fm.startitem()
1129 fm.startitem()
1130
1130
1131 # encoding
1131 # encoding
1132 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1132 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1133 err = None
1133 err = None
1134 try:
1134 try:
1135 codecs.lookup(pycompat.sysstr(encoding.encoding))
1135 codecs.lookup(pycompat.sysstr(encoding.encoding))
1136 except LookupError as inst:
1136 except LookupError as inst:
1137 err = util.forcebytestr(inst)
1137 err = util.forcebytestr(inst)
1138 problems += 1
1138 problems += 1
1139 fm.condwrite(err, 'encodingerror', _(" %s\n"
1139 fm.condwrite(err, 'encodingerror', _(" %s\n"
1140 " (check that your locale is properly set)\n"), err)
1140 " (check that your locale is properly set)\n"), err)
1141
1141
1142 # Python
1142 # Python
1143 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1143 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1144 pycompat.sysexecutable)
1144 pycompat.sysexecutable)
1145 fm.write('pythonver', _("checking Python version (%s)\n"),
1145 fm.write('pythonver', _("checking Python version (%s)\n"),
1146 ("%d.%d.%d" % sys.version_info[:3]))
1146 ("%d.%d.%d" % sys.version_info[:3]))
1147 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1147 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1148 os.path.dirname(pycompat.fsencode(os.__file__)))
1148 os.path.dirname(pycompat.fsencode(os.__file__)))
1149
1149
1150 security = set(sslutil.supportedprotocols)
1150 security = set(sslutil.supportedprotocols)
1151 if sslutil.hassni:
1151 if sslutil.hassni:
1152 security.add('sni')
1152 security.add('sni')
1153
1153
1154 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1154 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1155 fm.formatlist(sorted(security), name='protocol',
1155 fm.formatlist(sorted(security), name='protocol',
1156 fmt='%s', sep=','))
1156 fmt='%s', sep=','))
1157
1157
1158 # These are warnings, not errors. So don't increment problem count. This
1158 # These are warnings, not errors. So don't increment problem count. This
1159 # may change in the future.
1159 # may change in the future.
1160 if 'tls1.2' not in security:
1160 if 'tls1.2' not in security:
1161 fm.plain(_(' TLS 1.2 not supported by Python install; '
1161 fm.plain(_(' TLS 1.2 not supported by Python install; '
1162 'network connections lack modern security\n'))
1162 'network connections lack modern security\n'))
1163 if 'sni' not in security:
1163 if 'sni' not in security:
1164 fm.plain(_(' SNI not supported by Python install; may have '
1164 fm.plain(_(' SNI not supported by Python install; may have '
1165 'connectivity issues with some servers\n'))
1165 'connectivity issues with some servers\n'))
1166
1166
1167 # TODO print CA cert info
1167 # TODO print CA cert info
1168
1168
1169 # hg version
1169 # hg version
1170 hgver = util.version()
1170 hgver = util.version()
1171 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1171 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1172 hgver.split('+')[0])
1172 hgver.split('+')[0])
1173 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1173 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1174 '+'.join(hgver.split('+')[1:]))
1174 '+'.join(hgver.split('+')[1:]))
1175
1175
1176 # compiled modules
1176 # compiled modules
1177 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1177 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1178 policy.policy)
1178 policy.policy)
1179 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1179 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1180 os.path.dirname(pycompat.fsencode(__file__)))
1180 os.path.dirname(pycompat.fsencode(__file__)))
1181
1181
1182 if policy.policy in ('c', 'allow'):
1182 if policy.policy in ('c', 'allow'):
1183 err = None
1183 err = None
1184 try:
1184 try:
1185 from .cext import (
1185 from .cext import (
1186 base85,
1186 base85,
1187 bdiff,
1187 bdiff,
1188 mpatch,
1188 mpatch,
1189 osutil,
1189 osutil,
1190 )
1190 )
1191 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1191 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1192 except Exception as inst:
1192 except Exception as inst:
1193 err = util.forcebytestr(inst)
1193 err = util.forcebytestr(inst)
1194 problems += 1
1194 problems += 1
1195 fm.condwrite(err, 'extensionserror', " %s\n", err)
1195 fm.condwrite(err, 'extensionserror', " %s\n", err)
1196
1196
1197 compengines = util.compengines._engines.values()
1197 compengines = util.compengines._engines.values()
1198 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1198 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1199 fm.formatlist(sorted(e.name() for e in compengines),
1199 fm.formatlist(sorted(e.name() for e in compengines),
1200 name='compengine', fmt='%s', sep=', '))
1200 name='compengine', fmt='%s', sep=', '))
1201 fm.write('compenginesavail', _('checking available compression engines '
1201 fm.write('compenginesavail', _('checking available compression engines '
1202 '(%s)\n'),
1202 '(%s)\n'),
1203 fm.formatlist(sorted(e.name() for e in compengines
1203 fm.formatlist(sorted(e.name() for e in compengines
1204 if e.available()),
1204 if e.available()),
1205 name='compengine', fmt='%s', sep=', '))
1205 name='compengine', fmt='%s', sep=', '))
1206 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1206 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1207 fm.write('compenginesserver', _('checking available compression engines '
1207 fm.write('compenginesserver', _('checking available compression engines '
1208 'for wire protocol (%s)\n'),
1208 'for wire protocol (%s)\n'),
1209 fm.formatlist([e.name() for e in wirecompengines
1209 fm.formatlist([e.name() for e in wirecompengines
1210 if e.wireprotosupport()],
1210 if e.wireprotosupport()],
1211 name='compengine', fmt='%s', sep=', '))
1211 name='compengine', fmt='%s', sep=', '))
1212 re2 = 'missing'
1212 re2 = 'missing'
1213 if util._re2:
1213 if util._re2:
1214 re2 = 'available'
1214 re2 = 'available'
1215 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1215 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1216 fm.data(re2=bool(util._re2))
1216 fm.data(re2=bool(util._re2))
1217
1217
1218 # templates
1218 # templates
1219 p = templater.templatepaths()
1219 p = templater.templatepaths()
1220 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1220 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1221 fm.condwrite(not p, '', _(" no template directories found\n"))
1221 fm.condwrite(not p, '', _(" no template directories found\n"))
1222 if p:
1222 if p:
1223 m = templater.templatepath("map-cmdline.default")
1223 m = templater.templatepath("map-cmdline.default")
1224 if m:
1224 if m:
1225 # template found, check if it is working
1225 # template found, check if it is working
1226 err = None
1226 err = None
1227 try:
1227 try:
1228 templater.templater.frommapfile(m)
1228 templater.templater.frommapfile(m)
1229 except Exception as inst:
1229 except Exception as inst:
1230 err = util.forcebytestr(inst)
1230 err = util.forcebytestr(inst)
1231 p = None
1231 p = None
1232 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1232 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1233 else:
1233 else:
1234 p = None
1234 p = None
1235 fm.condwrite(p, 'defaulttemplate',
1235 fm.condwrite(p, 'defaulttemplate',
1236 _("checking default template (%s)\n"), m)
1236 _("checking default template (%s)\n"), m)
1237 fm.condwrite(not m, 'defaulttemplatenotfound',
1237 fm.condwrite(not m, 'defaulttemplatenotfound',
1238 _(" template '%s' not found\n"), "default")
1238 _(" template '%s' not found\n"), "default")
1239 if not p:
1239 if not p:
1240 problems += 1
1240 problems += 1
1241 fm.condwrite(not p, '',
1241 fm.condwrite(not p, '',
1242 _(" (templates seem to have been installed incorrectly)\n"))
1242 _(" (templates seem to have been installed incorrectly)\n"))
1243
1243
1244 # editor
1244 # editor
1245 editor = ui.geteditor()
1245 editor = ui.geteditor()
1246 editor = util.expandpath(editor)
1246 editor = util.expandpath(editor)
1247 editorbin = util.shellsplit(editor)[0]
1247 editorbin = util.shellsplit(editor)[0]
1248 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1248 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1249 cmdpath = util.findexe(editorbin)
1249 cmdpath = util.findexe(editorbin)
1250 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1250 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1251 _(" No commit editor set and can't find %s in PATH\n"
1251 _(" No commit editor set and can't find %s in PATH\n"
1252 " (specify a commit editor in your configuration"
1252 " (specify a commit editor in your configuration"
1253 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1253 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1254 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1254 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1255 _(" Can't find editor '%s' in PATH\n"
1255 _(" Can't find editor '%s' in PATH\n"
1256 " (specify a commit editor in your configuration"
1256 " (specify a commit editor in your configuration"
1257 " file)\n"), not cmdpath and editorbin)
1257 " file)\n"), not cmdpath and editorbin)
1258 if not cmdpath and editor != 'vi':
1258 if not cmdpath and editor != 'vi':
1259 problems += 1
1259 problems += 1
1260
1260
1261 # check username
1261 # check username
1262 username = None
1262 username = None
1263 err = None
1263 err = None
1264 try:
1264 try:
1265 username = ui.username()
1265 username = ui.username()
1266 except error.Abort as e:
1266 except error.Abort as e:
1267 err = util.forcebytestr(e)
1267 err = util.forcebytestr(e)
1268 problems += 1
1268 problems += 1
1269
1269
1270 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1270 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1271 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1271 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1272 " (specify a username in your configuration file)\n"), err)
1272 " (specify a username in your configuration file)\n"), err)
1273
1273
1274 fm.condwrite(not problems, '',
1274 fm.condwrite(not problems, '',
1275 _("no problems detected\n"))
1275 _("no problems detected\n"))
1276 if not problems:
1276 if not problems:
1277 fm.data(problems=problems)
1277 fm.data(problems=problems)
1278 fm.condwrite(problems, 'problems',
1278 fm.condwrite(problems, 'problems',
1279 _("%d problems detected,"
1279 _("%d problems detected,"
1280 " please check your install!\n"), problems)
1280 " please check your install!\n"), problems)
1281 fm.end()
1281 fm.end()
1282
1282
1283 return problems
1283 return problems
1284
1284
1285 @command('debugknown', [], _('REPO ID...'), norepo=True)
1285 @command('debugknown', [], _('REPO ID...'), norepo=True)
1286 def debugknown(ui, repopath, *ids, **opts):
1286 def debugknown(ui, repopath, *ids, **opts):
1287 """test whether node ids are known to a repo
1287 """test whether node ids are known to a repo
1288
1288
1289 Every ID must be a full-length hex node id string. Returns a list of 0s
1289 Every ID must be a full-length hex node id string. Returns a list of 0s
1290 and 1s indicating unknown/known.
1290 and 1s indicating unknown/known.
1291 """
1291 """
1292 opts = pycompat.byteskwargs(opts)
1292 opts = pycompat.byteskwargs(opts)
1293 repo = hg.peer(ui, opts, repopath)
1293 repo = hg.peer(ui, opts, repopath)
1294 if not repo.capable('known'):
1294 if not repo.capable('known'):
1295 raise error.Abort("known() not supported by target repository")
1295 raise error.Abort("known() not supported by target repository")
1296 flags = repo.known([bin(s) for s in ids])
1296 flags = repo.known([bin(s) for s in ids])
1297 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1297 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1298
1298
1299 @command('debuglabelcomplete', [], _('LABEL...'))
1299 @command('debuglabelcomplete', [], _('LABEL...'))
1300 def debuglabelcomplete(ui, repo, *args):
1300 def debuglabelcomplete(ui, repo, *args):
1301 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1301 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1302 debugnamecomplete(ui, repo, *args)
1302 debugnamecomplete(ui, repo, *args)
1303
1303
1304 @command('debuglocks',
1304 @command('debuglocks',
1305 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1305 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1306 ('W', 'force-wlock', None,
1306 ('W', 'force-wlock', None,
1307 _('free the working state lock (DANGEROUS)')),
1307 _('free the working state lock (DANGEROUS)')),
1308 ('s', 'set-lock', None, _('set the store lock until stopped')),
1308 ('s', 'set-lock', None, _('set the store lock until stopped')),
1309 ('S', 'set-wlock', None,
1309 ('S', 'set-wlock', None,
1310 _('set the working state lock until stopped'))],
1310 _('set the working state lock until stopped'))],
1311 _('[OPTION]...'))
1311 _('[OPTION]...'))
1312 def debuglocks(ui, repo, **opts):
1312 def debuglocks(ui, repo, **opts):
1313 """show or modify state of locks
1313 """show or modify state of locks
1314
1314
1315 By default, this command will show which locks are held. This
1315 By default, this command will show which locks are held. This
1316 includes the user and process holding the lock, the amount of time
1316 includes the user and process holding the lock, the amount of time
1317 the lock has been held, and the machine name where the process is
1317 the lock has been held, and the machine name where the process is
1318 running if it's not local.
1318 running if it's not local.
1319
1319
1320 Locks protect the integrity of Mercurial's data, so should be
1320 Locks protect the integrity of Mercurial's data, so should be
1321 treated with care. System crashes or other interruptions may cause
1321 treated with care. System crashes or other interruptions may cause
1322 locks to not be properly released, though Mercurial will usually
1322 locks to not be properly released, though Mercurial will usually
1323 detect and remove such stale locks automatically.
1323 detect and remove such stale locks automatically.
1324
1324
1325 However, detecting stale locks may not always be possible (for
1325 However, detecting stale locks may not always be possible (for
1326 instance, on a shared filesystem). Removing locks may also be
1326 instance, on a shared filesystem). Removing locks may also be
1327 blocked by filesystem permissions.
1327 blocked by filesystem permissions.
1328
1328
1329 Setting a lock will prevent other commands from changing the data.
1329 Setting a lock will prevent other commands from changing the data.
1330 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1330 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1331 The set locks are removed when the command exits.
1331 The set locks are removed when the command exits.
1332
1332
1333 Returns 0 if no locks are held.
1333 Returns 0 if no locks are held.
1334
1334
1335 """
1335 """
1336
1336
1337 if opts.get(r'force_lock'):
1337 if opts.get(r'force_lock'):
1338 repo.svfs.unlink('lock')
1338 repo.svfs.unlink('lock')
1339 if opts.get(r'force_wlock'):
1339 if opts.get(r'force_wlock'):
1340 repo.vfs.unlink('wlock')
1340 repo.vfs.unlink('wlock')
1341 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1341 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1342 return 0
1342 return 0
1343
1343
1344 locks = []
1344 locks = []
1345 try:
1345 try:
1346 if opts.get(r'set_wlock'):
1346 if opts.get(r'set_wlock'):
1347 try:
1347 try:
1348 locks.append(repo.wlock(False))
1348 locks.append(repo.wlock(False))
1349 except error.LockHeld:
1349 except error.LockHeld:
1350 raise error.Abort(_('wlock is already held'))
1350 raise error.Abort(_('wlock is already held'))
1351 if opts.get(r'set_lock'):
1351 if opts.get(r'set_lock'):
1352 try:
1352 try:
1353 locks.append(repo.lock(False))
1353 locks.append(repo.lock(False))
1354 except error.LockHeld:
1354 except error.LockHeld:
1355 raise error.Abort(_('lock is already held'))
1355 raise error.Abort(_('lock is already held'))
1356 if len(locks):
1356 if len(locks):
1357 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1357 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1358 return 0
1358 return 0
1359 finally:
1359 finally:
1360 release(*locks)
1360 release(*locks)
1361
1361
1362 now = time.time()
1362 now = time.time()
1363 held = 0
1363 held = 0
1364
1364
1365 def report(vfs, name, method):
1365 def report(vfs, name, method):
1366 # this causes stale locks to get reaped for more accurate reporting
1366 # this causes stale locks to get reaped for more accurate reporting
1367 try:
1367 try:
1368 l = method(False)
1368 l = method(False)
1369 except error.LockHeld:
1369 except error.LockHeld:
1370 l = None
1370 l = None
1371
1371
1372 if l:
1372 if l:
1373 l.release()
1373 l.release()
1374 else:
1374 else:
1375 try:
1375 try:
1376 stat = vfs.lstat(name)
1376 stat = vfs.lstat(name)
1377 age = now - stat.st_mtime
1377 age = now - stat.st_mtime
1378 user = util.username(stat.st_uid)
1378 user = util.username(stat.st_uid)
1379 locker = vfs.readlock(name)
1379 locker = vfs.readlock(name)
1380 if ":" in locker:
1380 if ":" in locker:
1381 host, pid = locker.split(':')
1381 host, pid = locker.split(':')
1382 if host == socket.gethostname():
1382 if host == socket.gethostname():
1383 locker = 'user %s, process %s' % (user, pid)
1383 locker = 'user %s, process %s' % (user, pid)
1384 else:
1384 else:
1385 locker = 'user %s, process %s, host %s' \
1385 locker = 'user %s, process %s, host %s' \
1386 % (user, pid, host)
1386 % (user, pid, host)
1387 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1387 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1388 return 1
1388 return 1
1389 except OSError as e:
1389 except OSError as e:
1390 if e.errno != errno.ENOENT:
1390 if e.errno != errno.ENOENT:
1391 raise
1391 raise
1392
1392
1393 ui.write(("%-6s free\n") % (name + ":"))
1393 ui.write(("%-6s free\n") % (name + ":"))
1394 return 0
1394 return 0
1395
1395
1396 held += report(repo.svfs, "lock", repo.lock)
1396 held += report(repo.svfs, "lock", repo.lock)
1397 held += report(repo.vfs, "wlock", repo.wlock)
1397 held += report(repo.vfs, "wlock", repo.wlock)
1398
1398
1399 return held
1399 return held
1400
1400
1401 @command('debugmergestate', [], '')
1401 @command('debugmergestate', [], '')
1402 def debugmergestate(ui, repo, *args):
1402 def debugmergestate(ui, repo, *args):
1403 """print merge state
1403 """print merge state
1404
1404
1405 Use --verbose to print out information about whether v1 or v2 merge state
1405 Use --verbose to print out information about whether v1 or v2 merge state
1406 was chosen."""
1406 was chosen."""
1407 def _hashornull(h):
1407 def _hashornull(h):
1408 if h == nullhex:
1408 if h == nullhex:
1409 return 'null'
1409 return 'null'
1410 else:
1410 else:
1411 return h
1411 return h
1412
1412
1413 def printrecords(version):
1413 def printrecords(version):
1414 ui.write(('* version %d records\n') % version)
1414 ui.write(('* version %d records\n') % version)
1415 if version == 1:
1415 if version == 1:
1416 records = v1records
1416 records = v1records
1417 else:
1417 else:
1418 records = v2records
1418 records = v2records
1419
1419
1420 for rtype, record in records:
1420 for rtype, record in records:
1421 # pretty print some record types
1421 # pretty print some record types
1422 if rtype == 'L':
1422 if rtype == 'L':
1423 ui.write(('local: %s\n') % record)
1423 ui.write(('local: %s\n') % record)
1424 elif rtype == 'O':
1424 elif rtype == 'O':
1425 ui.write(('other: %s\n') % record)
1425 ui.write(('other: %s\n') % record)
1426 elif rtype == 'm':
1426 elif rtype == 'm':
1427 driver, mdstate = record.split('\0', 1)
1427 driver, mdstate = record.split('\0', 1)
1428 ui.write(('merge driver: %s (state "%s")\n')
1428 ui.write(('merge driver: %s (state "%s")\n')
1429 % (driver, mdstate))
1429 % (driver, mdstate))
1430 elif rtype in 'FDC':
1430 elif rtype in 'FDC':
1431 r = record.split('\0')
1431 r = record.split('\0')
1432 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1432 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1433 if version == 1:
1433 if version == 1:
1434 onode = 'not stored in v1 format'
1434 onode = 'not stored in v1 format'
1435 flags = r[7]
1435 flags = r[7]
1436 else:
1436 else:
1437 onode, flags = r[7:9]
1437 onode, flags = r[7:9]
1438 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1438 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1439 % (f, rtype, state, _hashornull(hash)))
1439 % (f, rtype, state, _hashornull(hash)))
1440 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1440 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1441 ui.write((' ancestor path: %s (node %s)\n')
1441 ui.write((' ancestor path: %s (node %s)\n')
1442 % (afile, _hashornull(anode)))
1442 % (afile, _hashornull(anode)))
1443 ui.write((' other path: %s (node %s)\n')
1443 ui.write((' other path: %s (node %s)\n')
1444 % (ofile, _hashornull(onode)))
1444 % (ofile, _hashornull(onode)))
1445 elif rtype == 'f':
1445 elif rtype == 'f':
1446 filename, rawextras = record.split('\0', 1)
1446 filename, rawextras = record.split('\0', 1)
1447 extras = rawextras.split('\0')
1447 extras = rawextras.split('\0')
1448 i = 0
1448 i = 0
1449 extrastrings = []
1449 extrastrings = []
1450 while i < len(extras):
1450 while i < len(extras):
1451 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1451 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1452 i += 2
1452 i += 2
1453
1453
1454 ui.write(('file extras: %s (%s)\n')
1454 ui.write(('file extras: %s (%s)\n')
1455 % (filename, ', '.join(extrastrings)))
1455 % (filename, ', '.join(extrastrings)))
1456 elif rtype == 'l':
1456 elif rtype == 'l':
1457 labels = record.split('\0', 2)
1457 labels = record.split('\0', 2)
1458 labels = [l for l in labels if len(l) > 0]
1458 labels = [l for l in labels if len(l) > 0]
1459 ui.write(('labels:\n'))
1459 ui.write(('labels:\n'))
1460 ui.write((' local: %s\n' % labels[0]))
1460 ui.write((' local: %s\n' % labels[0]))
1461 ui.write((' other: %s\n' % labels[1]))
1461 ui.write((' other: %s\n' % labels[1]))
1462 if len(labels) > 2:
1462 if len(labels) > 2:
1463 ui.write((' base: %s\n' % labels[2]))
1463 ui.write((' base: %s\n' % labels[2]))
1464 else:
1464 else:
1465 ui.write(('unrecognized entry: %s\t%s\n')
1465 ui.write(('unrecognized entry: %s\t%s\n')
1466 % (rtype, record.replace('\0', '\t')))
1466 % (rtype, record.replace('\0', '\t')))
1467
1467
1468 # Avoid mergestate.read() since it may raise an exception for unsupported
1468 # Avoid mergestate.read() since it may raise an exception for unsupported
1469 # merge state records. We shouldn't be doing this, but this is OK since this
1469 # merge state records. We shouldn't be doing this, but this is OK since this
1470 # command is pretty low-level.
1470 # command is pretty low-level.
1471 ms = mergemod.mergestate(repo)
1471 ms = mergemod.mergestate(repo)
1472
1472
1473 # sort so that reasonable information is on top
1473 # sort so that reasonable information is on top
1474 v1records = ms._readrecordsv1()
1474 v1records = ms._readrecordsv1()
1475 v2records = ms._readrecordsv2()
1475 v2records = ms._readrecordsv2()
1476 order = 'LOml'
1476 order = 'LOml'
1477 def key(r):
1477 def key(r):
1478 idx = order.find(r[0])
1478 idx = order.find(r[0])
1479 if idx == -1:
1479 if idx == -1:
1480 return (1, r[1])
1480 return (1, r[1])
1481 else:
1481 else:
1482 return (0, idx)
1482 return (0, idx)
1483 v1records.sort(key=key)
1483 v1records.sort(key=key)
1484 v2records.sort(key=key)
1484 v2records.sort(key=key)
1485
1485
1486 if not v1records and not v2records:
1486 if not v1records and not v2records:
1487 ui.write(('no merge state found\n'))
1487 ui.write(('no merge state found\n'))
1488 elif not v2records:
1488 elif not v2records:
1489 ui.note(('no version 2 merge state\n'))
1489 ui.note(('no version 2 merge state\n'))
1490 printrecords(1)
1490 printrecords(1)
1491 elif ms._v1v2match(v1records, v2records):
1491 elif ms._v1v2match(v1records, v2records):
1492 ui.note(('v1 and v2 states match: using v2\n'))
1492 ui.note(('v1 and v2 states match: using v2\n'))
1493 printrecords(2)
1493 printrecords(2)
1494 else:
1494 else:
1495 ui.note(('v1 and v2 states mismatch: using v1\n'))
1495 ui.note(('v1 and v2 states mismatch: using v1\n'))
1496 printrecords(1)
1496 printrecords(1)
1497 if ui.verbose:
1497 if ui.verbose:
1498 printrecords(2)
1498 printrecords(2)
1499
1499
1500 @command('debugnamecomplete', [], _('NAME...'))
1500 @command('debugnamecomplete', [], _('NAME...'))
1501 def debugnamecomplete(ui, repo, *args):
1501 def debugnamecomplete(ui, repo, *args):
1502 '''complete "names" - tags, open branch names, bookmark names'''
1502 '''complete "names" - tags, open branch names, bookmark names'''
1503
1503
1504 names = set()
1504 names = set()
1505 # since we previously only listed open branches, we will handle that
1505 # since we previously only listed open branches, we will handle that
1506 # specially (after this for loop)
1506 # specially (after this for loop)
1507 for name, ns in repo.names.iteritems():
1507 for name, ns in repo.names.iteritems():
1508 if name != 'branches':
1508 if name != 'branches':
1509 names.update(ns.listnames(repo))
1509 names.update(ns.listnames(repo))
1510 names.update(tag for (tag, heads, tip, closed)
1510 names.update(tag for (tag, heads, tip, closed)
1511 in repo.branchmap().iterbranches() if not closed)
1511 in repo.branchmap().iterbranches() if not closed)
1512 completions = set()
1512 completions = set()
1513 if not args:
1513 if not args:
1514 args = ['']
1514 args = ['']
1515 for a in args:
1515 for a in args:
1516 completions.update(n for n in names if n.startswith(a))
1516 completions.update(n for n in names if n.startswith(a))
1517 ui.write('\n'.join(sorted(completions)))
1517 ui.write('\n'.join(sorted(completions)))
1518 ui.write('\n')
1518 ui.write('\n')
1519
1519
1520 @command('debugobsolete',
1520 @command('debugobsolete',
1521 [('', 'flags', 0, _('markers flag')),
1521 [('', 'flags', 0, _('markers flag')),
1522 ('', 'record-parents', False,
1522 ('', 'record-parents', False,
1523 _('record parent information for the precursor')),
1523 _('record parent information for the precursor')),
1524 ('r', 'rev', [], _('display markers relevant to REV')),
1524 ('r', 'rev', [], _('display markers relevant to REV')),
1525 ('', 'exclusive', False, _('restrict display to markers only '
1525 ('', 'exclusive', False, _('restrict display to markers only '
1526 'relevant to REV')),
1526 'relevant to REV')),
1527 ('', 'index', False, _('display index of the marker')),
1527 ('', 'index', False, _('display index of the marker')),
1528 ('', 'delete', [], _('delete markers specified by indices')),
1528 ('', 'delete', [], _('delete markers specified by indices')),
1529 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1529 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1530 _('[OBSOLETED [REPLACEMENT ...]]'))
1530 _('[OBSOLETED [REPLACEMENT ...]]'))
1531 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1531 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1532 """create arbitrary obsolete marker
1532 """create arbitrary obsolete marker
1533
1533
1534 With no arguments, displays the list of obsolescence markers."""
1534 With no arguments, displays the list of obsolescence markers."""
1535
1535
1536 opts = pycompat.byteskwargs(opts)
1536 opts = pycompat.byteskwargs(opts)
1537
1537
1538 def parsenodeid(s):
1538 def parsenodeid(s):
1539 try:
1539 try:
1540 # We do not use revsingle/revrange functions here to accept
1540 # We do not use revsingle/revrange functions here to accept
1541 # arbitrary node identifiers, possibly not present in the
1541 # arbitrary node identifiers, possibly not present in the
1542 # local repository.
1542 # local repository.
1543 n = bin(s)
1543 n = bin(s)
1544 if len(n) != len(nullid):
1544 if len(n) != len(nullid):
1545 raise TypeError()
1545 raise TypeError()
1546 return n
1546 return n
1547 except TypeError:
1547 except TypeError:
1548 raise error.Abort('changeset references must be full hexadecimal '
1548 raise error.Abort('changeset references must be full hexadecimal '
1549 'node identifiers')
1549 'node identifiers')
1550
1550
1551 if opts.get('delete'):
1551 if opts.get('delete'):
1552 indices = []
1552 indices = []
1553 for v in opts.get('delete'):
1553 for v in opts.get('delete'):
1554 try:
1554 try:
1555 indices.append(int(v))
1555 indices.append(int(v))
1556 except ValueError:
1556 except ValueError:
1557 raise error.Abort(_('invalid index value: %r') % v,
1557 raise error.Abort(_('invalid index value: %r') % v,
1558 hint=_('use integers for indices'))
1558 hint=_('use integers for indices'))
1559
1559
1560 if repo.currenttransaction():
1560 if repo.currenttransaction():
1561 raise error.Abort(_('cannot delete obsmarkers in the middle '
1561 raise error.Abort(_('cannot delete obsmarkers in the middle '
1562 'of transaction.'))
1562 'of transaction.'))
1563
1563
1564 with repo.lock():
1564 with repo.lock():
1565 n = repair.deleteobsmarkers(repo.obsstore, indices)
1565 n = repair.deleteobsmarkers(repo.obsstore, indices)
1566 ui.write(_('deleted %i obsolescence markers\n') % n)
1566 ui.write(_('deleted %i obsolescence markers\n') % n)
1567
1567
1568 return
1568 return
1569
1569
1570 if precursor is not None:
1570 if precursor is not None:
1571 if opts['rev']:
1571 if opts['rev']:
1572 raise error.Abort('cannot select revision when creating marker')
1572 raise error.Abort('cannot select revision when creating marker')
1573 metadata = {}
1573 metadata = {}
1574 metadata['user'] = opts['user'] or ui.username()
1574 metadata['user'] = opts['user'] or ui.username()
1575 succs = tuple(parsenodeid(succ) for succ in successors)
1575 succs = tuple(parsenodeid(succ) for succ in successors)
1576 l = repo.lock()
1576 l = repo.lock()
1577 try:
1577 try:
1578 tr = repo.transaction('debugobsolete')
1578 tr = repo.transaction('debugobsolete')
1579 try:
1579 try:
1580 date = opts.get('date')
1580 date = opts.get('date')
1581 if date:
1581 if date:
1582 date = dateutil.parsedate(date)
1582 date = dateutil.parsedate(date)
1583 else:
1583 else:
1584 date = None
1584 date = None
1585 prec = parsenodeid(precursor)
1585 prec = parsenodeid(precursor)
1586 parents = None
1586 parents = None
1587 if opts['record_parents']:
1587 if opts['record_parents']:
1588 if prec not in repo.unfiltered():
1588 if prec not in repo.unfiltered():
1589 raise error.Abort('cannot used --record-parents on '
1589 raise error.Abort('cannot used --record-parents on '
1590 'unknown changesets')
1590 'unknown changesets')
1591 parents = repo.unfiltered()[prec].parents()
1591 parents = repo.unfiltered()[prec].parents()
1592 parents = tuple(p.node() for p in parents)
1592 parents = tuple(p.node() for p in parents)
1593 repo.obsstore.create(tr, prec, succs, opts['flags'],
1593 repo.obsstore.create(tr, prec, succs, opts['flags'],
1594 parents=parents, date=date,
1594 parents=parents, date=date,
1595 metadata=metadata, ui=ui)
1595 metadata=metadata, ui=ui)
1596 tr.close()
1596 tr.close()
1597 except ValueError as exc:
1597 except ValueError as exc:
1598 raise error.Abort(_('bad obsmarker input: %s') %
1598 raise error.Abort(_('bad obsmarker input: %s') %
1599 pycompat.bytestr(exc))
1599 pycompat.bytestr(exc))
1600 finally:
1600 finally:
1601 tr.release()
1601 tr.release()
1602 finally:
1602 finally:
1603 l.release()
1603 l.release()
1604 else:
1604 else:
1605 if opts['rev']:
1605 if opts['rev']:
1606 revs = scmutil.revrange(repo, opts['rev'])
1606 revs = scmutil.revrange(repo, opts['rev'])
1607 nodes = [repo[r].node() for r in revs]
1607 nodes = [repo[r].node() for r in revs]
1608 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1608 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1609 exclusive=opts['exclusive']))
1609 exclusive=opts['exclusive']))
1610 markers.sort(key=lambda x: x._data)
1610 markers.sort(key=lambda x: x._data)
1611 else:
1611 else:
1612 markers = obsutil.getmarkers(repo)
1612 markers = obsutil.getmarkers(repo)
1613
1613
1614 markerstoiter = markers
1614 markerstoiter = markers
1615 isrelevant = lambda m: True
1615 isrelevant = lambda m: True
1616 if opts.get('rev') and opts.get('index'):
1616 if opts.get('rev') and opts.get('index'):
1617 markerstoiter = obsutil.getmarkers(repo)
1617 markerstoiter = obsutil.getmarkers(repo)
1618 markerset = set(markers)
1618 markerset = set(markers)
1619 isrelevant = lambda m: m in markerset
1619 isrelevant = lambda m: m in markerset
1620
1620
1621 fm = ui.formatter('debugobsolete', opts)
1621 fm = ui.formatter('debugobsolete', opts)
1622 for i, m in enumerate(markerstoiter):
1622 for i, m in enumerate(markerstoiter):
1623 if not isrelevant(m):
1623 if not isrelevant(m):
1624 # marker can be irrelevant when we're iterating over a set
1624 # marker can be irrelevant when we're iterating over a set
1625 # of markers (markerstoiter) which is bigger than the set
1625 # of markers (markerstoiter) which is bigger than the set
1626 # of markers we want to display (markers)
1626 # of markers we want to display (markers)
1627 # this can happen if both --index and --rev options are
1627 # this can happen if both --index and --rev options are
1628 # provided and thus we need to iterate over all of the markers
1628 # provided and thus we need to iterate over all of the markers
1629 # to get the correct indices, but only display the ones that
1629 # to get the correct indices, but only display the ones that
1630 # are relevant to --rev value
1630 # are relevant to --rev value
1631 continue
1631 continue
1632 fm.startitem()
1632 fm.startitem()
1633 ind = i if opts.get('index') else None
1633 ind = i if opts.get('index') else None
1634 cmdutil.showmarker(fm, m, index=ind)
1634 cmdutil.showmarker(fm, m, index=ind)
1635 fm.end()
1635 fm.end()
1636
1636
1637 @command('debugpathcomplete',
1637 @command('debugpathcomplete',
1638 [('f', 'full', None, _('complete an entire path')),
1638 [('f', 'full', None, _('complete an entire path')),
1639 ('n', 'normal', None, _('show only normal files')),
1639 ('n', 'normal', None, _('show only normal files')),
1640 ('a', 'added', None, _('show only added files')),
1640 ('a', 'added', None, _('show only added files')),
1641 ('r', 'removed', None, _('show only removed files'))],
1641 ('r', 'removed', None, _('show only removed files'))],
1642 _('FILESPEC...'))
1642 _('FILESPEC...'))
1643 def debugpathcomplete(ui, repo, *specs, **opts):
1643 def debugpathcomplete(ui, repo, *specs, **opts):
1644 '''complete part or all of a tracked path
1644 '''complete part or all of a tracked path
1645
1645
1646 This command supports shells that offer path name completion. It
1646 This command supports shells that offer path name completion. It
1647 currently completes only files already known to the dirstate.
1647 currently completes only files already known to the dirstate.
1648
1648
1649 Completion extends only to the next path segment unless
1649 Completion extends only to the next path segment unless
1650 --full is specified, in which case entire paths are used.'''
1650 --full is specified, in which case entire paths are used.'''
1651
1651
1652 def complete(path, acceptable):
1652 def complete(path, acceptable):
1653 dirstate = repo.dirstate
1653 dirstate = repo.dirstate
1654 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1654 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1655 rootdir = repo.root + pycompat.ossep
1655 rootdir = repo.root + pycompat.ossep
1656 if spec != repo.root and not spec.startswith(rootdir):
1656 if spec != repo.root and not spec.startswith(rootdir):
1657 return [], []
1657 return [], []
1658 if os.path.isdir(spec):
1658 if os.path.isdir(spec):
1659 spec += '/'
1659 spec += '/'
1660 spec = spec[len(rootdir):]
1660 spec = spec[len(rootdir):]
1661 fixpaths = pycompat.ossep != '/'
1661 fixpaths = pycompat.ossep != '/'
1662 if fixpaths:
1662 if fixpaths:
1663 spec = spec.replace(pycompat.ossep, '/')
1663 spec = spec.replace(pycompat.ossep, '/')
1664 speclen = len(spec)
1664 speclen = len(spec)
1665 fullpaths = opts[r'full']
1665 fullpaths = opts[r'full']
1666 files, dirs = set(), set()
1666 files, dirs = set(), set()
1667 adddir, addfile = dirs.add, files.add
1667 adddir, addfile = dirs.add, files.add
1668 for f, st in dirstate.iteritems():
1668 for f, st in dirstate.iteritems():
1669 if f.startswith(spec) and st[0] in acceptable:
1669 if f.startswith(spec) and st[0] in acceptable:
1670 if fixpaths:
1670 if fixpaths:
1671 f = f.replace('/', pycompat.ossep)
1671 f = f.replace('/', pycompat.ossep)
1672 if fullpaths:
1672 if fullpaths:
1673 addfile(f)
1673 addfile(f)
1674 continue
1674 continue
1675 s = f.find(pycompat.ossep, speclen)
1675 s = f.find(pycompat.ossep, speclen)
1676 if s >= 0:
1676 if s >= 0:
1677 adddir(f[:s])
1677 adddir(f[:s])
1678 else:
1678 else:
1679 addfile(f)
1679 addfile(f)
1680 return files, dirs
1680 return files, dirs
1681
1681
1682 acceptable = ''
1682 acceptable = ''
1683 if opts[r'normal']:
1683 if opts[r'normal']:
1684 acceptable += 'nm'
1684 acceptable += 'nm'
1685 if opts[r'added']:
1685 if opts[r'added']:
1686 acceptable += 'a'
1686 acceptable += 'a'
1687 if opts[r'removed']:
1687 if opts[r'removed']:
1688 acceptable += 'r'
1688 acceptable += 'r'
1689 cwd = repo.getcwd()
1689 cwd = repo.getcwd()
1690 if not specs:
1690 if not specs:
1691 specs = ['.']
1691 specs = ['.']
1692
1692
1693 files, dirs = set(), set()
1693 files, dirs = set(), set()
1694 for spec in specs:
1694 for spec in specs:
1695 f, d = complete(spec, acceptable or 'nmar')
1695 f, d = complete(spec, acceptable or 'nmar')
1696 files.update(f)
1696 files.update(f)
1697 dirs.update(d)
1697 dirs.update(d)
1698 files.update(dirs)
1698 files.update(dirs)
1699 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1699 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1700 ui.write('\n')
1700 ui.write('\n')
1701
1701
1702 @command('debugpeer', [], _('PATH'), norepo=True)
1702 @command('debugpeer', [], _('PATH'), norepo=True)
1703 def debugpeer(ui, path):
1703 def debugpeer(ui, path):
1704 """establish a connection to a peer repository"""
1704 """establish a connection to a peer repository"""
1705 # Always enable peer request logging. Requires --debug to display
1705 # Always enable peer request logging. Requires --debug to display
1706 # though.
1706 # though.
1707 overrides = {
1707 overrides = {
1708 ('devel', 'debug.peer-request'): True,
1708 ('devel', 'debug.peer-request'): True,
1709 }
1709 }
1710
1710
1711 with ui.configoverride(overrides):
1711 with ui.configoverride(overrides):
1712 peer = hg.peer(ui, {}, path)
1712 peer = hg.peer(ui, {}, path)
1713
1713
1714 local = peer.local() is not None
1714 local = peer.local() is not None
1715 canpush = peer.canpush()
1715 canpush = peer.canpush()
1716
1716
1717 ui.write(_('url: %s\n') % peer.url())
1717 ui.write(_('url: %s\n') % peer.url())
1718 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1718 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1719 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1719 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1720
1720
1721 @command('debugpickmergetool',
1721 @command('debugpickmergetool',
1722 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1722 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1723 ('', 'changedelete', None, _('emulate merging change and delete')),
1723 ('', 'changedelete', None, _('emulate merging change and delete')),
1724 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1724 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1725 _('[PATTERN]...'),
1725 _('[PATTERN]...'),
1726 inferrepo=True)
1726 inferrepo=True)
1727 def debugpickmergetool(ui, repo, *pats, **opts):
1727 def debugpickmergetool(ui, repo, *pats, **opts):
1728 """examine which merge tool is chosen for specified file
1728 """examine which merge tool is chosen for specified file
1729
1729
1730 As described in :hg:`help merge-tools`, Mercurial examines
1730 As described in :hg:`help merge-tools`, Mercurial examines
1731 configurations below in this order to decide which merge tool is
1731 configurations below in this order to decide which merge tool is
1732 chosen for specified file.
1732 chosen for specified file.
1733
1733
1734 1. ``--tool`` option
1734 1. ``--tool`` option
1735 2. ``HGMERGE`` environment variable
1735 2. ``HGMERGE`` environment variable
1736 3. configurations in ``merge-patterns`` section
1736 3. configurations in ``merge-patterns`` section
1737 4. configuration of ``ui.merge``
1737 4. configuration of ``ui.merge``
1738 5. configurations in ``merge-tools`` section
1738 5. configurations in ``merge-tools`` section
1739 6. ``hgmerge`` tool (for historical reason only)
1739 6. ``hgmerge`` tool (for historical reason only)
1740 7. default tool for fallback (``:merge`` or ``:prompt``)
1740 7. default tool for fallback (``:merge`` or ``:prompt``)
1741
1741
1742 This command writes out examination result in the style below::
1742 This command writes out examination result in the style below::
1743
1743
1744 FILE = MERGETOOL
1744 FILE = MERGETOOL
1745
1745
1746 By default, all files known in the first parent context of the
1746 By default, all files known in the first parent context of the
1747 working directory are examined. Use file patterns and/or -I/-X
1747 working directory are examined. Use file patterns and/or -I/-X
1748 options to limit target files. -r/--rev is also useful to examine
1748 options to limit target files. -r/--rev is also useful to examine
1749 files in another context without actual updating to it.
1749 files in another context without actual updating to it.
1750
1750
1751 With --debug, this command shows warning messages while matching
1751 With --debug, this command shows warning messages while matching
1752 against ``merge-patterns`` and so on, too. It is recommended to
1752 against ``merge-patterns`` and so on, too. It is recommended to
1753 use this option with explicit file patterns and/or -I/-X options,
1753 use this option with explicit file patterns and/or -I/-X options,
1754 because this option increases amount of output per file according
1754 because this option increases amount of output per file according
1755 to configurations in hgrc.
1755 to configurations in hgrc.
1756
1756
1757 With -v/--verbose, this command shows configurations below at
1757 With -v/--verbose, this command shows configurations below at
1758 first (only if specified).
1758 first (only if specified).
1759
1759
1760 - ``--tool`` option
1760 - ``--tool`` option
1761 - ``HGMERGE`` environment variable
1761 - ``HGMERGE`` environment variable
1762 - configuration of ``ui.merge``
1762 - configuration of ``ui.merge``
1763
1763
1764 If merge tool is chosen before matching against
1764 If merge tool is chosen before matching against
1765 ``merge-patterns``, this command can't show any helpful
1765 ``merge-patterns``, this command can't show any helpful
1766 information, even with --debug. In such case, information above is
1766 information, even with --debug. In such case, information above is
1767 useful to know why a merge tool is chosen.
1767 useful to know why a merge tool is chosen.
1768 """
1768 """
1769 opts = pycompat.byteskwargs(opts)
1769 opts = pycompat.byteskwargs(opts)
1770 overrides = {}
1770 overrides = {}
1771 if opts['tool']:
1771 if opts['tool']:
1772 overrides[('ui', 'forcemerge')] = opts['tool']
1772 overrides[('ui', 'forcemerge')] = opts['tool']
1773 ui.note(('with --tool %r\n') % (opts['tool']))
1773 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1774
1774
1775 with ui.configoverride(overrides, 'debugmergepatterns'):
1775 with ui.configoverride(overrides, 'debugmergepatterns'):
1776 hgmerge = encoding.environ.get("HGMERGE")
1776 hgmerge = encoding.environ.get("HGMERGE")
1777 if hgmerge is not None:
1777 if hgmerge is not None:
1778 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1778 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1779 uimerge = ui.config("ui", "merge")
1779 uimerge = ui.config("ui", "merge")
1780 if uimerge:
1780 if uimerge:
1781 ui.note(('with ui.merge=%r\n') % (uimerge))
1781 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1782
1782
1783 ctx = scmutil.revsingle(repo, opts.get('rev'))
1783 ctx = scmutil.revsingle(repo, opts.get('rev'))
1784 m = scmutil.match(ctx, pats, opts)
1784 m = scmutil.match(ctx, pats, opts)
1785 changedelete = opts['changedelete']
1785 changedelete = opts['changedelete']
1786 for path in ctx.walk(m):
1786 for path in ctx.walk(m):
1787 fctx = ctx[path]
1787 fctx = ctx[path]
1788 try:
1788 try:
1789 if not ui.debugflag:
1789 if not ui.debugflag:
1790 ui.pushbuffer(error=True)
1790 ui.pushbuffer(error=True)
1791 tool, toolpath = filemerge._picktool(repo, ui, path,
1791 tool, toolpath = filemerge._picktool(repo, ui, path,
1792 fctx.isbinary(),
1792 fctx.isbinary(),
1793 'l' in fctx.flags(),
1793 'l' in fctx.flags(),
1794 changedelete)
1794 changedelete)
1795 finally:
1795 finally:
1796 if not ui.debugflag:
1796 if not ui.debugflag:
1797 ui.popbuffer()
1797 ui.popbuffer()
1798 ui.write(('%s = %s\n') % (path, tool))
1798 ui.write(('%s = %s\n') % (path, tool))
1799
1799
1800 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1800 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1801 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1801 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1802 '''access the pushkey key/value protocol
1802 '''access the pushkey key/value protocol
1803
1803
1804 With two args, list the keys in the given namespace.
1804 With two args, list the keys in the given namespace.
1805
1805
1806 With five args, set a key to new if it currently is set to old.
1806 With five args, set a key to new if it currently is set to old.
1807 Reports success or failure.
1807 Reports success or failure.
1808 '''
1808 '''
1809
1809
1810 target = hg.peer(ui, {}, repopath)
1810 target = hg.peer(ui, {}, repopath)
1811 if keyinfo:
1811 if keyinfo:
1812 key, old, new = keyinfo
1812 key, old, new = keyinfo
1813 r = target.pushkey(namespace, key, old, new)
1813 r = target.pushkey(namespace, key, old, new)
1814 ui.status(pycompat.bytestr(r) + '\n')
1814 ui.status(pycompat.bytestr(r) + '\n')
1815 return not r
1815 return not r
1816 else:
1816 else:
1817 for k, v in sorted(target.listkeys(namespace).iteritems()):
1817 for k, v in sorted(target.listkeys(namespace).iteritems()):
1818 ui.write("%s\t%s\n" % (util.escapestr(k),
1818 ui.write("%s\t%s\n" % (util.escapestr(k),
1819 util.escapestr(v)))
1819 util.escapestr(v)))
1820
1820
1821 @command('debugpvec', [], _('A B'))
1821 @command('debugpvec', [], _('A B'))
1822 def debugpvec(ui, repo, a, b=None):
1822 def debugpvec(ui, repo, a, b=None):
1823 ca = scmutil.revsingle(repo, a)
1823 ca = scmutil.revsingle(repo, a)
1824 cb = scmutil.revsingle(repo, b)
1824 cb = scmutil.revsingle(repo, b)
1825 pa = pvec.ctxpvec(ca)
1825 pa = pvec.ctxpvec(ca)
1826 pb = pvec.ctxpvec(cb)
1826 pb = pvec.ctxpvec(cb)
1827 if pa == pb:
1827 if pa == pb:
1828 rel = "="
1828 rel = "="
1829 elif pa > pb:
1829 elif pa > pb:
1830 rel = ">"
1830 rel = ">"
1831 elif pa < pb:
1831 elif pa < pb:
1832 rel = "<"
1832 rel = "<"
1833 elif pa | pb:
1833 elif pa | pb:
1834 rel = "|"
1834 rel = "|"
1835 ui.write(_("a: %s\n") % pa)
1835 ui.write(_("a: %s\n") % pa)
1836 ui.write(_("b: %s\n") % pb)
1836 ui.write(_("b: %s\n") % pb)
1837 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1837 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1838 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1838 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1839 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1839 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1840 pa.distance(pb), rel))
1840 pa.distance(pb), rel))
1841
1841
1842 @command('debugrebuilddirstate|debugrebuildstate',
1842 @command('debugrebuilddirstate|debugrebuildstate',
1843 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1843 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1844 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1844 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1845 'the working copy parent')),
1845 'the working copy parent')),
1846 ],
1846 ],
1847 _('[-r REV]'))
1847 _('[-r REV]'))
1848 def debugrebuilddirstate(ui, repo, rev, **opts):
1848 def debugrebuilddirstate(ui, repo, rev, **opts):
1849 """rebuild the dirstate as it would look like for the given revision
1849 """rebuild the dirstate as it would look like for the given revision
1850
1850
1851 If no revision is specified the first current parent will be used.
1851 If no revision is specified the first current parent will be used.
1852
1852
1853 The dirstate will be set to the files of the given revision.
1853 The dirstate will be set to the files of the given revision.
1854 The actual working directory content or existing dirstate
1854 The actual working directory content or existing dirstate
1855 information such as adds or removes is not considered.
1855 information such as adds or removes is not considered.
1856
1856
1857 ``minimal`` will only rebuild the dirstate status for files that claim to be
1857 ``minimal`` will only rebuild the dirstate status for files that claim to be
1858 tracked but are not in the parent manifest, or that exist in the parent
1858 tracked but are not in the parent manifest, or that exist in the parent
1859 manifest but are not in the dirstate. It will not change adds, removes, or
1859 manifest but are not in the dirstate. It will not change adds, removes, or
1860 modified files that are in the working copy parent.
1860 modified files that are in the working copy parent.
1861
1861
1862 One use of this command is to make the next :hg:`status` invocation
1862 One use of this command is to make the next :hg:`status` invocation
1863 check the actual file content.
1863 check the actual file content.
1864 """
1864 """
1865 ctx = scmutil.revsingle(repo, rev)
1865 ctx = scmutil.revsingle(repo, rev)
1866 with repo.wlock():
1866 with repo.wlock():
1867 dirstate = repo.dirstate
1867 dirstate = repo.dirstate
1868 changedfiles = None
1868 changedfiles = None
1869 # See command doc for what minimal does.
1869 # See command doc for what minimal does.
1870 if opts.get(r'minimal'):
1870 if opts.get(r'minimal'):
1871 manifestfiles = set(ctx.manifest().keys())
1871 manifestfiles = set(ctx.manifest().keys())
1872 dirstatefiles = set(dirstate)
1872 dirstatefiles = set(dirstate)
1873 manifestonly = manifestfiles - dirstatefiles
1873 manifestonly = manifestfiles - dirstatefiles
1874 dsonly = dirstatefiles - manifestfiles
1874 dsonly = dirstatefiles - manifestfiles
1875 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1875 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1876 changedfiles = manifestonly | dsnotadded
1876 changedfiles = manifestonly | dsnotadded
1877
1877
1878 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1878 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1879
1879
1880 @command('debugrebuildfncache', [], '')
1880 @command('debugrebuildfncache', [], '')
1881 def debugrebuildfncache(ui, repo):
1881 def debugrebuildfncache(ui, repo):
1882 """rebuild the fncache file"""
1882 """rebuild the fncache file"""
1883 repair.rebuildfncache(ui, repo)
1883 repair.rebuildfncache(ui, repo)
1884
1884
1885 @command('debugrename',
1885 @command('debugrename',
1886 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1886 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1887 _('[-r REV] FILE'))
1887 _('[-r REV] FILE'))
1888 def debugrename(ui, repo, file1, *pats, **opts):
1888 def debugrename(ui, repo, file1, *pats, **opts):
1889 """dump rename information"""
1889 """dump rename information"""
1890
1890
1891 opts = pycompat.byteskwargs(opts)
1891 opts = pycompat.byteskwargs(opts)
1892 ctx = scmutil.revsingle(repo, opts.get('rev'))
1892 ctx = scmutil.revsingle(repo, opts.get('rev'))
1893 m = scmutil.match(ctx, (file1,) + pats, opts)
1893 m = scmutil.match(ctx, (file1,) + pats, opts)
1894 for abs in ctx.walk(m):
1894 for abs in ctx.walk(m):
1895 fctx = ctx[abs]
1895 fctx = ctx[abs]
1896 o = fctx.filelog().renamed(fctx.filenode())
1896 o = fctx.filelog().renamed(fctx.filenode())
1897 rel = m.rel(abs)
1897 rel = m.rel(abs)
1898 if o:
1898 if o:
1899 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1899 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1900 else:
1900 else:
1901 ui.write(_("%s not renamed\n") % rel)
1901 ui.write(_("%s not renamed\n") % rel)
1902
1902
1903 @command('debugrevlog', cmdutil.debugrevlogopts +
1903 @command('debugrevlog', cmdutil.debugrevlogopts +
1904 [('d', 'dump', False, _('dump index data'))],
1904 [('d', 'dump', False, _('dump index data'))],
1905 _('-c|-m|FILE'),
1905 _('-c|-m|FILE'),
1906 optionalrepo=True)
1906 optionalrepo=True)
1907 def debugrevlog(ui, repo, file_=None, **opts):
1907 def debugrevlog(ui, repo, file_=None, **opts):
1908 """show data and statistics about a revlog"""
1908 """show data and statistics about a revlog"""
1909 opts = pycompat.byteskwargs(opts)
1909 opts = pycompat.byteskwargs(opts)
1910 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1910 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1911
1911
1912 if opts.get("dump"):
1912 if opts.get("dump"):
1913 numrevs = len(r)
1913 numrevs = len(r)
1914 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1914 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1915 " rawsize totalsize compression heads chainlen\n"))
1915 " rawsize totalsize compression heads chainlen\n"))
1916 ts = 0
1916 ts = 0
1917 heads = set()
1917 heads = set()
1918
1918
1919 for rev in xrange(numrevs):
1919 for rev in xrange(numrevs):
1920 dbase = r.deltaparent(rev)
1920 dbase = r.deltaparent(rev)
1921 if dbase == -1:
1921 if dbase == -1:
1922 dbase = rev
1922 dbase = rev
1923 cbase = r.chainbase(rev)
1923 cbase = r.chainbase(rev)
1924 clen = r.chainlen(rev)
1924 clen = r.chainlen(rev)
1925 p1, p2 = r.parentrevs(rev)
1925 p1, p2 = r.parentrevs(rev)
1926 rs = r.rawsize(rev)
1926 rs = r.rawsize(rev)
1927 ts = ts + rs
1927 ts = ts + rs
1928 heads -= set(r.parentrevs(rev))
1928 heads -= set(r.parentrevs(rev))
1929 heads.add(rev)
1929 heads.add(rev)
1930 try:
1930 try:
1931 compression = ts / r.end(rev)
1931 compression = ts / r.end(rev)
1932 except ZeroDivisionError:
1932 except ZeroDivisionError:
1933 compression = 0
1933 compression = 0
1934 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1934 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1935 "%11d %5d %8d\n" %
1935 "%11d %5d %8d\n" %
1936 (rev, p1, p2, r.start(rev), r.end(rev),
1936 (rev, p1, p2, r.start(rev), r.end(rev),
1937 r.start(dbase), r.start(cbase),
1937 r.start(dbase), r.start(cbase),
1938 r.start(p1), r.start(p2),
1938 r.start(p1), r.start(p2),
1939 rs, ts, compression, len(heads), clen))
1939 rs, ts, compression, len(heads), clen))
1940 return 0
1940 return 0
1941
1941
1942 v = r.version
1942 v = r.version
1943 format = v & 0xFFFF
1943 format = v & 0xFFFF
1944 flags = []
1944 flags = []
1945 gdelta = False
1945 gdelta = False
1946 if v & revlog.FLAG_INLINE_DATA:
1946 if v & revlog.FLAG_INLINE_DATA:
1947 flags.append('inline')
1947 flags.append('inline')
1948 if v & revlog.FLAG_GENERALDELTA:
1948 if v & revlog.FLAG_GENERALDELTA:
1949 gdelta = True
1949 gdelta = True
1950 flags.append('generaldelta')
1950 flags.append('generaldelta')
1951 if not flags:
1951 if not flags:
1952 flags = ['(none)']
1952 flags = ['(none)']
1953
1953
1954 nummerges = 0
1954 nummerges = 0
1955 numfull = 0
1955 numfull = 0
1956 numprev = 0
1956 numprev = 0
1957 nump1 = 0
1957 nump1 = 0
1958 nump2 = 0
1958 nump2 = 0
1959 numother = 0
1959 numother = 0
1960 nump1prev = 0
1960 nump1prev = 0
1961 nump2prev = 0
1961 nump2prev = 0
1962 chainlengths = []
1962 chainlengths = []
1963 chainbases = []
1963 chainbases = []
1964 chainspans = []
1964 chainspans = []
1965
1965
1966 datasize = [None, 0, 0]
1966 datasize = [None, 0, 0]
1967 fullsize = [None, 0, 0]
1967 fullsize = [None, 0, 0]
1968 deltasize = [None, 0, 0]
1968 deltasize = [None, 0, 0]
1969 chunktypecounts = {}
1969 chunktypecounts = {}
1970 chunktypesizes = {}
1970 chunktypesizes = {}
1971
1971
1972 def addsize(size, l):
1972 def addsize(size, l):
1973 if l[0] is None or size < l[0]:
1973 if l[0] is None or size < l[0]:
1974 l[0] = size
1974 l[0] = size
1975 if size > l[1]:
1975 if size > l[1]:
1976 l[1] = size
1976 l[1] = size
1977 l[2] += size
1977 l[2] += size
1978
1978
1979 numrevs = len(r)
1979 numrevs = len(r)
1980 for rev in xrange(numrevs):
1980 for rev in xrange(numrevs):
1981 p1, p2 = r.parentrevs(rev)
1981 p1, p2 = r.parentrevs(rev)
1982 delta = r.deltaparent(rev)
1982 delta = r.deltaparent(rev)
1983 if format > 0:
1983 if format > 0:
1984 addsize(r.rawsize(rev), datasize)
1984 addsize(r.rawsize(rev), datasize)
1985 if p2 != nullrev:
1985 if p2 != nullrev:
1986 nummerges += 1
1986 nummerges += 1
1987 size = r.length(rev)
1987 size = r.length(rev)
1988 if delta == nullrev:
1988 if delta == nullrev:
1989 chainlengths.append(0)
1989 chainlengths.append(0)
1990 chainbases.append(r.start(rev))
1990 chainbases.append(r.start(rev))
1991 chainspans.append(size)
1991 chainspans.append(size)
1992 numfull += 1
1992 numfull += 1
1993 addsize(size, fullsize)
1993 addsize(size, fullsize)
1994 else:
1994 else:
1995 chainlengths.append(chainlengths[delta] + 1)
1995 chainlengths.append(chainlengths[delta] + 1)
1996 baseaddr = chainbases[delta]
1996 baseaddr = chainbases[delta]
1997 revaddr = r.start(rev)
1997 revaddr = r.start(rev)
1998 chainbases.append(baseaddr)
1998 chainbases.append(baseaddr)
1999 chainspans.append((revaddr - baseaddr) + size)
1999 chainspans.append((revaddr - baseaddr) + size)
2000 addsize(size, deltasize)
2000 addsize(size, deltasize)
2001 if delta == rev - 1:
2001 if delta == rev - 1:
2002 numprev += 1
2002 numprev += 1
2003 if delta == p1:
2003 if delta == p1:
2004 nump1prev += 1
2004 nump1prev += 1
2005 elif delta == p2:
2005 elif delta == p2:
2006 nump2prev += 1
2006 nump2prev += 1
2007 elif delta == p1:
2007 elif delta == p1:
2008 nump1 += 1
2008 nump1 += 1
2009 elif delta == p2:
2009 elif delta == p2:
2010 nump2 += 1
2010 nump2 += 1
2011 elif delta != nullrev:
2011 elif delta != nullrev:
2012 numother += 1
2012 numother += 1
2013
2013
2014 # Obtain data on the raw chunks in the revlog.
2014 # Obtain data on the raw chunks in the revlog.
2015 segment = r._getsegmentforrevs(rev, rev)[1]
2015 segment = r._getsegmentforrevs(rev, rev)[1]
2016 if segment:
2016 if segment:
2017 chunktype = bytes(segment[0:1])
2017 chunktype = bytes(segment[0:1])
2018 else:
2018 else:
2019 chunktype = 'empty'
2019 chunktype = 'empty'
2020
2020
2021 if chunktype not in chunktypecounts:
2021 if chunktype not in chunktypecounts:
2022 chunktypecounts[chunktype] = 0
2022 chunktypecounts[chunktype] = 0
2023 chunktypesizes[chunktype] = 0
2023 chunktypesizes[chunktype] = 0
2024
2024
2025 chunktypecounts[chunktype] += 1
2025 chunktypecounts[chunktype] += 1
2026 chunktypesizes[chunktype] += size
2026 chunktypesizes[chunktype] += size
2027
2027
2028 # Adjust size min value for empty cases
2028 # Adjust size min value for empty cases
2029 for size in (datasize, fullsize, deltasize):
2029 for size in (datasize, fullsize, deltasize):
2030 if size[0] is None:
2030 if size[0] is None:
2031 size[0] = 0
2031 size[0] = 0
2032
2032
2033 numdeltas = numrevs - numfull
2033 numdeltas = numrevs - numfull
2034 numoprev = numprev - nump1prev - nump2prev
2034 numoprev = numprev - nump1prev - nump2prev
2035 totalrawsize = datasize[2]
2035 totalrawsize = datasize[2]
2036 datasize[2] /= numrevs
2036 datasize[2] /= numrevs
2037 fulltotal = fullsize[2]
2037 fulltotal = fullsize[2]
2038 fullsize[2] /= numfull
2038 fullsize[2] /= numfull
2039 deltatotal = deltasize[2]
2039 deltatotal = deltasize[2]
2040 if numrevs - numfull > 0:
2040 if numrevs - numfull > 0:
2041 deltasize[2] /= numrevs - numfull
2041 deltasize[2] /= numrevs - numfull
2042 totalsize = fulltotal + deltatotal
2042 totalsize = fulltotal + deltatotal
2043 avgchainlen = sum(chainlengths) / numrevs
2043 avgchainlen = sum(chainlengths) / numrevs
2044 maxchainlen = max(chainlengths)
2044 maxchainlen = max(chainlengths)
2045 maxchainspan = max(chainspans)
2045 maxchainspan = max(chainspans)
2046 compratio = 1
2046 compratio = 1
2047 if totalsize:
2047 if totalsize:
2048 compratio = totalrawsize / totalsize
2048 compratio = totalrawsize / totalsize
2049
2049
2050 basedfmtstr = '%%%dd\n'
2050 basedfmtstr = '%%%dd\n'
2051 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2051 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2052
2052
2053 def dfmtstr(max):
2053 def dfmtstr(max):
2054 return basedfmtstr % len(str(max))
2054 return basedfmtstr % len(str(max))
2055 def pcfmtstr(max, padding=0):
2055 def pcfmtstr(max, padding=0):
2056 return basepcfmtstr % (len(str(max)), ' ' * padding)
2056 return basepcfmtstr % (len(str(max)), ' ' * padding)
2057
2057
2058 def pcfmt(value, total):
2058 def pcfmt(value, total):
2059 if total:
2059 if total:
2060 return (value, 100 * float(value) / total)
2060 return (value, 100 * float(value) / total)
2061 else:
2061 else:
2062 return value, 100.0
2062 return value, 100.0
2063
2063
2064 ui.write(('format : %d\n') % format)
2064 ui.write(('format : %d\n') % format)
2065 ui.write(('flags : %s\n') % ', '.join(flags))
2065 ui.write(('flags : %s\n') % ', '.join(flags))
2066
2066
2067 ui.write('\n')
2067 ui.write('\n')
2068 fmt = pcfmtstr(totalsize)
2068 fmt = pcfmtstr(totalsize)
2069 fmt2 = dfmtstr(totalsize)
2069 fmt2 = dfmtstr(totalsize)
2070 ui.write(('revisions : ') + fmt2 % numrevs)
2070 ui.write(('revisions : ') + fmt2 % numrevs)
2071 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2071 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2072 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2072 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2073 ui.write(('revisions : ') + fmt2 % numrevs)
2073 ui.write(('revisions : ') + fmt2 % numrevs)
2074 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2074 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2075 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2075 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2076 ui.write(('revision size : ') + fmt2 % totalsize)
2076 ui.write(('revision size : ') + fmt2 % totalsize)
2077 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2077 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2078 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2078 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2079
2079
2080 def fmtchunktype(chunktype):
2080 def fmtchunktype(chunktype):
2081 if chunktype == 'empty':
2081 if chunktype == 'empty':
2082 return ' %s : ' % chunktype
2082 return ' %s : ' % chunktype
2083 elif chunktype in pycompat.bytestr(string.ascii_letters):
2083 elif chunktype in pycompat.bytestr(string.ascii_letters):
2084 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2084 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2085 else:
2085 else:
2086 return ' 0x%s : ' % hex(chunktype)
2086 return ' 0x%s : ' % hex(chunktype)
2087
2087
2088 ui.write('\n')
2088 ui.write('\n')
2089 ui.write(('chunks : ') + fmt2 % numrevs)
2089 ui.write(('chunks : ') + fmt2 % numrevs)
2090 for chunktype in sorted(chunktypecounts):
2090 for chunktype in sorted(chunktypecounts):
2091 ui.write(fmtchunktype(chunktype))
2091 ui.write(fmtchunktype(chunktype))
2092 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2092 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2093 ui.write(('chunks size : ') + fmt2 % totalsize)
2093 ui.write(('chunks size : ') + fmt2 % totalsize)
2094 for chunktype in sorted(chunktypecounts):
2094 for chunktype in sorted(chunktypecounts):
2095 ui.write(fmtchunktype(chunktype))
2095 ui.write(fmtchunktype(chunktype))
2096 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2096 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2097
2097
2098 ui.write('\n')
2098 ui.write('\n')
2099 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2099 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2100 ui.write(('avg chain length : ') + fmt % avgchainlen)
2100 ui.write(('avg chain length : ') + fmt % avgchainlen)
2101 ui.write(('max chain length : ') + fmt % maxchainlen)
2101 ui.write(('max chain length : ') + fmt % maxchainlen)
2102 ui.write(('max chain reach : ') + fmt % maxchainspan)
2102 ui.write(('max chain reach : ') + fmt % maxchainspan)
2103 ui.write(('compression ratio : ') + fmt % compratio)
2103 ui.write(('compression ratio : ') + fmt % compratio)
2104
2104
2105 if format > 0:
2105 if format > 0:
2106 ui.write('\n')
2106 ui.write('\n')
2107 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2107 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2108 % tuple(datasize))
2108 % tuple(datasize))
2109 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2109 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2110 % tuple(fullsize))
2110 % tuple(fullsize))
2111 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2111 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2112 % tuple(deltasize))
2112 % tuple(deltasize))
2113
2113
2114 if numdeltas > 0:
2114 if numdeltas > 0:
2115 ui.write('\n')
2115 ui.write('\n')
2116 fmt = pcfmtstr(numdeltas)
2116 fmt = pcfmtstr(numdeltas)
2117 fmt2 = pcfmtstr(numdeltas, 4)
2117 fmt2 = pcfmtstr(numdeltas, 4)
2118 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2118 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2119 if numprev > 0:
2119 if numprev > 0:
2120 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2120 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2121 numprev))
2121 numprev))
2122 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2122 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2123 numprev))
2123 numprev))
2124 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2124 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2125 numprev))
2125 numprev))
2126 if gdelta:
2126 if gdelta:
2127 ui.write(('deltas against p1 : ')
2127 ui.write(('deltas against p1 : ')
2128 + fmt % pcfmt(nump1, numdeltas))
2128 + fmt % pcfmt(nump1, numdeltas))
2129 ui.write(('deltas against p2 : ')
2129 ui.write(('deltas against p2 : ')
2130 + fmt % pcfmt(nump2, numdeltas))
2130 + fmt % pcfmt(nump2, numdeltas))
2131 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2131 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2132 numdeltas))
2132 numdeltas))
2133
2133
2134 @command('debugrevspec',
2134 @command('debugrevspec',
2135 [('', 'optimize', None,
2135 [('', 'optimize', None,
2136 _('print parsed tree after optimizing (DEPRECATED)')),
2136 _('print parsed tree after optimizing (DEPRECATED)')),
2137 ('', 'show-revs', True, _('print list of result revisions (default)')),
2137 ('', 'show-revs', True, _('print list of result revisions (default)')),
2138 ('s', 'show-set', None, _('print internal representation of result set')),
2138 ('s', 'show-set', None, _('print internal representation of result set')),
2139 ('p', 'show-stage', [],
2139 ('p', 'show-stage', [],
2140 _('print parsed tree at the given stage'), _('NAME')),
2140 _('print parsed tree at the given stage'), _('NAME')),
2141 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2141 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2142 ('', 'verify-optimized', False, _('verify optimized result')),
2142 ('', 'verify-optimized', False, _('verify optimized result')),
2143 ],
2143 ],
2144 ('REVSPEC'))
2144 ('REVSPEC'))
2145 def debugrevspec(ui, repo, expr, **opts):
2145 def debugrevspec(ui, repo, expr, **opts):
2146 """parse and apply a revision specification
2146 """parse and apply a revision specification
2147
2147
2148 Use -p/--show-stage option to print the parsed tree at the given stages.
2148 Use -p/--show-stage option to print the parsed tree at the given stages.
2149 Use -p all to print tree at every stage.
2149 Use -p all to print tree at every stage.
2150
2150
2151 Use --no-show-revs option with -s or -p to print only the set
2151 Use --no-show-revs option with -s or -p to print only the set
2152 representation or the parsed tree respectively.
2152 representation or the parsed tree respectively.
2153
2153
2154 Use --verify-optimized to compare the optimized result with the unoptimized
2154 Use --verify-optimized to compare the optimized result with the unoptimized
2155 one. Returns 1 if the optimized result differs.
2155 one. Returns 1 if the optimized result differs.
2156 """
2156 """
2157 opts = pycompat.byteskwargs(opts)
2157 opts = pycompat.byteskwargs(opts)
2158 aliases = ui.configitems('revsetalias')
2158 aliases = ui.configitems('revsetalias')
2159 stages = [
2159 stages = [
2160 ('parsed', lambda tree: tree),
2160 ('parsed', lambda tree: tree),
2161 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2161 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2162 ui.warn)),
2162 ui.warn)),
2163 ('concatenated', revsetlang.foldconcat),
2163 ('concatenated', revsetlang.foldconcat),
2164 ('analyzed', revsetlang.analyze),
2164 ('analyzed', revsetlang.analyze),
2165 ('optimized', revsetlang.optimize),
2165 ('optimized', revsetlang.optimize),
2166 ]
2166 ]
2167 if opts['no_optimized']:
2167 if opts['no_optimized']:
2168 stages = stages[:-1]
2168 stages = stages[:-1]
2169 if opts['verify_optimized'] and opts['no_optimized']:
2169 if opts['verify_optimized'] and opts['no_optimized']:
2170 raise error.Abort(_('cannot use --verify-optimized with '
2170 raise error.Abort(_('cannot use --verify-optimized with '
2171 '--no-optimized'))
2171 '--no-optimized'))
2172 stagenames = set(n for n, f in stages)
2172 stagenames = set(n for n, f in stages)
2173
2173
2174 showalways = set()
2174 showalways = set()
2175 showchanged = set()
2175 showchanged = set()
2176 if ui.verbose and not opts['show_stage']:
2176 if ui.verbose and not opts['show_stage']:
2177 # show parsed tree by --verbose (deprecated)
2177 # show parsed tree by --verbose (deprecated)
2178 showalways.add('parsed')
2178 showalways.add('parsed')
2179 showchanged.update(['expanded', 'concatenated'])
2179 showchanged.update(['expanded', 'concatenated'])
2180 if opts['optimize']:
2180 if opts['optimize']:
2181 showalways.add('optimized')
2181 showalways.add('optimized')
2182 if opts['show_stage'] and opts['optimize']:
2182 if opts['show_stage'] and opts['optimize']:
2183 raise error.Abort(_('cannot use --optimize with --show-stage'))
2183 raise error.Abort(_('cannot use --optimize with --show-stage'))
2184 if opts['show_stage'] == ['all']:
2184 if opts['show_stage'] == ['all']:
2185 showalways.update(stagenames)
2185 showalways.update(stagenames)
2186 else:
2186 else:
2187 for n in opts['show_stage']:
2187 for n in opts['show_stage']:
2188 if n not in stagenames:
2188 if n not in stagenames:
2189 raise error.Abort(_('invalid stage name: %s') % n)
2189 raise error.Abort(_('invalid stage name: %s') % n)
2190 showalways.update(opts['show_stage'])
2190 showalways.update(opts['show_stage'])
2191
2191
2192 treebystage = {}
2192 treebystage = {}
2193 printedtree = None
2193 printedtree = None
2194 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2194 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2195 for n, f in stages:
2195 for n, f in stages:
2196 treebystage[n] = tree = f(tree)
2196 treebystage[n] = tree = f(tree)
2197 if n in showalways or (n in showchanged and tree != printedtree):
2197 if n in showalways or (n in showchanged and tree != printedtree):
2198 if opts['show_stage'] or n != 'parsed':
2198 if opts['show_stage'] or n != 'parsed':
2199 ui.write(("* %s:\n") % n)
2199 ui.write(("* %s:\n") % n)
2200 ui.write(revsetlang.prettyformat(tree), "\n")
2200 ui.write(revsetlang.prettyformat(tree), "\n")
2201 printedtree = tree
2201 printedtree = tree
2202
2202
2203 if opts['verify_optimized']:
2203 if opts['verify_optimized']:
2204 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2204 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2205 brevs = revset.makematcher(treebystage['optimized'])(repo)
2205 brevs = revset.makematcher(treebystage['optimized'])(repo)
2206 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2206 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2207 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2207 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2208 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2208 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2209 arevs = list(arevs)
2209 arevs = list(arevs)
2210 brevs = list(brevs)
2210 brevs = list(brevs)
2211 if arevs == brevs:
2211 if arevs == brevs:
2212 return 0
2212 return 0
2213 ui.write(('--- analyzed\n'), label='diff.file_a')
2213 ui.write(('--- analyzed\n'), label='diff.file_a')
2214 ui.write(('+++ optimized\n'), label='diff.file_b')
2214 ui.write(('+++ optimized\n'), label='diff.file_b')
2215 sm = difflib.SequenceMatcher(None, arevs, brevs)
2215 sm = difflib.SequenceMatcher(None, arevs, brevs)
2216 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2216 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2217 if tag in ('delete', 'replace'):
2217 if tag in ('delete', 'replace'):
2218 for c in arevs[alo:ahi]:
2218 for c in arevs[alo:ahi]:
2219 ui.write('-%s\n' % c, label='diff.deleted')
2219 ui.write('-%s\n' % c, label='diff.deleted')
2220 if tag in ('insert', 'replace'):
2220 if tag in ('insert', 'replace'):
2221 for c in brevs[blo:bhi]:
2221 for c in brevs[blo:bhi]:
2222 ui.write('+%s\n' % c, label='diff.inserted')
2222 ui.write('+%s\n' % c, label='diff.inserted')
2223 if tag == 'equal':
2223 if tag == 'equal':
2224 for c in arevs[alo:ahi]:
2224 for c in arevs[alo:ahi]:
2225 ui.write(' %s\n' % c)
2225 ui.write(' %s\n' % c)
2226 return 1
2226 return 1
2227
2227
2228 func = revset.makematcher(tree)
2228 func = revset.makematcher(tree)
2229 revs = func(repo)
2229 revs = func(repo)
2230 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2230 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2231 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2231 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2232 if not opts['show_revs']:
2232 if not opts['show_revs']:
2233 return
2233 return
2234 for c in revs:
2234 for c in revs:
2235 ui.write("%d\n" % c)
2235 ui.write("%d\n" % c)
2236
2236
2237 @command('debugserve', [
2237 @command('debugserve', [
2238 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2238 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2239 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2239 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2240 ('', 'logiofile', '', _('file to log server I/O to')),
2240 ('', 'logiofile', '', _('file to log server I/O to')),
2241 ], '')
2241 ], '')
2242 def debugserve(ui, repo, **opts):
2242 def debugserve(ui, repo, **opts):
2243 """run a server with advanced settings
2243 """run a server with advanced settings
2244
2244
2245 This command is similar to :hg:`serve`. It exists partially as a
2245 This command is similar to :hg:`serve`. It exists partially as a
2246 workaround to the fact that ``hg serve --stdio`` must have specific
2246 workaround to the fact that ``hg serve --stdio`` must have specific
2247 arguments for security reasons.
2247 arguments for security reasons.
2248 """
2248 """
2249 opts = pycompat.byteskwargs(opts)
2249 opts = pycompat.byteskwargs(opts)
2250
2250
2251 if not opts['sshstdio']:
2251 if not opts['sshstdio']:
2252 raise error.Abort(_('only --sshstdio is currently supported'))
2252 raise error.Abort(_('only --sshstdio is currently supported'))
2253
2253
2254 logfh = None
2254 logfh = None
2255
2255
2256 if opts['logiofd'] and opts['logiofile']:
2256 if opts['logiofd'] and opts['logiofile']:
2257 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2257 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2258
2258
2259 if opts['logiofd']:
2259 if opts['logiofd']:
2260 # Line buffered because output is line based.
2260 # Line buffered because output is line based.
2261 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2261 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2262 elif opts['logiofile']:
2262 elif opts['logiofile']:
2263 logfh = open(opts['logiofile'], 'ab', 1)
2263 logfh = open(opts['logiofile'], 'ab', 1)
2264
2264
2265 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2265 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2266 s.serve_forever()
2266 s.serve_forever()
2267
2267
2268 @command('debugsetparents', [], _('REV1 [REV2]'))
2268 @command('debugsetparents', [], _('REV1 [REV2]'))
2269 def debugsetparents(ui, repo, rev1, rev2=None):
2269 def debugsetparents(ui, repo, rev1, rev2=None):
2270 """manually set the parents of the current working directory
2270 """manually set the parents of the current working directory
2271
2271
2272 This is useful for writing repository conversion tools, but should
2272 This is useful for writing repository conversion tools, but should
2273 be used with care. For example, neither the working directory nor the
2273 be used with care. For example, neither the working directory nor the
2274 dirstate is updated, so file status may be incorrect after running this
2274 dirstate is updated, so file status may be incorrect after running this
2275 command.
2275 command.
2276
2276
2277 Returns 0 on success.
2277 Returns 0 on success.
2278 """
2278 """
2279
2279
2280 r1 = scmutil.revsingle(repo, rev1).node()
2280 r1 = scmutil.revsingle(repo, rev1).node()
2281 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2281 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2282
2282
2283 with repo.wlock():
2283 with repo.wlock():
2284 repo.setparents(r1, r2)
2284 repo.setparents(r1, r2)
2285
2285
2286 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2286 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2287 def debugssl(ui, repo, source=None, **opts):
2287 def debugssl(ui, repo, source=None, **opts):
2288 '''test a secure connection to a server
2288 '''test a secure connection to a server
2289
2289
2290 This builds the certificate chain for the server on Windows, installing the
2290 This builds the certificate chain for the server on Windows, installing the
2291 missing intermediates and trusted root via Windows Update if necessary. It
2291 missing intermediates and trusted root via Windows Update if necessary. It
2292 does nothing on other platforms.
2292 does nothing on other platforms.
2293
2293
2294 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2294 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2295 that server is used. See :hg:`help urls` for more information.
2295 that server is used. See :hg:`help urls` for more information.
2296
2296
2297 If the update succeeds, retry the original operation. Otherwise, the cause
2297 If the update succeeds, retry the original operation. Otherwise, the cause
2298 of the SSL error is likely another issue.
2298 of the SSL error is likely another issue.
2299 '''
2299 '''
2300 if not pycompat.iswindows:
2300 if not pycompat.iswindows:
2301 raise error.Abort(_('certificate chain building is only possible on '
2301 raise error.Abort(_('certificate chain building is only possible on '
2302 'Windows'))
2302 'Windows'))
2303
2303
2304 if not source:
2304 if not source:
2305 if not repo:
2305 if not repo:
2306 raise error.Abort(_("there is no Mercurial repository here, and no "
2306 raise error.Abort(_("there is no Mercurial repository here, and no "
2307 "server specified"))
2307 "server specified"))
2308 source = "default"
2308 source = "default"
2309
2309
2310 source, branches = hg.parseurl(ui.expandpath(source))
2310 source, branches = hg.parseurl(ui.expandpath(source))
2311 url = util.url(source)
2311 url = util.url(source)
2312 addr = None
2312 addr = None
2313
2313
2314 defaultport = {'https': 443, 'ssh': 22}
2314 defaultport = {'https': 443, 'ssh': 22}
2315 if url.scheme in defaultport:
2315 if url.scheme in defaultport:
2316 try:
2316 try:
2317 addr = (url.host, int(url.port or defaultport[url.scheme]))
2317 addr = (url.host, int(url.port or defaultport[url.scheme]))
2318 except ValueError:
2318 except ValueError:
2319 raise error.Abort(_("malformed port number in URL"))
2319 raise error.Abort(_("malformed port number in URL"))
2320 else:
2320 else:
2321 raise error.Abort(_("only https and ssh connections are supported"))
2321 raise error.Abort(_("only https and ssh connections are supported"))
2322
2322
2323 from . import win32
2323 from . import win32
2324
2324
2325 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2325 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2326 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2326 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2327
2327
2328 try:
2328 try:
2329 s.connect(addr)
2329 s.connect(addr)
2330 cert = s.getpeercert(True)
2330 cert = s.getpeercert(True)
2331
2331
2332 ui.status(_('checking the certificate chain for %s\n') % url.host)
2332 ui.status(_('checking the certificate chain for %s\n') % url.host)
2333
2333
2334 complete = win32.checkcertificatechain(cert, build=False)
2334 complete = win32.checkcertificatechain(cert, build=False)
2335
2335
2336 if not complete:
2336 if not complete:
2337 ui.status(_('certificate chain is incomplete, updating... '))
2337 ui.status(_('certificate chain is incomplete, updating... '))
2338
2338
2339 if not win32.checkcertificatechain(cert):
2339 if not win32.checkcertificatechain(cert):
2340 ui.status(_('failed.\n'))
2340 ui.status(_('failed.\n'))
2341 else:
2341 else:
2342 ui.status(_('done.\n'))
2342 ui.status(_('done.\n'))
2343 else:
2343 else:
2344 ui.status(_('full certificate chain is available\n'))
2344 ui.status(_('full certificate chain is available\n'))
2345 finally:
2345 finally:
2346 s.close()
2346 s.close()
2347
2347
2348 @command('debugsub',
2348 @command('debugsub',
2349 [('r', 'rev', '',
2349 [('r', 'rev', '',
2350 _('revision to check'), _('REV'))],
2350 _('revision to check'), _('REV'))],
2351 _('[-r REV] [REV]'))
2351 _('[-r REV] [REV]'))
2352 def debugsub(ui, repo, rev=None):
2352 def debugsub(ui, repo, rev=None):
2353 ctx = scmutil.revsingle(repo, rev, None)
2353 ctx = scmutil.revsingle(repo, rev, None)
2354 for k, v in sorted(ctx.substate.items()):
2354 for k, v in sorted(ctx.substate.items()):
2355 ui.write(('path %s\n') % k)
2355 ui.write(('path %s\n') % k)
2356 ui.write((' source %s\n') % v[0])
2356 ui.write((' source %s\n') % v[0])
2357 ui.write((' revision %s\n') % v[1])
2357 ui.write((' revision %s\n') % v[1])
2358
2358
2359 @command('debugsuccessorssets',
2359 @command('debugsuccessorssets',
2360 [('', 'closest', False, _('return closest successors sets only'))],
2360 [('', 'closest', False, _('return closest successors sets only'))],
2361 _('[REV]'))
2361 _('[REV]'))
2362 def debugsuccessorssets(ui, repo, *revs, **opts):
2362 def debugsuccessorssets(ui, repo, *revs, **opts):
2363 """show set of successors for revision
2363 """show set of successors for revision
2364
2364
2365 A successors set of changeset A is a consistent group of revisions that
2365 A successors set of changeset A is a consistent group of revisions that
2366 succeed A. It contains non-obsolete changesets only unless closests
2366 succeed A. It contains non-obsolete changesets only unless closests
2367 successors set is set.
2367 successors set is set.
2368
2368
2369 In most cases a changeset A has a single successors set containing a single
2369 In most cases a changeset A has a single successors set containing a single
2370 successor (changeset A replaced by A').
2370 successor (changeset A replaced by A').
2371
2371
2372 A changeset that is made obsolete with no successors are called "pruned".
2372 A changeset that is made obsolete with no successors are called "pruned".
2373 Such changesets have no successors sets at all.
2373 Such changesets have no successors sets at all.
2374
2374
2375 A changeset that has been "split" will have a successors set containing
2375 A changeset that has been "split" will have a successors set containing
2376 more than one successor.
2376 more than one successor.
2377
2377
2378 A changeset that has been rewritten in multiple different ways is called
2378 A changeset that has been rewritten in multiple different ways is called
2379 "divergent". Such changesets have multiple successor sets (each of which
2379 "divergent". Such changesets have multiple successor sets (each of which
2380 may also be split, i.e. have multiple successors).
2380 may also be split, i.e. have multiple successors).
2381
2381
2382 Results are displayed as follows::
2382 Results are displayed as follows::
2383
2383
2384 <rev1>
2384 <rev1>
2385 <successors-1A>
2385 <successors-1A>
2386 <rev2>
2386 <rev2>
2387 <successors-2A>
2387 <successors-2A>
2388 <successors-2B1> <successors-2B2> <successors-2B3>
2388 <successors-2B1> <successors-2B2> <successors-2B3>
2389
2389
2390 Here rev2 has two possible (i.e. divergent) successors sets. The first
2390 Here rev2 has two possible (i.e. divergent) successors sets. The first
2391 holds one element, whereas the second holds three (i.e. the changeset has
2391 holds one element, whereas the second holds three (i.e. the changeset has
2392 been split).
2392 been split).
2393 """
2393 """
2394 # passed to successorssets caching computation from one call to another
2394 # passed to successorssets caching computation from one call to another
2395 cache = {}
2395 cache = {}
2396 ctx2str = bytes
2396 ctx2str = bytes
2397 node2str = short
2397 node2str = short
2398 for rev in scmutil.revrange(repo, revs):
2398 for rev in scmutil.revrange(repo, revs):
2399 ctx = repo[rev]
2399 ctx = repo[rev]
2400 ui.write('%s\n'% ctx2str(ctx))
2400 ui.write('%s\n'% ctx2str(ctx))
2401 for succsset in obsutil.successorssets(repo, ctx.node(),
2401 for succsset in obsutil.successorssets(repo, ctx.node(),
2402 closest=opts[r'closest'],
2402 closest=opts[r'closest'],
2403 cache=cache):
2403 cache=cache):
2404 if succsset:
2404 if succsset:
2405 ui.write(' ')
2405 ui.write(' ')
2406 ui.write(node2str(succsset[0]))
2406 ui.write(node2str(succsset[0]))
2407 for node in succsset[1:]:
2407 for node in succsset[1:]:
2408 ui.write(' ')
2408 ui.write(' ')
2409 ui.write(node2str(node))
2409 ui.write(node2str(node))
2410 ui.write('\n')
2410 ui.write('\n')
2411
2411
2412 @command('debugtemplate',
2412 @command('debugtemplate',
2413 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2413 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2414 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2414 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2415 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2415 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2416 optionalrepo=True)
2416 optionalrepo=True)
2417 def debugtemplate(ui, repo, tmpl, **opts):
2417 def debugtemplate(ui, repo, tmpl, **opts):
2418 """parse and apply a template
2418 """parse and apply a template
2419
2419
2420 If -r/--rev is given, the template is processed as a log template and
2420 If -r/--rev is given, the template is processed as a log template and
2421 applied to the given changesets. Otherwise, it is processed as a generic
2421 applied to the given changesets. Otherwise, it is processed as a generic
2422 template.
2422 template.
2423
2423
2424 Use --verbose to print the parsed tree.
2424 Use --verbose to print the parsed tree.
2425 """
2425 """
2426 revs = None
2426 revs = None
2427 if opts[r'rev']:
2427 if opts[r'rev']:
2428 if repo is None:
2428 if repo is None:
2429 raise error.RepoError(_('there is no Mercurial repository here '
2429 raise error.RepoError(_('there is no Mercurial repository here '
2430 '(.hg not found)'))
2430 '(.hg not found)'))
2431 revs = scmutil.revrange(repo, opts[r'rev'])
2431 revs = scmutil.revrange(repo, opts[r'rev'])
2432
2432
2433 props = {}
2433 props = {}
2434 for d in opts[r'define']:
2434 for d in opts[r'define']:
2435 try:
2435 try:
2436 k, v = (e.strip() for e in d.split('=', 1))
2436 k, v = (e.strip() for e in d.split('=', 1))
2437 if not k or k == 'ui':
2437 if not k or k == 'ui':
2438 raise ValueError
2438 raise ValueError
2439 props[k] = v
2439 props[k] = v
2440 except ValueError:
2440 except ValueError:
2441 raise error.Abort(_('malformed keyword definition: %s') % d)
2441 raise error.Abort(_('malformed keyword definition: %s') % d)
2442
2442
2443 if ui.verbose:
2443 if ui.verbose:
2444 aliases = ui.configitems('templatealias')
2444 aliases = ui.configitems('templatealias')
2445 tree = templater.parse(tmpl)
2445 tree = templater.parse(tmpl)
2446 ui.note(templater.prettyformat(tree), '\n')
2446 ui.note(templater.prettyformat(tree), '\n')
2447 newtree = templater.expandaliases(tree, aliases)
2447 newtree = templater.expandaliases(tree, aliases)
2448 if newtree != tree:
2448 if newtree != tree:
2449 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2449 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2450
2450
2451 if revs is None:
2451 if revs is None:
2452 tres = formatter.templateresources(ui, repo)
2452 tres = formatter.templateresources(ui, repo)
2453 t = formatter.maketemplater(ui, tmpl, resources=tres)
2453 t = formatter.maketemplater(ui, tmpl, resources=tres)
2454 ui.write(t.render(props))
2454 ui.write(t.render(props))
2455 else:
2455 else:
2456 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2456 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2457 for r in revs:
2457 for r in revs:
2458 displayer.show(repo[r], **pycompat.strkwargs(props))
2458 displayer.show(repo[r], **pycompat.strkwargs(props))
2459 displayer.close()
2459 displayer.close()
2460
2460
2461 @command('debugupdatecaches', [])
2461 @command('debugupdatecaches', [])
2462 def debugupdatecaches(ui, repo, *pats, **opts):
2462 def debugupdatecaches(ui, repo, *pats, **opts):
2463 """warm all known caches in the repository"""
2463 """warm all known caches in the repository"""
2464 with repo.wlock(), repo.lock():
2464 with repo.wlock(), repo.lock():
2465 repo.updatecaches()
2465 repo.updatecaches()
2466
2466
2467 @command('debugupgraderepo', [
2467 @command('debugupgraderepo', [
2468 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2468 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2469 ('', 'run', False, _('performs an upgrade')),
2469 ('', 'run', False, _('performs an upgrade')),
2470 ])
2470 ])
2471 def debugupgraderepo(ui, repo, run=False, optimize=None):
2471 def debugupgraderepo(ui, repo, run=False, optimize=None):
2472 """upgrade a repository to use different features
2472 """upgrade a repository to use different features
2473
2473
2474 If no arguments are specified, the repository is evaluated for upgrade
2474 If no arguments are specified, the repository is evaluated for upgrade
2475 and a list of problems and potential optimizations is printed.
2475 and a list of problems and potential optimizations is printed.
2476
2476
2477 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2477 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2478 can be influenced via additional arguments. More details will be provided
2478 can be influenced via additional arguments. More details will be provided
2479 by the command output when run without ``--run``.
2479 by the command output when run without ``--run``.
2480
2480
2481 During the upgrade, the repository will be locked and no writes will be
2481 During the upgrade, the repository will be locked and no writes will be
2482 allowed.
2482 allowed.
2483
2483
2484 At the end of the upgrade, the repository may not be readable while new
2484 At the end of the upgrade, the repository may not be readable while new
2485 repository data is swapped in. This window will be as long as it takes to
2485 repository data is swapped in. This window will be as long as it takes to
2486 rename some directories inside the ``.hg`` directory. On most machines, this
2486 rename some directories inside the ``.hg`` directory. On most machines, this
2487 should complete almost instantaneously and the chances of a consumer being
2487 should complete almost instantaneously and the chances of a consumer being
2488 unable to access the repository should be low.
2488 unable to access the repository should be low.
2489 """
2489 """
2490 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2490 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2491
2491
2492 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2492 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2493 inferrepo=True)
2493 inferrepo=True)
2494 def debugwalk(ui, repo, *pats, **opts):
2494 def debugwalk(ui, repo, *pats, **opts):
2495 """show how files match on given patterns"""
2495 """show how files match on given patterns"""
2496 opts = pycompat.byteskwargs(opts)
2496 opts = pycompat.byteskwargs(opts)
2497 m = scmutil.match(repo[None], pats, opts)
2497 m = scmutil.match(repo[None], pats, opts)
2498 ui.write(('matcher: %r\n' % m))
2498 ui.write(('matcher: %r\n' % m))
2499 items = list(repo[None].walk(m))
2499 items = list(repo[None].walk(m))
2500 if not items:
2500 if not items:
2501 return
2501 return
2502 f = lambda fn: fn
2502 f = lambda fn: fn
2503 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2503 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2504 f = lambda fn: util.normpath(fn)
2504 f = lambda fn: util.normpath(fn)
2505 fmt = 'f %%-%ds %%-%ds %%s' % (
2505 fmt = 'f %%-%ds %%-%ds %%s' % (
2506 max([len(abs) for abs in items]),
2506 max([len(abs) for abs in items]),
2507 max([len(m.rel(abs)) for abs in items]))
2507 max([len(m.rel(abs)) for abs in items]))
2508 for abs in items:
2508 for abs in items:
2509 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2509 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2510 ui.write("%s\n" % line.rstrip())
2510 ui.write("%s\n" % line.rstrip())
2511
2511
2512 @command('debugwireargs',
2512 @command('debugwireargs',
2513 [('', 'three', '', 'three'),
2513 [('', 'three', '', 'three'),
2514 ('', 'four', '', 'four'),
2514 ('', 'four', '', 'four'),
2515 ('', 'five', '', 'five'),
2515 ('', 'five', '', 'five'),
2516 ] + cmdutil.remoteopts,
2516 ] + cmdutil.remoteopts,
2517 _('REPO [OPTIONS]... [ONE [TWO]]'),
2517 _('REPO [OPTIONS]... [ONE [TWO]]'),
2518 norepo=True)
2518 norepo=True)
2519 def debugwireargs(ui, repopath, *vals, **opts):
2519 def debugwireargs(ui, repopath, *vals, **opts):
2520 opts = pycompat.byteskwargs(opts)
2520 opts = pycompat.byteskwargs(opts)
2521 repo = hg.peer(ui, opts, repopath)
2521 repo = hg.peer(ui, opts, repopath)
2522 for opt in cmdutil.remoteopts:
2522 for opt in cmdutil.remoteopts:
2523 del opts[opt[1]]
2523 del opts[opt[1]]
2524 args = {}
2524 args = {}
2525 for k, v in opts.iteritems():
2525 for k, v in opts.iteritems():
2526 if v:
2526 if v:
2527 args[k] = v
2527 args[k] = v
2528 args = pycompat.strkwargs(args)
2528 args = pycompat.strkwargs(args)
2529 # run twice to check that we don't mess up the stream for the next command
2529 # run twice to check that we don't mess up the stream for the next command
2530 res1 = repo.debugwireargs(*vals, **args)
2530 res1 = repo.debugwireargs(*vals, **args)
2531 res2 = repo.debugwireargs(*vals, **args)
2531 res2 = repo.debugwireargs(*vals, **args)
2532 ui.write("%s\n" % res1)
2532 ui.write("%s\n" % res1)
2533 if res1 != res2:
2533 if res1 != res2:
2534 ui.warn("%s\n" % res2)
2534 ui.warn("%s\n" % res2)
2535
2535
2536 def _parsewirelangblocks(fh):
2536 def _parsewirelangblocks(fh):
2537 activeaction = None
2537 activeaction = None
2538 blocklines = []
2538 blocklines = []
2539
2539
2540 for line in fh:
2540 for line in fh:
2541 line = line.rstrip()
2541 line = line.rstrip()
2542 if not line:
2542 if not line:
2543 continue
2543 continue
2544
2544
2545 if line.startswith(b'#'):
2545 if line.startswith(b'#'):
2546 continue
2546 continue
2547
2547
2548 if not line.startswith(' '):
2548 if not line.startswith(' '):
2549 # New block. Flush previous one.
2549 # New block. Flush previous one.
2550 if activeaction:
2550 if activeaction:
2551 yield activeaction, blocklines
2551 yield activeaction, blocklines
2552
2552
2553 activeaction = line
2553 activeaction = line
2554 blocklines = []
2554 blocklines = []
2555 continue
2555 continue
2556
2556
2557 # Else we start with an indent.
2557 # Else we start with an indent.
2558
2558
2559 if not activeaction:
2559 if not activeaction:
2560 raise error.Abort(_('indented line outside of block'))
2560 raise error.Abort(_('indented line outside of block'))
2561
2561
2562 blocklines.append(line)
2562 blocklines.append(line)
2563
2563
2564 # Flush last block.
2564 # Flush last block.
2565 if activeaction:
2565 if activeaction:
2566 yield activeaction, blocklines
2566 yield activeaction, blocklines
2567
2567
2568 @command('debugwireproto',
2568 @command('debugwireproto',
2569 [
2569 [
2570 ('', 'localssh', False, _('start an SSH server for this repo')),
2570 ('', 'localssh', False, _('start an SSH server for this repo')),
2571 ('', 'peer', '', _('construct a specific version of the peer')),
2571 ('', 'peer', '', _('construct a specific version of the peer')),
2572 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2572 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2573 ] + cmdutil.remoteopts,
2573 ] + cmdutil.remoteopts,
2574 _('[REPO]'),
2574 _('[REPO]'),
2575 optionalrepo=True)
2575 optionalrepo=True)
2576 def debugwireproto(ui, repo, **opts):
2576 def debugwireproto(ui, repo, **opts):
2577 """send wire protocol commands to a server
2577 """send wire protocol commands to a server
2578
2578
2579 This command can be used to issue wire protocol commands to remote
2579 This command can be used to issue wire protocol commands to remote
2580 peers and to debug the raw data being exchanged.
2580 peers and to debug the raw data being exchanged.
2581
2581
2582 ``--localssh`` will start an SSH server against the current repository
2582 ``--localssh`` will start an SSH server against the current repository
2583 and connect to that. By default, the connection will perform a handshake
2583 and connect to that. By default, the connection will perform a handshake
2584 and establish an appropriate peer instance.
2584 and establish an appropriate peer instance.
2585
2585
2586 ``--peer`` can be used to bypass the handshake protocol and construct a
2586 ``--peer`` can be used to bypass the handshake protocol and construct a
2587 peer instance using the specified class type. Valid values are ``raw``,
2587 peer instance using the specified class type. Valid values are ``raw``,
2588 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2588 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2589 payloads and don't support higher-level command actions.
2589 payloads and don't support higher-level command actions.
2590
2590
2591 ``--noreadstderr`` can be used to disable automatic reading from stderr
2591 ``--noreadstderr`` can be used to disable automatic reading from stderr
2592 of the peer (for SSH connections only). Disabling automatic reading of
2592 of the peer (for SSH connections only). Disabling automatic reading of
2593 stderr is useful for making output more deterministic.
2593 stderr is useful for making output more deterministic.
2594
2594
2595 Commands are issued via a mini language which is specified via stdin.
2595 Commands are issued via a mini language which is specified via stdin.
2596 The language consists of individual actions to perform. An action is
2596 The language consists of individual actions to perform. An action is
2597 defined by a block. A block is defined as a line with no leading
2597 defined by a block. A block is defined as a line with no leading
2598 space followed by 0 or more lines with leading space. Blocks are
2598 space followed by 0 or more lines with leading space. Blocks are
2599 effectively a high-level command with additional metadata.
2599 effectively a high-level command with additional metadata.
2600
2600
2601 Lines beginning with ``#`` are ignored.
2601 Lines beginning with ``#`` are ignored.
2602
2602
2603 The following sections denote available actions.
2603 The following sections denote available actions.
2604
2604
2605 raw
2605 raw
2606 ---
2606 ---
2607
2607
2608 Send raw data to the server.
2608 Send raw data to the server.
2609
2609
2610 The block payload contains the raw data to send as one atomic send
2610 The block payload contains the raw data to send as one atomic send
2611 operation. The data may not actually be delivered in a single system
2611 operation. The data may not actually be delivered in a single system
2612 call: it depends on the abilities of the transport being used.
2612 call: it depends on the abilities of the transport being used.
2613
2613
2614 Each line in the block is de-indented and concatenated. Then, that
2614 Each line in the block is de-indented and concatenated. Then, that
2615 value is evaluated as a Python b'' literal. This allows the use of
2615 value is evaluated as a Python b'' literal. This allows the use of
2616 backslash escaping, etc.
2616 backslash escaping, etc.
2617
2617
2618 raw+
2618 raw+
2619 ----
2619 ----
2620
2620
2621 Behaves like ``raw`` except flushes output afterwards.
2621 Behaves like ``raw`` except flushes output afterwards.
2622
2622
2623 command <X>
2623 command <X>
2624 -----------
2624 -----------
2625
2625
2626 Send a request to run a named command, whose name follows the ``command``
2626 Send a request to run a named command, whose name follows the ``command``
2627 string.
2627 string.
2628
2628
2629 Arguments to the command are defined as lines in this block. The format of
2629 Arguments to the command are defined as lines in this block. The format of
2630 each line is ``<key> <value>``. e.g.::
2630 each line is ``<key> <value>``. e.g.::
2631
2631
2632 command listkeys
2632 command listkeys
2633 namespace bookmarks
2633 namespace bookmarks
2634
2634
2635 Values are interpreted as Python b'' literals. This allows encoding
2635 Values are interpreted as Python b'' literals. This allows encoding
2636 special byte sequences via backslash escaping.
2636 special byte sequences via backslash escaping.
2637
2637
2638 The following arguments have special meaning:
2638 The following arguments have special meaning:
2639
2639
2640 ``PUSHFILE``
2640 ``PUSHFILE``
2641 When defined, the *push* mechanism of the peer will be used instead
2641 When defined, the *push* mechanism of the peer will be used instead
2642 of the static request-response mechanism and the content of the
2642 of the static request-response mechanism and the content of the
2643 file specified in the value of this argument will be sent as the
2643 file specified in the value of this argument will be sent as the
2644 command payload.
2644 command payload.
2645
2645
2646 This can be used to submit a local bundle file to the remote.
2646 This can be used to submit a local bundle file to the remote.
2647
2647
2648 batchbegin
2648 batchbegin
2649 ----------
2649 ----------
2650
2650
2651 Instruct the peer to begin a batched send.
2651 Instruct the peer to begin a batched send.
2652
2652
2653 All ``command`` blocks are queued for execution until the next
2653 All ``command`` blocks are queued for execution until the next
2654 ``batchsubmit`` block.
2654 ``batchsubmit`` block.
2655
2655
2656 batchsubmit
2656 batchsubmit
2657 -----------
2657 -----------
2658
2658
2659 Submit previously queued ``command`` blocks as a batch request.
2659 Submit previously queued ``command`` blocks as a batch request.
2660
2660
2661 This action MUST be paired with a ``batchbegin`` action.
2661 This action MUST be paired with a ``batchbegin`` action.
2662
2662
2663 close
2663 close
2664 -----
2664 -----
2665
2665
2666 Close the connection to the server.
2666 Close the connection to the server.
2667
2667
2668 flush
2668 flush
2669 -----
2669 -----
2670
2670
2671 Flush data written to the server.
2671 Flush data written to the server.
2672
2672
2673 readavailable
2673 readavailable
2674 -------------
2674 -------------
2675
2675
2676 Read all available data from the server.
2676 Read all available data from the server.
2677
2677
2678 If the connection to the server encompasses multiple pipes, we poll both
2678 If the connection to the server encompasses multiple pipes, we poll both
2679 pipes and read available data.
2679 pipes and read available data.
2680
2680
2681 readline
2681 readline
2682 --------
2682 --------
2683
2683
2684 Read a line of output from the server. If there are multiple output
2684 Read a line of output from the server. If there are multiple output
2685 pipes, reads only the main pipe.
2685 pipes, reads only the main pipe.
2686 """
2686 """
2687 opts = pycompat.byteskwargs(opts)
2687 opts = pycompat.byteskwargs(opts)
2688
2688
2689 if opts['localssh'] and not repo:
2689 if opts['localssh'] and not repo:
2690 raise error.Abort(_('--localssh requires a repository'))
2690 raise error.Abort(_('--localssh requires a repository'))
2691
2691
2692 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2692 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2693 raise error.Abort(_('invalid value for --peer'),
2693 raise error.Abort(_('invalid value for --peer'),
2694 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2694 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2695
2695
2696 if ui.interactive():
2696 if ui.interactive():
2697 ui.write(_('(waiting for commands on stdin)\n'))
2697 ui.write(_('(waiting for commands on stdin)\n'))
2698
2698
2699 blocks = list(_parsewirelangblocks(ui.fin))
2699 blocks = list(_parsewirelangblocks(ui.fin))
2700
2700
2701 proc = None
2701 proc = None
2702
2702
2703 if opts['localssh']:
2703 if opts['localssh']:
2704 # We start the SSH server in its own process so there is process
2704 # We start the SSH server in its own process so there is process
2705 # separation. This prevents a whole class of potential bugs around
2705 # separation. This prevents a whole class of potential bugs around
2706 # shared state from interfering with server operation.
2706 # shared state from interfering with server operation.
2707 args = util.hgcmd() + [
2707 args = util.hgcmd() + [
2708 '-R', repo.root,
2708 '-R', repo.root,
2709 'debugserve', '--sshstdio',
2709 'debugserve', '--sshstdio',
2710 ]
2710 ]
2711 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2711 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2712 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2712 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2713 bufsize=0)
2713 bufsize=0)
2714
2714
2715 stdin = proc.stdin
2715 stdin = proc.stdin
2716 stdout = proc.stdout
2716 stdout = proc.stdout
2717 stderr = proc.stderr
2717 stderr = proc.stderr
2718
2718
2719 # We turn the pipes into observers so we can log I/O.
2719 # We turn the pipes into observers so we can log I/O.
2720 if ui.verbose or opts['peer'] == 'raw':
2720 if ui.verbose or opts['peer'] == 'raw':
2721 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2721 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2722 logdata=True)
2722 logdata=True)
2723 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2723 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2724 logdata=True)
2724 logdata=True)
2725 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2725 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2726 logdata=True)
2726 logdata=True)
2727
2727
2728 # --localssh also implies the peer connection settings.
2728 # --localssh also implies the peer connection settings.
2729
2729
2730 url = 'ssh://localserver'
2730 url = 'ssh://localserver'
2731 autoreadstderr = not opts['noreadstderr']
2731 autoreadstderr = not opts['noreadstderr']
2732
2732
2733 if opts['peer'] == 'ssh1':
2733 if opts['peer'] == 'ssh1':
2734 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2734 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2735 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2735 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2736 None, autoreadstderr=autoreadstderr)
2736 None, autoreadstderr=autoreadstderr)
2737 elif opts['peer'] == 'ssh2':
2737 elif opts['peer'] == 'ssh2':
2738 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2738 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2739 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2739 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2740 None, autoreadstderr=autoreadstderr)
2740 None, autoreadstderr=autoreadstderr)
2741 elif opts['peer'] == 'raw':
2741 elif opts['peer'] == 'raw':
2742 ui.write(_('using raw connection to peer\n'))
2742 ui.write(_('using raw connection to peer\n'))
2743 peer = None
2743 peer = None
2744 else:
2744 else:
2745 ui.write(_('creating ssh peer from handshake results\n'))
2745 ui.write(_('creating ssh peer from handshake results\n'))
2746 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2746 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2747 autoreadstderr=autoreadstderr)
2747 autoreadstderr=autoreadstderr)
2748
2748
2749 else:
2749 else:
2750 raise error.Abort(_('only --localssh is currently supported'))
2750 raise error.Abort(_('only --localssh is currently supported'))
2751
2751
2752 batchedcommands = None
2752 batchedcommands = None
2753
2753
2754 # Now perform actions based on the parsed wire language instructions.
2754 # Now perform actions based on the parsed wire language instructions.
2755 for action, lines in blocks:
2755 for action, lines in blocks:
2756 if action in ('raw', 'raw+'):
2756 if action in ('raw', 'raw+'):
2757 # Concatenate the data together.
2757 # Concatenate the data together.
2758 data = ''.join(l.lstrip() for l in lines)
2758 data = ''.join(l.lstrip() for l in lines)
2759 data = util.unescapestr(data)
2759 data = util.unescapestr(data)
2760 stdin.write(data)
2760 stdin.write(data)
2761
2761
2762 if action == 'raw+':
2762 if action == 'raw+':
2763 stdin.flush()
2763 stdin.flush()
2764 elif action == 'flush':
2764 elif action == 'flush':
2765 stdin.flush()
2765 stdin.flush()
2766 elif action.startswith('command'):
2766 elif action.startswith('command'):
2767 if not peer:
2767 if not peer:
2768 raise error.Abort(_('cannot send commands unless peer instance '
2768 raise error.Abort(_('cannot send commands unless peer instance '
2769 'is available'))
2769 'is available'))
2770
2770
2771 command = action.split(' ', 1)[1]
2771 command = action.split(' ', 1)[1]
2772
2772
2773 args = {}
2773 args = {}
2774 for line in lines:
2774 for line in lines:
2775 # We need to allow empty values.
2775 # We need to allow empty values.
2776 fields = line.lstrip().split(' ', 1)
2776 fields = line.lstrip().split(' ', 1)
2777 if len(fields) == 1:
2777 if len(fields) == 1:
2778 key = fields[0]
2778 key = fields[0]
2779 value = ''
2779 value = ''
2780 else:
2780 else:
2781 key, value = fields
2781 key, value = fields
2782
2782
2783 args[key] = util.unescapestr(value)
2783 args[key] = util.unescapestr(value)
2784
2784
2785 if batchedcommands is not None:
2785 if batchedcommands is not None:
2786 batchedcommands.append((command, args))
2786 batchedcommands.append((command, args))
2787 continue
2787 continue
2788
2788
2789 ui.status(_('sending %s command\n') % command)
2789 ui.status(_('sending %s command\n') % command)
2790
2790
2791 if 'PUSHFILE' in args:
2791 if 'PUSHFILE' in args:
2792 with open(args['PUSHFILE'], r'rb') as fh:
2792 with open(args['PUSHFILE'], r'rb') as fh:
2793 del args['PUSHFILE']
2793 del args['PUSHFILE']
2794 res, output = peer._callpush(command, fh,
2794 res, output = peer._callpush(command, fh,
2795 **pycompat.strkwargs(args))
2795 **pycompat.strkwargs(args))
2796 ui.status(_('result: %s\n') % util.escapedata(res))
2796 ui.status(_('result: %s\n') % util.escapedata(res))
2797 ui.status(_('remote output: %s\n') %
2797 ui.status(_('remote output: %s\n') %
2798 util.escapedata(output))
2798 util.escapedata(output))
2799 else:
2799 else:
2800 res = peer._call(command, **pycompat.strkwargs(args))
2800 res = peer._call(command, **pycompat.strkwargs(args))
2801 ui.status(_('response: %s\n') % util.escapedata(res))
2801 ui.status(_('response: %s\n') % util.escapedata(res))
2802
2802
2803 elif action == 'batchbegin':
2803 elif action == 'batchbegin':
2804 if batchedcommands is not None:
2804 if batchedcommands is not None:
2805 raise error.Abort(_('nested batchbegin not allowed'))
2805 raise error.Abort(_('nested batchbegin not allowed'))
2806
2806
2807 batchedcommands = []
2807 batchedcommands = []
2808 elif action == 'batchsubmit':
2808 elif action == 'batchsubmit':
2809 # There is a batching API we could go through. But it would be
2809 # There is a batching API we could go through. But it would be
2810 # difficult to normalize requests into function calls. It is easier
2810 # difficult to normalize requests into function calls. It is easier
2811 # to bypass this layer and normalize to commands + args.
2811 # to bypass this layer and normalize to commands + args.
2812 ui.status(_('sending batch with %d sub-commands\n') %
2812 ui.status(_('sending batch with %d sub-commands\n') %
2813 len(batchedcommands))
2813 len(batchedcommands))
2814 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2814 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2815 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2815 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2816
2816
2817 batchedcommands = None
2817 batchedcommands = None
2818 elif action == 'close':
2818 elif action == 'close':
2819 peer.close()
2819 peer.close()
2820 elif action == 'readavailable':
2820 elif action == 'readavailable':
2821 fds = [stdout.fileno(), stderr.fileno()]
2821 fds = [stdout.fileno(), stderr.fileno()]
2822 try:
2822 try:
2823 act = util.poll(fds)
2823 act = util.poll(fds)
2824 except NotImplementedError:
2824 except NotImplementedError:
2825 # non supported yet case, assume all have data.
2825 # non supported yet case, assume all have data.
2826 act = fds
2826 act = fds
2827
2827
2828 if stdout.fileno() in act:
2828 if stdout.fileno() in act:
2829 util.readpipe(stdout)
2829 util.readpipe(stdout)
2830 if stderr.fileno() in act:
2830 if stderr.fileno() in act:
2831 util.readpipe(stderr)
2831 util.readpipe(stderr)
2832 elif action == 'readline':
2832 elif action == 'readline':
2833 stdout.readline()
2833 stdout.readline()
2834 else:
2834 else:
2835 raise error.Abort(_('unknown action: %s') % action)
2835 raise error.Abort(_('unknown action: %s') % action)
2836
2836
2837 if batchedcommands is not None:
2837 if batchedcommands is not None:
2838 raise error.Abort(_('unclosed "batchbegin" request'))
2838 raise error.Abort(_('unclosed "batchbegin" request'))
2839
2839
2840 if peer:
2840 if peer:
2841 peer.close()
2841 peer.close()
2842
2842
2843 if proc:
2843 if proc:
2844 proc.kill()
2844 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now