##// END OF EJS Templates
debugdeltachain: output information about sparse read if enabled
Paul Morelle -
r35050:5cbbef8d default
parent child Browse files
Show More
@@ -1,2328 +1,2364 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import string
19 import string
20 import sys
20 import sys
21 import tempfile
21 import tempfile
22 import time
22 import time
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import (
25 from .node import (
26 bin,
26 bin,
27 hex,
27 hex,
28 nullhex,
28 nullhex,
29 nullid,
29 nullid,
30 nullrev,
30 nullrev,
31 short,
31 short,
32 )
32 )
33 from . import (
33 from . import (
34 bundle2,
34 bundle2,
35 changegroup,
35 changegroup,
36 cmdutil,
36 cmdutil,
37 color,
37 color,
38 context,
38 context,
39 dagparser,
39 dagparser,
40 dagutil,
40 dagutil,
41 encoding,
41 encoding,
42 error,
42 error,
43 exchange,
43 exchange,
44 extensions,
44 extensions,
45 filemerge,
45 filemerge,
46 fileset,
46 fileset,
47 formatter,
47 formatter,
48 hg,
48 hg,
49 localrepo,
49 localrepo,
50 lock as lockmod,
50 lock as lockmod,
51 merge as mergemod,
51 merge as mergemod,
52 obsolete,
52 obsolete,
53 obsutil,
53 obsutil,
54 phases,
54 phases,
55 policy,
55 policy,
56 pvec,
56 pvec,
57 pycompat,
57 pycompat,
58 registrar,
58 registrar,
59 repair,
59 repair,
60 revlog,
60 revlog,
61 revset,
61 revset,
62 revsetlang,
62 revsetlang,
63 scmutil,
63 scmutil,
64 setdiscovery,
64 setdiscovery,
65 simplemerge,
65 simplemerge,
66 smartset,
66 smartset,
67 sslutil,
67 sslutil,
68 streamclone,
68 streamclone,
69 templater,
69 templater,
70 treediscovery,
70 treediscovery,
71 upgrade,
71 upgrade,
72 util,
72 util,
73 vfs as vfsmod,
73 vfs as vfsmod,
74 )
74 )
75
75
76 release = lockmod.release
76 release = lockmod.release
77
77
78 command = registrar.command()
78 command = registrar.command()
79
79
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 def debugancestor(ui, repo, *args):
81 def debugancestor(ui, repo, *args):
82 """find the ancestor revision of two revisions in a given index"""
82 """find the ancestor revision of two revisions in a given index"""
83 if len(args) == 3:
83 if len(args) == 3:
84 index, rev1, rev2 = args
84 index, rev1, rev2 = args
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 lookup = r.lookup
86 lookup = r.lookup
87 elif len(args) == 2:
87 elif len(args) == 2:
88 if not repo:
88 if not repo:
89 raise error.Abort(_('there is no Mercurial repository here '
89 raise error.Abort(_('there is no Mercurial repository here '
90 '(.hg not found)'))
90 '(.hg not found)'))
91 rev1, rev2 = args
91 rev1, rev2 = args
92 r = repo.changelog
92 r = repo.changelog
93 lookup = repo.lookup
93 lookup = repo.lookup
94 else:
94 else:
95 raise error.Abort(_('either two or three arguments required'))
95 raise error.Abort(_('either two or three arguments required'))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98
98
99 @command('debugapplystreamclonebundle', [], 'FILE')
99 @command('debugapplystreamclonebundle', [], 'FILE')
100 def debugapplystreamclonebundle(ui, repo, fname):
100 def debugapplystreamclonebundle(ui, repo, fname):
101 """apply a stream clone bundle file"""
101 """apply a stream clone bundle file"""
102 f = hg.openpath(ui, fname)
102 f = hg.openpath(ui, fname)
103 gen = exchange.readbundle(ui, f, fname)
103 gen = exchange.readbundle(ui, f, fname)
104 gen.apply(repo)
104 gen.apply(repo)
105
105
106 @command('debugbuilddag',
106 @command('debugbuilddag',
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 ('n', 'new-file', None, _('add new file at each rev'))],
109 ('n', 'new-file', None, _('add new file at each rev'))],
110 _('[OPTION]... [TEXT]'))
110 _('[OPTION]... [TEXT]'))
111 def debugbuilddag(ui, repo, text=None,
111 def debugbuilddag(ui, repo, text=None,
112 mergeable_file=False,
112 mergeable_file=False,
113 overwritten_file=False,
113 overwritten_file=False,
114 new_file=False):
114 new_file=False):
115 """builds a repo with a given DAG from scratch in the current empty repo
115 """builds a repo with a given DAG from scratch in the current empty repo
116
116
117 The description of the DAG is read from stdin if not given on the
117 The description of the DAG is read from stdin if not given on the
118 command line.
118 command line.
119
119
120 Elements:
120 Elements:
121
121
122 - "+n" is a linear run of n nodes based on the current default parent
122 - "+n" is a linear run of n nodes based on the current default parent
123 - "." is a single node based on the current default parent
123 - "." is a single node based on the current default parent
124 - "$" resets the default parent to null (implied at the start);
124 - "$" resets the default parent to null (implied at the start);
125 otherwise the default parent is always the last node created
125 otherwise the default parent is always the last node created
126 - "<p" sets the default parent to the backref p
126 - "<p" sets the default parent to the backref p
127 - "*p" is a fork at parent p, which is a backref
127 - "*p" is a fork at parent p, which is a backref
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 - "/p2" is a merge of the preceding node and p2
129 - "/p2" is a merge of the preceding node and p2
130 - ":tag" defines a local tag for the preceding node
130 - ":tag" defines a local tag for the preceding node
131 - "@branch" sets the named branch for subsequent nodes
131 - "@branch" sets the named branch for subsequent nodes
132 - "#...\\n" is a comment up to the end of the line
132 - "#...\\n" is a comment up to the end of the line
133
133
134 Whitespace between the above elements is ignored.
134 Whitespace between the above elements is ignored.
135
135
136 A backref is either
136 A backref is either
137
137
138 - a number n, which references the node curr-n, where curr is the current
138 - a number n, which references the node curr-n, where curr is the current
139 node, or
139 node, or
140 - the name of a local tag you placed earlier using ":tag", or
140 - the name of a local tag you placed earlier using ":tag", or
141 - empty to denote the default parent.
141 - empty to denote the default parent.
142
142
143 All string valued-elements are either strictly alphanumeric, or must
143 All string valued-elements are either strictly alphanumeric, or must
144 be enclosed in double quotes ("..."), with "\\" as escape character.
144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 """
145 """
146
146
147 if text is None:
147 if text is None:
148 ui.status(_("reading DAG from stdin\n"))
148 ui.status(_("reading DAG from stdin\n"))
149 text = ui.fin.read()
149 text = ui.fin.read()
150
150
151 cl = repo.changelog
151 cl = repo.changelog
152 if len(cl) > 0:
152 if len(cl) > 0:
153 raise error.Abort(_('repository is not empty'))
153 raise error.Abort(_('repository is not empty'))
154
154
155 # determine number of revs in DAG
155 # determine number of revs in DAG
156 total = 0
156 total = 0
157 for type, data in dagparser.parsedag(text):
157 for type, data in dagparser.parsedag(text):
158 if type == 'n':
158 if type == 'n':
159 total += 1
159 total += 1
160
160
161 if mergeable_file:
161 if mergeable_file:
162 linesperrev = 2
162 linesperrev = 2
163 # make a file with k lines per rev
163 # make a file with k lines per rev
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 initialmergedlines.append("")
165 initialmergedlines.append("")
166
166
167 tags = []
167 tags = []
168
168
169 wlock = lock = tr = None
169 wlock = lock = tr = None
170 try:
170 try:
171 wlock = repo.wlock()
171 wlock = repo.wlock()
172 lock = repo.lock()
172 lock = repo.lock()
173 tr = repo.transaction("builddag")
173 tr = repo.transaction("builddag")
174
174
175 at = -1
175 at = -1
176 atbranch = 'default'
176 atbranch = 'default'
177 nodeids = []
177 nodeids = []
178 id = 0
178 id = 0
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 for type, data in dagparser.parsedag(text):
180 for type, data in dagparser.parsedag(text):
181 if type == 'n':
181 if type == 'n':
182 ui.note(('node %s\n' % str(data)))
182 ui.note(('node %s\n' % str(data)))
183 id, ps = data
183 id, ps = data
184
184
185 files = []
185 files = []
186 fctxs = {}
186 fctxs = {}
187
187
188 p2 = None
188 p2 = None
189 if mergeable_file:
189 if mergeable_file:
190 fn = "mf"
190 fn = "mf"
191 p1 = repo[ps[0]]
191 p1 = repo[ps[0]]
192 if len(ps) > 1:
192 if len(ps) > 1:
193 p2 = repo[ps[1]]
193 p2 = repo[ps[1]]
194 pa = p1.ancestor(p2)
194 pa = p1.ancestor(p2)
195 base, local, other = [x[fn].data() for x in (pa, p1,
195 base, local, other = [x[fn].data() for x in (pa, p1,
196 p2)]
196 p2)]
197 m3 = simplemerge.Merge3Text(base, local, other)
197 m3 = simplemerge.Merge3Text(base, local, other)
198 ml = [l.strip() for l in m3.merge_lines()]
198 ml = [l.strip() for l in m3.merge_lines()]
199 ml.append("")
199 ml.append("")
200 elif at > 0:
200 elif at > 0:
201 ml = p1[fn].data().split("\n")
201 ml = p1[fn].data().split("\n")
202 else:
202 else:
203 ml = initialmergedlines
203 ml = initialmergedlines
204 ml[id * linesperrev] += " r%i" % id
204 ml[id * linesperrev] += " r%i" % id
205 mergedtext = "\n".join(ml)
205 mergedtext = "\n".join(ml)
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208
208
209 if overwritten_file:
209 if overwritten_file:
210 fn = "of"
210 fn = "of"
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213
213
214 if new_file:
214 if new_file:
215 fn = "nf%i" % id
215 fn = "nf%i" % id
216 files.append(fn)
216 files.append(fn)
217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 if len(ps) > 1:
218 if len(ps) > 1:
219 if not p2:
219 if not p2:
220 p2 = repo[ps[1]]
220 p2 = repo[ps[1]]
221 for fn in p2:
221 for fn in p2:
222 if fn.startswith("nf"):
222 if fn.startswith("nf"):
223 files.append(fn)
223 files.append(fn)
224 fctxs[fn] = p2[fn]
224 fctxs[fn] = p2[fn]
225
225
226 def fctxfn(repo, cx, path):
226 def fctxfn(repo, cx, path):
227 return fctxs.get(path)
227 return fctxs.get(path)
228
228
229 if len(ps) == 0 or ps[0] < 0:
229 if len(ps) == 0 or ps[0] < 0:
230 pars = [None, None]
230 pars = [None, None]
231 elif len(ps) == 1:
231 elif len(ps) == 1:
232 pars = [nodeids[ps[0]], None]
232 pars = [nodeids[ps[0]], None]
233 else:
233 else:
234 pars = [nodeids[p] for p in ps]
234 pars = [nodeids[p] for p in ps]
235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 date=(id, 0),
236 date=(id, 0),
237 user="debugbuilddag",
237 user="debugbuilddag",
238 extra={'branch': atbranch})
238 extra={'branch': atbranch})
239 nodeid = repo.commitctx(cx)
239 nodeid = repo.commitctx(cx)
240 nodeids.append(nodeid)
240 nodeids.append(nodeid)
241 at = id
241 at = id
242 elif type == 'l':
242 elif type == 'l':
243 id, name = data
243 id, name = data
244 ui.note(('tag %s\n' % name))
244 ui.note(('tag %s\n' % name))
245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 elif type == 'a':
246 elif type == 'a':
247 ui.note(('branch %s\n' % data))
247 ui.note(('branch %s\n' % data))
248 atbranch = data
248 atbranch = data
249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 tr.close()
250 tr.close()
251
251
252 if tags:
252 if tags:
253 repo.vfs.write("localtags", "".join(tags))
253 repo.vfs.write("localtags", "".join(tags))
254 finally:
254 finally:
255 ui.progress(_('building'), None)
255 ui.progress(_('building'), None)
256 release(tr, lock, wlock)
256 release(tr, lock, wlock)
257
257
258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 indent_string = ' ' * indent
259 indent_string = ' ' * indent
260 if all:
260 if all:
261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 % indent_string)
262 % indent_string)
263
263
264 def showchunks(named):
264 def showchunks(named):
265 ui.write("\n%s%s\n" % (indent_string, named))
265 ui.write("\n%s%s\n" % (indent_string, named))
266 for deltadata in gen.deltaiter():
266 for deltadata in gen.deltaiter():
267 node, p1, p2, cs, deltabase, delta, flags = deltadata
267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 ui.write("%s%s %s %s %s %s %s\n" %
268 ui.write("%s%s %s %s %s %s %s\n" %
269 (indent_string, hex(node), hex(p1), hex(p2),
269 (indent_string, hex(node), hex(p1), hex(p2),
270 hex(cs), hex(deltabase), len(delta)))
270 hex(cs), hex(deltabase), len(delta)))
271
271
272 chunkdata = gen.changelogheader()
272 chunkdata = gen.changelogheader()
273 showchunks("changelog")
273 showchunks("changelog")
274 chunkdata = gen.manifestheader()
274 chunkdata = gen.manifestheader()
275 showchunks("manifest")
275 showchunks("manifest")
276 for chunkdata in iter(gen.filelogheader, {}):
276 for chunkdata in iter(gen.filelogheader, {}):
277 fname = chunkdata['filename']
277 fname = chunkdata['filename']
278 showchunks(fname)
278 showchunks(fname)
279 else:
279 else:
280 if isinstance(gen, bundle2.unbundle20):
280 if isinstance(gen, bundle2.unbundle20):
281 raise error.Abort(_('use debugbundle2 for this file'))
281 raise error.Abort(_('use debugbundle2 for this file'))
282 chunkdata = gen.changelogheader()
282 chunkdata = gen.changelogheader()
283 for deltadata in gen.deltaiter():
283 for deltadata in gen.deltaiter():
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 ui.write("%s%s\n" % (indent_string, hex(node)))
285 ui.write("%s%s\n" % (indent_string, hex(node)))
286
286
287 def _debugobsmarkers(ui, part, indent=0, **opts):
287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 """display version and markers contained in 'data'"""
288 """display version and markers contained in 'data'"""
289 opts = pycompat.byteskwargs(opts)
289 opts = pycompat.byteskwargs(opts)
290 data = part.read()
290 data = part.read()
291 indent_string = ' ' * indent
291 indent_string = ' ' * indent
292 try:
292 try:
293 version, markers = obsolete._readmarkers(data)
293 version, markers = obsolete._readmarkers(data)
294 except error.UnknownVersion as exc:
294 except error.UnknownVersion as exc:
295 msg = "%sunsupported version: %s (%d bytes)\n"
295 msg = "%sunsupported version: %s (%d bytes)\n"
296 msg %= indent_string, exc.version, len(data)
296 msg %= indent_string, exc.version, len(data)
297 ui.write(msg)
297 ui.write(msg)
298 else:
298 else:
299 msg = "%sversion: %s (%d bytes)\n"
299 msg = "%sversion: %s (%d bytes)\n"
300 msg %= indent_string, version, len(data)
300 msg %= indent_string, version, len(data)
301 ui.write(msg)
301 ui.write(msg)
302 fm = ui.formatter('debugobsolete', opts)
302 fm = ui.formatter('debugobsolete', opts)
303 for rawmarker in sorted(markers):
303 for rawmarker in sorted(markers):
304 m = obsutil.marker(None, rawmarker)
304 m = obsutil.marker(None, rawmarker)
305 fm.startitem()
305 fm.startitem()
306 fm.plain(indent_string)
306 fm.plain(indent_string)
307 cmdutil.showmarker(fm, m)
307 cmdutil.showmarker(fm, m)
308 fm.end()
308 fm.end()
309
309
310 def _debugphaseheads(ui, data, indent=0):
310 def _debugphaseheads(ui, data, indent=0):
311 """display version and markers contained in 'data'"""
311 """display version and markers contained in 'data'"""
312 indent_string = ' ' * indent
312 indent_string = ' ' * indent
313 headsbyphase = phases.binarydecode(data)
313 headsbyphase = phases.binarydecode(data)
314 for phase in phases.allphases:
314 for phase in phases.allphases:
315 for head in headsbyphase[phase]:
315 for head in headsbyphase[phase]:
316 ui.write(indent_string)
316 ui.write(indent_string)
317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318
318
319 def _quasirepr(thing):
319 def _quasirepr(thing):
320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 return '{%s}' % (
321 return '{%s}' % (
322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 return pycompat.bytestr(repr(thing))
323 return pycompat.bytestr(repr(thing))
324
324
325 def _debugbundle2(ui, gen, all=None, **opts):
325 def _debugbundle2(ui, gen, all=None, **opts):
326 """lists the contents of a bundle2"""
326 """lists the contents of a bundle2"""
327 if not isinstance(gen, bundle2.unbundle20):
327 if not isinstance(gen, bundle2.unbundle20):
328 raise error.Abort(_('not a bundle2 file'))
328 raise error.Abort(_('not a bundle2 file'))
329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 parttypes = opts.get(r'part_type', [])
330 parttypes = opts.get(r'part_type', [])
331 for part in gen.iterparts():
331 for part in gen.iterparts():
332 if parttypes and part.type not in parttypes:
332 if parttypes and part.type not in parttypes:
333 continue
333 continue
334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 if part.type == 'changegroup':
335 if part.type == 'changegroup':
336 version = part.params.get('version', '01')
336 version = part.params.get('version', '01')
337 cg = changegroup.getunbundler(version, part, 'UN')
337 cg = changegroup.getunbundler(version, part, 'UN')
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 if part.type == 'obsmarkers':
339 if part.type == 'obsmarkers':
340 _debugobsmarkers(ui, part, indent=4, **opts)
340 _debugobsmarkers(ui, part, indent=4, **opts)
341 if part.type == 'phase-heads':
341 if part.type == 'phase-heads':
342 _debugphaseheads(ui, part, indent=4)
342 _debugphaseheads(ui, part, indent=4)
343
343
344 @command('debugbundle',
344 @command('debugbundle',
345 [('a', 'all', None, _('show all details')),
345 [('a', 'all', None, _('show all details')),
346 ('', 'part-type', [], _('show only the named part type')),
346 ('', 'part-type', [], _('show only the named part type')),
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 _('FILE'),
348 _('FILE'),
349 norepo=True)
349 norepo=True)
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 """lists the contents of a bundle"""
351 """lists the contents of a bundle"""
352 with hg.openpath(ui, bundlepath) as f:
352 with hg.openpath(ui, bundlepath) as f:
353 if spec:
353 if spec:
354 spec = exchange.getbundlespec(ui, f)
354 spec = exchange.getbundlespec(ui, f)
355 ui.write('%s\n' % spec)
355 ui.write('%s\n' % spec)
356 return
356 return
357
357
358 gen = exchange.readbundle(ui, f, bundlepath)
358 gen = exchange.readbundle(ui, f, bundlepath)
359 if isinstance(gen, bundle2.unbundle20):
359 if isinstance(gen, bundle2.unbundle20):
360 return _debugbundle2(ui, gen, all=all, **opts)
360 return _debugbundle2(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
362
362
363 @command('debugcapabilities',
363 @command('debugcapabilities',
364 [], _('PATH'),
364 [], _('PATH'),
365 norepo=True)
365 norepo=True)
366 def debugcapabilities(ui, path, **opts):
366 def debugcapabilities(ui, path, **opts):
367 """lists the capabilities of a remote peer"""
367 """lists the capabilities of a remote peer"""
368 peer = hg.peer(ui, opts, path)
368 peer = hg.peer(ui, opts, path)
369 caps = peer.capabilities()
369 caps = peer.capabilities()
370 ui.write(('Main capabilities:\n'))
370 ui.write(('Main capabilities:\n'))
371 for c in sorted(caps):
371 for c in sorted(caps):
372 ui.write((' %s\n') % c)
372 ui.write((' %s\n') % c)
373 b2caps = bundle2.bundle2caps(peer)
373 b2caps = bundle2.bundle2caps(peer)
374 if b2caps:
374 if b2caps:
375 ui.write(('Bundle2 capabilities:\n'))
375 ui.write(('Bundle2 capabilities:\n'))
376 for key, values in sorted(b2caps.iteritems()):
376 for key, values in sorted(b2caps.iteritems()):
377 ui.write((' %s\n') % key)
377 ui.write((' %s\n') % key)
378 for v in values:
378 for v in values:
379 ui.write((' %s\n') % v)
379 ui.write((' %s\n') % v)
380
380
381 @command('debugcheckstate', [], '')
381 @command('debugcheckstate', [], '')
382 def debugcheckstate(ui, repo):
382 def debugcheckstate(ui, repo):
383 """validate the correctness of the current dirstate"""
383 """validate the correctness of the current dirstate"""
384 parent1, parent2 = repo.dirstate.parents()
384 parent1, parent2 = repo.dirstate.parents()
385 m1 = repo[parent1].manifest()
385 m1 = repo[parent1].manifest()
386 m2 = repo[parent2].manifest()
386 m2 = repo[parent2].manifest()
387 errors = 0
387 errors = 0
388 for f in repo.dirstate:
388 for f in repo.dirstate:
389 state = repo.dirstate[f]
389 state = repo.dirstate[f]
390 if state in "nr" and f not in m1:
390 if state in "nr" and f not in m1:
391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 errors += 1
392 errors += 1
393 if state in "a" and f in m1:
393 if state in "a" and f in m1:
394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 errors += 1
395 errors += 1
396 if state in "m" and f not in m1 and f not in m2:
396 if state in "m" and f not in m1 and f not in m2:
397 ui.warn(_("%s in state %s, but not in either manifest\n") %
397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 (f, state))
398 (f, state))
399 errors += 1
399 errors += 1
400 for f in m1:
400 for f in m1:
401 state = repo.dirstate[f]
401 state = repo.dirstate[f]
402 if state not in "nrm":
402 if state not in "nrm":
403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 errors += 1
404 errors += 1
405 if errors:
405 if errors:
406 error = _(".hg/dirstate inconsistent with current parent's manifest")
406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 raise error.Abort(error)
407 raise error.Abort(error)
408
408
409 @command('debugcolor',
409 @command('debugcolor',
410 [('', 'style', None, _('show all configured styles'))],
410 [('', 'style', None, _('show all configured styles'))],
411 'hg debugcolor')
411 'hg debugcolor')
412 def debugcolor(ui, repo, **opts):
412 def debugcolor(ui, repo, **opts):
413 """show available color, effects or style"""
413 """show available color, effects or style"""
414 ui.write(('color mode: %s\n') % ui._colormode)
414 ui.write(('color mode: %s\n') % ui._colormode)
415 if opts.get(r'style'):
415 if opts.get(r'style'):
416 return _debugdisplaystyle(ui)
416 return _debugdisplaystyle(ui)
417 else:
417 else:
418 return _debugdisplaycolor(ui)
418 return _debugdisplaycolor(ui)
419
419
420 def _debugdisplaycolor(ui):
420 def _debugdisplaycolor(ui):
421 ui = ui.copy()
421 ui = ui.copy()
422 ui._styles.clear()
422 ui._styles.clear()
423 for effect in color._activeeffects(ui).keys():
423 for effect in color._activeeffects(ui).keys():
424 ui._styles[effect] = effect
424 ui._styles[effect] = effect
425 if ui._terminfoparams:
425 if ui._terminfoparams:
426 for k, v in ui.configitems('color'):
426 for k, v in ui.configitems('color'):
427 if k.startswith('color.'):
427 if k.startswith('color.'):
428 ui._styles[k] = k[6:]
428 ui._styles[k] = k[6:]
429 elif k.startswith('terminfo.'):
429 elif k.startswith('terminfo.'):
430 ui._styles[k] = k[9:]
430 ui._styles[k] = k[9:]
431 ui.write(_('available colors:\n'))
431 ui.write(_('available colors:\n'))
432 # sort label with a '_' after the other to group '_background' entry.
432 # sort label with a '_' after the other to group '_background' entry.
433 items = sorted(ui._styles.items(),
433 items = sorted(ui._styles.items(),
434 key=lambda i: ('_' in i[0], i[0], i[1]))
434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 for colorname, label in items:
435 for colorname, label in items:
436 ui.write(('%s\n') % colorname, label=label)
436 ui.write(('%s\n') % colorname, label=label)
437
437
438 def _debugdisplaystyle(ui):
438 def _debugdisplaystyle(ui):
439 ui.write(_('available style:\n'))
439 ui.write(_('available style:\n'))
440 width = max(len(s) for s in ui._styles)
440 width = max(len(s) for s in ui._styles)
441 for label, effects in sorted(ui._styles.items()):
441 for label, effects in sorted(ui._styles.items()):
442 ui.write('%s' % label, label=label)
442 ui.write('%s' % label, label=label)
443 if effects:
443 if effects:
444 # 50
444 # 50
445 ui.write(': ')
445 ui.write(': ')
446 ui.write(' ' * (max(0, width - len(label))))
446 ui.write(' ' * (max(0, width - len(label))))
447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 ui.write('\n')
448 ui.write('\n')
449
449
450 @command('debugcreatestreamclonebundle', [], 'FILE')
450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 def debugcreatestreamclonebundle(ui, repo, fname):
451 def debugcreatestreamclonebundle(ui, repo, fname):
452 """create a stream clone bundle file
452 """create a stream clone bundle file
453
453
454 Stream bundles are special bundles that are essentially archives of
454 Stream bundles are special bundles that are essentially archives of
455 revlog files. They are commonly used for cloning very quickly.
455 revlog files. They are commonly used for cloning very quickly.
456 """
456 """
457 # TODO we may want to turn this into an abort when this functionality
457 # TODO we may want to turn this into an abort when this functionality
458 # is moved into `hg bundle`.
458 # is moved into `hg bundle`.
459 if phases.hassecret(repo):
459 if phases.hassecret(repo):
460 ui.warn(_('(warning: stream clone bundle will contain secret '
460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 'revisions)\n'))
461 'revisions)\n'))
462
462
463 requirements, gen = streamclone.generatebundlev1(repo)
463 requirements, gen = streamclone.generatebundlev1(repo)
464 changegroup.writechunks(ui, gen, fname)
464 changegroup.writechunks(ui, gen, fname)
465
465
466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467
467
468 @command('debugdag',
468 @command('debugdag',
469 [('t', 'tags', None, _('use tags as labels')),
469 [('t', 'tags', None, _('use tags as labels')),
470 ('b', 'branches', None, _('annotate with branch names')),
470 ('b', 'branches', None, _('annotate with branch names')),
471 ('', 'dots', None, _('use dots for runs')),
471 ('', 'dots', None, _('use dots for runs')),
472 ('s', 'spaces', None, _('separate elements by spaces'))],
472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 _('[OPTION]... [FILE [REV]...]'),
473 _('[OPTION]... [FILE [REV]...]'),
474 optionalrepo=True)
474 optionalrepo=True)
475 def debugdag(ui, repo, file_=None, *revs, **opts):
475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 """format the changelog or an index DAG as a concise textual description
476 """format the changelog or an index DAG as a concise textual description
477
477
478 If you pass a revlog index, the revlog's DAG is emitted. If you list
478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 revision numbers, they get labeled in the output as rN.
479 revision numbers, they get labeled in the output as rN.
480
480
481 Otherwise, the changelog DAG of the current repo is emitted.
481 Otherwise, the changelog DAG of the current repo is emitted.
482 """
482 """
483 spaces = opts.get(r'spaces')
483 spaces = opts.get(r'spaces')
484 dots = opts.get(r'dots')
484 dots = opts.get(r'dots')
485 if file_:
485 if file_:
486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 file_)
487 file_)
488 revs = set((int(r) for r in revs))
488 revs = set((int(r) for r in revs))
489 def events():
489 def events():
490 for r in rlog:
490 for r in rlog:
491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 if p != -1))
492 if p != -1))
493 if r in revs:
493 if r in revs:
494 yield 'l', (r, "r%i" % r)
494 yield 'l', (r, "r%i" % r)
495 elif repo:
495 elif repo:
496 cl = repo.changelog
496 cl = repo.changelog
497 tags = opts.get(r'tags')
497 tags = opts.get(r'tags')
498 branches = opts.get(r'branches')
498 branches = opts.get(r'branches')
499 if tags:
499 if tags:
500 labels = {}
500 labels = {}
501 for l, n in repo.tags().items():
501 for l, n in repo.tags().items():
502 labels.setdefault(cl.rev(n), []).append(l)
502 labels.setdefault(cl.rev(n), []).append(l)
503 def events():
503 def events():
504 b = "default"
504 b = "default"
505 for r in cl:
505 for r in cl:
506 if branches:
506 if branches:
507 newb = cl.read(cl.node(r))[5]['branch']
507 newb = cl.read(cl.node(r))[5]['branch']
508 if newb != b:
508 if newb != b:
509 yield 'a', newb
509 yield 'a', newb
510 b = newb
510 b = newb
511 yield 'n', (r, list(p for p in cl.parentrevs(r)
511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 if p != -1))
512 if p != -1))
513 if tags:
513 if tags:
514 ls = labels.get(r)
514 ls = labels.get(r)
515 if ls:
515 if ls:
516 for l in ls:
516 for l in ls:
517 yield 'l', (r, l)
517 yield 'l', (r, l)
518 else:
518 else:
519 raise error.Abort(_('need repo for changelog dag'))
519 raise error.Abort(_('need repo for changelog dag'))
520
520
521 for line in dagparser.dagtextlines(events(),
521 for line in dagparser.dagtextlines(events(),
522 addspaces=spaces,
522 addspaces=spaces,
523 wraplabels=True,
523 wraplabels=True,
524 wrapannotations=True,
524 wrapannotations=True,
525 wrapnonlinear=dots,
525 wrapnonlinear=dots,
526 usedots=dots,
526 usedots=dots,
527 maxlinewidth=70):
527 maxlinewidth=70):
528 ui.write(line)
528 ui.write(line)
529 ui.write("\n")
529 ui.write("\n")
530
530
531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 def debugdata(ui, repo, file_, rev=None, **opts):
532 def debugdata(ui, repo, file_, rev=None, **opts):
533 """dump the contents of a data file revision"""
533 """dump the contents of a data file revision"""
534 opts = pycompat.byteskwargs(opts)
534 opts = pycompat.byteskwargs(opts)
535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 if rev is not None:
536 if rev is not None:
537 raise error.CommandError('debugdata', _('invalid arguments'))
537 raise error.CommandError('debugdata', _('invalid arguments'))
538 file_, rev = None, file_
538 file_, rev = None, file_
539 elif rev is None:
539 elif rev is None:
540 raise error.CommandError('debugdata', _('invalid arguments'))
540 raise error.CommandError('debugdata', _('invalid arguments'))
541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 try:
542 try:
543 ui.write(r.revision(r.lookup(rev), raw=True))
543 ui.write(r.revision(r.lookup(rev), raw=True))
544 except KeyError:
544 except KeyError:
545 raise error.Abort(_('invalid revision identifier %s') % rev)
545 raise error.Abort(_('invalid revision identifier %s') % rev)
546
546
547 @command('debugdate',
547 @command('debugdate',
548 [('e', 'extended', None, _('try extended date formats'))],
548 [('e', 'extended', None, _('try extended date formats'))],
549 _('[-e] DATE [RANGE]'),
549 _('[-e] DATE [RANGE]'),
550 norepo=True, optionalrepo=True)
550 norepo=True, optionalrepo=True)
551 def debugdate(ui, date, range=None, **opts):
551 def debugdate(ui, date, range=None, **opts):
552 """parse and display a date"""
552 """parse and display a date"""
553 if opts[r"extended"]:
553 if opts[r"extended"]:
554 d = util.parsedate(date, util.extendeddateformats)
554 d = util.parsedate(date, util.extendeddateformats)
555 else:
555 else:
556 d = util.parsedate(date)
556 d = util.parsedate(date)
557 ui.write(("internal: %s %s\n") % d)
557 ui.write(("internal: %s %s\n") % d)
558 ui.write(("standard: %s\n") % util.datestr(d))
558 ui.write(("standard: %s\n") % util.datestr(d))
559 if range:
559 if range:
560 m = util.matchdate(range)
560 m = util.matchdate(range)
561 ui.write(("match: %s\n") % m(d[0]))
561 ui.write(("match: %s\n") % m(d[0]))
562
562
563 @command('debugdeltachain',
563 @command('debugdeltachain',
564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 _('-c|-m|FILE'),
565 _('-c|-m|FILE'),
566 optionalrepo=True)
566 optionalrepo=True)
567 def debugdeltachain(ui, repo, file_=None, **opts):
567 def debugdeltachain(ui, repo, file_=None, **opts):
568 """dump information about delta chains in a revlog
568 """dump information about delta chains in a revlog
569
569
570 Output can be templatized. Available template keywords are:
570 Output can be templatized. Available template keywords are:
571
571
572 :``rev``: revision number
572 :``rev``: revision number
573 :``chainid``: delta chain identifier (numbered by unique base)
573 :``chainid``: delta chain identifier (numbered by unique base)
574 :``chainlen``: delta chain length to this revision
574 :``chainlen``: delta chain length to this revision
575 :``prevrev``: previous revision in delta chain
575 :``prevrev``: previous revision in delta chain
576 :``deltatype``: role of delta / how it was computed
576 :``deltatype``: role of delta / how it was computed
577 :``compsize``: compressed size of revision
577 :``compsize``: compressed size of revision
578 :``uncompsize``: uncompressed size of revision
578 :``uncompsize``: uncompressed size of revision
579 :``chainsize``: total size of compressed revisions in chain
579 :``chainsize``: total size of compressed revisions in chain
580 :``chainratio``: total chain size divided by uncompressed revision size
580 :``chainratio``: total chain size divided by uncompressed revision size
581 (new delta chains typically start at ratio 2.00)
581 (new delta chains typically start at ratio 2.00)
582 :``lindist``: linear distance from base revision in delta chain to end
582 :``lindist``: linear distance from base revision in delta chain to end
583 of this revision
583 of this revision
584 :``extradist``: total size of revisions not part of this delta chain from
584 :``extradist``: total size of revisions not part of this delta chain from
585 base of delta chain to end of this revision; a measurement
585 base of delta chain to end of this revision; a measurement
586 of how much extra data we need to read/seek across to read
586 of how much extra data we need to read/seek across to read
587 the delta chain for this revision
587 the delta chain for this revision
588 :``extraratio``: extradist divided by chainsize; another representation of
588 :``extraratio``: extradist divided by chainsize; another representation of
589 how much unrelated data is needed to load this delta chain
589 how much unrelated data is needed to load this delta chain
590
591 If the repository is configured to use the sparse read, additional keywords
592 are available:
593
594 :``readsize``: total size of data read from the disk for a revision
595 (sum of the sizes of all the blocks)
596 :``largestblock``: size of the largest block of data read from the disk
597 :``readdensity``: density of useful bytes in the data read from the disk
598
599 The sparse read can be enabled with experimental.sparse-read = True
590 """
600 """
591 opts = pycompat.byteskwargs(opts)
601 opts = pycompat.byteskwargs(opts)
592 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
593 index = r.index
603 index = r.index
594 generaldelta = r.version & revlog.FLAG_GENERALDELTA
604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 withsparseread = getattr(r, '_withsparseread', False)
595
606
596 def revinfo(rev):
607 def revinfo(rev):
597 e = index[rev]
608 e = index[rev]
598 compsize = e[1]
609 compsize = e[1]
599 uncompsize = e[2]
610 uncompsize = e[2]
600 chainsize = 0
611 chainsize = 0
601
612
602 if generaldelta:
613 if generaldelta:
603 if e[3] == e[5]:
614 if e[3] == e[5]:
604 deltatype = 'p1'
615 deltatype = 'p1'
605 elif e[3] == e[6]:
616 elif e[3] == e[6]:
606 deltatype = 'p2'
617 deltatype = 'p2'
607 elif e[3] == rev - 1:
618 elif e[3] == rev - 1:
608 deltatype = 'prev'
619 deltatype = 'prev'
609 elif e[3] == rev:
620 elif e[3] == rev:
610 deltatype = 'base'
621 deltatype = 'base'
611 else:
622 else:
612 deltatype = 'other'
623 deltatype = 'other'
613 else:
624 else:
614 if e[3] == rev:
625 if e[3] == rev:
615 deltatype = 'base'
626 deltatype = 'base'
616 else:
627 else:
617 deltatype = 'prev'
628 deltatype = 'prev'
618
629
619 chain = r._deltachain(rev)[0]
630 chain = r._deltachain(rev)[0]
620 for iterrev in chain:
631 for iterrev in chain:
621 e = index[iterrev]
632 e = index[iterrev]
622 chainsize += e[1]
633 chainsize += e[1]
623
634
624 return compsize, uncompsize, deltatype, chain, chainsize
635 return compsize, uncompsize, deltatype, chain, chainsize
625
636
626 fm = ui.formatter('debugdeltachain', opts)
637 fm = ui.formatter('debugdeltachain', opts)
627
638
628 fm.plain(' rev chain# chainlen prev delta '
639 fm.plain(' rev chain# chainlen prev delta '
629 'size rawsize chainsize ratio lindist extradist '
640 'size rawsize chainsize ratio lindist extradist '
630 'extraratio\n')
641 'extraratio')
642 if withsparseread:
643 fm.plain(' readsize largestblk rddensity')
644 fm.plain('\n')
631
645
632 chainbases = {}
646 chainbases = {}
633 for rev in r:
647 for rev in r:
634 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
635 chainbase = chain[0]
649 chainbase = chain[0]
636 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
637 basestart = r.start(chainbase)
651 start = r.start
638 revstart = r.start(rev)
652 length = r.length
653 basestart = start(chainbase)
654 revstart = start(rev)
639 lineardist = revstart + comp - basestart
655 lineardist = revstart + comp - basestart
640 extradist = lineardist - chainsize
656 extradist = lineardist - chainsize
641 try:
657 try:
642 prevrev = chain[-2]
658 prevrev = chain[-2]
643 except IndexError:
659 except IndexError:
644 prevrev = -1
660 prevrev = -1
645
661
646 chainratio = float(chainsize) / float(uncomp)
662 chainratio = float(chainsize) / float(uncomp)
647 extraratio = float(extradist) / float(chainsize)
663 extraratio = float(extradist) / float(chainsize)
648
664
649 fm.startitem()
665 fm.startitem()
650 fm.write('rev chainid chainlen prevrev deltatype compsize '
666 fm.write('rev chainid chainlen prevrev deltatype compsize '
651 'uncompsize chainsize chainratio lindist extradist '
667 'uncompsize chainsize chainratio lindist extradist '
652 'extraratio',
668 'extraratio',
653 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
654 rev, chainid, len(chain), prevrev, deltatype, comp,
670 rev, chainid, len(chain), prevrev, deltatype, comp,
655 uncomp, chainsize, chainratio, lineardist, extradist,
671 uncomp, chainsize, chainratio, lineardist, extradist,
656 extraratio,
672 extraratio,
657 rev=rev, chainid=chainid, chainlen=len(chain),
673 rev=rev, chainid=chainid, chainlen=len(chain),
658 prevrev=prevrev, deltatype=deltatype, compsize=comp,
674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
659 uncompsize=uncomp, chainsize=chainsize,
675 uncompsize=uncomp, chainsize=chainsize,
660 chainratio=chainratio, lindist=lineardist,
676 chainratio=chainratio, lindist=lineardist,
661 extradist=extradist, extraratio=extraratio)
677 extradist=extradist, extraratio=extraratio)
678 if withsparseread:
679 readsize = 0
680 largestblock = 0
681 for revschunk in revlog._slicechunk(r, chain):
682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 blksize = blkend - start(revschunk[0])
684
685 readsize += blksize
686 if largestblock < blksize:
687 largestblock = blksize
688
689 readdensity = float(chainsize) / float(readsize)
690
691 fm.write('readsize largestblock readdensity',
692 ' %10d %10d %9.5f',
693 readsize, largestblock, readdensity,
694 readsize=readsize, largestblock=largestblock,
695 readdensity=readdensity)
696
697 fm.plain('\n')
662
698
663 fm.end()
699 fm.end()
664
700
665 @command('debugdirstate|debugstate',
701 @command('debugdirstate|debugstate',
666 [('', 'nodates', None, _('do not display the saved mtime')),
702 [('', 'nodates', None, _('do not display the saved mtime')),
667 ('', 'datesort', None, _('sort by saved mtime'))],
703 ('', 'datesort', None, _('sort by saved mtime'))],
668 _('[OPTION]...'))
704 _('[OPTION]...'))
669 def debugstate(ui, repo, **opts):
705 def debugstate(ui, repo, **opts):
670 """show the contents of the current dirstate"""
706 """show the contents of the current dirstate"""
671
707
672 nodates = opts.get(r'nodates')
708 nodates = opts.get(r'nodates')
673 datesort = opts.get(r'datesort')
709 datesort = opts.get(r'datesort')
674
710
675 timestr = ""
711 timestr = ""
676 if datesort:
712 if datesort:
677 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
678 else:
714 else:
679 keyfunc = None # sort by filename
715 keyfunc = None # sort by filename
680 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
681 if ent[3] == -1:
717 if ent[3] == -1:
682 timestr = 'unset '
718 timestr = 'unset '
683 elif nodates:
719 elif nodates:
684 timestr = 'set '
720 timestr = 'set '
685 else:
721 else:
686 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
722 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
687 time.localtime(ent[3]))
723 time.localtime(ent[3]))
688 if ent[1] & 0o20000:
724 if ent[1] & 0o20000:
689 mode = 'lnk'
725 mode = 'lnk'
690 else:
726 else:
691 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
727 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
692 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
728 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
693 for f in repo.dirstate.copies():
729 for f in repo.dirstate.copies():
694 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
730 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
695
731
696 @command('debugdiscovery',
732 @command('debugdiscovery',
697 [('', 'old', None, _('use old-style discovery')),
733 [('', 'old', None, _('use old-style discovery')),
698 ('', 'nonheads', None,
734 ('', 'nonheads', None,
699 _('use old-style discovery with non-heads included')),
735 _('use old-style discovery with non-heads included')),
700 ] + cmdutil.remoteopts,
736 ] + cmdutil.remoteopts,
701 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
737 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
702 def debugdiscovery(ui, repo, remoteurl="default", **opts):
738 def debugdiscovery(ui, repo, remoteurl="default", **opts):
703 """runs the changeset discovery protocol in isolation"""
739 """runs the changeset discovery protocol in isolation"""
704 opts = pycompat.byteskwargs(opts)
740 opts = pycompat.byteskwargs(opts)
705 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
741 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
706 opts.get('branch'))
742 opts.get('branch'))
707 remote = hg.peer(repo, opts, remoteurl)
743 remote = hg.peer(repo, opts, remoteurl)
708 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
744 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
709
745
710 # make sure tests are repeatable
746 # make sure tests are repeatable
711 random.seed(12323)
747 random.seed(12323)
712
748
713 def doit(localheads, remoteheads, remote=remote):
749 def doit(localheads, remoteheads, remote=remote):
714 if opts.get('old'):
750 if opts.get('old'):
715 if localheads:
751 if localheads:
716 raise error.Abort('cannot use localheads with old style '
752 raise error.Abort('cannot use localheads with old style '
717 'discovery')
753 'discovery')
718 if not util.safehasattr(remote, 'branches'):
754 if not util.safehasattr(remote, 'branches'):
719 # enable in-client legacy support
755 # enable in-client legacy support
720 remote = localrepo.locallegacypeer(remote.local())
756 remote = localrepo.locallegacypeer(remote.local())
721 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
757 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
722 force=True)
758 force=True)
723 common = set(common)
759 common = set(common)
724 if not opts.get('nonheads'):
760 if not opts.get('nonheads'):
725 ui.write(("unpruned common: %s\n") %
761 ui.write(("unpruned common: %s\n") %
726 " ".join(sorted(short(n) for n in common)))
762 " ".join(sorted(short(n) for n in common)))
727 dag = dagutil.revlogdag(repo.changelog)
763 dag = dagutil.revlogdag(repo.changelog)
728 all = dag.ancestorset(dag.internalizeall(common))
764 all = dag.ancestorset(dag.internalizeall(common))
729 common = dag.externalizeall(dag.headsetofconnecteds(all))
765 common = dag.externalizeall(dag.headsetofconnecteds(all))
730 else:
766 else:
731 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
767 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
732 common = set(common)
768 common = set(common)
733 rheads = set(hds)
769 rheads = set(hds)
734 lheads = set(repo.heads())
770 lheads = set(repo.heads())
735 ui.write(("common heads: %s\n") %
771 ui.write(("common heads: %s\n") %
736 " ".join(sorted(short(n) for n in common)))
772 " ".join(sorted(short(n) for n in common)))
737 if lheads <= common:
773 if lheads <= common:
738 ui.write(("local is subset\n"))
774 ui.write(("local is subset\n"))
739 elif rheads <= common:
775 elif rheads <= common:
740 ui.write(("remote is subset\n"))
776 ui.write(("remote is subset\n"))
741
777
742 serverlogs = opts.get('serverlog')
778 serverlogs = opts.get('serverlog')
743 if serverlogs:
779 if serverlogs:
744 for filename in serverlogs:
780 for filename in serverlogs:
745 with open(filename, 'r') as logfile:
781 with open(filename, 'r') as logfile:
746 line = logfile.readline()
782 line = logfile.readline()
747 while line:
783 while line:
748 parts = line.strip().split(';')
784 parts = line.strip().split(';')
749 op = parts[1]
785 op = parts[1]
750 if op == 'cg':
786 if op == 'cg':
751 pass
787 pass
752 elif op == 'cgss':
788 elif op == 'cgss':
753 doit(parts[2].split(' '), parts[3].split(' '))
789 doit(parts[2].split(' '), parts[3].split(' '))
754 elif op == 'unb':
790 elif op == 'unb':
755 doit(parts[3].split(' '), parts[2].split(' '))
791 doit(parts[3].split(' '), parts[2].split(' '))
756 line = logfile.readline()
792 line = logfile.readline()
757 else:
793 else:
758 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
794 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
759 opts.get('remote_head'))
795 opts.get('remote_head'))
760 localrevs = opts.get('local_head')
796 localrevs = opts.get('local_head')
761 doit(localrevs, remoterevs)
797 doit(localrevs, remoterevs)
762
798
763 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
799 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
764 def debugextensions(ui, **opts):
800 def debugextensions(ui, **opts):
765 '''show information about active extensions'''
801 '''show information about active extensions'''
766 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
767 exts = extensions.extensions(ui)
803 exts = extensions.extensions(ui)
768 hgver = util.version()
804 hgver = util.version()
769 fm = ui.formatter('debugextensions', opts)
805 fm = ui.formatter('debugextensions', opts)
770 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
806 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
771 isinternal = extensions.ismoduleinternal(extmod)
807 isinternal = extensions.ismoduleinternal(extmod)
772 extsource = pycompat.fsencode(extmod.__file__)
808 extsource = pycompat.fsencode(extmod.__file__)
773 if isinternal:
809 if isinternal:
774 exttestedwith = [] # never expose magic string to users
810 exttestedwith = [] # never expose magic string to users
775 else:
811 else:
776 exttestedwith = getattr(extmod, 'testedwith', '').split()
812 exttestedwith = getattr(extmod, 'testedwith', '').split()
777 extbuglink = getattr(extmod, 'buglink', None)
813 extbuglink = getattr(extmod, 'buglink', None)
778
814
779 fm.startitem()
815 fm.startitem()
780
816
781 if ui.quiet or ui.verbose:
817 if ui.quiet or ui.verbose:
782 fm.write('name', '%s\n', extname)
818 fm.write('name', '%s\n', extname)
783 else:
819 else:
784 fm.write('name', '%s', extname)
820 fm.write('name', '%s', extname)
785 if isinternal or hgver in exttestedwith:
821 if isinternal or hgver in exttestedwith:
786 fm.plain('\n')
822 fm.plain('\n')
787 elif not exttestedwith:
823 elif not exttestedwith:
788 fm.plain(_(' (untested!)\n'))
824 fm.plain(_(' (untested!)\n'))
789 else:
825 else:
790 lasttestedversion = exttestedwith[-1]
826 lasttestedversion = exttestedwith[-1]
791 fm.plain(' (%s!)\n' % lasttestedversion)
827 fm.plain(' (%s!)\n' % lasttestedversion)
792
828
793 fm.condwrite(ui.verbose and extsource, 'source',
829 fm.condwrite(ui.verbose and extsource, 'source',
794 _(' location: %s\n'), extsource or "")
830 _(' location: %s\n'), extsource or "")
795
831
796 if ui.verbose:
832 if ui.verbose:
797 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
833 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
798 fm.data(bundled=isinternal)
834 fm.data(bundled=isinternal)
799
835
800 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
836 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
801 _(' tested with: %s\n'),
837 _(' tested with: %s\n'),
802 fm.formatlist(exttestedwith, name='ver'))
838 fm.formatlist(exttestedwith, name='ver'))
803
839
804 fm.condwrite(ui.verbose and extbuglink, 'buglink',
840 fm.condwrite(ui.verbose and extbuglink, 'buglink',
805 _(' bug reporting: %s\n'), extbuglink or "")
841 _(' bug reporting: %s\n'), extbuglink or "")
806
842
807 fm.end()
843 fm.end()
808
844
809 @command('debugfileset',
845 @command('debugfileset',
810 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
846 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
811 _('[-r REV] FILESPEC'))
847 _('[-r REV] FILESPEC'))
812 def debugfileset(ui, repo, expr, **opts):
848 def debugfileset(ui, repo, expr, **opts):
813 '''parse and apply a fileset specification'''
849 '''parse and apply a fileset specification'''
814 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
850 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
815 if ui.verbose:
851 if ui.verbose:
816 tree = fileset.parse(expr)
852 tree = fileset.parse(expr)
817 ui.note(fileset.prettyformat(tree), "\n")
853 ui.note(fileset.prettyformat(tree), "\n")
818
854
819 for f in ctx.getfileset(expr):
855 for f in ctx.getfileset(expr):
820 ui.write("%s\n" % f)
856 ui.write("%s\n" % f)
821
857
822 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
858 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
823 def debugfsinfo(ui, path="."):
859 def debugfsinfo(ui, path="."):
824 """show information detected about current filesystem"""
860 """show information detected about current filesystem"""
825 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
861 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
826 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
862 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
827 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
863 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
828 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
864 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
829 casesensitive = '(unknown)'
865 casesensitive = '(unknown)'
830 try:
866 try:
831 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
867 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
832 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
868 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
833 except OSError:
869 except OSError:
834 pass
870 pass
835 ui.write(('case-sensitive: %s\n') % casesensitive)
871 ui.write(('case-sensitive: %s\n') % casesensitive)
836
872
837 @command('debuggetbundle',
873 @command('debuggetbundle',
838 [('H', 'head', [], _('id of head node'), _('ID')),
874 [('H', 'head', [], _('id of head node'), _('ID')),
839 ('C', 'common', [], _('id of common node'), _('ID')),
875 ('C', 'common', [], _('id of common node'), _('ID')),
840 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
876 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
841 _('REPO FILE [-H|-C ID]...'),
877 _('REPO FILE [-H|-C ID]...'),
842 norepo=True)
878 norepo=True)
843 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
879 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
844 """retrieves a bundle from a repo
880 """retrieves a bundle from a repo
845
881
846 Every ID must be a full-length hex node id string. Saves the bundle to the
882 Every ID must be a full-length hex node id string. Saves the bundle to the
847 given file.
883 given file.
848 """
884 """
849 opts = pycompat.byteskwargs(opts)
885 opts = pycompat.byteskwargs(opts)
850 repo = hg.peer(ui, opts, repopath)
886 repo = hg.peer(ui, opts, repopath)
851 if not repo.capable('getbundle'):
887 if not repo.capable('getbundle'):
852 raise error.Abort("getbundle() not supported by target repository")
888 raise error.Abort("getbundle() not supported by target repository")
853 args = {}
889 args = {}
854 if common:
890 if common:
855 args[r'common'] = [bin(s) for s in common]
891 args[r'common'] = [bin(s) for s in common]
856 if head:
892 if head:
857 args[r'heads'] = [bin(s) for s in head]
893 args[r'heads'] = [bin(s) for s in head]
858 # TODO: get desired bundlecaps from command line.
894 # TODO: get desired bundlecaps from command line.
859 args[r'bundlecaps'] = None
895 args[r'bundlecaps'] = None
860 bundle = repo.getbundle('debug', **args)
896 bundle = repo.getbundle('debug', **args)
861
897
862 bundletype = opts.get('type', 'bzip2').lower()
898 bundletype = opts.get('type', 'bzip2').lower()
863 btypes = {'none': 'HG10UN',
899 btypes = {'none': 'HG10UN',
864 'bzip2': 'HG10BZ',
900 'bzip2': 'HG10BZ',
865 'gzip': 'HG10GZ',
901 'gzip': 'HG10GZ',
866 'bundle2': 'HG20'}
902 'bundle2': 'HG20'}
867 bundletype = btypes.get(bundletype)
903 bundletype = btypes.get(bundletype)
868 if bundletype not in bundle2.bundletypes:
904 if bundletype not in bundle2.bundletypes:
869 raise error.Abort(_('unknown bundle type specified with --type'))
905 raise error.Abort(_('unknown bundle type specified with --type'))
870 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
906 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
871
907
872 @command('debugignore', [], '[FILE]')
908 @command('debugignore', [], '[FILE]')
873 def debugignore(ui, repo, *files, **opts):
909 def debugignore(ui, repo, *files, **opts):
874 """display the combined ignore pattern and information about ignored files
910 """display the combined ignore pattern and information about ignored files
875
911
876 With no argument display the combined ignore pattern.
912 With no argument display the combined ignore pattern.
877
913
878 Given space separated file names, shows if the given file is ignored and
914 Given space separated file names, shows if the given file is ignored and
879 if so, show the ignore rule (file and line number) that matched it.
915 if so, show the ignore rule (file and line number) that matched it.
880 """
916 """
881 ignore = repo.dirstate._ignore
917 ignore = repo.dirstate._ignore
882 if not files:
918 if not files:
883 # Show all the patterns
919 # Show all the patterns
884 ui.write("%s\n" % repr(ignore))
920 ui.write("%s\n" % repr(ignore))
885 else:
921 else:
886 m = scmutil.match(repo[None], pats=files)
922 m = scmutil.match(repo[None], pats=files)
887 for f in m.files():
923 for f in m.files():
888 nf = util.normpath(f)
924 nf = util.normpath(f)
889 ignored = None
925 ignored = None
890 ignoredata = None
926 ignoredata = None
891 if nf != '.':
927 if nf != '.':
892 if ignore(nf):
928 if ignore(nf):
893 ignored = nf
929 ignored = nf
894 ignoredata = repo.dirstate._ignorefileandline(nf)
930 ignoredata = repo.dirstate._ignorefileandline(nf)
895 else:
931 else:
896 for p in util.finddirs(nf):
932 for p in util.finddirs(nf):
897 if ignore(p):
933 if ignore(p):
898 ignored = p
934 ignored = p
899 ignoredata = repo.dirstate._ignorefileandline(p)
935 ignoredata = repo.dirstate._ignorefileandline(p)
900 break
936 break
901 if ignored:
937 if ignored:
902 if ignored == nf:
938 if ignored == nf:
903 ui.write(_("%s is ignored\n") % m.uipath(f))
939 ui.write(_("%s is ignored\n") % m.uipath(f))
904 else:
940 else:
905 ui.write(_("%s is ignored because of "
941 ui.write(_("%s is ignored because of "
906 "containing folder %s\n")
942 "containing folder %s\n")
907 % (m.uipath(f), ignored))
943 % (m.uipath(f), ignored))
908 ignorefile, lineno, line = ignoredata
944 ignorefile, lineno, line = ignoredata
909 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
945 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
910 % (ignorefile, lineno, line))
946 % (ignorefile, lineno, line))
911 else:
947 else:
912 ui.write(_("%s is not ignored\n") % m.uipath(f))
948 ui.write(_("%s is not ignored\n") % m.uipath(f))
913
949
914 @command('debugindex', cmdutil.debugrevlogopts +
950 @command('debugindex', cmdutil.debugrevlogopts +
915 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
951 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
916 _('[-f FORMAT] -c|-m|FILE'),
952 _('[-f FORMAT] -c|-m|FILE'),
917 optionalrepo=True)
953 optionalrepo=True)
918 def debugindex(ui, repo, file_=None, **opts):
954 def debugindex(ui, repo, file_=None, **opts):
919 """dump the contents of an index file"""
955 """dump the contents of an index file"""
920 opts = pycompat.byteskwargs(opts)
956 opts = pycompat.byteskwargs(opts)
921 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
957 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
922 format = opts.get('format', 0)
958 format = opts.get('format', 0)
923 if format not in (0, 1):
959 if format not in (0, 1):
924 raise error.Abort(_("unknown format %d") % format)
960 raise error.Abort(_("unknown format %d") % format)
925
961
926 generaldelta = r.version & revlog.FLAG_GENERALDELTA
962 generaldelta = r.version & revlog.FLAG_GENERALDELTA
927 if generaldelta:
963 if generaldelta:
928 basehdr = ' delta'
964 basehdr = ' delta'
929 else:
965 else:
930 basehdr = ' base'
966 basehdr = ' base'
931
967
932 if ui.debugflag:
968 if ui.debugflag:
933 shortfn = hex
969 shortfn = hex
934 else:
970 else:
935 shortfn = short
971 shortfn = short
936
972
937 # There might not be anything in r, so have a sane default
973 # There might not be anything in r, so have a sane default
938 idlen = 12
974 idlen = 12
939 for i in r:
975 for i in r:
940 idlen = len(shortfn(r.node(i)))
976 idlen = len(shortfn(r.node(i)))
941 break
977 break
942
978
943 if format == 0:
979 if format == 0:
944 ui.write((" rev offset length " + basehdr + " linkrev"
980 ui.write((" rev offset length " + basehdr + " linkrev"
945 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
981 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
946 elif format == 1:
982 elif format == 1:
947 ui.write((" rev flag offset length"
983 ui.write((" rev flag offset length"
948 " size " + basehdr + " link p1 p2"
984 " size " + basehdr + " link p1 p2"
949 " %s\n") % "nodeid".rjust(idlen))
985 " %s\n") % "nodeid".rjust(idlen))
950
986
951 for i in r:
987 for i in r:
952 node = r.node(i)
988 node = r.node(i)
953 if generaldelta:
989 if generaldelta:
954 base = r.deltaparent(i)
990 base = r.deltaparent(i)
955 else:
991 else:
956 base = r.chainbase(i)
992 base = r.chainbase(i)
957 if format == 0:
993 if format == 0:
958 try:
994 try:
959 pp = r.parents(node)
995 pp = r.parents(node)
960 except Exception:
996 except Exception:
961 pp = [nullid, nullid]
997 pp = [nullid, nullid]
962 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
998 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
963 i, r.start(i), r.length(i), base, r.linkrev(i),
999 i, r.start(i), r.length(i), base, r.linkrev(i),
964 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1000 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
965 elif format == 1:
1001 elif format == 1:
966 pr = r.parentrevs(i)
1002 pr = r.parentrevs(i)
967 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1003 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
968 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1004 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
969 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1005 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
970
1006
971 @command('debugindexdot', cmdutil.debugrevlogopts,
1007 @command('debugindexdot', cmdutil.debugrevlogopts,
972 _('-c|-m|FILE'), optionalrepo=True)
1008 _('-c|-m|FILE'), optionalrepo=True)
973 def debugindexdot(ui, repo, file_=None, **opts):
1009 def debugindexdot(ui, repo, file_=None, **opts):
974 """dump an index DAG as a graphviz dot file"""
1010 """dump an index DAG as a graphviz dot file"""
975 opts = pycompat.byteskwargs(opts)
1011 opts = pycompat.byteskwargs(opts)
976 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1012 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
977 ui.write(("digraph G {\n"))
1013 ui.write(("digraph G {\n"))
978 for i in r:
1014 for i in r:
979 node = r.node(i)
1015 node = r.node(i)
980 pp = r.parents(node)
1016 pp = r.parents(node)
981 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1017 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
982 if pp[1] != nullid:
1018 if pp[1] != nullid:
983 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1019 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
984 ui.write("}\n")
1020 ui.write("}\n")
985
1021
986 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1022 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
987 def debuginstall(ui, **opts):
1023 def debuginstall(ui, **opts):
988 '''test Mercurial installation
1024 '''test Mercurial installation
989
1025
990 Returns 0 on success.
1026 Returns 0 on success.
991 '''
1027 '''
992 opts = pycompat.byteskwargs(opts)
1028 opts = pycompat.byteskwargs(opts)
993
1029
994 def writetemp(contents):
1030 def writetemp(contents):
995 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1031 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
996 f = os.fdopen(fd, pycompat.sysstr("wb"))
1032 f = os.fdopen(fd, pycompat.sysstr("wb"))
997 f.write(contents)
1033 f.write(contents)
998 f.close()
1034 f.close()
999 return name
1035 return name
1000
1036
1001 problems = 0
1037 problems = 0
1002
1038
1003 fm = ui.formatter('debuginstall', opts)
1039 fm = ui.formatter('debuginstall', opts)
1004 fm.startitem()
1040 fm.startitem()
1005
1041
1006 # encoding
1042 # encoding
1007 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1043 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1008 err = None
1044 err = None
1009 try:
1045 try:
1010 codecs.lookup(pycompat.sysstr(encoding.encoding))
1046 codecs.lookup(pycompat.sysstr(encoding.encoding))
1011 except LookupError as inst:
1047 except LookupError as inst:
1012 err = util.forcebytestr(inst)
1048 err = util.forcebytestr(inst)
1013 problems += 1
1049 problems += 1
1014 fm.condwrite(err, 'encodingerror', _(" %s\n"
1050 fm.condwrite(err, 'encodingerror', _(" %s\n"
1015 " (check that your locale is properly set)\n"), err)
1051 " (check that your locale is properly set)\n"), err)
1016
1052
1017 # Python
1053 # Python
1018 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1054 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1019 pycompat.sysexecutable)
1055 pycompat.sysexecutable)
1020 fm.write('pythonver', _("checking Python version (%s)\n"),
1056 fm.write('pythonver', _("checking Python version (%s)\n"),
1021 ("%d.%d.%d" % sys.version_info[:3]))
1057 ("%d.%d.%d" % sys.version_info[:3]))
1022 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1058 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1023 os.path.dirname(pycompat.fsencode(os.__file__)))
1059 os.path.dirname(pycompat.fsencode(os.__file__)))
1024
1060
1025 security = set(sslutil.supportedprotocols)
1061 security = set(sslutil.supportedprotocols)
1026 if sslutil.hassni:
1062 if sslutil.hassni:
1027 security.add('sni')
1063 security.add('sni')
1028
1064
1029 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1065 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1030 fm.formatlist(sorted(security), name='protocol',
1066 fm.formatlist(sorted(security), name='protocol',
1031 fmt='%s', sep=','))
1067 fmt='%s', sep=','))
1032
1068
1033 # These are warnings, not errors. So don't increment problem count. This
1069 # These are warnings, not errors. So don't increment problem count. This
1034 # may change in the future.
1070 # may change in the future.
1035 if 'tls1.2' not in security:
1071 if 'tls1.2' not in security:
1036 fm.plain(_(' TLS 1.2 not supported by Python install; '
1072 fm.plain(_(' TLS 1.2 not supported by Python install; '
1037 'network connections lack modern security\n'))
1073 'network connections lack modern security\n'))
1038 if 'sni' not in security:
1074 if 'sni' not in security:
1039 fm.plain(_(' SNI not supported by Python install; may have '
1075 fm.plain(_(' SNI not supported by Python install; may have '
1040 'connectivity issues with some servers\n'))
1076 'connectivity issues with some servers\n'))
1041
1077
1042 # TODO print CA cert info
1078 # TODO print CA cert info
1043
1079
1044 # hg version
1080 # hg version
1045 hgver = util.version()
1081 hgver = util.version()
1046 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1082 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1047 hgver.split('+')[0])
1083 hgver.split('+')[0])
1048 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1084 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1049 '+'.join(hgver.split('+')[1:]))
1085 '+'.join(hgver.split('+')[1:]))
1050
1086
1051 # compiled modules
1087 # compiled modules
1052 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1088 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1053 policy.policy)
1089 policy.policy)
1054 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1090 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1055 os.path.dirname(pycompat.fsencode(__file__)))
1091 os.path.dirname(pycompat.fsencode(__file__)))
1056
1092
1057 if policy.policy in ('c', 'allow'):
1093 if policy.policy in ('c', 'allow'):
1058 err = None
1094 err = None
1059 try:
1095 try:
1060 from .cext import (
1096 from .cext import (
1061 base85,
1097 base85,
1062 bdiff,
1098 bdiff,
1063 mpatch,
1099 mpatch,
1064 osutil,
1100 osutil,
1065 )
1101 )
1066 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1102 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1067 except Exception as inst:
1103 except Exception as inst:
1068 err = util.forcebytestr(inst)
1104 err = util.forcebytestr(inst)
1069 problems += 1
1105 problems += 1
1070 fm.condwrite(err, 'extensionserror', " %s\n", err)
1106 fm.condwrite(err, 'extensionserror', " %s\n", err)
1071
1107
1072 compengines = util.compengines._engines.values()
1108 compengines = util.compengines._engines.values()
1073 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1109 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1074 fm.formatlist(sorted(e.name() for e in compengines),
1110 fm.formatlist(sorted(e.name() for e in compengines),
1075 name='compengine', fmt='%s', sep=', '))
1111 name='compengine', fmt='%s', sep=', '))
1076 fm.write('compenginesavail', _('checking available compression engines '
1112 fm.write('compenginesavail', _('checking available compression engines '
1077 '(%s)\n'),
1113 '(%s)\n'),
1078 fm.formatlist(sorted(e.name() for e in compengines
1114 fm.formatlist(sorted(e.name() for e in compengines
1079 if e.available()),
1115 if e.available()),
1080 name='compengine', fmt='%s', sep=', '))
1116 name='compengine', fmt='%s', sep=', '))
1081 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1117 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1082 fm.write('compenginesserver', _('checking available compression engines '
1118 fm.write('compenginesserver', _('checking available compression engines '
1083 'for wire protocol (%s)\n'),
1119 'for wire protocol (%s)\n'),
1084 fm.formatlist([e.name() for e in wirecompengines
1120 fm.formatlist([e.name() for e in wirecompengines
1085 if e.wireprotosupport()],
1121 if e.wireprotosupport()],
1086 name='compengine', fmt='%s', sep=', '))
1122 name='compengine', fmt='%s', sep=', '))
1087
1123
1088 # templates
1124 # templates
1089 p = templater.templatepaths()
1125 p = templater.templatepaths()
1090 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1126 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1091 fm.condwrite(not p, '', _(" no template directories found\n"))
1127 fm.condwrite(not p, '', _(" no template directories found\n"))
1092 if p:
1128 if p:
1093 m = templater.templatepath("map-cmdline.default")
1129 m = templater.templatepath("map-cmdline.default")
1094 if m:
1130 if m:
1095 # template found, check if it is working
1131 # template found, check if it is working
1096 err = None
1132 err = None
1097 try:
1133 try:
1098 templater.templater.frommapfile(m)
1134 templater.templater.frommapfile(m)
1099 except Exception as inst:
1135 except Exception as inst:
1100 err = util.forcebytestr(inst)
1136 err = util.forcebytestr(inst)
1101 p = None
1137 p = None
1102 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1138 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1103 else:
1139 else:
1104 p = None
1140 p = None
1105 fm.condwrite(p, 'defaulttemplate',
1141 fm.condwrite(p, 'defaulttemplate',
1106 _("checking default template (%s)\n"), m)
1142 _("checking default template (%s)\n"), m)
1107 fm.condwrite(not m, 'defaulttemplatenotfound',
1143 fm.condwrite(not m, 'defaulttemplatenotfound',
1108 _(" template '%s' not found\n"), "default")
1144 _(" template '%s' not found\n"), "default")
1109 if not p:
1145 if not p:
1110 problems += 1
1146 problems += 1
1111 fm.condwrite(not p, '',
1147 fm.condwrite(not p, '',
1112 _(" (templates seem to have been installed incorrectly)\n"))
1148 _(" (templates seem to have been installed incorrectly)\n"))
1113
1149
1114 # editor
1150 # editor
1115 editor = ui.geteditor()
1151 editor = ui.geteditor()
1116 editor = util.expandpath(editor)
1152 editor = util.expandpath(editor)
1117 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1153 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1118 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1154 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1119 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1155 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1120 _(" No commit editor set and can't find %s in PATH\n"
1156 _(" No commit editor set and can't find %s in PATH\n"
1121 " (specify a commit editor in your configuration"
1157 " (specify a commit editor in your configuration"
1122 " file)\n"), not cmdpath and editor == 'vi' and editor)
1158 " file)\n"), not cmdpath and editor == 'vi' and editor)
1123 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1159 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1124 _(" Can't find editor '%s' in PATH\n"
1160 _(" Can't find editor '%s' in PATH\n"
1125 " (specify a commit editor in your configuration"
1161 " (specify a commit editor in your configuration"
1126 " file)\n"), not cmdpath and editor)
1162 " file)\n"), not cmdpath and editor)
1127 if not cmdpath and editor != 'vi':
1163 if not cmdpath and editor != 'vi':
1128 problems += 1
1164 problems += 1
1129
1165
1130 # check username
1166 # check username
1131 username = None
1167 username = None
1132 err = None
1168 err = None
1133 try:
1169 try:
1134 username = ui.username()
1170 username = ui.username()
1135 except error.Abort as e:
1171 except error.Abort as e:
1136 err = util.forcebytestr(e)
1172 err = util.forcebytestr(e)
1137 problems += 1
1173 problems += 1
1138
1174
1139 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1175 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1140 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1176 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1141 " (specify a username in your configuration file)\n"), err)
1177 " (specify a username in your configuration file)\n"), err)
1142
1178
1143 fm.condwrite(not problems, '',
1179 fm.condwrite(not problems, '',
1144 _("no problems detected\n"))
1180 _("no problems detected\n"))
1145 if not problems:
1181 if not problems:
1146 fm.data(problems=problems)
1182 fm.data(problems=problems)
1147 fm.condwrite(problems, 'problems',
1183 fm.condwrite(problems, 'problems',
1148 _("%d problems detected,"
1184 _("%d problems detected,"
1149 " please check your install!\n"), problems)
1185 " please check your install!\n"), problems)
1150 fm.end()
1186 fm.end()
1151
1187
1152 return problems
1188 return problems
1153
1189
1154 @command('debugknown', [], _('REPO ID...'), norepo=True)
1190 @command('debugknown', [], _('REPO ID...'), norepo=True)
1155 def debugknown(ui, repopath, *ids, **opts):
1191 def debugknown(ui, repopath, *ids, **opts):
1156 """test whether node ids are known to a repo
1192 """test whether node ids are known to a repo
1157
1193
1158 Every ID must be a full-length hex node id string. Returns a list of 0s
1194 Every ID must be a full-length hex node id string. Returns a list of 0s
1159 and 1s indicating unknown/known.
1195 and 1s indicating unknown/known.
1160 """
1196 """
1161 opts = pycompat.byteskwargs(opts)
1197 opts = pycompat.byteskwargs(opts)
1162 repo = hg.peer(ui, opts, repopath)
1198 repo = hg.peer(ui, opts, repopath)
1163 if not repo.capable('known'):
1199 if not repo.capable('known'):
1164 raise error.Abort("known() not supported by target repository")
1200 raise error.Abort("known() not supported by target repository")
1165 flags = repo.known([bin(s) for s in ids])
1201 flags = repo.known([bin(s) for s in ids])
1166 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1202 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1167
1203
1168 @command('debuglabelcomplete', [], _('LABEL...'))
1204 @command('debuglabelcomplete', [], _('LABEL...'))
1169 def debuglabelcomplete(ui, repo, *args):
1205 def debuglabelcomplete(ui, repo, *args):
1170 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1206 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1171 debugnamecomplete(ui, repo, *args)
1207 debugnamecomplete(ui, repo, *args)
1172
1208
1173 @command('debuglocks',
1209 @command('debuglocks',
1174 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1210 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1175 ('W', 'force-wlock', None,
1211 ('W', 'force-wlock', None,
1176 _('free the working state lock (DANGEROUS)'))],
1212 _('free the working state lock (DANGEROUS)'))],
1177 _('[OPTION]...'))
1213 _('[OPTION]...'))
1178 def debuglocks(ui, repo, **opts):
1214 def debuglocks(ui, repo, **opts):
1179 """show or modify state of locks
1215 """show or modify state of locks
1180
1216
1181 By default, this command will show which locks are held. This
1217 By default, this command will show which locks are held. This
1182 includes the user and process holding the lock, the amount of time
1218 includes the user and process holding the lock, the amount of time
1183 the lock has been held, and the machine name where the process is
1219 the lock has been held, and the machine name where the process is
1184 running if it's not local.
1220 running if it's not local.
1185
1221
1186 Locks protect the integrity of Mercurial's data, so should be
1222 Locks protect the integrity of Mercurial's data, so should be
1187 treated with care. System crashes or other interruptions may cause
1223 treated with care. System crashes or other interruptions may cause
1188 locks to not be properly released, though Mercurial will usually
1224 locks to not be properly released, though Mercurial will usually
1189 detect and remove such stale locks automatically.
1225 detect and remove such stale locks automatically.
1190
1226
1191 However, detecting stale locks may not always be possible (for
1227 However, detecting stale locks may not always be possible (for
1192 instance, on a shared filesystem). Removing locks may also be
1228 instance, on a shared filesystem). Removing locks may also be
1193 blocked by filesystem permissions.
1229 blocked by filesystem permissions.
1194
1230
1195 Returns 0 if no locks are held.
1231 Returns 0 if no locks are held.
1196
1232
1197 """
1233 """
1198
1234
1199 if opts.get(r'force_lock'):
1235 if opts.get(r'force_lock'):
1200 repo.svfs.unlink('lock')
1236 repo.svfs.unlink('lock')
1201 if opts.get(r'force_wlock'):
1237 if opts.get(r'force_wlock'):
1202 repo.vfs.unlink('wlock')
1238 repo.vfs.unlink('wlock')
1203 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1239 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1204 return 0
1240 return 0
1205
1241
1206 now = time.time()
1242 now = time.time()
1207 held = 0
1243 held = 0
1208
1244
1209 def report(vfs, name, method):
1245 def report(vfs, name, method):
1210 # this causes stale locks to get reaped for more accurate reporting
1246 # this causes stale locks to get reaped for more accurate reporting
1211 try:
1247 try:
1212 l = method(False)
1248 l = method(False)
1213 except error.LockHeld:
1249 except error.LockHeld:
1214 l = None
1250 l = None
1215
1251
1216 if l:
1252 if l:
1217 l.release()
1253 l.release()
1218 else:
1254 else:
1219 try:
1255 try:
1220 stat = vfs.lstat(name)
1256 stat = vfs.lstat(name)
1221 age = now - stat.st_mtime
1257 age = now - stat.st_mtime
1222 user = util.username(stat.st_uid)
1258 user = util.username(stat.st_uid)
1223 locker = vfs.readlock(name)
1259 locker = vfs.readlock(name)
1224 if ":" in locker:
1260 if ":" in locker:
1225 host, pid = locker.split(':')
1261 host, pid = locker.split(':')
1226 if host == socket.gethostname():
1262 if host == socket.gethostname():
1227 locker = 'user %s, process %s' % (user, pid)
1263 locker = 'user %s, process %s' % (user, pid)
1228 else:
1264 else:
1229 locker = 'user %s, process %s, host %s' \
1265 locker = 'user %s, process %s, host %s' \
1230 % (user, pid, host)
1266 % (user, pid, host)
1231 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1267 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1232 return 1
1268 return 1
1233 except OSError as e:
1269 except OSError as e:
1234 if e.errno != errno.ENOENT:
1270 if e.errno != errno.ENOENT:
1235 raise
1271 raise
1236
1272
1237 ui.write(("%-6s free\n") % (name + ":"))
1273 ui.write(("%-6s free\n") % (name + ":"))
1238 return 0
1274 return 0
1239
1275
1240 held += report(repo.svfs, "lock", repo.lock)
1276 held += report(repo.svfs, "lock", repo.lock)
1241 held += report(repo.vfs, "wlock", repo.wlock)
1277 held += report(repo.vfs, "wlock", repo.wlock)
1242
1278
1243 return held
1279 return held
1244
1280
1245 @command('debugmergestate', [], '')
1281 @command('debugmergestate', [], '')
1246 def debugmergestate(ui, repo, *args):
1282 def debugmergestate(ui, repo, *args):
1247 """print merge state
1283 """print merge state
1248
1284
1249 Use --verbose to print out information about whether v1 or v2 merge state
1285 Use --verbose to print out information about whether v1 or v2 merge state
1250 was chosen."""
1286 was chosen."""
1251 def _hashornull(h):
1287 def _hashornull(h):
1252 if h == nullhex:
1288 if h == nullhex:
1253 return 'null'
1289 return 'null'
1254 else:
1290 else:
1255 return h
1291 return h
1256
1292
1257 def printrecords(version):
1293 def printrecords(version):
1258 ui.write(('* version %s records\n') % version)
1294 ui.write(('* version %s records\n') % version)
1259 if version == 1:
1295 if version == 1:
1260 records = v1records
1296 records = v1records
1261 else:
1297 else:
1262 records = v2records
1298 records = v2records
1263
1299
1264 for rtype, record in records:
1300 for rtype, record in records:
1265 # pretty print some record types
1301 # pretty print some record types
1266 if rtype == 'L':
1302 if rtype == 'L':
1267 ui.write(('local: %s\n') % record)
1303 ui.write(('local: %s\n') % record)
1268 elif rtype == 'O':
1304 elif rtype == 'O':
1269 ui.write(('other: %s\n') % record)
1305 ui.write(('other: %s\n') % record)
1270 elif rtype == 'm':
1306 elif rtype == 'm':
1271 driver, mdstate = record.split('\0', 1)
1307 driver, mdstate = record.split('\0', 1)
1272 ui.write(('merge driver: %s (state "%s")\n')
1308 ui.write(('merge driver: %s (state "%s")\n')
1273 % (driver, mdstate))
1309 % (driver, mdstate))
1274 elif rtype in 'FDC':
1310 elif rtype in 'FDC':
1275 r = record.split('\0')
1311 r = record.split('\0')
1276 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1312 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1277 if version == 1:
1313 if version == 1:
1278 onode = 'not stored in v1 format'
1314 onode = 'not stored in v1 format'
1279 flags = r[7]
1315 flags = r[7]
1280 else:
1316 else:
1281 onode, flags = r[7:9]
1317 onode, flags = r[7:9]
1282 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1318 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1283 % (f, rtype, state, _hashornull(hash)))
1319 % (f, rtype, state, _hashornull(hash)))
1284 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1320 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1285 ui.write((' ancestor path: %s (node %s)\n')
1321 ui.write((' ancestor path: %s (node %s)\n')
1286 % (afile, _hashornull(anode)))
1322 % (afile, _hashornull(anode)))
1287 ui.write((' other path: %s (node %s)\n')
1323 ui.write((' other path: %s (node %s)\n')
1288 % (ofile, _hashornull(onode)))
1324 % (ofile, _hashornull(onode)))
1289 elif rtype == 'f':
1325 elif rtype == 'f':
1290 filename, rawextras = record.split('\0', 1)
1326 filename, rawextras = record.split('\0', 1)
1291 extras = rawextras.split('\0')
1327 extras = rawextras.split('\0')
1292 i = 0
1328 i = 0
1293 extrastrings = []
1329 extrastrings = []
1294 while i < len(extras):
1330 while i < len(extras):
1295 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1331 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1296 i += 2
1332 i += 2
1297
1333
1298 ui.write(('file extras: %s (%s)\n')
1334 ui.write(('file extras: %s (%s)\n')
1299 % (filename, ', '.join(extrastrings)))
1335 % (filename, ', '.join(extrastrings)))
1300 elif rtype == 'l':
1336 elif rtype == 'l':
1301 labels = record.split('\0', 2)
1337 labels = record.split('\0', 2)
1302 labels = [l for l in labels if len(l) > 0]
1338 labels = [l for l in labels if len(l) > 0]
1303 ui.write(('labels:\n'))
1339 ui.write(('labels:\n'))
1304 ui.write((' local: %s\n' % labels[0]))
1340 ui.write((' local: %s\n' % labels[0]))
1305 ui.write((' other: %s\n' % labels[1]))
1341 ui.write((' other: %s\n' % labels[1]))
1306 if len(labels) > 2:
1342 if len(labels) > 2:
1307 ui.write((' base: %s\n' % labels[2]))
1343 ui.write((' base: %s\n' % labels[2]))
1308 else:
1344 else:
1309 ui.write(('unrecognized entry: %s\t%s\n')
1345 ui.write(('unrecognized entry: %s\t%s\n')
1310 % (rtype, record.replace('\0', '\t')))
1346 % (rtype, record.replace('\0', '\t')))
1311
1347
1312 # Avoid mergestate.read() since it may raise an exception for unsupported
1348 # Avoid mergestate.read() since it may raise an exception for unsupported
1313 # merge state records. We shouldn't be doing this, but this is OK since this
1349 # merge state records. We shouldn't be doing this, but this is OK since this
1314 # command is pretty low-level.
1350 # command is pretty low-level.
1315 ms = mergemod.mergestate(repo)
1351 ms = mergemod.mergestate(repo)
1316
1352
1317 # sort so that reasonable information is on top
1353 # sort so that reasonable information is on top
1318 v1records = ms._readrecordsv1()
1354 v1records = ms._readrecordsv1()
1319 v2records = ms._readrecordsv2()
1355 v2records = ms._readrecordsv2()
1320 order = 'LOml'
1356 order = 'LOml'
1321 def key(r):
1357 def key(r):
1322 idx = order.find(r[0])
1358 idx = order.find(r[0])
1323 if idx == -1:
1359 if idx == -1:
1324 return (1, r[1])
1360 return (1, r[1])
1325 else:
1361 else:
1326 return (0, idx)
1362 return (0, idx)
1327 v1records.sort(key=key)
1363 v1records.sort(key=key)
1328 v2records.sort(key=key)
1364 v2records.sort(key=key)
1329
1365
1330 if not v1records and not v2records:
1366 if not v1records and not v2records:
1331 ui.write(('no merge state found\n'))
1367 ui.write(('no merge state found\n'))
1332 elif not v2records:
1368 elif not v2records:
1333 ui.note(('no version 2 merge state\n'))
1369 ui.note(('no version 2 merge state\n'))
1334 printrecords(1)
1370 printrecords(1)
1335 elif ms._v1v2match(v1records, v2records):
1371 elif ms._v1v2match(v1records, v2records):
1336 ui.note(('v1 and v2 states match: using v2\n'))
1372 ui.note(('v1 and v2 states match: using v2\n'))
1337 printrecords(2)
1373 printrecords(2)
1338 else:
1374 else:
1339 ui.note(('v1 and v2 states mismatch: using v1\n'))
1375 ui.note(('v1 and v2 states mismatch: using v1\n'))
1340 printrecords(1)
1376 printrecords(1)
1341 if ui.verbose:
1377 if ui.verbose:
1342 printrecords(2)
1378 printrecords(2)
1343
1379
1344 @command('debugnamecomplete', [], _('NAME...'))
1380 @command('debugnamecomplete', [], _('NAME...'))
1345 def debugnamecomplete(ui, repo, *args):
1381 def debugnamecomplete(ui, repo, *args):
1346 '''complete "names" - tags, open branch names, bookmark names'''
1382 '''complete "names" - tags, open branch names, bookmark names'''
1347
1383
1348 names = set()
1384 names = set()
1349 # since we previously only listed open branches, we will handle that
1385 # since we previously only listed open branches, we will handle that
1350 # specially (after this for loop)
1386 # specially (after this for loop)
1351 for name, ns in repo.names.iteritems():
1387 for name, ns in repo.names.iteritems():
1352 if name != 'branches':
1388 if name != 'branches':
1353 names.update(ns.listnames(repo))
1389 names.update(ns.listnames(repo))
1354 names.update(tag for (tag, heads, tip, closed)
1390 names.update(tag for (tag, heads, tip, closed)
1355 in repo.branchmap().iterbranches() if not closed)
1391 in repo.branchmap().iterbranches() if not closed)
1356 completions = set()
1392 completions = set()
1357 if not args:
1393 if not args:
1358 args = ['']
1394 args = ['']
1359 for a in args:
1395 for a in args:
1360 completions.update(n for n in names if n.startswith(a))
1396 completions.update(n for n in names if n.startswith(a))
1361 ui.write('\n'.join(sorted(completions)))
1397 ui.write('\n'.join(sorted(completions)))
1362 ui.write('\n')
1398 ui.write('\n')
1363
1399
1364 @command('debugobsolete',
1400 @command('debugobsolete',
1365 [('', 'flags', 0, _('markers flag')),
1401 [('', 'flags', 0, _('markers flag')),
1366 ('', 'record-parents', False,
1402 ('', 'record-parents', False,
1367 _('record parent information for the precursor')),
1403 _('record parent information for the precursor')),
1368 ('r', 'rev', [], _('display markers relevant to REV')),
1404 ('r', 'rev', [], _('display markers relevant to REV')),
1369 ('', 'exclusive', False, _('restrict display to markers only '
1405 ('', 'exclusive', False, _('restrict display to markers only '
1370 'relevant to REV')),
1406 'relevant to REV')),
1371 ('', 'index', False, _('display index of the marker')),
1407 ('', 'index', False, _('display index of the marker')),
1372 ('', 'delete', [], _('delete markers specified by indices')),
1408 ('', 'delete', [], _('delete markers specified by indices')),
1373 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1409 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1374 _('[OBSOLETED [REPLACEMENT ...]]'))
1410 _('[OBSOLETED [REPLACEMENT ...]]'))
1375 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1411 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1376 """create arbitrary obsolete marker
1412 """create arbitrary obsolete marker
1377
1413
1378 With no arguments, displays the list of obsolescence markers."""
1414 With no arguments, displays the list of obsolescence markers."""
1379
1415
1380 opts = pycompat.byteskwargs(opts)
1416 opts = pycompat.byteskwargs(opts)
1381
1417
1382 def parsenodeid(s):
1418 def parsenodeid(s):
1383 try:
1419 try:
1384 # We do not use revsingle/revrange functions here to accept
1420 # We do not use revsingle/revrange functions here to accept
1385 # arbitrary node identifiers, possibly not present in the
1421 # arbitrary node identifiers, possibly not present in the
1386 # local repository.
1422 # local repository.
1387 n = bin(s)
1423 n = bin(s)
1388 if len(n) != len(nullid):
1424 if len(n) != len(nullid):
1389 raise TypeError()
1425 raise TypeError()
1390 return n
1426 return n
1391 except TypeError:
1427 except TypeError:
1392 raise error.Abort('changeset references must be full hexadecimal '
1428 raise error.Abort('changeset references must be full hexadecimal '
1393 'node identifiers')
1429 'node identifiers')
1394
1430
1395 if opts.get('delete'):
1431 if opts.get('delete'):
1396 indices = []
1432 indices = []
1397 for v in opts.get('delete'):
1433 for v in opts.get('delete'):
1398 try:
1434 try:
1399 indices.append(int(v))
1435 indices.append(int(v))
1400 except ValueError:
1436 except ValueError:
1401 raise error.Abort(_('invalid index value: %r') % v,
1437 raise error.Abort(_('invalid index value: %r') % v,
1402 hint=_('use integers for indices'))
1438 hint=_('use integers for indices'))
1403
1439
1404 if repo.currenttransaction():
1440 if repo.currenttransaction():
1405 raise error.Abort(_('cannot delete obsmarkers in the middle '
1441 raise error.Abort(_('cannot delete obsmarkers in the middle '
1406 'of transaction.'))
1442 'of transaction.'))
1407
1443
1408 with repo.lock():
1444 with repo.lock():
1409 n = repair.deleteobsmarkers(repo.obsstore, indices)
1445 n = repair.deleteobsmarkers(repo.obsstore, indices)
1410 ui.write(_('deleted %i obsolescence markers\n') % n)
1446 ui.write(_('deleted %i obsolescence markers\n') % n)
1411
1447
1412 return
1448 return
1413
1449
1414 if precursor is not None:
1450 if precursor is not None:
1415 if opts['rev']:
1451 if opts['rev']:
1416 raise error.Abort('cannot select revision when creating marker')
1452 raise error.Abort('cannot select revision when creating marker')
1417 metadata = {}
1453 metadata = {}
1418 metadata['user'] = opts['user'] or ui.username()
1454 metadata['user'] = opts['user'] or ui.username()
1419 succs = tuple(parsenodeid(succ) for succ in successors)
1455 succs = tuple(parsenodeid(succ) for succ in successors)
1420 l = repo.lock()
1456 l = repo.lock()
1421 try:
1457 try:
1422 tr = repo.transaction('debugobsolete')
1458 tr = repo.transaction('debugobsolete')
1423 try:
1459 try:
1424 date = opts.get('date')
1460 date = opts.get('date')
1425 if date:
1461 if date:
1426 date = util.parsedate(date)
1462 date = util.parsedate(date)
1427 else:
1463 else:
1428 date = None
1464 date = None
1429 prec = parsenodeid(precursor)
1465 prec = parsenodeid(precursor)
1430 parents = None
1466 parents = None
1431 if opts['record_parents']:
1467 if opts['record_parents']:
1432 if prec not in repo.unfiltered():
1468 if prec not in repo.unfiltered():
1433 raise error.Abort('cannot used --record-parents on '
1469 raise error.Abort('cannot used --record-parents on '
1434 'unknown changesets')
1470 'unknown changesets')
1435 parents = repo.unfiltered()[prec].parents()
1471 parents = repo.unfiltered()[prec].parents()
1436 parents = tuple(p.node() for p in parents)
1472 parents = tuple(p.node() for p in parents)
1437 repo.obsstore.create(tr, prec, succs, opts['flags'],
1473 repo.obsstore.create(tr, prec, succs, opts['flags'],
1438 parents=parents, date=date,
1474 parents=parents, date=date,
1439 metadata=metadata, ui=ui)
1475 metadata=metadata, ui=ui)
1440 tr.close()
1476 tr.close()
1441 except ValueError as exc:
1477 except ValueError as exc:
1442 raise error.Abort(_('bad obsmarker input: %s') % exc)
1478 raise error.Abort(_('bad obsmarker input: %s') % exc)
1443 finally:
1479 finally:
1444 tr.release()
1480 tr.release()
1445 finally:
1481 finally:
1446 l.release()
1482 l.release()
1447 else:
1483 else:
1448 if opts['rev']:
1484 if opts['rev']:
1449 revs = scmutil.revrange(repo, opts['rev'])
1485 revs = scmutil.revrange(repo, opts['rev'])
1450 nodes = [repo[r].node() for r in revs]
1486 nodes = [repo[r].node() for r in revs]
1451 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1487 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1452 exclusive=opts['exclusive']))
1488 exclusive=opts['exclusive']))
1453 markers.sort(key=lambda x: x._data)
1489 markers.sort(key=lambda x: x._data)
1454 else:
1490 else:
1455 markers = obsutil.getmarkers(repo)
1491 markers = obsutil.getmarkers(repo)
1456
1492
1457 markerstoiter = markers
1493 markerstoiter = markers
1458 isrelevant = lambda m: True
1494 isrelevant = lambda m: True
1459 if opts.get('rev') and opts.get('index'):
1495 if opts.get('rev') and opts.get('index'):
1460 markerstoiter = obsutil.getmarkers(repo)
1496 markerstoiter = obsutil.getmarkers(repo)
1461 markerset = set(markers)
1497 markerset = set(markers)
1462 isrelevant = lambda m: m in markerset
1498 isrelevant = lambda m: m in markerset
1463
1499
1464 fm = ui.formatter('debugobsolete', opts)
1500 fm = ui.formatter('debugobsolete', opts)
1465 for i, m in enumerate(markerstoiter):
1501 for i, m in enumerate(markerstoiter):
1466 if not isrelevant(m):
1502 if not isrelevant(m):
1467 # marker can be irrelevant when we're iterating over a set
1503 # marker can be irrelevant when we're iterating over a set
1468 # of markers (markerstoiter) which is bigger than the set
1504 # of markers (markerstoiter) which is bigger than the set
1469 # of markers we want to display (markers)
1505 # of markers we want to display (markers)
1470 # this can happen if both --index and --rev options are
1506 # this can happen if both --index and --rev options are
1471 # provided and thus we need to iterate over all of the markers
1507 # provided and thus we need to iterate over all of the markers
1472 # to get the correct indices, but only display the ones that
1508 # to get the correct indices, but only display the ones that
1473 # are relevant to --rev value
1509 # are relevant to --rev value
1474 continue
1510 continue
1475 fm.startitem()
1511 fm.startitem()
1476 ind = i if opts.get('index') else None
1512 ind = i if opts.get('index') else None
1477 cmdutil.showmarker(fm, m, index=ind)
1513 cmdutil.showmarker(fm, m, index=ind)
1478 fm.end()
1514 fm.end()
1479
1515
1480 @command('debugpathcomplete',
1516 @command('debugpathcomplete',
1481 [('f', 'full', None, _('complete an entire path')),
1517 [('f', 'full', None, _('complete an entire path')),
1482 ('n', 'normal', None, _('show only normal files')),
1518 ('n', 'normal', None, _('show only normal files')),
1483 ('a', 'added', None, _('show only added files')),
1519 ('a', 'added', None, _('show only added files')),
1484 ('r', 'removed', None, _('show only removed files'))],
1520 ('r', 'removed', None, _('show only removed files'))],
1485 _('FILESPEC...'))
1521 _('FILESPEC...'))
1486 def debugpathcomplete(ui, repo, *specs, **opts):
1522 def debugpathcomplete(ui, repo, *specs, **opts):
1487 '''complete part or all of a tracked path
1523 '''complete part or all of a tracked path
1488
1524
1489 This command supports shells that offer path name completion. It
1525 This command supports shells that offer path name completion. It
1490 currently completes only files already known to the dirstate.
1526 currently completes only files already known to the dirstate.
1491
1527
1492 Completion extends only to the next path segment unless
1528 Completion extends only to the next path segment unless
1493 --full is specified, in which case entire paths are used.'''
1529 --full is specified, in which case entire paths are used.'''
1494
1530
1495 def complete(path, acceptable):
1531 def complete(path, acceptable):
1496 dirstate = repo.dirstate
1532 dirstate = repo.dirstate
1497 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1533 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1498 rootdir = repo.root + pycompat.ossep
1534 rootdir = repo.root + pycompat.ossep
1499 if spec != repo.root and not spec.startswith(rootdir):
1535 if spec != repo.root and not spec.startswith(rootdir):
1500 return [], []
1536 return [], []
1501 if os.path.isdir(spec):
1537 if os.path.isdir(spec):
1502 spec += '/'
1538 spec += '/'
1503 spec = spec[len(rootdir):]
1539 spec = spec[len(rootdir):]
1504 fixpaths = pycompat.ossep != '/'
1540 fixpaths = pycompat.ossep != '/'
1505 if fixpaths:
1541 if fixpaths:
1506 spec = spec.replace(pycompat.ossep, '/')
1542 spec = spec.replace(pycompat.ossep, '/')
1507 speclen = len(spec)
1543 speclen = len(spec)
1508 fullpaths = opts[r'full']
1544 fullpaths = opts[r'full']
1509 files, dirs = set(), set()
1545 files, dirs = set(), set()
1510 adddir, addfile = dirs.add, files.add
1546 adddir, addfile = dirs.add, files.add
1511 for f, st in dirstate.iteritems():
1547 for f, st in dirstate.iteritems():
1512 if f.startswith(spec) and st[0] in acceptable:
1548 if f.startswith(spec) and st[0] in acceptable:
1513 if fixpaths:
1549 if fixpaths:
1514 f = f.replace('/', pycompat.ossep)
1550 f = f.replace('/', pycompat.ossep)
1515 if fullpaths:
1551 if fullpaths:
1516 addfile(f)
1552 addfile(f)
1517 continue
1553 continue
1518 s = f.find(pycompat.ossep, speclen)
1554 s = f.find(pycompat.ossep, speclen)
1519 if s >= 0:
1555 if s >= 0:
1520 adddir(f[:s])
1556 adddir(f[:s])
1521 else:
1557 else:
1522 addfile(f)
1558 addfile(f)
1523 return files, dirs
1559 return files, dirs
1524
1560
1525 acceptable = ''
1561 acceptable = ''
1526 if opts[r'normal']:
1562 if opts[r'normal']:
1527 acceptable += 'nm'
1563 acceptable += 'nm'
1528 if opts[r'added']:
1564 if opts[r'added']:
1529 acceptable += 'a'
1565 acceptable += 'a'
1530 if opts[r'removed']:
1566 if opts[r'removed']:
1531 acceptable += 'r'
1567 acceptable += 'r'
1532 cwd = repo.getcwd()
1568 cwd = repo.getcwd()
1533 if not specs:
1569 if not specs:
1534 specs = ['.']
1570 specs = ['.']
1535
1571
1536 files, dirs = set(), set()
1572 files, dirs = set(), set()
1537 for spec in specs:
1573 for spec in specs:
1538 f, d = complete(spec, acceptable or 'nmar')
1574 f, d = complete(spec, acceptable or 'nmar')
1539 files.update(f)
1575 files.update(f)
1540 dirs.update(d)
1576 dirs.update(d)
1541 files.update(dirs)
1577 files.update(dirs)
1542 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1578 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1543 ui.write('\n')
1579 ui.write('\n')
1544
1580
1545 @command('debugpickmergetool',
1581 @command('debugpickmergetool',
1546 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1582 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1547 ('', 'changedelete', None, _('emulate merging change and delete')),
1583 ('', 'changedelete', None, _('emulate merging change and delete')),
1548 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1584 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1549 _('[PATTERN]...'),
1585 _('[PATTERN]...'),
1550 inferrepo=True)
1586 inferrepo=True)
1551 def debugpickmergetool(ui, repo, *pats, **opts):
1587 def debugpickmergetool(ui, repo, *pats, **opts):
1552 """examine which merge tool is chosen for specified file
1588 """examine which merge tool is chosen for specified file
1553
1589
1554 As described in :hg:`help merge-tools`, Mercurial examines
1590 As described in :hg:`help merge-tools`, Mercurial examines
1555 configurations below in this order to decide which merge tool is
1591 configurations below in this order to decide which merge tool is
1556 chosen for specified file.
1592 chosen for specified file.
1557
1593
1558 1. ``--tool`` option
1594 1. ``--tool`` option
1559 2. ``HGMERGE`` environment variable
1595 2. ``HGMERGE`` environment variable
1560 3. configurations in ``merge-patterns`` section
1596 3. configurations in ``merge-patterns`` section
1561 4. configuration of ``ui.merge``
1597 4. configuration of ``ui.merge``
1562 5. configurations in ``merge-tools`` section
1598 5. configurations in ``merge-tools`` section
1563 6. ``hgmerge`` tool (for historical reason only)
1599 6. ``hgmerge`` tool (for historical reason only)
1564 7. default tool for fallback (``:merge`` or ``:prompt``)
1600 7. default tool for fallback (``:merge`` or ``:prompt``)
1565
1601
1566 This command writes out examination result in the style below::
1602 This command writes out examination result in the style below::
1567
1603
1568 FILE = MERGETOOL
1604 FILE = MERGETOOL
1569
1605
1570 By default, all files known in the first parent context of the
1606 By default, all files known in the first parent context of the
1571 working directory are examined. Use file patterns and/or -I/-X
1607 working directory are examined. Use file patterns and/or -I/-X
1572 options to limit target files. -r/--rev is also useful to examine
1608 options to limit target files. -r/--rev is also useful to examine
1573 files in another context without actual updating to it.
1609 files in another context without actual updating to it.
1574
1610
1575 With --debug, this command shows warning messages while matching
1611 With --debug, this command shows warning messages while matching
1576 against ``merge-patterns`` and so on, too. It is recommended to
1612 against ``merge-patterns`` and so on, too. It is recommended to
1577 use this option with explicit file patterns and/or -I/-X options,
1613 use this option with explicit file patterns and/or -I/-X options,
1578 because this option increases amount of output per file according
1614 because this option increases amount of output per file according
1579 to configurations in hgrc.
1615 to configurations in hgrc.
1580
1616
1581 With -v/--verbose, this command shows configurations below at
1617 With -v/--verbose, this command shows configurations below at
1582 first (only if specified).
1618 first (only if specified).
1583
1619
1584 - ``--tool`` option
1620 - ``--tool`` option
1585 - ``HGMERGE`` environment variable
1621 - ``HGMERGE`` environment variable
1586 - configuration of ``ui.merge``
1622 - configuration of ``ui.merge``
1587
1623
1588 If merge tool is chosen before matching against
1624 If merge tool is chosen before matching against
1589 ``merge-patterns``, this command can't show any helpful
1625 ``merge-patterns``, this command can't show any helpful
1590 information, even with --debug. In such case, information above is
1626 information, even with --debug. In such case, information above is
1591 useful to know why a merge tool is chosen.
1627 useful to know why a merge tool is chosen.
1592 """
1628 """
1593 opts = pycompat.byteskwargs(opts)
1629 opts = pycompat.byteskwargs(opts)
1594 overrides = {}
1630 overrides = {}
1595 if opts['tool']:
1631 if opts['tool']:
1596 overrides[('ui', 'forcemerge')] = opts['tool']
1632 overrides[('ui', 'forcemerge')] = opts['tool']
1597 ui.note(('with --tool %r\n') % (opts['tool']))
1633 ui.note(('with --tool %r\n') % (opts['tool']))
1598
1634
1599 with ui.configoverride(overrides, 'debugmergepatterns'):
1635 with ui.configoverride(overrides, 'debugmergepatterns'):
1600 hgmerge = encoding.environ.get("HGMERGE")
1636 hgmerge = encoding.environ.get("HGMERGE")
1601 if hgmerge is not None:
1637 if hgmerge is not None:
1602 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1638 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1603 uimerge = ui.config("ui", "merge")
1639 uimerge = ui.config("ui", "merge")
1604 if uimerge:
1640 if uimerge:
1605 ui.note(('with ui.merge=%r\n') % (uimerge))
1641 ui.note(('with ui.merge=%r\n') % (uimerge))
1606
1642
1607 ctx = scmutil.revsingle(repo, opts.get('rev'))
1643 ctx = scmutil.revsingle(repo, opts.get('rev'))
1608 m = scmutil.match(ctx, pats, opts)
1644 m = scmutil.match(ctx, pats, opts)
1609 changedelete = opts['changedelete']
1645 changedelete = opts['changedelete']
1610 for path in ctx.walk(m):
1646 for path in ctx.walk(m):
1611 fctx = ctx[path]
1647 fctx = ctx[path]
1612 try:
1648 try:
1613 if not ui.debugflag:
1649 if not ui.debugflag:
1614 ui.pushbuffer(error=True)
1650 ui.pushbuffer(error=True)
1615 tool, toolpath = filemerge._picktool(repo, ui, path,
1651 tool, toolpath = filemerge._picktool(repo, ui, path,
1616 fctx.isbinary(),
1652 fctx.isbinary(),
1617 'l' in fctx.flags(),
1653 'l' in fctx.flags(),
1618 changedelete)
1654 changedelete)
1619 finally:
1655 finally:
1620 if not ui.debugflag:
1656 if not ui.debugflag:
1621 ui.popbuffer()
1657 ui.popbuffer()
1622 ui.write(('%s = %s\n') % (path, tool))
1658 ui.write(('%s = %s\n') % (path, tool))
1623
1659
1624 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1660 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1625 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1661 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1626 '''access the pushkey key/value protocol
1662 '''access the pushkey key/value protocol
1627
1663
1628 With two args, list the keys in the given namespace.
1664 With two args, list the keys in the given namespace.
1629
1665
1630 With five args, set a key to new if it currently is set to old.
1666 With five args, set a key to new if it currently is set to old.
1631 Reports success or failure.
1667 Reports success or failure.
1632 '''
1668 '''
1633
1669
1634 target = hg.peer(ui, {}, repopath)
1670 target = hg.peer(ui, {}, repopath)
1635 if keyinfo:
1671 if keyinfo:
1636 key, old, new = keyinfo
1672 key, old, new = keyinfo
1637 r = target.pushkey(namespace, key, old, new)
1673 r = target.pushkey(namespace, key, old, new)
1638 ui.status(str(r) + '\n')
1674 ui.status(str(r) + '\n')
1639 return not r
1675 return not r
1640 else:
1676 else:
1641 for k, v in sorted(target.listkeys(namespace).iteritems()):
1677 for k, v in sorted(target.listkeys(namespace).iteritems()):
1642 ui.write("%s\t%s\n" % (util.escapestr(k),
1678 ui.write("%s\t%s\n" % (util.escapestr(k),
1643 util.escapestr(v)))
1679 util.escapestr(v)))
1644
1680
1645 @command('debugpvec', [], _('A B'))
1681 @command('debugpvec', [], _('A B'))
1646 def debugpvec(ui, repo, a, b=None):
1682 def debugpvec(ui, repo, a, b=None):
1647 ca = scmutil.revsingle(repo, a)
1683 ca = scmutil.revsingle(repo, a)
1648 cb = scmutil.revsingle(repo, b)
1684 cb = scmutil.revsingle(repo, b)
1649 pa = pvec.ctxpvec(ca)
1685 pa = pvec.ctxpvec(ca)
1650 pb = pvec.ctxpvec(cb)
1686 pb = pvec.ctxpvec(cb)
1651 if pa == pb:
1687 if pa == pb:
1652 rel = "="
1688 rel = "="
1653 elif pa > pb:
1689 elif pa > pb:
1654 rel = ">"
1690 rel = ">"
1655 elif pa < pb:
1691 elif pa < pb:
1656 rel = "<"
1692 rel = "<"
1657 elif pa | pb:
1693 elif pa | pb:
1658 rel = "|"
1694 rel = "|"
1659 ui.write(_("a: %s\n") % pa)
1695 ui.write(_("a: %s\n") % pa)
1660 ui.write(_("b: %s\n") % pb)
1696 ui.write(_("b: %s\n") % pb)
1661 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1697 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1662 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1698 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1663 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1699 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1664 pa.distance(pb), rel))
1700 pa.distance(pb), rel))
1665
1701
1666 @command('debugrebuilddirstate|debugrebuildstate',
1702 @command('debugrebuilddirstate|debugrebuildstate',
1667 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1703 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1668 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1704 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1669 'the working copy parent')),
1705 'the working copy parent')),
1670 ],
1706 ],
1671 _('[-r REV]'))
1707 _('[-r REV]'))
1672 def debugrebuilddirstate(ui, repo, rev, **opts):
1708 def debugrebuilddirstate(ui, repo, rev, **opts):
1673 """rebuild the dirstate as it would look like for the given revision
1709 """rebuild the dirstate as it would look like for the given revision
1674
1710
1675 If no revision is specified the first current parent will be used.
1711 If no revision is specified the first current parent will be used.
1676
1712
1677 The dirstate will be set to the files of the given revision.
1713 The dirstate will be set to the files of the given revision.
1678 The actual working directory content or existing dirstate
1714 The actual working directory content or existing dirstate
1679 information such as adds or removes is not considered.
1715 information such as adds or removes is not considered.
1680
1716
1681 ``minimal`` will only rebuild the dirstate status for files that claim to be
1717 ``minimal`` will only rebuild the dirstate status for files that claim to be
1682 tracked but are not in the parent manifest, or that exist in the parent
1718 tracked but are not in the parent manifest, or that exist in the parent
1683 manifest but are not in the dirstate. It will not change adds, removes, or
1719 manifest but are not in the dirstate. It will not change adds, removes, or
1684 modified files that are in the working copy parent.
1720 modified files that are in the working copy parent.
1685
1721
1686 One use of this command is to make the next :hg:`status` invocation
1722 One use of this command is to make the next :hg:`status` invocation
1687 check the actual file content.
1723 check the actual file content.
1688 """
1724 """
1689 ctx = scmutil.revsingle(repo, rev)
1725 ctx = scmutil.revsingle(repo, rev)
1690 with repo.wlock():
1726 with repo.wlock():
1691 dirstate = repo.dirstate
1727 dirstate = repo.dirstate
1692 changedfiles = None
1728 changedfiles = None
1693 # See command doc for what minimal does.
1729 # See command doc for what minimal does.
1694 if opts.get(r'minimal'):
1730 if opts.get(r'minimal'):
1695 manifestfiles = set(ctx.manifest().keys())
1731 manifestfiles = set(ctx.manifest().keys())
1696 dirstatefiles = set(dirstate)
1732 dirstatefiles = set(dirstate)
1697 manifestonly = manifestfiles - dirstatefiles
1733 manifestonly = manifestfiles - dirstatefiles
1698 dsonly = dirstatefiles - manifestfiles
1734 dsonly = dirstatefiles - manifestfiles
1699 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1735 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1700 changedfiles = manifestonly | dsnotadded
1736 changedfiles = manifestonly | dsnotadded
1701
1737
1702 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1738 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1703
1739
1704 @command('debugrebuildfncache', [], '')
1740 @command('debugrebuildfncache', [], '')
1705 def debugrebuildfncache(ui, repo):
1741 def debugrebuildfncache(ui, repo):
1706 """rebuild the fncache file"""
1742 """rebuild the fncache file"""
1707 repair.rebuildfncache(ui, repo)
1743 repair.rebuildfncache(ui, repo)
1708
1744
1709 @command('debugrename',
1745 @command('debugrename',
1710 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1746 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1711 _('[-r REV] FILE'))
1747 _('[-r REV] FILE'))
1712 def debugrename(ui, repo, file1, *pats, **opts):
1748 def debugrename(ui, repo, file1, *pats, **opts):
1713 """dump rename information"""
1749 """dump rename information"""
1714
1750
1715 opts = pycompat.byteskwargs(opts)
1751 opts = pycompat.byteskwargs(opts)
1716 ctx = scmutil.revsingle(repo, opts.get('rev'))
1752 ctx = scmutil.revsingle(repo, opts.get('rev'))
1717 m = scmutil.match(ctx, (file1,) + pats, opts)
1753 m = scmutil.match(ctx, (file1,) + pats, opts)
1718 for abs in ctx.walk(m):
1754 for abs in ctx.walk(m):
1719 fctx = ctx[abs]
1755 fctx = ctx[abs]
1720 o = fctx.filelog().renamed(fctx.filenode())
1756 o = fctx.filelog().renamed(fctx.filenode())
1721 rel = m.rel(abs)
1757 rel = m.rel(abs)
1722 if o:
1758 if o:
1723 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1759 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1724 else:
1760 else:
1725 ui.write(_("%s not renamed\n") % rel)
1761 ui.write(_("%s not renamed\n") % rel)
1726
1762
1727 @command('debugrevlog', cmdutil.debugrevlogopts +
1763 @command('debugrevlog', cmdutil.debugrevlogopts +
1728 [('d', 'dump', False, _('dump index data'))],
1764 [('d', 'dump', False, _('dump index data'))],
1729 _('-c|-m|FILE'),
1765 _('-c|-m|FILE'),
1730 optionalrepo=True)
1766 optionalrepo=True)
1731 def debugrevlog(ui, repo, file_=None, **opts):
1767 def debugrevlog(ui, repo, file_=None, **opts):
1732 """show data and statistics about a revlog"""
1768 """show data and statistics about a revlog"""
1733 opts = pycompat.byteskwargs(opts)
1769 opts = pycompat.byteskwargs(opts)
1734 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1770 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1735
1771
1736 if opts.get("dump"):
1772 if opts.get("dump"):
1737 numrevs = len(r)
1773 numrevs = len(r)
1738 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1774 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1739 " rawsize totalsize compression heads chainlen\n"))
1775 " rawsize totalsize compression heads chainlen\n"))
1740 ts = 0
1776 ts = 0
1741 heads = set()
1777 heads = set()
1742
1778
1743 for rev in xrange(numrevs):
1779 for rev in xrange(numrevs):
1744 dbase = r.deltaparent(rev)
1780 dbase = r.deltaparent(rev)
1745 if dbase == -1:
1781 if dbase == -1:
1746 dbase = rev
1782 dbase = rev
1747 cbase = r.chainbase(rev)
1783 cbase = r.chainbase(rev)
1748 clen = r.chainlen(rev)
1784 clen = r.chainlen(rev)
1749 p1, p2 = r.parentrevs(rev)
1785 p1, p2 = r.parentrevs(rev)
1750 rs = r.rawsize(rev)
1786 rs = r.rawsize(rev)
1751 ts = ts + rs
1787 ts = ts + rs
1752 heads -= set(r.parentrevs(rev))
1788 heads -= set(r.parentrevs(rev))
1753 heads.add(rev)
1789 heads.add(rev)
1754 try:
1790 try:
1755 compression = ts / r.end(rev)
1791 compression = ts / r.end(rev)
1756 except ZeroDivisionError:
1792 except ZeroDivisionError:
1757 compression = 0
1793 compression = 0
1758 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1794 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1759 "%11d %5d %8d\n" %
1795 "%11d %5d %8d\n" %
1760 (rev, p1, p2, r.start(rev), r.end(rev),
1796 (rev, p1, p2, r.start(rev), r.end(rev),
1761 r.start(dbase), r.start(cbase),
1797 r.start(dbase), r.start(cbase),
1762 r.start(p1), r.start(p2),
1798 r.start(p1), r.start(p2),
1763 rs, ts, compression, len(heads), clen))
1799 rs, ts, compression, len(heads), clen))
1764 return 0
1800 return 0
1765
1801
1766 v = r.version
1802 v = r.version
1767 format = v & 0xFFFF
1803 format = v & 0xFFFF
1768 flags = []
1804 flags = []
1769 gdelta = False
1805 gdelta = False
1770 if v & revlog.FLAG_INLINE_DATA:
1806 if v & revlog.FLAG_INLINE_DATA:
1771 flags.append('inline')
1807 flags.append('inline')
1772 if v & revlog.FLAG_GENERALDELTA:
1808 if v & revlog.FLAG_GENERALDELTA:
1773 gdelta = True
1809 gdelta = True
1774 flags.append('generaldelta')
1810 flags.append('generaldelta')
1775 if not flags:
1811 if not flags:
1776 flags = ['(none)']
1812 flags = ['(none)']
1777
1813
1778 nummerges = 0
1814 nummerges = 0
1779 numfull = 0
1815 numfull = 0
1780 numprev = 0
1816 numprev = 0
1781 nump1 = 0
1817 nump1 = 0
1782 nump2 = 0
1818 nump2 = 0
1783 numother = 0
1819 numother = 0
1784 nump1prev = 0
1820 nump1prev = 0
1785 nump2prev = 0
1821 nump2prev = 0
1786 chainlengths = []
1822 chainlengths = []
1787 chainbases = []
1823 chainbases = []
1788 chainspans = []
1824 chainspans = []
1789
1825
1790 datasize = [None, 0, 0]
1826 datasize = [None, 0, 0]
1791 fullsize = [None, 0, 0]
1827 fullsize = [None, 0, 0]
1792 deltasize = [None, 0, 0]
1828 deltasize = [None, 0, 0]
1793 chunktypecounts = {}
1829 chunktypecounts = {}
1794 chunktypesizes = {}
1830 chunktypesizes = {}
1795
1831
1796 def addsize(size, l):
1832 def addsize(size, l):
1797 if l[0] is None or size < l[0]:
1833 if l[0] is None or size < l[0]:
1798 l[0] = size
1834 l[0] = size
1799 if size > l[1]:
1835 if size > l[1]:
1800 l[1] = size
1836 l[1] = size
1801 l[2] += size
1837 l[2] += size
1802
1838
1803 numrevs = len(r)
1839 numrevs = len(r)
1804 for rev in xrange(numrevs):
1840 for rev in xrange(numrevs):
1805 p1, p2 = r.parentrevs(rev)
1841 p1, p2 = r.parentrevs(rev)
1806 delta = r.deltaparent(rev)
1842 delta = r.deltaparent(rev)
1807 if format > 0:
1843 if format > 0:
1808 addsize(r.rawsize(rev), datasize)
1844 addsize(r.rawsize(rev), datasize)
1809 if p2 != nullrev:
1845 if p2 != nullrev:
1810 nummerges += 1
1846 nummerges += 1
1811 size = r.length(rev)
1847 size = r.length(rev)
1812 if delta == nullrev:
1848 if delta == nullrev:
1813 chainlengths.append(0)
1849 chainlengths.append(0)
1814 chainbases.append(r.start(rev))
1850 chainbases.append(r.start(rev))
1815 chainspans.append(size)
1851 chainspans.append(size)
1816 numfull += 1
1852 numfull += 1
1817 addsize(size, fullsize)
1853 addsize(size, fullsize)
1818 else:
1854 else:
1819 chainlengths.append(chainlengths[delta] + 1)
1855 chainlengths.append(chainlengths[delta] + 1)
1820 baseaddr = chainbases[delta]
1856 baseaddr = chainbases[delta]
1821 revaddr = r.start(rev)
1857 revaddr = r.start(rev)
1822 chainbases.append(baseaddr)
1858 chainbases.append(baseaddr)
1823 chainspans.append((revaddr - baseaddr) + size)
1859 chainspans.append((revaddr - baseaddr) + size)
1824 addsize(size, deltasize)
1860 addsize(size, deltasize)
1825 if delta == rev - 1:
1861 if delta == rev - 1:
1826 numprev += 1
1862 numprev += 1
1827 if delta == p1:
1863 if delta == p1:
1828 nump1prev += 1
1864 nump1prev += 1
1829 elif delta == p2:
1865 elif delta == p2:
1830 nump2prev += 1
1866 nump2prev += 1
1831 elif delta == p1:
1867 elif delta == p1:
1832 nump1 += 1
1868 nump1 += 1
1833 elif delta == p2:
1869 elif delta == p2:
1834 nump2 += 1
1870 nump2 += 1
1835 elif delta != nullrev:
1871 elif delta != nullrev:
1836 numother += 1
1872 numother += 1
1837
1873
1838 # Obtain data on the raw chunks in the revlog.
1874 # Obtain data on the raw chunks in the revlog.
1839 segment = r._getsegmentforrevs(rev, rev)[1]
1875 segment = r._getsegmentforrevs(rev, rev)[1]
1840 if segment:
1876 if segment:
1841 chunktype = bytes(segment[0:1])
1877 chunktype = bytes(segment[0:1])
1842 else:
1878 else:
1843 chunktype = 'empty'
1879 chunktype = 'empty'
1844
1880
1845 if chunktype not in chunktypecounts:
1881 if chunktype not in chunktypecounts:
1846 chunktypecounts[chunktype] = 0
1882 chunktypecounts[chunktype] = 0
1847 chunktypesizes[chunktype] = 0
1883 chunktypesizes[chunktype] = 0
1848
1884
1849 chunktypecounts[chunktype] += 1
1885 chunktypecounts[chunktype] += 1
1850 chunktypesizes[chunktype] += size
1886 chunktypesizes[chunktype] += size
1851
1887
1852 # Adjust size min value for empty cases
1888 # Adjust size min value for empty cases
1853 for size in (datasize, fullsize, deltasize):
1889 for size in (datasize, fullsize, deltasize):
1854 if size[0] is None:
1890 if size[0] is None:
1855 size[0] = 0
1891 size[0] = 0
1856
1892
1857 numdeltas = numrevs - numfull
1893 numdeltas = numrevs - numfull
1858 numoprev = numprev - nump1prev - nump2prev
1894 numoprev = numprev - nump1prev - nump2prev
1859 totalrawsize = datasize[2]
1895 totalrawsize = datasize[2]
1860 datasize[2] /= numrevs
1896 datasize[2] /= numrevs
1861 fulltotal = fullsize[2]
1897 fulltotal = fullsize[2]
1862 fullsize[2] /= numfull
1898 fullsize[2] /= numfull
1863 deltatotal = deltasize[2]
1899 deltatotal = deltasize[2]
1864 if numrevs - numfull > 0:
1900 if numrevs - numfull > 0:
1865 deltasize[2] /= numrevs - numfull
1901 deltasize[2] /= numrevs - numfull
1866 totalsize = fulltotal + deltatotal
1902 totalsize = fulltotal + deltatotal
1867 avgchainlen = sum(chainlengths) / numrevs
1903 avgchainlen = sum(chainlengths) / numrevs
1868 maxchainlen = max(chainlengths)
1904 maxchainlen = max(chainlengths)
1869 maxchainspan = max(chainspans)
1905 maxchainspan = max(chainspans)
1870 compratio = 1
1906 compratio = 1
1871 if totalsize:
1907 if totalsize:
1872 compratio = totalrawsize / totalsize
1908 compratio = totalrawsize / totalsize
1873
1909
1874 basedfmtstr = '%%%dd\n'
1910 basedfmtstr = '%%%dd\n'
1875 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1911 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1876
1912
1877 def dfmtstr(max):
1913 def dfmtstr(max):
1878 return basedfmtstr % len(str(max))
1914 return basedfmtstr % len(str(max))
1879 def pcfmtstr(max, padding=0):
1915 def pcfmtstr(max, padding=0):
1880 return basepcfmtstr % (len(str(max)), ' ' * padding)
1916 return basepcfmtstr % (len(str(max)), ' ' * padding)
1881
1917
1882 def pcfmt(value, total):
1918 def pcfmt(value, total):
1883 if total:
1919 if total:
1884 return (value, 100 * float(value) / total)
1920 return (value, 100 * float(value) / total)
1885 else:
1921 else:
1886 return value, 100.0
1922 return value, 100.0
1887
1923
1888 ui.write(('format : %d\n') % format)
1924 ui.write(('format : %d\n') % format)
1889 ui.write(('flags : %s\n') % ', '.join(flags))
1925 ui.write(('flags : %s\n') % ', '.join(flags))
1890
1926
1891 ui.write('\n')
1927 ui.write('\n')
1892 fmt = pcfmtstr(totalsize)
1928 fmt = pcfmtstr(totalsize)
1893 fmt2 = dfmtstr(totalsize)
1929 fmt2 = dfmtstr(totalsize)
1894 ui.write(('revisions : ') + fmt2 % numrevs)
1930 ui.write(('revisions : ') + fmt2 % numrevs)
1895 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1931 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1896 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1932 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1897 ui.write(('revisions : ') + fmt2 % numrevs)
1933 ui.write(('revisions : ') + fmt2 % numrevs)
1898 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1934 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1899 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1935 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1900 ui.write(('revision size : ') + fmt2 % totalsize)
1936 ui.write(('revision size : ') + fmt2 % totalsize)
1901 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1937 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1902 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1938 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1903
1939
1904 def fmtchunktype(chunktype):
1940 def fmtchunktype(chunktype):
1905 if chunktype == 'empty':
1941 if chunktype == 'empty':
1906 return ' %s : ' % chunktype
1942 return ' %s : ' % chunktype
1907 elif chunktype in pycompat.bytestr(string.ascii_letters):
1943 elif chunktype in pycompat.bytestr(string.ascii_letters):
1908 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1944 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1909 else:
1945 else:
1910 return ' 0x%s : ' % hex(chunktype)
1946 return ' 0x%s : ' % hex(chunktype)
1911
1947
1912 ui.write('\n')
1948 ui.write('\n')
1913 ui.write(('chunks : ') + fmt2 % numrevs)
1949 ui.write(('chunks : ') + fmt2 % numrevs)
1914 for chunktype in sorted(chunktypecounts):
1950 for chunktype in sorted(chunktypecounts):
1915 ui.write(fmtchunktype(chunktype))
1951 ui.write(fmtchunktype(chunktype))
1916 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1952 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1917 ui.write(('chunks size : ') + fmt2 % totalsize)
1953 ui.write(('chunks size : ') + fmt2 % totalsize)
1918 for chunktype in sorted(chunktypecounts):
1954 for chunktype in sorted(chunktypecounts):
1919 ui.write(fmtchunktype(chunktype))
1955 ui.write(fmtchunktype(chunktype))
1920 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1956 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1921
1957
1922 ui.write('\n')
1958 ui.write('\n')
1923 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1959 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1924 ui.write(('avg chain length : ') + fmt % avgchainlen)
1960 ui.write(('avg chain length : ') + fmt % avgchainlen)
1925 ui.write(('max chain length : ') + fmt % maxchainlen)
1961 ui.write(('max chain length : ') + fmt % maxchainlen)
1926 ui.write(('max chain reach : ') + fmt % maxchainspan)
1962 ui.write(('max chain reach : ') + fmt % maxchainspan)
1927 ui.write(('compression ratio : ') + fmt % compratio)
1963 ui.write(('compression ratio : ') + fmt % compratio)
1928
1964
1929 if format > 0:
1965 if format > 0:
1930 ui.write('\n')
1966 ui.write('\n')
1931 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1967 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1932 % tuple(datasize))
1968 % tuple(datasize))
1933 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1969 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1934 % tuple(fullsize))
1970 % tuple(fullsize))
1935 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1971 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1936 % tuple(deltasize))
1972 % tuple(deltasize))
1937
1973
1938 if numdeltas > 0:
1974 if numdeltas > 0:
1939 ui.write('\n')
1975 ui.write('\n')
1940 fmt = pcfmtstr(numdeltas)
1976 fmt = pcfmtstr(numdeltas)
1941 fmt2 = pcfmtstr(numdeltas, 4)
1977 fmt2 = pcfmtstr(numdeltas, 4)
1942 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1978 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1943 if numprev > 0:
1979 if numprev > 0:
1944 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1980 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1945 numprev))
1981 numprev))
1946 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1982 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1947 numprev))
1983 numprev))
1948 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1984 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1949 numprev))
1985 numprev))
1950 if gdelta:
1986 if gdelta:
1951 ui.write(('deltas against p1 : ')
1987 ui.write(('deltas against p1 : ')
1952 + fmt % pcfmt(nump1, numdeltas))
1988 + fmt % pcfmt(nump1, numdeltas))
1953 ui.write(('deltas against p2 : ')
1989 ui.write(('deltas against p2 : ')
1954 + fmt % pcfmt(nump2, numdeltas))
1990 + fmt % pcfmt(nump2, numdeltas))
1955 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1991 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1956 numdeltas))
1992 numdeltas))
1957
1993
1958 @command('debugrevspec',
1994 @command('debugrevspec',
1959 [('', 'optimize', None,
1995 [('', 'optimize', None,
1960 _('print parsed tree after optimizing (DEPRECATED)')),
1996 _('print parsed tree after optimizing (DEPRECATED)')),
1961 ('', 'show-revs', True, _('print list of result revisions (default)')),
1997 ('', 'show-revs', True, _('print list of result revisions (default)')),
1962 ('s', 'show-set', None, _('print internal representation of result set')),
1998 ('s', 'show-set', None, _('print internal representation of result set')),
1963 ('p', 'show-stage', [],
1999 ('p', 'show-stage', [],
1964 _('print parsed tree at the given stage'), _('NAME')),
2000 _('print parsed tree at the given stage'), _('NAME')),
1965 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2001 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1966 ('', 'verify-optimized', False, _('verify optimized result')),
2002 ('', 'verify-optimized', False, _('verify optimized result')),
1967 ],
2003 ],
1968 ('REVSPEC'))
2004 ('REVSPEC'))
1969 def debugrevspec(ui, repo, expr, **opts):
2005 def debugrevspec(ui, repo, expr, **opts):
1970 """parse and apply a revision specification
2006 """parse and apply a revision specification
1971
2007
1972 Use -p/--show-stage option to print the parsed tree at the given stages.
2008 Use -p/--show-stage option to print the parsed tree at the given stages.
1973 Use -p all to print tree at every stage.
2009 Use -p all to print tree at every stage.
1974
2010
1975 Use --no-show-revs option with -s or -p to print only the set
2011 Use --no-show-revs option with -s or -p to print only the set
1976 representation or the parsed tree respectively.
2012 representation or the parsed tree respectively.
1977
2013
1978 Use --verify-optimized to compare the optimized result with the unoptimized
2014 Use --verify-optimized to compare the optimized result with the unoptimized
1979 one. Returns 1 if the optimized result differs.
2015 one. Returns 1 if the optimized result differs.
1980 """
2016 """
1981 opts = pycompat.byteskwargs(opts)
2017 opts = pycompat.byteskwargs(opts)
1982 aliases = ui.configitems('revsetalias')
2018 aliases = ui.configitems('revsetalias')
1983 stages = [
2019 stages = [
1984 ('parsed', lambda tree: tree),
2020 ('parsed', lambda tree: tree),
1985 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2021 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1986 ui.warn)),
2022 ui.warn)),
1987 ('concatenated', revsetlang.foldconcat),
2023 ('concatenated', revsetlang.foldconcat),
1988 ('analyzed', revsetlang.analyze),
2024 ('analyzed', revsetlang.analyze),
1989 ('optimized', revsetlang.optimize),
2025 ('optimized', revsetlang.optimize),
1990 ]
2026 ]
1991 if opts['no_optimized']:
2027 if opts['no_optimized']:
1992 stages = stages[:-1]
2028 stages = stages[:-1]
1993 if opts['verify_optimized'] and opts['no_optimized']:
2029 if opts['verify_optimized'] and opts['no_optimized']:
1994 raise error.Abort(_('cannot use --verify-optimized with '
2030 raise error.Abort(_('cannot use --verify-optimized with '
1995 '--no-optimized'))
2031 '--no-optimized'))
1996 stagenames = set(n for n, f in stages)
2032 stagenames = set(n for n, f in stages)
1997
2033
1998 showalways = set()
2034 showalways = set()
1999 showchanged = set()
2035 showchanged = set()
2000 if ui.verbose and not opts['show_stage']:
2036 if ui.verbose and not opts['show_stage']:
2001 # show parsed tree by --verbose (deprecated)
2037 # show parsed tree by --verbose (deprecated)
2002 showalways.add('parsed')
2038 showalways.add('parsed')
2003 showchanged.update(['expanded', 'concatenated'])
2039 showchanged.update(['expanded', 'concatenated'])
2004 if opts['optimize']:
2040 if opts['optimize']:
2005 showalways.add('optimized')
2041 showalways.add('optimized')
2006 if opts['show_stage'] and opts['optimize']:
2042 if opts['show_stage'] and opts['optimize']:
2007 raise error.Abort(_('cannot use --optimize with --show-stage'))
2043 raise error.Abort(_('cannot use --optimize with --show-stage'))
2008 if opts['show_stage'] == ['all']:
2044 if opts['show_stage'] == ['all']:
2009 showalways.update(stagenames)
2045 showalways.update(stagenames)
2010 else:
2046 else:
2011 for n in opts['show_stage']:
2047 for n in opts['show_stage']:
2012 if n not in stagenames:
2048 if n not in stagenames:
2013 raise error.Abort(_('invalid stage name: %s') % n)
2049 raise error.Abort(_('invalid stage name: %s') % n)
2014 showalways.update(opts['show_stage'])
2050 showalways.update(opts['show_stage'])
2015
2051
2016 treebystage = {}
2052 treebystage = {}
2017 printedtree = None
2053 printedtree = None
2018 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2054 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2019 for n, f in stages:
2055 for n, f in stages:
2020 treebystage[n] = tree = f(tree)
2056 treebystage[n] = tree = f(tree)
2021 if n in showalways or (n in showchanged and tree != printedtree):
2057 if n in showalways or (n in showchanged and tree != printedtree):
2022 if opts['show_stage'] or n != 'parsed':
2058 if opts['show_stage'] or n != 'parsed':
2023 ui.write(("* %s:\n") % n)
2059 ui.write(("* %s:\n") % n)
2024 ui.write(revsetlang.prettyformat(tree), "\n")
2060 ui.write(revsetlang.prettyformat(tree), "\n")
2025 printedtree = tree
2061 printedtree = tree
2026
2062
2027 if opts['verify_optimized']:
2063 if opts['verify_optimized']:
2028 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2064 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2029 brevs = revset.makematcher(treebystage['optimized'])(repo)
2065 brevs = revset.makematcher(treebystage['optimized'])(repo)
2030 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2066 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2031 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2067 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2032 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2068 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2033 arevs = list(arevs)
2069 arevs = list(arevs)
2034 brevs = list(brevs)
2070 brevs = list(brevs)
2035 if arevs == brevs:
2071 if arevs == brevs:
2036 return 0
2072 return 0
2037 ui.write(('--- analyzed\n'), label='diff.file_a')
2073 ui.write(('--- analyzed\n'), label='diff.file_a')
2038 ui.write(('+++ optimized\n'), label='diff.file_b')
2074 ui.write(('+++ optimized\n'), label='diff.file_b')
2039 sm = difflib.SequenceMatcher(None, arevs, brevs)
2075 sm = difflib.SequenceMatcher(None, arevs, brevs)
2040 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2041 if tag in ('delete', 'replace'):
2077 if tag in ('delete', 'replace'):
2042 for c in arevs[alo:ahi]:
2078 for c in arevs[alo:ahi]:
2043 ui.write('-%s\n' % c, label='diff.deleted')
2079 ui.write('-%s\n' % c, label='diff.deleted')
2044 if tag in ('insert', 'replace'):
2080 if tag in ('insert', 'replace'):
2045 for c in brevs[blo:bhi]:
2081 for c in brevs[blo:bhi]:
2046 ui.write('+%s\n' % c, label='diff.inserted')
2082 ui.write('+%s\n' % c, label='diff.inserted')
2047 if tag == 'equal':
2083 if tag == 'equal':
2048 for c in arevs[alo:ahi]:
2084 for c in arevs[alo:ahi]:
2049 ui.write(' %s\n' % c)
2085 ui.write(' %s\n' % c)
2050 return 1
2086 return 1
2051
2087
2052 func = revset.makematcher(tree)
2088 func = revset.makematcher(tree)
2053 revs = func(repo)
2089 revs = func(repo)
2054 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2090 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2055 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2091 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2056 if not opts['show_revs']:
2092 if not opts['show_revs']:
2057 return
2093 return
2058 for c in revs:
2094 for c in revs:
2059 ui.write("%s\n" % c)
2095 ui.write("%s\n" % c)
2060
2096
2061 @command('debugsetparents', [], _('REV1 [REV2]'))
2097 @command('debugsetparents', [], _('REV1 [REV2]'))
2062 def debugsetparents(ui, repo, rev1, rev2=None):
2098 def debugsetparents(ui, repo, rev1, rev2=None):
2063 """manually set the parents of the current working directory
2099 """manually set the parents of the current working directory
2064
2100
2065 This is useful for writing repository conversion tools, but should
2101 This is useful for writing repository conversion tools, but should
2066 be used with care. For example, neither the working directory nor the
2102 be used with care. For example, neither the working directory nor the
2067 dirstate is updated, so file status may be incorrect after running this
2103 dirstate is updated, so file status may be incorrect after running this
2068 command.
2104 command.
2069
2105
2070 Returns 0 on success.
2106 Returns 0 on success.
2071 """
2107 """
2072
2108
2073 r1 = scmutil.revsingle(repo, rev1).node()
2109 r1 = scmutil.revsingle(repo, rev1).node()
2074 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2110 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2075
2111
2076 with repo.wlock():
2112 with repo.wlock():
2077 repo.setparents(r1, r2)
2113 repo.setparents(r1, r2)
2078
2114
2079 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2115 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2080 def debugssl(ui, repo, source=None, **opts):
2116 def debugssl(ui, repo, source=None, **opts):
2081 '''test a secure connection to a server
2117 '''test a secure connection to a server
2082
2118
2083 This builds the certificate chain for the server on Windows, installing the
2119 This builds the certificate chain for the server on Windows, installing the
2084 missing intermediates and trusted root via Windows Update if necessary. It
2120 missing intermediates and trusted root via Windows Update if necessary. It
2085 does nothing on other platforms.
2121 does nothing on other platforms.
2086
2122
2087 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2123 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2088 that server is used. See :hg:`help urls` for more information.
2124 that server is used. See :hg:`help urls` for more information.
2089
2125
2090 If the update succeeds, retry the original operation. Otherwise, the cause
2126 If the update succeeds, retry the original operation. Otherwise, the cause
2091 of the SSL error is likely another issue.
2127 of the SSL error is likely another issue.
2092 '''
2128 '''
2093 if not pycompat.iswindows:
2129 if not pycompat.iswindows:
2094 raise error.Abort(_('certificate chain building is only possible on '
2130 raise error.Abort(_('certificate chain building is only possible on '
2095 'Windows'))
2131 'Windows'))
2096
2132
2097 if not source:
2133 if not source:
2098 if not repo:
2134 if not repo:
2099 raise error.Abort(_("there is no Mercurial repository here, and no "
2135 raise error.Abort(_("there is no Mercurial repository here, and no "
2100 "server specified"))
2136 "server specified"))
2101 source = "default"
2137 source = "default"
2102
2138
2103 source, branches = hg.parseurl(ui.expandpath(source))
2139 source, branches = hg.parseurl(ui.expandpath(source))
2104 url = util.url(source)
2140 url = util.url(source)
2105 addr = None
2141 addr = None
2106
2142
2107 if url.scheme == 'https':
2143 if url.scheme == 'https':
2108 addr = (url.host, url.port or 443)
2144 addr = (url.host, url.port or 443)
2109 elif url.scheme == 'ssh':
2145 elif url.scheme == 'ssh':
2110 addr = (url.host, url.port or 22)
2146 addr = (url.host, url.port or 22)
2111 else:
2147 else:
2112 raise error.Abort(_("only https and ssh connections are supported"))
2148 raise error.Abort(_("only https and ssh connections are supported"))
2113
2149
2114 from . import win32
2150 from . import win32
2115
2151
2116 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2152 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2117 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2153 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2118
2154
2119 try:
2155 try:
2120 s.connect(addr)
2156 s.connect(addr)
2121 cert = s.getpeercert(True)
2157 cert = s.getpeercert(True)
2122
2158
2123 ui.status(_('checking the certificate chain for %s\n') % url.host)
2159 ui.status(_('checking the certificate chain for %s\n') % url.host)
2124
2160
2125 complete = win32.checkcertificatechain(cert, build=False)
2161 complete = win32.checkcertificatechain(cert, build=False)
2126
2162
2127 if not complete:
2163 if not complete:
2128 ui.status(_('certificate chain is incomplete, updating... '))
2164 ui.status(_('certificate chain is incomplete, updating... '))
2129
2165
2130 if not win32.checkcertificatechain(cert):
2166 if not win32.checkcertificatechain(cert):
2131 ui.status(_('failed.\n'))
2167 ui.status(_('failed.\n'))
2132 else:
2168 else:
2133 ui.status(_('done.\n'))
2169 ui.status(_('done.\n'))
2134 else:
2170 else:
2135 ui.status(_('full certificate chain is available\n'))
2171 ui.status(_('full certificate chain is available\n'))
2136 finally:
2172 finally:
2137 s.close()
2173 s.close()
2138
2174
2139 @command('debugsub',
2175 @command('debugsub',
2140 [('r', 'rev', '',
2176 [('r', 'rev', '',
2141 _('revision to check'), _('REV'))],
2177 _('revision to check'), _('REV'))],
2142 _('[-r REV] [REV]'))
2178 _('[-r REV] [REV]'))
2143 def debugsub(ui, repo, rev=None):
2179 def debugsub(ui, repo, rev=None):
2144 ctx = scmutil.revsingle(repo, rev, None)
2180 ctx = scmutil.revsingle(repo, rev, None)
2145 for k, v in sorted(ctx.substate.items()):
2181 for k, v in sorted(ctx.substate.items()):
2146 ui.write(('path %s\n') % k)
2182 ui.write(('path %s\n') % k)
2147 ui.write((' source %s\n') % v[0])
2183 ui.write((' source %s\n') % v[0])
2148 ui.write((' revision %s\n') % v[1])
2184 ui.write((' revision %s\n') % v[1])
2149
2185
2150 @command('debugsuccessorssets',
2186 @command('debugsuccessorssets',
2151 [('', 'closest', False, _('return closest successors sets only'))],
2187 [('', 'closest', False, _('return closest successors sets only'))],
2152 _('[REV]'))
2188 _('[REV]'))
2153 def debugsuccessorssets(ui, repo, *revs, **opts):
2189 def debugsuccessorssets(ui, repo, *revs, **opts):
2154 """show set of successors for revision
2190 """show set of successors for revision
2155
2191
2156 A successors set of changeset A is a consistent group of revisions that
2192 A successors set of changeset A is a consistent group of revisions that
2157 succeed A. It contains non-obsolete changesets only unless closests
2193 succeed A. It contains non-obsolete changesets only unless closests
2158 successors set is set.
2194 successors set is set.
2159
2195
2160 In most cases a changeset A has a single successors set containing a single
2196 In most cases a changeset A has a single successors set containing a single
2161 successor (changeset A replaced by A').
2197 successor (changeset A replaced by A').
2162
2198
2163 A changeset that is made obsolete with no successors are called "pruned".
2199 A changeset that is made obsolete with no successors are called "pruned".
2164 Such changesets have no successors sets at all.
2200 Such changesets have no successors sets at all.
2165
2201
2166 A changeset that has been "split" will have a successors set containing
2202 A changeset that has been "split" will have a successors set containing
2167 more than one successor.
2203 more than one successor.
2168
2204
2169 A changeset that has been rewritten in multiple different ways is called
2205 A changeset that has been rewritten in multiple different ways is called
2170 "divergent". Such changesets have multiple successor sets (each of which
2206 "divergent". Such changesets have multiple successor sets (each of which
2171 may also be split, i.e. have multiple successors).
2207 may also be split, i.e. have multiple successors).
2172
2208
2173 Results are displayed as follows::
2209 Results are displayed as follows::
2174
2210
2175 <rev1>
2211 <rev1>
2176 <successors-1A>
2212 <successors-1A>
2177 <rev2>
2213 <rev2>
2178 <successors-2A>
2214 <successors-2A>
2179 <successors-2B1> <successors-2B2> <successors-2B3>
2215 <successors-2B1> <successors-2B2> <successors-2B3>
2180
2216
2181 Here rev2 has two possible (i.e. divergent) successors sets. The first
2217 Here rev2 has two possible (i.e. divergent) successors sets. The first
2182 holds one element, whereas the second holds three (i.e. the changeset has
2218 holds one element, whereas the second holds three (i.e. the changeset has
2183 been split).
2219 been split).
2184 """
2220 """
2185 # passed to successorssets caching computation from one call to another
2221 # passed to successorssets caching computation from one call to another
2186 cache = {}
2222 cache = {}
2187 ctx2str = str
2223 ctx2str = str
2188 node2str = short
2224 node2str = short
2189 if ui.debug():
2225 if ui.debug():
2190 def ctx2str(ctx):
2226 def ctx2str(ctx):
2191 return ctx.hex()
2227 return ctx.hex()
2192 node2str = hex
2228 node2str = hex
2193 for rev in scmutil.revrange(repo, revs):
2229 for rev in scmutil.revrange(repo, revs):
2194 ctx = repo[rev]
2230 ctx = repo[rev]
2195 ui.write('%s\n'% ctx2str(ctx))
2231 ui.write('%s\n'% ctx2str(ctx))
2196 for succsset in obsutil.successorssets(repo, ctx.node(),
2232 for succsset in obsutil.successorssets(repo, ctx.node(),
2197 closest=opts['closest'],
2233 closest=opts['closest'],
2198 cache=cache):
2234 cache=cache):
2199 if succsset:
2235 if succsset:
2200 ui.write(' ')
2236 ui.write(' ')
2201 ui.write(node2str(succsset[0]))
2237 ui.write(node2str(succsset[0]))
2202 for node in succsset[1:]:
2238 for node in succsset[1:]:
2203 ui.write(' ')
2239 ui.write(' ')
2204 ui.write(node2str(node))
2240 ui.write(node2str(node))
2205 ui.write('\n')
2241 ui.write('\n')
2206
2242
2207 @command('debugtemplate',
2243 @command('debugtemplate',
2208 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2244 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2209 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2245 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2210 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2246 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2211 optionalrepo=True)
2247 optionalrepo=True)
2212 def debugtemplate(ui, repo, tmpl, **opts):
2248 def debugtemplate(ui, repo, tmpl, **opts):
2213 """parse and apply a template
2249 """parse and apply a template
2214
2250
2215 If -r/--rev is given, the template is processed as a log template and
2251 If -r/--rev is given, the template is processed as a log template and
2216 applied to the given changesets. Otherwise, it is processed as a generic
2252 applied to the given changesets. Otherwise, it is processed as a generic
2217 template.
2253 template.
2218
2254
2219 Use --verbose to print the parsed tree.
2255 Use --verbose to print the parsed tree.
2220 """
2256 """
2221 revs = None
2257 revs = None
2222 if opts[r'rev']:
2258 if opts[r'rev']:
2223 if repo is None:
2259 if repo is None:
2224 raise error.RepoError(_('there is no Mercurial repository here '
2260 raise error.RepoError(_('there is no Mercurial repository here '
2225 '(.hg not found)'))
2261 '(.hg not found)'))
2226 revs = scmutil.revrange(repo, opts[r'rev'])
2262 revs = scmutil.revrange(repo, opts[r'rev'])
2227
2263
2228 props = {}
2264 props = {}
2229 for d in opts[r'define']:
2265 for d in opts[r'define']:
2230 try:
2266 try:
2231 k, v = (e.strip() for e in d.split('=', 1))
2267 k, v = (e.strip() for e in d.split('=', 1))
2232 if not k or k == 'ui':
2268 if not k or k == 'ui':
2233 raise ValueError
2269 raise ValueError
2234 props[k] = v
2270 props[k] = v
2235 except ValueError:
2271 except ValueError:
2236 raise error.Abort(_('malformed keyword definition: %s') % d)
2272 raise error.Abort(_('malformed keyword definition: %s') % d)
2237
2273
2238 if ui.verbose:
2274 if ui.verbose:
2239 aliases = ui.configitems('templatealias')
2275 aliases = ui.configitems('templatealias')
2240 tree = templater.parse(tmpl)
2276 tree = templater.parse(tmpl)
2241 ui.note(templater.prettyformat(tree), '\n')
2277 ui.note(templater.prettyformat(tree), '\n')
2242 newtree = templater.expandaliases(tree, aliases)
2278 newtree = templater.expandaliases(tree, aliases)
2243 if newtree != tree:
2279 if newtree != tree:
2244 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2280 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2245
2281
2246 if revs is None:
2282 if revs is None:
2247 t = formatter.maketemplater(ui, tmpl)
2283 t = formatter.maketemplater(ui, tmpl)
2248 props['ui'] = ui
2284 props['ui'] = ui
2249 ui.write(t.render(props))
2285 ui.write(t.render(props))
2250 else:
2286 else:
2251 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2287 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2252 for r in revs:
2288 for r in revs:
2253 displayer.show(repo[r], **pycompat.strkwargs(props))
2289 displayer.show(repo[r], **pycompat.strkwargs(props))
2254 displayer.close()
2290 displayer.close()
2255
2291
2256 @command('debugupdatecaches', [])
2292 @command('debugupdatecaches', [])
2257 def debugupdatecaches(ui, repo, *pats, **opts):
2293 def debugupdatecaches(ui, repo, *pats, **opts):
2258 """warm all known caches in the repository"""
2294 """warm all known caches in the repository"""
2259 with repo.wlock(), repo.lock():
2295 with repo.wlock(), repo.lock():
2260 repo.updatecaches()
2296 repo.updatecaches()
2261
2297
2262 @command('debugupgraderepo', [
2298 @command('debugupgraderepo', [
2263 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2299 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2264 ('', 'run', False, _('performs an upgrade')),
2300 ('', 'run', False, _('performs an upgrade')),
2265 ])
2301 ])
2266 def debugupgraderepo(ui, repo, run=False, optimize=None):
2302 def debugupgraderepo(ui, repo, run=False, optimize=None):
2267 """upgrade a repository to use different features
2303 """upgrade a repository to use different features
2268
2304
2269 If no arguments are specified, the repository is evaluated for upgrade
2305 If no arguments are specified, the repository is evaluated for upgrade
2270 and a list of problems and potential optimizations is printed.
2306 and a list of problems and potential optimizations is printed.
2271
2307
2272 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2308 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2273 can be influenced via additional arguments. More details will be provided
2309 can be influenced via additional arguments. More details will be provided
2274 by the command output when run without ``--run``.
2310 by the command output when run without ``--run``.
2275
2311
2276 During the upgrade, the repository will be locked and no writes will be
2312 During the upgrade, the repository will be locked and no writes will be
2277 allowed.
2313 allowed.
2278
2314
2279 At the end of the upgrade, the repository may not be readable while new
2315 At the end of the upgrade, the repository may not be readable while new
2280 repository data is swapped in. This window will be as long as it takes to
2316 repository data is swapped in. This window will be as long as it takes to
2281 rename some directories inside the ``.hg`` directory. On most machines, this
2317 rename some directories inside the ``.hg`` directory. On most machines, this
2282 should complete almost instantaneously and the chances of a consumer being
2318 should complete almost instantaneously and the chances of a consumer being
2283 unable to access the repository should be low.
2319 unable to access the repository should be low.
2284 """
2320 """
2285 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2321 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2286
2322
2287 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2323 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2288 inferrepo=True)
2324 inferrepo=True)
2289 def debugwalk(ui, repo, *pats, **opts):
2325 def debugwalk(ui, repo, *pats, **opts):
2290 """show how files match on given patterns"""
2326 """show how files match on given patterns"""
2291 opts = pycompat.byteskwargs(opts)
2327 opts = pycompat.byteskwargs(opts)
2292 m = scmutil.match(repo[None], pats, opts)
2328 m = scmutil.match(repo[None], pats, opts)
2293 ui.write(('matcher: %r\n' % m))
2329 ui.write(('matcher: %r\n' % m))
2294 items = list(repo[None].walk(m))
2330 items = list(repo[None].walk(m))
2295 if not items:
2331 if not items:
2296 return
2332 return
2297 f = lambda fn: fn
2333 f = lambda fn: fn
2298 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2334 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2299 f = lambda fn: util.normpath(fn)
2335 f = lambda fn: util.normpath(fn)
2300 fmt = 'f %%-%ds %%-%ds %%s' % (
2336 fmt = 'f %%-%ds %%-%ds %%s' % (
2301 max([len(abs) for abs in items]),
2337 max([len(abs) for abs in items]),
2302 max([len(m.rel(abs)) for abs in items]))
2338 max([len(m.rel(abs)) for abs in items]))
2303 for abs in items:
2339 for abs in items:
2304 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2340 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2305 ui.write("%s\n" % line.rstrip())
2341 ui.write("%s\n" % line.rstrip())
2306
2342
2307 @command('debugwireargs',
2343 @command('debugwireargs',
2308 [('', 'three', '', 'three'),
2344 [('', 'three', '', 'three'),
2309 ('', 'four', '', 'four'),
2345 ('', 'four', '', 'four'),
2310 ('', 'five', '', 'five'),
2346 ('', 'five', '', 'five'),
2311 ] + cmdutil.remoteopts,
2347 ] + cmdutil.remoteopts,
2312 _('REPO [OPTIONS]... [ONE [TWO]]'),
2348 _('REPO [OPTIONS]... [ONE [TWO]]'),
2313 norepo=True)
2349 norepo=True)
2314 def debugwireargs(ui, repopath, *vals, **opts):
2350 def debugwireargs(ui, repopath, *vals, **opts):
2315 opts = pycompat.byteskwargs(opts)
2351 opts = pycompat.byteskwargs(opts)
2316 repo = hg.peer(ui, opts, repopath)
2352 repo = hg.peer(ui, opts, repopath)
2317 for opt in cmdutil.remoteopts:
2353 for opt in cmdutil.remoteopts:
2318 del opts[opt[1]]
2354 del opts[opt[1]]
2319 args = {}
2355 args = {}
2320 for k, v in opts.iteritems():
2356 for k, v in opts.iteritems():
2321 if v:
2357 if v:
2322 args[k] = v
2358 args[k] = v
2323 # run twice to check that we don't mess up the stream for the next command
2359 # run twice to check that we don't mess up the stream for the next command
2324 res1 = repo.debugwireargs(*vals, **args)
2360 res1 = repo.debugwireargs(*vals, **args)
2325 res2 = repo.debugwireargs(*vals, **args)
2361 res2 = repo.debugwireargs(*vals, **args)
2326 ui.write("%s\n" % res1)
2362 ui.write("%s\n" % res1)
2327 if res1 != res2:
2363 if res1 != res2:
2328 ui.warn("%s\n" % res2)
2364 ui.warn("%s\n" % res2)
@@ -1,192 +1,226 b''
1 $ cat << EOF >> $HGRCPATH
1 $ cat << EOF >> $HGRCPATH
2 > [format]
2 > [format]
3 > usegeneraldelta=yes
3 > usegeneraldelta=yes
4 > EOF
4 > EOF
5
5
6 $ hg init debugrevlog
6 $ hg init debugrevlog
7 $ cd debugrevlog
7 $ cd debugrevlog
8 $ echo a > a
8 $ echo a > a
9 $ hg ci -Am adda
9 $ hg ci -Am adda
10 adding a
10 adding a
11 $ hg debugrevlog -m
11 $ hg debugrevlog -m
12 format : 1
12 format : 1
13 flags : inline, generaldelta
13 flags : inline, generaldelta
14
14
15 revisions : 1
15 revisions : 1
16 merges : 0 ( 0.00%)
16 merges : 0 ( 0.00%)
17 normal : 1 (100.00%)
17 normal : 1 (100.00%)
18 revisions : 1
18 revisions : 1
19 full : 1 (100.00%)
19 full : 1 (100.00%)
20 deltas : 0 ( 0.00%)
20 deltas : 0 ( 0.00%)
21 revision size : 44
21 revision size : 44
22 full : 44 (100.00%)
22 full : 44 (100.00%)
23 deltas : 0 ( 0.00%)
23 deltas : 0 ( 0.00%)
24
24
25 chunks : 1
25 chunks : 1
26 0x75 (u) : 1 (100.00%)
26 0x75 (u) : 1 (100.00%)
27 chunks size : 44
27 chunks size : 44
28 0x75 (u) : 44 (100.00%)
28 0x75 (u) : 44 (100.00%)
29
29
30 avg chain length : 0
30 avg chain length : 0
31 max chain length : 0
31 max chain length : 0
32 max chain reach : 44
32 max chain reach : 44
33 compression ratio : 0
33 compression ratio : 0
34
34
35 uncompressed data size (min/max/avg) : 43 / 43 / 43
35 uncompressed data size (min/max/avg) : 43 / 43 / 43
36 full revision size (min/max/avg) : 44 / 44 / 44
36 full revision size (min/max/avg) : 44 / 44 / 44
37 delta size (min/max/avg) : 0 / 0 / 0
37 delta size (min/max/avg) : 0 / 0 / 0
38
38
39 Test debugindex, with and without the --debug flag
39 Test debugindex, with and without the --debug flag
40 $ hg debugindex a
40 $ hg debugindex a
41 rev offset length ..... linkrev nodeid p1 p2 (re)
41 rev offset length ..... linkrev nodeid p1 p2 (re)
42 0 0 3 .... 0 b789fdd96dc2 000000000000 000000000000 (re)
42 0 0 3 .... 0 b789fdd96dc2 000000000000 000000000000 (re)
43 $ hg --debug debugindex a
43 $ hg --debug debugindex a
44 rev offset length ..... linkrev nodeid p1 p2 (re)
44 rev offset length ..... linkrev nodeid p1 p2 (re)
45 0 0 3 .... 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re)
45 0 0 3 .... 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re)
46 $ hg debugindex -f 1 a
46 $ hg debugindex -f 1 a
47 rev flag offset length size ..... link p1 p2 nodeid (re)
47 rev flag offset length size ..... link p1 p2 nodeid (re)
48 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2 (re)
48 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2 (re)
49 $ hg --debug debugindex -f 1 a
49 $ hg --debug debugindex -f 1 a
50 rev flag offset length size ..... link p1 p2 nodeid (re)
50 rev flag offset length size ..... link p1 p2 nodeid (re)
51 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re)
51 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re)
52
52
53 debugdelta chain basic output
53 debugdelta chain basic output
54
54
55 $ hg debugdeltachain -m
55 $ hg debugdeltachain -m
56 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
56 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
57 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
57 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
58
58
59 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
59 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
60 0 1 1
60 0 1 1
61
61
62 $ hg debugdeltachain -m -Tjson
62 $ hg debugdeltachain -m -Tjson
63 [
63 [
64 {
64 {
65 "chainid": 1,
65 "chainid": 1,
66 "chainlen": 1,
66 "chainlen": 1,
67 "chainratio": 1.02325581395,
67 "chainratio": 1.02325581395,
68 "chainsize": 44,
68 "chainsize": 44,
69 "compsize": 44,
69 "compsize": 44,
70 "deltatype": "base",
70 "deltatype": "base",
71 "extradist": 0,
71 "extradist": 0,
72 "extraratio": 0.0,
72 "extraratio": 0.0,
73 "lindist": 44,
73 "lindist": 44,
74 "prevrev": -1,
74 "prevrev": -1,
75 "rev": 0,
75 "rev": 0,
76 "uncompsize": 43
76 "uncompsize": 43
77 }
77 }
78 ]
78 ]
79
79
80 debugdelta chain with sparse read enabled
81
82 $ cat >> $HGRCPATH <<EOF
83 > [experimental]
84 > sparse-read = True
85 > EOF
86 $ hg debugdeltachain -m
87 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity
88 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000
89
90 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
91 0 1 1 44 44 1.0
92
93 $ hg debugdeltachain -m -Tjson
94 [
95 {
96 "chainid": 1,
97 "chainlen": 1,
98 "chainratio": 1.02325581395,
99 "chainsize": 44,
100 "compsize": 44,
101 "deltatype": "base",
102 "extradist": 0,
103 "extraratio": 0.0,
104 "largestblock": 44,
105 "lindist": 44,
106 "prevrev": -1,
107 "readdensity": 1.0,
108 "readsize": 44,
109 "rev": 0,
110 "uncompsize": 43
111 }
112 ]
113
80 Test max chain len
114 Test max chain len
81 $ cat >> $HGRCPATH << EOF
115 $ cat >> $HGRCPATH << EOF
82 > [format]
116 > [format]
83 > maxchainlen=4
117 > maxchainlen=4
84 > EOF
118 > EOF
85
119
86 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
120 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
87 $ hg ci -m a
121 $ hg ci -m a
88 $ printf "b\n" >> a
122 $ printf "b\n" >> a
89 $ hg ci -m a
123 $ hg ci -m a
90 $ printf "c\n" >> a
124 $ printf "c\n" >> a
91 $ hg ci -m a
125 $ hg ci -m a
92 $ printf "d\n" >> a
126 $ printf "d\n" >> a
93 $ hg ci -m a
127 $ hg ci -m a
94 $ printf "e\n" >> a
128 $ printf "e\n" >> a
95 $ hg ci -m a
129 $ hg ci -m a
96 $ printf "f\n" >> a
130 $ printf "f\n" >> a
97 $ hg ci -m a
131 $ hg ci -m a
98 $ printf 'g\n' >> a
132 $ printf 'g\n' >> a
99 $ hg ci -m a
133 $ hg ci -m a
100 $ printf 'h\n' >> a
134 $ printf 'h\n' >> a
101 $ hg ci -m a
135 $ hg ci -m a
102 $ hg debugrevlog -d a
136 $ hg debugrevlog -d a
103 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
137 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
104 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
138 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
105 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
139 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
106 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
140 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
107 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
141 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
108 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
142 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
109 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
143 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
110 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
144 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
111 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
145 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
112 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
146 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
113
147
114 Test WdirUnsupported exception
148 Test WdirUnsupported exception
115
149
116 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
150 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
117 abort: working directory revision cannot be specified
151 abort: working directory revision cannot be specified
118 [255]
152 [255]
119
153
120 Test cache warming command
154 Test cache warming command
121
155
122 $ rm -rf .hg/cache/
156 $ rm -rf .hg/cache/
123 $ hg debugupdatecaches --debug
157 $ hg debugupdatecaches --debug
124 updating the branch cache
158 updating the branch cache
125 $ ls -r .hg/cache/*
159 $ ls -r .hg/cache/*
126 .hg/cache/rbc-revs-v1
160 .hg/cache/rbc-revs-v1
127 .hg/cache/rbc-names-v1
161 .hg/cache/rbc-names-v1
128 .hg/cache/branch2-served
162 .hg/cache/branch2-served
129
163
130 $ cd ..
164 $ cd ..
131
165
132 Test internal debugstacktrace command
166 Test internal debugstacktrace command
133
167
134 $ cat > debugstacktrace.py << EOF
168 $ cat > debugstacktrace.py << EOF
135 > from __future__ import absolute_import
169 > from __future__ import absolute_import
136 > import sys
170 > import sys
137 > from mercurial import util
171 > from mercurial import util
138 > def f():
172 > def f():
139 > util.debugstacktrace(f=sys.stdout)
173 > util.debugstacktrace(f=sys.stdout)
140 > g()
174 > g()
141 > def g():
175 > def g():
142 > util.dst('hello from g\\n', skip=1)
176 > util.dst('hello from g\\n', skip=1)
143 > h()
177 > h()
144 > def h():
178 > def h():
145 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
179 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
146 > f()
180 > f()
147 > EOF
181 > EOF
148 $ $PYTHON debugstacktrace.py
182 $ $PYTHON debugstacktrace.py
149 stacktrace at:
183 stacktrace at:
150 debugstacktrace.py:12 in * (glob)
184 debugstacktrace.py:12 in * (glob)
151 debugstacktrace.py:5 in f
185 debugstacktrace.py:5 in f
152 hello from g at:
186 hello from g at:
153 debugstacktrace.py:12 in * (glob)
187 debugstacktrace.py:12 in * (glob)
154 debugstacktrace.py:6 in f
188 debugstacktrace.py:6 in f
155 hi ...
189 hi ...
156 from h hidden in g at:
190 from h hidden in g at:
157 debugstacktrace.py:6 in f
191 debugstacktrace.py:6 in f
158 debugstacktrace.py:9 in g
192 debugstacktrace.py:9 in g
159
193
160 Test debugcapabilities command:
194 Test debugcapabilities command:
161
195
162 $ hg debugcapabilities ./debugrevlog/
196 $ hg debugcapabilities ./debugrevlog/
163 Main capabilities:
197 Main capabilities:
164 branchmap
198 branchmap
165 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps
199 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Aphases%3Dheads%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps
166 getbundle
200 getbundle
167 known
201 known
168 lookup
202 lookup
169 pushkey
203 pushkey
170 unbundle
204 unbundle
171 Bundle2 capabilities:
205 Bundle2 capabilities:
172 HG20
206 HG20
173 changegroup
207 changegroup
174 01
208 01
175 02
209 02
176 digests
210 digests
177 md5
211 md5
178 sha1
212 sha1
179 sha512
213 sha512
180 error
214 error
181 abort
215 abort
182 unsupportedcontent
216 unsupportedcontent
183 pushraced
217 pushraced
184 pushkey
218 pushkey
185 hgtagsfnodes
219 hgtagsfnodes
186 listkeys
220 listkeys
187 phases
221 phases
188 heads
222 heads
189 pushkey
223 pushkey
190 remote-changegroup
224 remote-changegroup
191 http
225 http
192 https
226 https
General Comments 0
You need to be logged in to leave comments. Login now