##// END OF EJS Templates
py3: use '%d' for integers rather than '%s'...
Pulkit Goyal -
r35144:8f6641fa default
parent child Browse files
Show More
@@ -1,2364 +1,2364 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import string
19 import string
20 import sys
20 import sys
21 import tempfile
21 import tempfile
22 import time
22 import time
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import (
25 from .node import (
26 bin,
26 bin,
27 hex,
27 hex,
28 nullhex,
28 nullhex,
29 nullid,
29 nullid,
30 nullrev,
30 nullrev,
31 short,
31 short,
32 )
32 )
33 from . import (
33 from . import (
34 bundle2,
34 bundle2,
35 changegroup,
35 changegroup,
36 cmdutil,
36 cmdutil,
37 color,
37 color,
38 context,
38 context,
39 dagparser,
39 dagparser,
40 dagutil,
40 dagutil,
41 encoding,
41 encoding,
42 error,
42 error,
43 exchange,
43 exchange,
44 extensions,
44 extensions,
45 filemerge,
45 filemerge,
46 fileset,
46 fileset,
47 formatter,
47 formatter,
48 hg,
48 hg,
49 localrepo,
49 localrepo,
50 lock as lockmod,
50 lock as lockmod,
51 merge as mergemod,
51 merge as mergemod,
52 obsolete,
52 obsolete,
53 obsutil,
53 obsutil,
54 phases,
54 phases,
55 policy,
55 policy,
56 pvec,
56 pvec,
57 pycompat,
57 pycompat,
58 registrar,
58 registrar,
59 repair,
59 repair,
60 revlog,
60 revlog,
61 revset,
61 revset,
62 revsetlang,
62 revsetlang,
63 scmutil,
63 scmutil,
64 setdiscovery,
64 setdiscovery,
65 simplemerge,
65 simplemerge,
66 smartset,
66 smartset,
67 sslutil,
67 sslutil,
68 streamclone,
68 streamclone,
69 templater,
69 templater,
70 treediscovery,
70 treediscovery,
71 upgrade,
71 upgrade,
72 util,
72 util,
73 vfs as vfsmod,
73 vfs as vfsmod,
74 )
74 )
75
75
76 release = lockmod.release
76 release = lockmod.release
77
77
78 command = registrar.command()
78 command = registrar.command()
79
79
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
80 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
81 def debugancestor(ui, repo, *args):
81 def debugancestor(ui, repo, *args):
82 """find the ancestor revision of two revisions in a given index"""
82 """find the ancestor revision of two revisions in a given index"""
83 if len(args) == 3:
83 if len(args) == 3:
84 index, rev1, rev2 = args
84 index, rev1, rev2 = args
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
85 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
86 lookup = r.lookup
86 lookup = r.lookup
87 elif len(args) == 2:
87 elif len(args) == 2:
88 if not repo:
88 if not repo:
89 raise error.Abort(_('there is no Mercurial repository here '
89 raise error.Abort(_('there is no Mercurial repository here '
90 '(.hg not found)'))
90 '(.hg not found)'))
91 rev1, rev2 = args
91 rev1, rev2 = args
92 r = repo.changelog
92 r = repo.changelog
93 lookup = repo.lookup
93 lookup = repo.lookup
94 else:
94 else:
95 raise error.Abort(_('either two or three arguments required'))
95 raise error.Abort(_('either two or three arguments required'))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
96 a = r.ancestor(lookup(rev1), lookup(rev2))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
97 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
98
98
99 @command('debugapplystreamclonebundle', [], 'FILE')
99 @command('debugapplystreamclonebundle', [], 'FILE')
100 def debugapplystreamclonebundle(ui, repo, fname):
100 def debugapplystreamclonebundle(ui, repo, fname):
101 """apply a stream clone bundle file"""
101 """apply a stream clone bundle file"""
102 f = hg.openpath(ui, fname)
102 f = hg.openpath(ui, fname)
103 gen = exchange.readbundle(ui, f, fname)
103 gen = exchange.readbundle(ui, f, fname)
104 gen.apply(repo)
104 gen.apply(repo)
105
105
106 @command('debugbuilddag',
106 @command('debugbuilddag',
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
107 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
108 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
109 ('n', 'new-file', None, _('add new file at each rev'))],
109 ('n', 'new-file', None, _('add new file at each rev'))],
110 _('[OPTION]... [TEXT]'))
110 _('[OPTION]... [TEXT]'))
111 def debugbuilddag(ui, repo, text=None,
111 def debugbuilddag(ui, repo, text=None,
112 mergeable_file=False,
112 mergeable_file=False,
113 overwritten_file=False,
113 overwritten_file=False,
114 new_file=False):
114 new_file=False):
115 """builds a repo with a given DAG from scratch in the current empty repo
115 """builds a repo with a given DAG from scratch in the current empty repo
116
116
117 The description of the DAG is read from stdin if not given on the
117 The description of the DAG is read from stdin if not given on the
118 command line.
118 command line.
119
119
120 Elements:
120 Elements:
121
121
122 - "+n" is a linear run of n nodes based on the current default parent
122 - "+n" is a linear run of n nodes based on the current default parent
123 - "." is a single node based on the current default parent
123 - "." is a single node based on the current default parent
124 - "$" resets the default parent to null (implied at the start);
124 - "$" resets the default parent to null (implied at the start);
125 otherwise the default parent is always the last node created
125 otherwise the default parent is always the last node created
126 - "<p" sets the default parent to the backref p
126 - "<p" sets the default parent to the backref p
127 - "*p" is a fork at parent p, which is a backref
127 - "*p" is a fork at parent p, which is a backref
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
128 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
129 - "/p2" is a merge of the preceding node and p2
129 - "/p2" is a merge of the preceding node and p2
130 - ":tag" defines a local tag for the preceding node
130 - ":tag" defines a local tag for the preceding node
131 - "@branch" sets the named branch for subsequent nodes
131 - "@branch" sets the named branch for subsequent nodes
132 - "#...\\n" is a comment up to the end of the line
132 - "#...\\n" is a comment up to the end of the line
133
133
134 Whitespace between the above elements is ignored.
134 Whitespace between the above elements is ignored.
135
135
136 A backref is either
136 A backref is either
137
137
138 - a number n, which references the node curr-n, where curr is the current
138 - a number n, which references the node curr-n, where curr is the current
139 node, or
139 node, or
140 - the name of a local tag you placed earlier using ":tag", or
140 - the name of a local tag you placed earlier using ":tag", or
141 - empty to denote the default parent.
141 - empty to denote the default parent.
142
142
143 All string valued-elements are either strictly alphanumeric, or must
143 All string valued-elements are either strictly alphanumeric, or must
144 be enclosed in double quotes ("..."), with "\\" as escape character.
144 be enclosed in double quotes ("..."), with "\\" as escape character.
145 """
145 """
146
146
147 if text is None:
147 if text is None:
148 ui.status(_("reading DAG from stdin\n"))
148 ui.status(_("reading DAG from stdin\n"))
149 text = ui.fin.read()
149 text = ui.fin.read()
150
150
151 cl = repo.changelog
151 cl = repo.changelog
152 if len(cl) > 0:
152 if len(cl) > 0:
153 raise error.Abort(_('repository is not empty'))
153 raise error.Abort(_('repository is not empty'))
154
154
155 # determine number of revs in DAG
155 # determine number of revs in DAG
156 total = 0
156 total = 0
157 for type, data in dagparser.parsedag(text):
157 for type, data in dagparser.parsedag(text):
158 if type == 'n':
158 if type == 'n':
159 total += 1
159 total += 1
160
160
161 if mergeable_file:
161 if mergeable_file:
162 linesperrev = 2
162 linesperrev = 2
163 # make a file with k lines per rev
163 # make a file with k lines per rev
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
164 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
165 initialmergedlines.append("")
165 initialmergedlines.append("")
166
166
167 tags = []
167 tags = []
168
168
169 wlock = lock = tr = None
169 wlock = lock = tr = None
170 try:
170 try:
171 wlock = repo.wlock()
171 wlock = repo.wlock()
172 lock = repo.lock()
172 lock = repo.lock()
173 tr = repo.transaction("builddag")
173 tr = repo.transaction("builddag")
174
174
175 at = -1
175 at = -1
176 atbranch = 'default'
176 atbranch = 'default'
177 nodeids = []
177 nodeids = []
178 id = 0
178 id = 0
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
179 ui.progress(_('building'), id, unit=_('revisions'), total=total)
180 for type, data in dagparser.parsedag(text):
180 for type, data in dagparser.parsedag(text):
181 if type == 'n':
181 if type == 'n':
182 ui.note(('node %s\n' % str(data)))
182 ui.note(('node %s\n' % str(data)))
183 id, ps = data
183 id, ps = data
184
184
185 files = []
185 files = []
186 fctxs = {}
186 fctxs = {}
187
187
188 p2 = None
188 p2 = None
189 if mergeable_file:
189 if mergeable_file:
190 fn = "mf"
190 fn = "mf"
191 p1 = repo[ps[0]]
191 p1 = repo[ps[0]]
192 if len(ps) > 1:
192 if len(ps) > 1:
193 p2 = repo[ps[1]]
193 p2 = repo[ps[1]]
194 pa = p1.ancestor(p2)
194 pa = p1.ancestor(p2)
195 base, local, other = [x[fn].data() for x in (pa, p1,
195 base, local, other = [x[fn].data() for x in (pa, p1,
196 p2)]
196 p2)]
197 m3 = simplemerge.Merge3Text(base, local, other)
197 m3 = simplemerge.Merge3Text(base, local, other)
198 ml = [l.strip() for l in m3.merge_lines()]
198 ml = [l.strip() for l in m3.merge_lines()]
199 ml.append("")
199 ml.append("")
200 elif at > 0:
200 elif at > 0:
201 ml = p1[fn].data().split("\n")
201 ml = p1[fn].data().split("\n")
202 else:
202 else:
203 ml = initialmergedlines
203 ml = initialmergedlines
204 ml[id * linesperrev] += " r%i" % id
204 ml[id * linesperrev] += " r%i" % id
205 mergedtext = "\n".join(ml)
205 mergedtext = "\n".join(ml)
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
207 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
208
208
209 if overwritten_file:
209 if overwritten_file:
210 fn = "of"
210 fn = "of"
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213
213
214 if new_file:
214 if new_file:
215 fn = "nf%i" % id
215 fn = "nf%i" % id
216 files.append(fn)
216 files.append(fn)
217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
217 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
218 if len(ps) > 1:
218 if len(ps) > 1:
219 if not p2:
219 if not p2:
220 p2 = repo[ps[1]]
220 p2 = repo[ps[1]]
221 for fn in p2:
221 for fn in p2:
222 if fn.startswith("nf"):
222 if fn.startswith("nf"):
223 files.append(fn)
223 files.append(fn)
224 fctxs[fn] = p2[fn]
224 fctxs[fn] = p2[fn]
225
225
226 def fctxfn(repo, cx, path):
226 def fctxfn(repo, cx, path):
227 return fctxs.get(path)
227 return fctxs.get(path)
228
228
229 if len(ps) == 0 or ps[0] < 0:
229 if len(ps) == 0 or ps[0] < 0:
230 pars = [None, None]
230 pars = [None, None]
231 elif len(ps) == 1:
231 elif len(ps) == 1:
232 pars = [nodeids[ps[0]], None]
232 pars = [nodeids[ps[0]], None]
233 else:
233 else:
234 pars = [nodeids[p] for p in ps]
234 pars = [nodeids[p] for p in ps]
235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
235 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
236 date=(id, 0),
236 date=(id, 0),
237 user="debugbuilddag",
237 user="debugbuilddag",
238 extra={'branch': atbranch})
238 extra={'branch': atbranch})
239 nodeid = repo.commitctx(cx)
239 nodeid = repo.commitctx(cx)
240 nodeids.append(nodeid)
240 nodeids.append(nodeid)
241 at = id
241 at = id
242 elif type == 'l':
242 elif type == 'l':
243 id, name = data
243 id, name = data
244 ui.note(('tag %s\n' % name))
244 ui.note(('tag %s\n' % name))
245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
245 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
246 elif type == 'a':
246 elif type == 'a':
247 ui.note(('branch %s\n' % data))
247 ui.note(('branch %s\n' % data))
248 atbranch = data
248 atbranch = data
249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
249 ui.progress(_('building'), id, unit=_('revisions'), total=total)
250 tr.close()
250 tr.close()
251
251
252 if tags:
252 if tags:
253 repo.vfs.write("localtags", "".join(tags))
253 repo.vfs.write("localtags", "".join(tags))
254 finally:
254 finally:
255 ui.progress(_('building'), None)
255 ui.progress(_('building'), None)
256 release(tr, lock, wlock)
256 release(tr, lock, wlock)
257
257
258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
258 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
259 indent_string = ' ' * indent
259 indent_string = ' ' * indent
260 if all:
260 if all:
261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
261 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
262 % indent_string)
262 % indent_string)
263
263
264 def showchunks(named):
264 def showchunks(named):
265 ui.write("\n%s%s\n" % (indent_string, named))
265 ui.write("\n%s%s\n" % (indent_string, named))
266 for deltadata in gen.deltaiter():
266 for deltadata in gen.deltaiter():
267 node, p1, p2, cs, deltabase, delta, flags = deltadata
267 node, p1, p2, cs, deltabase, delta, flags = deltadata
268 ui.write("%s%s %s %s %s %s %s\n" %
268 ui.write("%s%s %s %s %s %s %s\n" %
269 (indent_string, hex(node), hex(p1), hex(p2),
269 (indent_string, hex(node), hex(p1), hex(p2),
270 hex(cs), hex(deltabase), len(delta)))
270 hex(cs), hex(deltabase), len(delta)))
271
271
272 chunkdata = gen.changelogheader()
272 chunkdata = gen.changelogheader()
273 showchunks("changelog")
273 showchunks("changelog")
274 chunkdata = gen.manifestheader()
274 chunkdata = gen.manifestheader()
275 showchunks("manifest")
275 showchunks("manifest")
276 for chunkdata in iter(gen.filelogheader, {}):
276 for chunkdata in iter(gen.filelogheader, {}):
277 fname = chunkdata['filename']
277 fname = chunkdata['filename']
278 showchunks(fname)
278 showchunks(fname)
279 else:
279 else:
280 if isinstance(gen, bundle2.unbundle20):
280 if isinstance(gen, bundle2.unbundle20):
281 raise error.Abort(_('use debugbundle2 for this file'))
281 raise error.Abort(_('use debugbundle2 for this file'))
282 chunkdata = gen.changelogheader()
282 chunkdata = gen.changelogheader()
283 for deltadata in gen.deltaiter():
283 for deltadata in gen.deltaiter():
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 ui.write("%s%s\n" % (indent_string, hex(node)))
285 ui.write("%s%s\n" % (indent_string, hex(node)))
286
286
287 def _debugobsmarkers(ui, part, indent=0, **opts):
287 def _debugobsmarkers(ui, part, indent=0, **opts):
288 """display version and markers contained in 'data'"""
288 """display version and markers contained in 'data'"""
289 opts = pycompat.byteskwargs(opts)
289 opts = pycompat.byteskwargs(opts)
290 data = part.read()
290 data = part.read()
291 indent_string = ' ' * indent
291 indent_string = ' ' * indent
292 try:
292 try:
293 version, markers = obsolete._readmarkers(data)
293 version, markers = obsolete._readmarkers(data)
294 except error.UnknownVersion as exc:
294 except error.UnknownVersion as exc:
295 msg = "%sunsupported version: %s (%d bytes)\n"
295 msg = "%sunsupported version: %s (%d bytes)\n"
296 msg %= indent_string, exc.version, len(data)
296 msg %= indent_string, exc.version, len(data)
297 ui.write(msg)
297 ui.write(msg)
298 else:
298 else:
299 msg = "%sversion: %s (%d bytes)\n"
299 msg = "%sversion: %d (%d bytes)\n"
300 msg %= indent_string, version, len(data)
300 msg %= indent_string, version, len(data)
301 ui.write(msg)
301 ui.write(msg)
302 fm = ui.formatter('debugobsolete', opts)
302 fm = ui.formatter('debugobsolete', opts)
303 for rawmarker in sorted(markers):
303 for rawmarker in sorted(markers):
304 m = obsutil.marker(None, rawmarker)
304 m = obsutil.marker(None, rawmarker)
305 fm.startitem()
305 fm.startitem()
306 fm.plain(indent_string)
306 fm.plain(indent_string)
307 cmdutil.showmarker(fm, m)
307 cmdutil.showmarker(fm, m)
308 fm.end()
308 fm.end()
309
309
310 def _debugphaseheads(ui, data, indent=0):
310 def _debugphaseheads(ui, data, indent=0):
311 """display version and markers contained in 'data'"""
311 """display version and markers contained in 'data'"""
312 indent_string = ' ' * indent
312 indent_string = ' ' * indent
313 headsbyphase = phases.binarydecode(data)
313 headsbyphase = phases.binarydecode(data)
314 for phase in phases.allphases:
314 for phase in phases.allphases:
315 for head in headsbyphase[phase]:
315 for head in headsbyphase[phase]:
316 ui.write(indent_string)
316 ui.write(indent_string)
317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
317 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
318
318
319 def _quasirepr(thing):
319 def _quasirepr(thing):
320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
320 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
321 return '{%s}' % (
321 return '{%s}' % (
322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
322 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
323 return pycompat.bytestr(repr(thing))
323 return pycompat.bytestr(repr(thing))
324
324
325 def _debugbundle2(ui, gen, all=None, **opts):
325 def _debugbundle2(ui, gen, all=None, **opts):
326 """lists the contents of a bundle2"""
326 """lists the contents of a bundle2"""
327 if not isinstance(gen, bundle2.unbundle20):
327 if not isinstance(gen, bundle2.unbundle20):
328 raise error.Abort(_('not a bundle2 file'))
328 raise error.Abort(_('not a bundle2 file'))
329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
329 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
330 parttypes = opts.get(r'part_type', [])
330 parttypes = opts.get(r'part_type', [])
331 for part in gen.iterparts():
331 for part in gen.iterparts():
332 if parttypes and part.type not in parttypes:
332 if parttypes and part.type not in parttypes:
333 continue
333 continue
334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
334 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
335 if part.type == 'changegroup':
335 if part.type == 'changegroup':
336 version = part.params.get('version', '01')
336 version = part.params.get('version', '01')
337 cg = changegroup.getunbundler(version, part, 'UN')
337 cg = changegroup.getunbundler(version, part, 'UN')
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
338 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 if part.type == 'obsmarkers':
339 if part.type == 'obsmarkers':
340 _debugobsmarkers(ui, part, indent=4, **opts)
340 _debugobsmarkers(ui, part, indent=4, **opts)
341 if part.type == 'phase-heads':
341 if part.type == 'phase-heads':
342 _debugphaseheads(ui, part, indent=4)
342 _debugphaseheads(ui, part, indent=4)
343
343
344 @command('debugbundle',
344 @command('debugbundle',
345 [('a', 'all', None, _('show all details')),
345 [('a', 'all', None, _('show all details')),
346 ('', 'part-type', [], _('show only the named part type')),
346 ('', 'part-type', [], _('show only the named part type')),
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
347 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 _('FILE'),
348 _('FILE'),
349 norepo=True)
349 norepo=True)
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
350 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 """lists the contents of a bundle"""
351 """lists the contents of a bundle"""
352 with hg.openpath(ui, bundlepath) as f:
352 with hg.openpath(ui, bundlepath) as f:
353 if spec:
353 if spec:
354 spec = exchange.getbundlespec(ui, f)
354 spec = exchange.getbundlespec(ui, f)
355 ui.write('%s\n' % spec)
355 ui.write('%s\n' % spec)
356 return
356 return
357
357
358 gen = exchange.readbundle(ui, f, bundlepath)
358 gen = exchange.readbundle(ui, f, bundlepath)
359 if isinstance(gen, bundle2.unbundle20):
359 if isinstance(gen, bundle2.unbundle20):
360 return _debugbundle2(ui, gen, all=all, **opts)
360 return _debugbundle2(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
361 _debugchangegroup(ui, gen, all=all, **opts)
362
362
363 @command('debugcapabilities',
363 @command('debugcapabilities',
364 [], _('PATH'),
364 [], _('PATH'),
365 norepo=True)
365 norepo=True)
366 def debugcapabilities(ui, path, **opts):
366 def debugcapabilities(ui, path, **opts):
367 """lists the capabilities of a remote peer"""
367 """lists the capabilities of a remote peer"""
368 peer = hg.peer(ui, opts, path)
368 peer = hg.peer(ui, opts, path)
369 caps = peer.capabilities()
369 caps = peer.capabilities()
370 ui.write(('Main capabilities:\n'))
370 ui.write(('Main capabilities:\n'))
371 for c in sorted(caps):
371 for c in sorted(caps):
372 ui.write((' %s\n') % c)
372 ui.write((' %s\n') % c)
373 b2caps = bundle2.bundle2caps(peer)
373 b2caps = bundle2.bundle2caps(peer)
374 if b2caps:
374 if b2caps:
375 ui.write(('Bundle2 capabilities:\n'))
375 ui.write(('Bundle2 capabilities:\n'))
376 for key, values in sorted(b2caps.iteritems()):
376 for key, values in sorted(b2caps.iteritems()):
377 ui.write((' %s\n') % key)
377 ui.write((' %s\n') % key)
378 for v in values:
378 for v in values:
379 ui.write((' %s\n') % v)
379 ui.write((' %s\n') % v)
380
380
381 @command('debugcheckstate', [], '')
381 @command('debugcheckstate', [], '')
382 def debugcheckstate(ui, repo):
382 def debugcheckstate(ui, repo):
383 """validate the correctness of the current dirstate"""
383 """validate the correctness of the current dirstate"""
384 parent1, parent2 = repo.dirstate.parents()
384 parent1, parent2 = repo.dirstate.parents()
385 m1 = repo[parent1].manifest()
385 m1 = repo[parent1].manifest()
386 m2 = repo[parent2].manifest()
386 m2 = repo[parent2].manifest()
387 errors = 0
387 errors = 0
388 for f in repo.dirstate:
388 for f in repo.dirstate:
389 state = repo.dirstate[f]
389 state = repo.dirstate[f]
390 if state in "nr" and f not in m1:
390 if state in "nr" and f not in m1:
391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
391 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
392 errors += 1
392 errors += 1
393 if state in "a" and f in m1:
393 if state in "a" and f in m1:
394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
394 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
395 errors += 1
395 errors += 1
396 if state in "m" and f not in m1 and f not in m2:
396 if state in "m" and f not in m1 and f not in m2:
397 ui.warn(_("%s in state %s, but not in either manifest\n") %
397 ui.warn(_("%s in state %s, but not in either manifest\n") %
398 (f, state))
398 (f, state))
399 errors += 1
399 errors += 1
400 for f in m1:
400 for f in m1:
401 state = repo.dirstate[f]
401 state = repo.dirstate[f]
402 if state not in "nrm":
402 if state not in "nrm":
403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
403 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
404 errors += 1
404 errors += 1
405 if errors:
405 if errors:
406 error = _(".hg/dirstate inconsistent with current parent's manifest")
406 error = _(".hg/dirstate inconsistent with current parent's manifest")
407 raise error.Abort(error)
407 raise error.Abort(error)
408
408
409 @command('debugcolor',
409 @command('debugcolor',
410 [('', 'style', None, _('show all configured styles'))],
410 [('', 'style', None, _('show all configured styles'))],
411 'hg debugcolor')
411 'hg debugcolor')
412 def debugcolor(ui, repo, **opts):
412 def debugcolor(ui, repo, **opts):
413 """show available color, effects or style"""
413 """show available color, effects or style"""
414 ui.write(('color mode: %s\n') % ui._colormode)
414 ui.write(('color mode: %s\n') % ui._colormode)
415 if opts.get(r'style'):
415 if opts.get(r'style'):
416 return _debugdisplaystyle(ui)
416 return _debugdisplaystyle(ui)
417 else:
417 else:
418 return _debugdisplaycolor(ui)
418 return _debugdisplaycolor(ui)
419
419
420 def _debugdisplaycolor(ui):
420 def _debugdisplaycolor(ui):
421 ui = ui.copy()
421 ui = ui.copy()
422 ui._styles.clear()
422 ui._styles.clear()
423 for effect in color._activeeffects(ui).keys():
423 for effect in color._activeeffects(ui).keys():
424 ui._styles[effect] = effect
424 ui._styles[effect] = effect
425 if ui._terminfoparams:
425 if ui._terminfoparams:
426 for k, v in ui.configitems('color'):
426 for k, v in ui.configitems('color'):
427 if k.startswith('color.'):
427 if k.startswith('color.'):
428 ui._styles[k] = k[6:]
428 ui._styles[k] = k[6:]
429 elif k.startswith('terminfo.'):
429 elif k.startswith('terminfo.'):
430 ui._styles[k] = k[9:]
430 ui._styles[k] = k[9:]
431 ui.write(_('available colors:\n'))
431 ui.write(_('available colors:\n'))
432 # sort label with a '_' after the other to group '_background' entry.
432 # sort label with a '_' after the other to group '_background' entry.
433 items = sorted(ui._styles.items(),
433 items = sorted(ui._styles.items(),
434 key=lambda i: ('_' in i[0], i[0], i[1]))
434 key=lambda i: ('_' in i[0], i[0], i[1]))
435 for colorname, label in items:
435 for colorname, label in items:
436 ui.write(('%s\n') % colorname, label=label)
436 ui.write(('%s\n') % colorname, label=label)
437
437
438 def _debugdisplaystyle(ui):
438 def _debugdisplaystyle(ui):
439 ui.write(_('available style:\n'))
439 ui.write(_('available style:\n'))
440 width = max(len(s) for s in ui._styles)
440 width = max(len(s) for s in ui._styles)
441 for label, effects in sorted(ui._styles.items()):
441 for label, effects in sorted(ui._styles.items()):
442 ui.write('%s' % label, label=label)
442 ui.write('%s' % label, label=label)
443 if effects:
443 if effects:
444 # 50
444 # 50
445 ui.write(': ')
445 ui.write(': ')
446 ui.write(' ' * (max(0, width - len(label))))
446 ui.write(' ' * (max(0, width - len(label))))
447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
447 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
448 ui.write('\n')
448 ui.write('\n')
449
449
450 @command('debugcreatestreamclonebundle', [], 'FILE')
450 @command('debugcreatestreamclonebundle', [], 'FILE')
451 def debugcreatestreamclonebundle(ui, repo, fname):
451 def debugcreatestreamclonebundle(ui, repo, fname):
452 """create a stream clone bundle file
452 """create a stream clone bundle file
453
453
454 Stream bundles are special bundles that are essentially archives of
454 Stream bundles are special bundles that are essentially archives of
455 revlog files. They are commonly used for cloning very quickly.
455 revlog files. They are commonly used for cloning very quickly.
456 """
456 """
457 # TODO we may want to turn this into an abort when this functionality
457 # TODO we may want to turn this into an abort when this functionality
458 # is moved into `hg bundle`.
458 # is moved into `hg bundle`.
459 if phases.hassecret(repo):
459 if phases.hassecret(repo):
460 ui.warn(_('(warning: stream clone bundle will contain secret '
460 ui.warn(_('(warning: stream clone bundle will contain secret '
461 'revisions)\n'))
461 'revisions)\n'))
462
462
463 requirements, gen = streamclone.generatebundlev1(repo)
463 requirements, gen = streamclone.generatebundlev1(repo)
464 changegroup.writechunks(ui, gen, fname)
464 changegroup.writechunks(ui, gen, fname)
465
465
466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
466 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
467
467
468 @command('debugdag',
468 @command('debugdag',
469 [('t', 'tags', None, _('use tags as labels')),
469 [('t', 'tags', None, _('use tags as labels')),
470 ('b', 'branches', None, _('annotate with branch names')),
470 ('b', 'branches', None, _('annotate with branch names')),
471 ('', 'dots', None, _('use dots for runs')),
471 ('', 'dots', None, _('use dots for runs')),
472 ('s', 'spaces', None, _('separate elements by spaces'))],
472 ('s', 'spaces', None, _('separate elements by spaces'))],
473 _('[OPTION]... [FILE [REV]...]'),
473 _('[OPTION]... [FILE [REV]...]'),
474 optionalrepo=True)
474 optionalrepo=True)
475 def debugdag(ui, repo, file_=None, *revs, **opts):
475 def debugdag(ui, repo, file_=None, *revs, **opts):
476 """format the changelog or an index DAG as a concise textual description
476 """format the changelog or an index DAG as a concise textual description
477
477
478 If you pass a revlog index, the revlog's DAG is emitted. If you list
478 If you pass a revlog index, the revlog's DAG is emitted. If you list
479 revision numbers, they get labeled in the output as rN.
479 revision numbers, they get labeled in the output as rN.
480
480
481 Otherwise, the changelog DAG of the current repo is emitted.
481 Otherwise, the changelog DAG of the current repo is emitted.
482 """
482 """
483 spaces = opts.get(r'spaces')
483 spaces = opts.get(r'spaces')
484 dots = opts.get(r'dots')
484 dots = opts.get(r'dots')
485 if file_:
485 if file_:
486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
486 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
487 file_)
487 file_)
488 revs = set((int(r) for r in revs))
488 revs = set((int(r) for r in revs))
489 def events():
489 def events():
490 for r in rlog:
490 for r in rlog:
491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
491 yield 'n', (r, list(p for p in rlog.parentrevs(r)
492 if p != -1))
492 if p != -1))
493 if r in revs:
493 if r in revs:
494 yield 'l', (r, "r%i" % r)
494 yield 'l', (r, "r%i" % r)
495 elif repo:
495 elif repo:
496 cl = repo.changelog
496 cl = repo.changelog
497 tags = opts.get(r'tags')
497 tags = opts.get(r'tags')
498 branches = opts.get(r'branches')
498 branches = opts.get(r'branches')
499 if tags:
499 if tags:
500 labels = {}
500 labels = {}
501 for l, n in repo.tags().items():
501 for l, n in repo.tags().items():
502 labels.setdefault(cl.rev(n), []).append(l)
502 labels.setdefault(cl.rev(n), []).append(l)
503 def events():
503 def events():
504 b = "default"
504 b = "default"
505 for r in cl:
505 for r in cl:
506 if branches:
506 if branches:
507 newb = cl.read(cl.node(r))[5]['branch']
507 newb = cl.read(cl.node(r))[5]['branch']
508 if newb != b:
508 if newb != b:
509 yield 'a', newb
509 yield 'a', newb
510 b = newb
510 b = newb
511 yield 'n', (r, list(p for p in cl.parentrevs(r)
511 yield 'n', (r, list(p for p in cl.parentrevs(r)
512 if p != -1))
512 if p != -1))
513 if tags:
513 if tags:
514 ls = labels.get(r)
514 ls = labels.get(r)
515 if ls:
515 if ls:
516 for l in ls:
516 for l in ls:
517 yield 'l', (r, l)
517 yield 'l', (r, l)
518 else:
518 else:
519 raise error.Abort(_('need repo for changelog dag'))
519 raise error.Abort(_('need repo for changelog dag'))
520
520
521 for line in dagparser.dagtextlines(events(),
521 for line in dagparser.dagtextlines(events(),
522 addspaces=spaces,
522 addspaces=spaces,
523 wraplabels=True,
523 wraplabels=True,
524 wrapannotations=True,
524 wrapannotations=True,
525 wrapnonlinear=dots,
525 wrapnonlinear=dots,
526 usedots=dots,
526 usedots=dots,
527 maxlinewidth=70):
527 maxlinewidth=70):
528 ui.write(line)
528 ui.write(line)
529 ui.write("\n")
529 ui.write("\n")
530
530
531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
531 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
532 def debugdata(ui, repo, file_, rev=None, **opts):
532 def debugdata(ui, repo, file_, rev=None, **opts):
533 """dump the contents of a data file revision"""
533 """dump the contents of a data file revision"""
534 opts = pycompat.byteskwargs(opts)
534 opts = pycompat.byteskwargs(opts)
535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
535 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
536 if rev is not None:
536 if rev is not None:
537 raise error.CommandError('debugdata', _('invalid arguments'))
537 raise error.CommandError('debugdata', _('invalid arguments'))
538 file_, rev = None, file_
538 file_, rev = None, file_
539 elif rev is None:
539 elif rev is None:
540 raise error.CommandError('debugdata', _('invalid arguments'))
540 raise error.CommandError('debugdata', _('invalid arguments'))
541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
541 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
542 try:
542 try:
543 ui.write(r.revision(r.lookup(rev), raw=True))
543 ui.write(r.revision(r.lookup(rev), raw=True))
544 except KeyError:
544 except KeyError:
545 raise error.Abort(_('invalid revision identifier %s') % rev)
545 raise error.Abort(_('invalid revision identifier %s') % rev)
546
546
547 @command('debugdate',
547 @command('debugdate',
548 [('e', 'extended', None, _('try extended date formats'))],
548 [('e', 'extended', None, _('try extended date formats'))],
549 _('[-e] DATE [RANGE]'),
549 _('[-e] DATE [RANGE]'),
550 norepo=True, optionalrepo=True)
550 norepo=True, optionalrepo=True)
551 def debugdate(ui, date, range=None, **opts):
551 def debugdate(ui, date, range=None, **opts):
552 """parse and display a date"""
552 """parse and display a date"""
553 if opts[r"extended"]:
553 if opts[r"extended"]:
554 d = util.parsedate(date, util.extendeddateformats)
554 d = util.parsedate(date, util.extendeddateformats)
555 else:
555 else:
556 d = util.parsedate(date)
556 d = util.parsedate(date)
557 ui.write(("internal: %s %s\n") % d)
557 ui.write(("internal: %s %s\n") % d)
558 ui.write(("standard: %s\n") % util.datestr(d))
558 ui.write(("standard: %s\n") % util.datestr(d))
559 if range:
559 if range:
560 m = util.matchdate(range)
560 m = util.matchdate(range)
561 ui.write(("match: %s\n") % m(d[0]))
561 ui.write(("match: %s\n") % m(d[0]))
562
562
563 @command('debugdeltachain',
563 @command('debugdeltachain',
564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
564 cmdutil.debugrevlogopts + cmdutil.formatteropts,
565 _('-c|-m|FILE'),
565 _('-c|-m|FILE'),
566 optionalrepo=True)
566 optionalrepo=True)
567 def debugdeltachain(ui, repo, file_=None, **opts):
567 def debugdeltachain(ui, repo, file_=None, **opts):
568 """dump information about delta chains in a revlog
568 """dump information about delta chains in a revlog
569
569
570 Output can be templatized. Available template keywords are:
570 Output can be templatized. Available template keywords are:
571
571
572 :``rev``: revision number
572 :``rev``: revision number
573 :``chainid``: delta chain identifier (numbered by unique base)
573 :``chainid``: delta chain identifier (numbered by unique base)
574 :``chainlen``: delta chain length to this revision
574 :``chainlen``: delta chain length to this revision
575 :``prevrev``: previous revision in delta chain
575 :``prevrev``: previous revision in delta chain
576 :``deltatype``: role of delta / how it was computed
576 :``deltatype``: role of delta / how it was computed
577 :``compsize``: compressed size of revision
577 :``compsize``: compressed size of revision
578 :``uncompsize``: uncompressed size of revision
578 :``uncompsize``: uncompressed size of revision
579 :``chainsize``: total size of compressed revisions in chain
579 :``chainsize``: total size of compressed revisions in chain
580 :``chainratio``: total chain size divided by uncompressed revision size
580 :``chainratio``: total chain size divided by uncompressed revision size
581 (new delta chains typically start at ratio 2.00)
581 (new delta chains typically start at ratio 2.00)
582 :``lindist``: linear distance from base revision in delta chain to end
582 :``lindist``: linear distance from base revision in delta chain to end
583 of this revision
583 of this revision
584 :``extradist``: total size of revisions not part of this delta chain from
584 :``extradist``: total size of revisions not part of this delta chain from
585 base of delta chain to end of this revision; a measurement
585 base of delta chain to end of this revision; a measurement
586 of how much extra data we need to read/seek across to read
586 of how much extra data we need to read/seek across to read
587 the delta chain for this revision
587 the delta chain for this revision
588 :``extraratio``: extradist divided by chainsize; another representation of
588 :``extraratio``: extradist divided by chainsize; another representation of
589 how much unrelated data is needed to load this delta chain
589 how much unrelated data is needed to load this delta chain
590
590
591 If the repository is configured to use the sparse read, additional keywords
591 If the repository is configured to use the sparse read, additional keywords
592 are available:
592 are available:
593
593
594 :``readsize``: total size of data read from the disk for a revision
594 :``readsize``: total size of data read from the disk for a revision
595 (sum of the sizes of all the blocks)
595 (sum of the sizes of all the blocks)
596 :``largestblock``: size of the largest block of data read from the disk
596 :``largestblock``: size of the largest block of data read from the disk
597 :``readdensity``: density of useful bytes in the data read from the disk
597 :``readdensity``: density of useful bytes in the data read from the disk
598
598
599 The sparse read can be enabled with experimental.sparse-read = True
599 The sparse read can be enabled with experimental.sparse-read = True
600 """
600 """
601 opts = pycompat.byteskwargs(opts)
601 opts = pycompat.byteskwargs(opts)
602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
602 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
603 index = r.index
603 index = r.index
604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
604 generaldelta = r.version & revlog.FLAG_GENERALDELTA
605 withsparseread = getattr(r, '_withsparseread', False)
605 withsparseread = getattr(r, '_withsparseread', False)
606
606
607 def revinfo(rev):
607 def revinfo(rev):
608 e = index[rev]
608 e = index[rev]
609 compsize = e[1]
609 compsize = e[1]
610 uncompsize = e[2]
610 uncompsize = e[2]
611 chainsize = 0
611 chainsize = 0
612
612
613 if generaldelta:
613 if generaldelta:
614 if e[3] == e[5]:
614 if e[3] == e[5]:
615 deltatype = 'p1'
615 deltatype = 'p1'
616 elif e[3] == e[6]:
616 elif e[3] == e[6]:
617 deltatype = 'p2'
617 deltatype = 'p2'
618 elif e[3] == rev - 1:
618 elif e[3] == rev - 1:
619 deltatype = 'prev'
619 deltatype = 'prev'
620 elif e[3] == rev:
620 elif e[3] == rev:
621 deltatype = 'base'
621 deltatype = 'base'
622 else:
622 else:
623 deltatype = 'other'
623 deltatype = 'other'
624 else:
624 else:
625 if e[3] == rev:
625 if e[3] == rev:
626 deltatype = 'base'
626 deltatype = 'base'
627 else:
627 else:
628 deltatype = 'prev'
628 deltatype = 'prev'
629
629
630 chain = r._deltachain(rev)[0]
630 chain = r._deltachain(rev)[0]
631 for iterrev in chain:
631 for iterrev in chain:
632 e = index[iterrev]
632 e = index[iterrev]
633 chainsize += e[1]
633 chainsize += e[1]
634
634
635 return compsize, uncompsize, deltatype, chain, chainsize
635 return compsize, uncompsize, deltatype, chain, chainsize
636
636
637 fm = ui.formatter('debugdeltachain', opts)
637 fm = ui.formatter('debugdeltachain', opts)
638
638
639 fm.plain(' rev chain# chainlen prev delta '
639 fm.plain(' rev chain# chainlen prev delta '
640 'size rawsize chainsize ratio lindist extradist '
640 'size rawsize chainsize ratio lindist extradist '
641 'extraratio')
641 'extraratio')
642 if withsparseread:
642 if withsparseread:
643 fm.plain(' readsize largestblk rddensity')
643 fm.plain(' readsize largestblk rddensity')
644 fm.plain('\n')
644 fm.plain('\n')
645
645
646 chainbases = {}
646 chainbases = {}
647 for rev in r:
647 for rev in r:
648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
648 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
649 chainbase = chain[0]
649 chainbase = chain[0]
650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
650 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
651 start = r.start
651 start = r.start
652 length = r.length
652 length = r.length
653 basestart = start(chainbase)
653 basestart = start(chainbase)
654 revstart = start(rev)
654 revstart = start(rev)
655 lineardist = revstart + comp - basestart
655 lineardist = revstart + comp - basestart
656 extradist = lineardist - chainsize
656 extradist = lineardist - chainsize
657 try:
657 try:
658 prevrev = chain[-2]
658 prevrev = chain[-2]
659 except IndexError:
659 except IndexError:
660 prevrev = -1
660 prevrev = -1
661
661
662 chainratio = float(chainsize) / float(uncomp)
662 chainratio = float(chainsize) / float(uncomp)
663 extraratio = float(extradist) / float(chainsize)
663 extraratio = float(extradist) / float(chainsize)
664
664
665 fm.startitem()
665 fm.startitem()
666 fm.write('rev chainid chainlen prevrev deltatype compsize '
666 fm.write('rev chainid chainlen prevrev deltatype compsize '
667 'uncompsize chainsize chainratio lindist extradist '
667 'uncompsize chainsize chainratio lindist extradist '
668 'extraratio',
668 'extraratio',
669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
669 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
670 rev, chainid, len(chain), prevrev, deltatype, comp,
670 rev, chainid, len(chain), prevrev, deltatype, comp,
671 uncomp, chainsize, chainratio, lineardist, extradist,
671 uncomp, chainsize, chainratio, lineardist, extradist,
672 extraratio,
672 extraratio,
673 rev=rev, chainid=chainid, chainlen=len(chain),
673 rev=rev, chainid=chainid, chainlen=len(chain),
674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
674 prevrev=prevrev, deltatype=deltatype, compsize=comp,
675 uncompsize=uncomp, chainsize=chainsize,
675 uncompsize=uncomp, chainsize=chainsize,
676 chainratio=chainratio, lindist=lineardist,
676 chainratio=chainratio, lindist=lineardist,
677 extradist=extradist, extraratio=extraratio)
677 extradist=extradist, extraratio=extraratio)
678 if withsparseread:
678 if withsparseread:
679 readsize = 0
679 readsize = 0
680 largestblock = 0
680 largestblock = 0
681 for revschunk in revlog._slicechunk(r, chain):
681 for revschunk in revlog._slicechunk(r, chain):
682 blkend = start(revschunk[-1]) + length(revschunk[-1])
682 blkend = start(revschunk[-1]) + length(revschunk[-1])
683 blksize = blkend - start(revschunk[0])
683 blksize = blkend - start(revschunk[0])
684
684
685 readsize += blksize
685 readsize += blksize
686 if largestblock < blksize:
686 if largestblock < blksize:
687 largestblock = blksize
687 largestblock = blksize
688
688
689 readdensity = float(chainsize) / float(readsize)
689 readdensity = float(chainsize) / float(readsize)
690
690
691 fm.write('readsize largestblock readdensity',
691 fm.write('readsize largestblock readdensity',
692 ' %10d %10d %9.5f',
692 ' %10d %10d %9.5f',
693 readsize, largestblock, readdensity,
693 readsize, largestblock, readdensity,
694 readsize=readsize, largestblock=largestblock,
694 readsize=readsize, largestblock=largestblock,
695 readdensity=readdensity)
695 readdensity=readdensity)
696
696
697 fm.plain('\n')
697 fm.plain('\n')
698
698
699 fm.end()
699 fm.end()
700
700
701 @command('debugdirstate|debugstate',
701 @command('debugdirstate|debugstate',
702 [('', 'nodates', None, _('do not display the saved mtime')),
702 [('', 'nodates', None, _('do not display the saved mtime')),
703 ('', 'datesort', None, _('sort by saved mtime'))],
703 ('', 'datesort', None, _('sort by saved mtime'))],
704 _('[OPTION]...'))
704 _('[OPTION]...'))
705 def debugstate(ui, repo, **opts):
705 def debugstate(ui, repo, **opts):
706 """show the contents of the current dirstate"""
706 """show the contents of the current dirstate"""
707
707
708 nodates = opts.get(r'nodates')
708 nodates = opts.get(r'nodates')
709 datesort = opts.get(r'datesort')
709 datesort = opts.get(r'datesort')
710
710
711 timestr = ""
711 timestr = ""
712 if datesort:
712 if datesort:
713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
713 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
714 else:
714 else:
715 keyfunc = None # sort by filename
715 keyfunc = None # sort by filename
716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
716 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
717 if ent[3] == -1:
717 if ent[3] == -1:
718 timestr = 'unset '
718 timestr = 'unset '
719 elif nodates:
719 elif nodates:
720 timestr = 'set '
720 timestr = 'set '
721 else:
721 else:
722 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
722 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
723 time.localtime(ent[3]))
723 time.localtime(ent[3]))
724 if ent[1] & 0o20000:
724 if ent[1] & 0o20000:
725 mode = 'lnk'
725 mode = 'lnk'
726 else:
726 else:
727 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
727 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
728 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
728 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
729 for f in repo.dirstate.copies():
729 for f in repo.dirstate.copies():
730 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
730 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
731
731
732 @command('debugdiscovery',
732 @command('debugdiscovery',
733 [('', 'old', None, _('use old-style discovery')),
733 [('', 'old', None, _('use old-style discovery')),
734 ('', 'nonheads', None,
734 ('', 'nonheads', None,
735 _('use old-style discovery with non-heads included')),
735 _('use old-style discovery with non-heads included')),
736 ] + cmdutil.remoteopts,
736 ] + cmdutil.remoteopts,
737 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
737 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
738 def debugdiscovery(ui, repo, remoteurl="default", **opts):
738 def debugdiscovery(ui, repo, remoteurl="default", **opts):
739 """runs the changeset discovery protocol in isolation"""
739 """runs the changeset discovery protocol in isolation"""
740 opts = pycompat.byteskwargs(opts)
740 opts = pycompat.byteskwargs(opts)
741 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
741 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
742 opts.get('branch'))
742 opts.get('branch'))
743 remote = hg.peer(repo, opts, remoteurl)
743 remote = hg.peer(repo, opts, remoteurl)
744 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
744 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
745
745
746 # make sure tests are repeatable
746 # make sure tests are repeatable
747 random.seed(12323)
747 random.seed(12323)
748
748
749 def doit(localheads, remoteheads, remote=remote):
749 def doit(localheads, remoteheads, remote=remote):
750 if opts.get('old'):
750 if opts.get('old'):
751 if localheads:
751 if localheads:
752 raise error.Abort('cannot use localheads with old style '
752 raise error.Abort('cannot use localheads with old style '
753 'discovery')
753 'discovery')
754 if not util.safehasattr(remote, 'branches'):
754 if not util.safehasattr(remote, 'branches'):
755 # enable in-client legacy support
755 # enable in-client legacy support
756 remote = localrepo.locallegacypeer(remote.local())
756 remote = localrepo.locallegacypeer(remote.local())
757 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
757 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
758 force=True)
758 force=True)
759 common = set(common)
759 common = set(common)
760 if not opts.get('nonheads'):
760 if not opts.get('nonheads'):
761 ui.write(("unpruned common: %s\n") %
761 ui.write(("unpruned common: %s\n") %
762 " ".join(sorted(short(n) for n in common)))
762 " ".join(sorted(short(n) for n in common)))
763 dag = dagutil.revlogdag(repo.changelog)
763 dag = dagutil.revlogdag(repo.changelog)
764 all = dag.ancestorset(dag.internalizeall(common))
764 all = dag.ancestorset(dag.internalizeall(common))
765 common = dag.externalizeall(dag.headsetofconnecteds(all))
765 common = dag.externalizeall(dag.headsetofconnecteds(all))
766 else:
766 else:
767 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
767 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
768 common = set(common)
768 common = set(common)
769 rheads = set(hds)
769 rheads = set(hds)
770 lheads = set(repo.heads())
770 lheads = set(repo.heads())
771 ui.write(("common heads: %s\n") %
771 ui.write(("common heads: %s\n") %
772 " ".join(sorted(short(n) for n in common)))
772 " ".join(sorted(short(n) for n in common)))
773 if lheads <= common:
773 if lheads <= common:
774 ui.write(("local is subset\n"))
774 ui.write(("local is subset\n"))
775 elif rheads <= common:
775 elif rheads <= common:
776 ui.write(("remote is subset\n"))
776 ui.write(("remote is subset\n"))
777
777
778 serverlogs = opts.get('serverlog')
778 serverlogs = opts.get('serverlog')
779 if serverlogs:
779 if serverlogs:
780 for filename in serverlogs:
780 for filename in serverlogs:
781 with open(filename, 'r') as logfile:
781 with open(filename, 'r') as logfile:
782 line = logfile.readline()
782 line = logfile.readline()
783 while line:
783 while line:
784 parts = line.strip().split(';')
784 parts = line.strip().split(';')
785 op = parts[1]
785 op = parts[1]
786 if op == 'cg':
786 if op == 'cg':
787 pass
787 pass
788 elif op == 'cgss':
788 elif op == 'cgss':
789 doit(parts[2].split(' '), parts[3].split(' '))
789 doit(parts[2].split(' '), parts[3].split(' '))
790 elif op == 'unb':
790 elif op == 'unb':
791 doit(parts[3].split(' '), parts[2].split(' '))
791 doit(parts[3].split(' '), parts[2].split(' '))
792 line = logfile.readline()
792 line = logfile.readline()
793 else:
793 else:
794 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
794 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
795 opts.get('remote_head'))
795 opts.get('remote_head'))
796 localrevs = opts.get('local_head')
796 localrevs = opts.get('local_head')
797 doit(localrevs, remoterevs)
797 doit(localrevs, remoterevs)
798
798
799 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
799 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
800 def debugextensions(ui, **opts):
800 def debugextensions(ui, **opts):
801 '''show information about active extensions'''
801 '''show information about active extensions'''
802 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
803 exts = extensions.extensions(ui)
803 exts = extensions.extensions(ui)
804 hgver = util.version()
804 hgver = util.version()
805 fm = ui.formatter('debugextensions', opts)
805 fm = ui.formatter('debugextensions', opts)
806 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
806 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
807 isinternal = extensions.ismoduleinternal(extmod)
807 isinternal = extensions.ismoduleinternal(extmod)
808 extsource = pycompat.fsencode(extmod.__file__)
808 extsource = pycompat.fsencode(extmod.__file__)
809 if isinternal:
809 if isinternal:
810 exttestedwith = [] # never expose magic string to users
810 exttestedwith = [] # never expose magic string to users
811 else:
811 else:
812 exttestedwith = getattr(extmod, 'testedwith', '').split()
812 exttestedwith = getattr(extmod, 'testedwith', '').split()
813 extbuglink = getattr(extmod, 'buglink', None)
813 extbuglink = getattr(extmod, 'buglink', None)
814
814
815 fm.startitem()
815 fm.startitem()
816
816
817 if ui.quiet or ui.verbose:
817 if ui.quiet or ui.verbose:
818 fm.write('name', '%s\n', extname)
818 fm.write('name', '%s\n', extname)
819 else:
819 else:
820 fm.write('name', '%s', extname)
820 fm.write('name', '%s', extname)
821 if isinternal or hgver in exttestedwith:
821 if isinternal or hgver in exttestedwith:
822 fm.plain('\n')
822 fm.plain('\n')
823 elif not exttestedwith:
823 elif not exttestedwith:
824 fm.plain(_(' (untested!)\n'))
824 fm.plain(_(' (untested!)\n'))
825 else:
825 else:
826 lasttestedversion = exttestedwith[-1]
826 lasttestedversion = exttestedwith[-1]
827 fm.plain(' (%s!)\n' % lasttestedversion)
827 fm.plain(' (%s!)\n' % lasttestedversion)
828
828
829 fm.condwrite(ui.verbose and extsource, 'source',
829 fm.condwrite(ui.verbose and extsource, 'source',
830 _(' location: %s\n'), extsource or "")
830 _(' location: %s\n'), extsource or "")
831
831
832 if ui.verbose:
832 if ui.verbose:
833 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
833 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
834 fm.data(bundled=isinternal)
834 fm.data(bundled=isinternal)
835
835
836 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
836 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
837 _(' tested with: %s\n'),
837 _(' tested with: %s\n'),
838 fm.formatlist(exttestedwith, name='ver'))
838 fm.formatlist(exttestedwith, name='ver'))
839
839
840 fm.condwrite(ui.verbose and extbuglink, 'buglink',
840 fm.condwrite(ui.verbose and extbuglink, 'buglink',
841 _(' bug reporting: %s\n'), extbuglink or "")
841 _(' bug reporting: %s\n'), extbuglink or "")
842
842
843 fm.end()
843 fm.end()
844
844
845 @command('debugfileset',
845 @command('debugfileset',
846 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
846 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
847 _('[-r REV] FILESPEC'))
847 _('[-r REV] FILESPEC'))
848 def debugfileset(ui, repo, expr, **opts):
848 def debugfileset(ui, repo, expr, **opts):
849 '''parse and apply a fileset specification'''
849 '''parse and apply a fileset specification'''
850 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
850 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
851 if ui.verbose:
851 if ui.verbose:
852 tree = fileset.parse(expr)
852 tree = fileset.parse(expr)
853 ui.note(fileset.prettyformat(tree), "\n")
853 ui.note(fileset.prettyformat(tree), "\n")
854
854
855 for f in ctx.getfileset(expr):
855 for f in ctx.getfileset(expr):
856 ui.write("%s\n" % f)
856 ui.write("%s\n" % f)
857
857
858 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
858 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
859 def debugfsinfo(ui, path="."):
859 def debugfsinfo(ui, path="."):
860 """show information detected about current filesystem"""
860 """show information detected about current filesystem"""
861 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
861 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
862 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
862 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
863 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
863 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
864 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
864 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
865 casesensitive = '(unknown)'
865 casesensitive = '(unknown)'
866 try:
866 try:
867 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
867 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
868 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
868 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
869 except OSError:
869 except OSError:
870 pass
870 pass
871 ui.write(('case-sensitive: %s\n') % casesensitive)
871 ui.write(('case-sensitive: %s\n') % casesensitive)
872
872
873 @command('debuggetbundle',
873 @command('debuggetbundle',
874 [('H', 'head', [], _('id of head node'), _('ID')),
874 [('H', 'head', [], _('id of head node'), _('ID')),
875 ('C', 'common', [], _('id of common node'), _('ID')),
875 ('C', 'common', [], _('id of common node'), _('ID')),
876 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
876 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
877 _('REPO FILE [-H|-C ID]...'),
877 _('REPO FILE [-H|-C ID]...'),
878 norepo=True)
878 norepo=True)
879 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
879 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
880 """retrieves a bundle from a repo
880 """retrieves a bundle from a repo
881
881
882 Every ID must be a full-length hex node id string. Saves the bundle to the
882 Every ID must be a full-length hex node id string. Saves the bundle to the
883 given file.
883 given file.
884 """
884 """
885 opts = pycompat.byteskwargs(opts)
885 opts = pycompat.byteskwargs(opts)
886 repo = hg.peer(ui, opts, repopath)
886 repo = hg.peer(ui, opts, repopath)
887 if not repo.capable('getbundle'):
887 if not repo.capable('getbundle'):
888 raise error.Abort("getbundle() not supported by target repository")
888 raise error.Abort("getbundle() not supported by target repository")
889 args = {}
889 args = {}
890 if common:
890 if common:
891 args[r'common'] = [bin(s) for s in common]
891 args[r'common'] = [bin(s) for s in common]
892 if head:
892 if head:
893 args[r'heads'] = [bin(s) for s in head]
893 args[r'heads'] = [bin(s) for s in head]
894 # TODO: get desired bundlecaps from command line.
894 # TODO: get desired bundlecaps from command line.
895 args[r'bundlecaps'] = None
895 args[r'bundlecaps'] = None
896 bundle = repo.getbundle('debug', **args)
896 bundle = repo.getbundle('debug', **args)
897
897
898 bundletype = opts.get('type', 'bzip2').lower()
898 bundletype = opts.get('type', 'bzip2').lower()
899 btypes = {'none': 'HG10UN',
899 btypes = {'none': 'HG10UN',
900 'bzip2': 'HG10BZ',
900 'bzip2': 'HG10BZ',
901 'gzip': 'HG10GZ',
901 'gzip': 'HG10GZ',
902 'bundle2': 'HG20'}
902 'bundle2': 'HG20'}
903 bundletype = btypes.get(bundletype)
903 bundletype = btypes.get(bundletype)
904 if bundletype not in bundle2.bundletypes:
904 if bundletype not in bundle2.bundletypes:
905 raise error.Abort(_('unknown bundle type specified with --type'))
905 raise error.Abort(_('unknown bundle type specified with --type'))
906 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
906 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
907
907
908 @command('debugignore', [], '[FILE]')
908 @command('debugignore', [], '[FILE]')
909 def debugignore(ui, repo, *files, **opts):
909 def debugignore(ui, repo, *files, **opts):
910 """display the combined ignore pattern and information about ignored files
910 """display the combined ignore pattern and information about ignored files
911
911
912 With no argument display the combined ignore pattern.
912 With no argument display the combined ignore pattern.
913
913
914 Given space separated file names, shows if the given file is ignored and
914 Given space separated file names, shows if the given file is ignored and
915 if so, show the ignore rule (file and line number) that matched it.
915 if so, show the ignore rule (file and line number) that matched it.
916 """
916 """
917 ignore = repo.dirstate._ignore
917 ignore = repo.dirstate._ignore
918 if not files:
918 if not files:
919 # Show all the patterns
919 # Show all the patterns
920 ui.write("%s\n" % repr(ignore))
920 ui.write("%s\n" % repr(ignore))
921 else:
921 else:
922 m = scmutil.match(repo[None], pats=files)
922 m = scmutil.match(repo[None], pats=files)
923 for f in m.files():
923 for f in m.files():
924 nf = util.normpath(f)
924 nf = util.normpath(f)
925 ignored = None
925 ignored = None
926 ignoredata = None
926 ignoredata = None
927 if nf != '.':
927 if nf != '.':
928 if ignore(nf):
928 if ignore(nf):
929 ignored = nf
929 ignored = nf
930 ignoredata = repo.dirstate._ignorefileandline(nf)
930 ignoredata = repo.dirstate._ignorefileandline(nf)
931 else:
931 else:
932 for p in util.finddirs(nf):
932 for p in util.finddirs(nf):
933 if ignore(p):
933 if ignore(p):
934 ignored = p
934 ignored = p
935 ignoredata = repo.dirstate._ignorefileandline(p)
935 ignoredata = repo.dirstate._ignorefileandline(p)
936 break
936 break
937 if ignored:
937 if ignored:
938 if ignored == nf:
938 if ignored == nf:
939 ui.write(_("%s is ignored\n") % m.uipath(f))
939 ui.write(_("%s is ignored\n") % m.uipath(f))
940 else:
940 else:
941 ui.write(_("%s is ignored because of "
941 ui.write(_("%s is ignored because of "
942 "containing folder %s\n")
942 "containing folder %s\n")
943 % (m.uipath(f), ignored))
943 % (m.uipath(f), ignored))
944 ignorefile, lineno, line = ignoredata
944 ignorefile, lineno, line = ignoredata
945 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
945 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
946 % (ignorefile, lineno, line))
946 % (ignorefile, lineno, line))
947 else:
947 else:
948 ui.write(_("%s is not ignored\n") % m.uipath(f))
948 ui.write(_("%s is not ignored\n") % m.uipath(f))
949
949
950 @command('debugindex', cmdutil.debugrevlogopts +
950 @command('debugindex', cmdutil.debugrevlogopts +
951 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
951 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
952 _('[-f FORMAT] -c|-m|FILE'),
952 _('[-f FORMAT] -c|-m|FILE'),
953 optionalrepo=True)
953 optionalrepo=True)
954 def debugindex(ui, repo, file_=None, **opts):
954 def debugindex(ui, repo, file_=None, **opts):
955 """dump the contents of an index file"""
955 """dump the contents of an index file"""
956 opts = pycompat.byteskwargs(opts)
956 opts = pycompat.byteskwargs(opts)
957 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
957 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
958 format = opts.get('format', 0)
958 format = opts.get('format', 0)
959 if format not in (0, 1):
959 if format not in (0, 1):
960 raise error.Abort(_("unknown format %d") % format)
960 raise error.Abort(_("unknown format %d") % format)
961
961
962 generaldelta = r.version & revlog.FLAG_GENERALDELTA
962 generaldelta = r.version & revlog.FLAG_GENERALDELTA
963 if generaldelta:
963 if generaldelta:
964 basehdr = ' delta'
964 basehdr = ' delta'
965 else:
965 else:
966 basehdr = ' base'
966 basehdr = ' base'
967
967
968 if ui.debugflag:
968 if ui.debugflag:
969 shortfn = hex
969 shortfn = hex
970 else:
970 else:
971 shortfn = short
971 shortfn = short
972
972
973 # There might not be anything in r, so have a sane default
973 # There might not be anything in r, so have a sane default
974 idlen = 12
974 idlen = 12
975 for i in r:
975 for i in r:
976 idlen = len(shortfn(r.node(i)))
976 idlen = len(shortfn(r.node(i)))
977 break
977 break
978
978
979 if format == 0:
979 if format == 0:
980 ui.write((" rev offset length " + basehdr + " linkrev"
980 ui.write((" rev offset length " + basehdr + " linkrev"
981 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
981 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
982 elif format == 1:
982 elif format == 1:
983 ui.write((" rev flag offset length"
983 ui.write((" rev flag offset length"
984 " size " + basehdr + " link p1 p2"
984 " size " + basehdr + " link p1 p2"
985 " %s\n") % "nodeid".rjust(idlen))
985 " %s\n") % "nodeid".rjust(idlen))
986
986
987 for i in r:
987 for i in r:
988 node = r.node(i)
988 node = r.node(i)
989 if generaldelta:
989 if generaldelta:
990 base = r.deltaparent(i)
990 base = r.deltaparent(i)
991 else:
991 else:
992 base = r.chainbase(i)
992 base = r.chainbase(i)
993 if format == 0:
993 if format == 0:
994 try:
994 try:
995 pp = r.parents(node)
995 pp = r.parents(node)
996 except Exception:
996 except Exception:
997 pp = [nullid, nullid]
997 pp = [nullid, nullid]
998 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
998 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
999 i, r.start(i), r.length(i), base, r.linkrev(i),
999 i, r.start(i), r.length(i), base, r.linkrev(i),
1000 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1000 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1001 elif format == 1:
1001 elif format == 1:
1002 pr = r.parentrevs(i)
1002 pr = r.parentrevs(i)
1003 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1003 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1004 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1004 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1005 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1005 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1006
1006
1007 @command('debugindexdot', cmdutil.debugrevlogopts,
1007 @command('debugindexdot', cmdutil.debugrevlogopts,
1008 _('-c|-m|FILE'), optionalrepo=True)
1008 _('-c|-m|FILE'), optionalrepo=True)
1009 def debugindexdot(ui, repo, file_=None, **opts):
1009 def debugindexdot(ui, repo, file_=None, **opts):
1010 """dump an index DAG as a graphviz dot file"""
1010 """dump an index DAG as a graphviz dot file"""
1011 opts = pycompat.byteskwargs(opts)
1011 opts = pycompat.byteskwargs(opts)
1012 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1012 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1013 ui.write(("digraph G {\n"))
1013 ui.write(("digraph G {\n"))
1014 for i in r:
1014 for i in r:
1015 node = r.node(i)
1015 node = r.node(i)
1016 pp = r.parents(node)
1016 pp = r.parents(node)
1017 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1017 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1018 if pp[1] != nullid:
1018 if pp[1] != nullid:
1019 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1019 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1020 ui.write("}\n")
1020 ui.write("}\n")
1021
1021
1022 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1022 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1023 def debuginstall(ui, **opts):
1023 def debuginstall(ui, **opts):
1024 '''test Mercurial installation
1024 '''test Mercurial installation
1025
1025
1026 Returns 0 on success.
1026 Returns 0 on success.
1027 '''
1027 '''
1028 opts = pycompat.byteskwargs(opts)
1028 opts = pycompat.byteskwargs(opts)
1029
1029
1030 def writetemp(contents):
1030 def writetemp(contents):
1031 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1031 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1032 f = os.fdopen(fd, pycompat.sysstr("wb"))
1032 f = os.fdopen(fd, pycompat.sysstr("wb"))
1033 f.write(contents)
1033 f.write(contents)
1034 f.close()
1034 f.close()
1035 return name
1035 return name
1036
1036
1037 problems = 0
1037 problems = 0
1038
1038
1039 fm = ui.formatter('debuginstall', opts)
1039 fm = ui.formatter('debuginstall', opts)
1040 fm.startitem()
1040 fm.startitem()
1041
1041
1042 # encoding
1042 # encoding
1043 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1043 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1044 err = None
1044 err = None
1045 try:
1045 try:
1046 codecs.lookup(pycompat.sysstr(encoding.encoding))
1046 codecs.lookup(pycompat.sysstr(encoding.encoding))
1047 except LookupError as inst:
1047 except LookupError as inst:
1048 err = util.forcebytestr(inst)
1048 err = util.forcebytestr(inst)
1049 problems += 1
1049 problems += 1
1050 fm.condwrite(err, 'encodingerror', _(" %s\n"
1050 fm.condwrite(err, 'encodingerror', _(" %s\n"
1051 " (check that your locale is properly set)\n"), err)
1051 " (check that your locale is properly set)\n"), err)
1052
1052
1053 # Python
1053 # Python
1054 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1054 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1055 pycompat.sysexecutable)
1055 pycompat.sysexecutable)
1056 fm.write('pythonver', _("checking Python version (%s)\n"),
1056 fm.write('pythonver', _("checking Python version (%s)\n"),
1057 ("%d.%d.%d" % sys.version_info[:3]))
1057 ("%d.%d.%d" % sys.version_info[:3]))
1058 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1058 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1059 os.path.dirname(pycompat.fsencode(os.__file__)))
1059 os.path.dirname(pycompat.fsencode(os.__file__)))
1060
1060
1061 security = set(sslutil.supportedprotocols)
1061 security = set(sslutil.supportedprotocols)
1062 if sslutil.hassni:
1062 if sslutil.hassni:
1063 security.add('sni')
1063 security.add('sni')
1064
1064
1065 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1065 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1066 fm.formatlist(sorted(security), name='protocol',
1066 fm.formatlist(sorted(security), name='protocol',
1067 fmt='%s', sep=','))
1067 fmt='%s', sep=','))
1068
1068
1069 # These are warnings, not errors. So don't increment problem count. This
1069 # These are warnings, not errors. So don't increment problem count. This
1070 # may change in the future.
1070 # may change in the future.
1071 if 'tls1.2' not in security:
1071 if 'tls1.2' not in security:
1072 fm.plain(_(' TLS 1.2 not supported by Python install; '
1072 fm.plain(_(' TLS 1.2 not supported by Python install; '
1073 'network connections lack modern security\n'))
1073 'network connections lack modern security\n'))
1074 if 'sni' not in security:
1074 if 'sni' not in security:
1075 fm.plain(_(' SNI not supported by Python install; may have '
1075 fm.plain(_(' SNI not supported by Python install; may have '
1076 'connectivity issues with some servers\n'))
1076 'connectivity issues with some servers\n'))
1077
1077
1078 # TODO print CA cert info
1078 # TODO print CA cert info
1079
1079
1080 # hg version
1080 # hg version
1081 hgver = util.version()
1081 hgver = util.version()
1082 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1082 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1083 hgver.split('+')[0])
1083 hgver.split('+')[0])
1084 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1084 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1085 '+'.join(hgver.split('+')[1:]))
1085 '+'.join(hgver.split('+')[1:]))
1086
1086
1087 # compiled modules
1087 # compiled modules
1088 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1088 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1089 policy.policy)
1089 policy.policy)
1090 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1090 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1091 os.path.dirname(pycompat.fsencode(__file__)))
1091 os.path.dirname(pycompat.fsencode(__file__)))
1092
1092
1093 if policy.policy in ('c', 'allow'):
1093 if policy.policy in ('c', 'allow'):
1094 err = None
1094 err = None
1095 try:
1095 try:
1096 from .cext import (
1096 from .cext import (
1097 base85,
1097 base85,
1098 bdiff,
1098 bdiff,
1099 mpatch,
1099 mpatch,
1100 osutil,
1100 osutil,
1101 )
1101 )
1102 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1102 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1103 except Exception as inst:
1103 except Exception as inst:
1104 err = util.forcebytestr(inst)
1104 err = util.forcebytestr(inst)
1105 problems += 1
1105 problems += 1
1106 fm.condwrite(err, 'extensionserror', " %s\n", err)
1106 fm.condwrite(err, 'extensionserror', " %s\n", err)
1107
1107
1108 compengines = util.compengines._engines.values()
1108 compengines = util.compengines._engines.values()
1109 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1109 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1110 fm.formatlist(sorted(e.name() for e in compengines),
1110 fm.formatlist(sorted(e.name() for e in compengines),
1111 name='compengine', fmt='%s', sep=', '))
1111 name='compengine', fmt='%s', sep=', '))
1112 fm.write('compenginesavail', _('checking available compression engines '
1112 fm.write('compenginesavail', _('checking available compression engines '
1113 '(%s)\n'),
1113 '(%s)\n'),
1114 fm.formatlist(sorted(e.name() for e in compengines
1114 fm.formatlist(sorted(e.name() for e in compengines
1115 if e.available()),
1115 if e.available()),
1116 name='compengine', fmt='%s', sep=', '))
1116 name='compengine', fmt='%s', sep=', '))
1117 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1117 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1118 fm.write('compenginesserver', _('checking available compression engines '
1118 fm.write('compenginesserver', _('checking available compression engines '
1119 'for wire protocol (%s)\n'),
1119 'for wire protocol (%s)\n'),
1120 fm.formatlist([e.name() for e in wirecompengines
1120 fm.formatlist([e.name() for e in wirecompengines
1121 if e.wireprotosupport()],
1121 if e.wireprotosupport()],
1122 name='compengine', fmt='%s', sep=', '))
1122 name='compengine', fmt='%s', sep=', '))
1123
1123
1124 # templates
1124 # templates
1125 p = templater.templatepaths()
1125 p = templater.templatepaths()
1126 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1126 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1127 fm.condwrite(not p, '', _(" no template directories found\n"))
1127 fm.condwrite(not p, '', _(" no template directories found\n"))
1128 if p:
1128 if p:
1129 m = templater.templatepath("map-cmdline.default")
1129 m = templater.templatepath("map-cmdline.default")
1130 if m:
1130 if m:
1131 # template found, check if it is working
1131 # template found, check if it is working
1132 err = None
1132 err = None
1133 try:
1133 try:
1134 templater.templater.frommapfile(m)
1134 templater.templater.frommapfile(m)
1135 except Exception as inst:
1135 except Exception as inst:
1136 err = util.forcebytestr(inst)
1136 err = util.forcebytestr(inst)
1137 p = None
1137 p = None
1138 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1138 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1139 else:
1139 else:
1140 p = None
1140 p = None
1141 fm.condwrite(p, 'defaulttemplate',
1141 fm.condwrite(p, 'defaulttemplate',
1142 _("checking default template (%s)\n"), m)
1142 _("checking default template (%s)\n"), m)
1143 fm.condwrite(not m, 'defaulttemplatenotfound',
1143 fm.condwrite(not m, 'defaulttemplatenotfound',
1144 _(" template '%s' not found\n"), "default")
1144 _(" template '%s' not found\n"), "default")
1145 if not p:
1145 if not p:
1146 problems += 1
1146 problems += 1
1147 fm.condwrite(not p, '',
1147 fm.condwrite(not p, '',
1148 _(" (templates seem to have been installed incorrectly)\n"))
1148 _(" (templates seem to have been installed incorrectly)\n"))
1149
1149
1150 # editor
1150 # editor
1151 editor = ui.geteditor()
1151 editor = ui.geteditor()
1152 editor = util.expandpath(editor)
1152 editor = util.expandpath(editor)
1153 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1153 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1154 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1154 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1155 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1155 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1156 _(" No commit editor set and can't find %s in PATH\n"
1156 _(" No commit editor set and can't find %s in PATH\n"
1157 " (specify a commit editor in your configuration"
1157 " (specify a commit editor in your configuration"
1158 " file)\n"), not cmdpath and editor == 'vi' and editor)
1158 " file)\n"), not cmdpath and editor == 'vi' and editor)
1159 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1159 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1160 _(" Can't find editor '%s' in PATH\n"
1160 _(" Can't find editor '%s' in PATH\n"
1161 " (specify a commit editor in your configuration"
1161 " (specify a commit editor in your configuration"
1162 " file)\n"), not cmdpath and editor)
1162 " file)\n"), not cmdpath and editor)
1163 if not cmdpath and editor != 'vi':
1163 if not cmdpath and editor != 'vi':
1164 problems += 1
1164 problems += 1
1165
1165
1166 # check username
1166 # check username
1167 username = None
1167 username = None
1168 err = None
1168 err = None
1169 try:
1169 try:
1170 username = ui.username()
1170 username = ui.username()
1171 except error.Abort as e:
1171 except error.Abort as e:
1172 err = util.forcebytestr(e)
1172 err = util.forcebytestr(e)
1173 problems += 1
1173 problems += 1
1174
1174
1175 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1175 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1176 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1176 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1177 " (specify a username in your configuration file)\n"), err)
1177 " (specify a username in your configuration file)\n"), err)
1178
1178
1179 fm.condwrite(not problems, '',
1179 fm.condwrite(not problems, '',
1180 _("no problems detected\n"))
1180 _("no problems detected\n"))
1181 if not problems:
1181 if not problems:
1182 fm.data(problems=problems)
1182 fm.data(problems=problems)
1183 fm.condwrite(problems, 'problems',
1183 fm.condwrite(problems, 'problems',
1184 _("%d problems detected,"
1184 _("%d problems detected,"
1185 " please check your install!\n"), problems)
1185 " please check your install!\n"), problems)
1186 fm.end()
1186 fm.end()
1187
1187
1188 return problems
1188 return problems
1189
1189
1190 @command('debugknown', [], _('REPO ID...'), norepo=True)
1190 @command('debugknown', [], _('REPO ID...'), norepo=True)
1191 def debugknown(ui, repopath, *ids, **opts):
1191 def debugknown(ui, repopath, *ids, **opts):
1192 """test whether node ids are known to a repo
1192 """test whether node ids are known to a repo
1193
1193
1194 Every ID must be a full-length hex node id string. Returns a list of 0s
1194 Every ID must be a full-length hex node id string. Returns a list of 0s
1195 and 1s indicating unknown/known.
1195 and 1s indicating unknown/known.
1196 """
1196 """
1197 opts = pycompat.byteskwargs(opts)
1197 opts = pycompat.byteskwargs(opts)
1198 repo = hg.peer(ui, opts, repopath)
1198 repo = hg.peer(ui, opts, repopath)
1199 if not repo.capable('known'):
1199 if not repo.capable('known'):
1200 raise error.Abort("known() not supported by target repository")
1200 raise error.Abort("known() not supported by target repository")
1201 flags = repo.known([bin(s) for s in ids])
1201 flags = repo.known([bin(s) for s in ids])
1202 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1202 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1203
1203
1204 @command('debuglabelcomplete', [], _('LABEL...'))
1204 @command('debuglabelcomplete', [], _('LABEL...'))
1205 def debuglabelcomplete(ui, repo, *args):
1205 def debuglabelcomplete(ui, repo, *args):
1206 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1206 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1207 debugnamecomplete(ui, repo, *args)
1207 debugnamecomplete(ui, repo, *args)
1208
1208
1209 @command('debuglocks',
1209 @command('debuglocks',
1210 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1210 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1211 ('W', 'force-wlock', None,
1211 ('W', 'force-wlock', None,
1212 _('free the working state lock (DANGEROUS)'))],
1212 _('free the working state lock (DANGEROUS)'))],
1213 _('[OPTION]...'))
1213 _('[OPTION]...'))
1214 def debuglocks(ui, repo, **opts):
1214 def debuglocks(ui, repo, **opts):
1215 """show or modify state of locks
1215 """show or modify state of locks
1216
1216
1217 By default, this command will show which locks are held. This
1217 By default, this command will show which locks are held. This
1218 includes the user and process holding the lock, the amount of time
1218 includes the user and process holding the lock, the amount of time
1219 the lock has been held, and the machine name where the process is
1219 the lock has been held, and the machine name where the process is
1220 running if it's not local.
1220 running if it's not local.
1221
1221
1222 Locks protect the integrity of Mercurial's data, so should be
1222 Locks protect the integrity of Mercurial's data, so should be
1223 treated with care. System crashes or other interruptions may cause
1223 treated with care. System crashes or other interruptions may cause
1224 locks to not be properly released, though Mercurial will usually
1224 locks to not be properly released, though Mercurial will usually
1225 detect and remove such stale locks automatically.
1225 detect and remove such stale locks automatically.
1226
1226
1227 However, detecting stale locks may not always be possible (for
1227 However, detecting stale locks may not always be possible (for
1228 instance, on a shared filesystem). Removing locks may also be
1228 instance, on a shared filesystem). Removing locks may also be
1229 blocked by filesystem permissions.
1229 blocked by filesystem permissions.
1230
1230
1231 Returns 0 if no locks are held.
1231 Returns 0 if no locks are held.
1232
1232
1233 """
1233 """
1234
1234
1235 if opts.get(r'force_lock'):
1235 if opts.get(r'force_lock'):
1236 repo.svfs.unlink('lock')
1236 repo.svfs.unlink('lock')
1237 if opts.get(r'force_wlock'):
1237 if opts.get(r'force_wlock'):
1238 repo.vfs.unlink('wlock')
1238 repo.vfs.unlink('wlock')
1239 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1239 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1240 return 0
1240 return 0
1241
1241
1242 now = time.time()
1242 now = time.time()
1243 held = 0
1243 held = 0
1244
1244
1245 def report(vfs, name, method):
1245 def report(vfs, name, method):
1246 # this causes stale locks to get reaped for more accurate reporting
1246 # this causes stale locks to get reaped for more accurate reporting
1247 try:
1247 try:
1248 l = method(False)
1248 l = method(False)
1249 except error.LockHeld:
1249 except error.LockHeld:
1250 l = None
1250 l = None
1251
1251
1252 if l:
1252 if l:
1253 l.release()
1253 l.release()
1254 else:
1254 else:
1255 try:
1255 try:
1256 stat = vfs.lstat(name)
1256 stat = vfs.lstat(name)
1257 age = now - stat.st_mtime
1257 age = now - stat.st_mtime
1258 user = util.username(stat.st_uid)
1258 user = util.username(stat.st_uid)
1259 locker = vfs.readlock(name)
1259 locker = vfs.readlock(name)
1260 if ":" in locker:
1260 if ":" in locker:
1261 host, pid = locker.split(':')
1261 host, pid = locker.split(':')
1262 if host == socket.gethostname():
1262 if host == socket.gethostname():
1263 locker = 'user %s, process %s' % (user, pid)
1263 locker = 'user %s, process %s' % (user, pid)
1264 else:
1264 else:
1265 locker = 'user %s, process %s, host %s' \
1265 locker = 'user %s, process %s, host %s' \
1266 % (user, pid, host)
1266 % (user, pid, host)
1267 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1267 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1268 return 1
1268 return 1
1269 except OSError as e:
1269 except OSError as e:
1270 if e.errno != errno.ENOENT:
1270 if e.errno != errno.ENOENT:
1271 raise
1271 raise
1272
1272
1273 ui.write(("%-6s free\n") % (name + ":"))
1273 ui.write(("%-6s free\n") % (name + ":"))
1274 return 0
1274 return 0
1275
1275
1276 held += report(repo.svfs, "lock", repo.lock)
1276 held += report(repo.svfs, "lock", repo.lock)
1277 held += report(repo.vfs, "wlock", repo.wlock)
1277 held += report(repo.vfs, "wlock", repo.wlock)
1278
1278
1279 return held
1279 return held
1280
1280
1281 @command('debugmergestate', [], '')
1281 @command('debugmergestate', [], '')
1282 def debugmergestate(ui, repo, *args):
1282 def debugmergestate(ui, repo, *args):
1283 """print merge state
1283 """print merge state
1284
1284
1285 Use --verbose to print out information about whether v1 or v2 merge state
1285 Use --verbose to print out information about whether v1 or v2 merge state
1286 was chosen."""
1286 was chosen."""
1287 def _hashornull(h):
1287 def _hashornull(h):
1288 if h == nullhex:
1288 if h == nullhex:
1289 return 'null'
1289 return 'null'
1290 else:
1290 else:
1291 return h
1291 return h
1292
1292
1293 def printrecords(version):
1293 def printrecords(version):
1294 ui.write(('* version %s records\n') % version)
1294 ui.write(('* version %s records\n') % version)
1295 if version == 1:
1295 if version == 1:
1296 records = v1records
1296 records = v1records
1297 else:
1297 else:
1298 records = v2records
1298 records = v2records
1299
1299
1300 for rtype, record in records:
1300 for rtype, record in records:
1301 # pretty print some record types
1301 # pretty print some record types
1302 if rtype == 'L':
1302 if rtype == 'L':
1303 ui.write(('local: %s\n') % record)
1303 ui.write(('local: %s\n') % record)
1304 elif rtype == 'O':
1304 elif rtype == 'O':
1305 ui.write(('other: %s\n') % record)
1305 ui.write(('other: %s\n') % record)
1306 elif rtype == 'm':
1306 elif rtype == 'm':
1307 driver, mdstate = record.split('\0', 1)
1307 driver, mdstate = record.split('\0', 1)
1308 ui.write(('merge driver: %s (state "%s")\n')
1308 ui.write(('merge driver: %s (state "%s")\n')
1309 % (driver, mdstate))
1309 % (driver, mdstate))
1310 elif rtype in 'FDC':
1310 elif rtype in 'FDC':
1311 r = record.split('\0')
1311 r = record.split('\0')
1312 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1312 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1313 if version == 1:
1313 if version == 1:
1314 onode = 'not stored in v1 format'
1314 onode = 'not stored in v1 format'
1315 flags = r[7]
1315 flags = r[7]
1316 else:
1316 else:
1317 onode, flags = r[7:9]
1317 onode, flags = r[7:9]
1318 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1318 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1319 % (f, rtype, state, _hashornull(hash)))
1319 % (f, rtype, state, _hashornull(hash)))
1320 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1320 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1321 ui.write((' ancestor path: %s (node %s)\n')
1321 ui.write((' ancestor path: %s (node %s)\n')
1322 % (afile, _hashornull(anode)))
1322 % (afile, _hashornull(anode)))
1323 ui.write((' other path: %s (node %s)\n')
1323 ui.write((' other path: %s (node %s)\n')
1324 % (ofile, _hashornull(onode)))
1324 % (ofile, _hashornull(onode)))
1325 elif rtype == 'f':
1325 elif rtype == 'f':
1326 filename, rawextras = record.split('\0', 1)
1326 filename, rawextras = record.split('\0', 1)
1327 extras = rawextras.split('\0')
1327 extras = rawextras.split('\0')
1328 i = 0
1328 i = 0
1329 extrastrings = []
1329 extrastrings = []
1330 while i < len(extras):
1330 while i < len(extras):
1331 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1331 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1332 i += 2
1332 i += 2
1333
1333
1334 ui.write(('file extras: %s (%s)\n')
1334 ui.write(('file extras: %s (%s)\n')
1335 % (filename, ', '.join(extrastrings)))
1335 % (filename, ', '.join(extrastrings)))
1336 elif rtype == 'l':
1336 elif rtype == 'l':
1337 labels = record.split('\0', 2)
1337 labels = record.split('\0', 2)
1338 labels = [l for l in labels if len(l) > 0]
1338 labels = [l for l in labels if len(l) > 0]
1339 ui.write(('labels:\n'))
1339 ui.write(('labels:\n'))
1340 ui.write((' local: %s\n' % labels[0]))
1340 ui.write((' local: %s\n' % labels[0]))
1341 ui.write((' other: %s\n' % labels[1]))
1341 ui.write((' other: %s\n' % labels[1]))
1342 if len(labels) > 2:
1342 if len(labels) > 2:
1343 ui.write((' base: %s\n' % labels[2]))
1343 ui.write((' base: %s\n' % labels[2]))
1344 else:
1344 else:
1345 ui.write(('unrecognized entry: %s\t%s\n')
1345 ui.write(('unrecognized entry: %s\t%s\n')
1346 % (rtype, record.replace('\0', '\t')))
1346 % (rtype, record.replace('\0', '\t')))
1347
1347
1348 # Avoid mergestate.read() since it may raise an exception for unsupported
1348 # Avoid mergestate.read() since it may raise an exception for unsupported
1349 # merge state records. We shouldn't be doing this, but this is OK since this
1349 # merge state records. We shouldn't be doing this, but this is OK since this
1350 # command is pretty low-level.
1350 # command is pretty low-level.
1351 ms = mergemod.mergestate(repo)
1351 ms = mergemod.mergestate(repo)
1352
1352
1353 # sort so that reasonable information is on top
1353 # sort so that reasonable information is on top
1354 v1records = ms._readrecordsv1()
1354 v1records = ms._readrecordsv1()
1355 v2records = ms._readrecordsv2()
1355 v2records = ms._readrecordsv2()
1356 order = 'LOml'
1356 order = 'LOml'
1357 def key(r):
1357 def key(r):
1358 idx = order.find(r[0])
1358 idx = order.find(r[0])
1359 if idx == -1:
1359 if idx == -1:
1360 return (1, r[1])
1360 return (1, r[1])
1361 else:
1361 else:
1362 return (0, idx)
1362 return (0, idx)
1363 v1records.sort(key=key)
1363 v1records.sort(key=key)
1364 v2records.sort(key=key)
1364 v2records.sort(key=key)
1365
1365
1366 if not v1records and not v2records:
1366 if not v1records and not v2records:
1367 ui.write(('no merge state found\n'))
1367 ui.write(('no merge state found\n'))
1368 elif not v2records:
1368 elif not v2records:
1369 ui.note(('no version 2 merge state\n'))
1369 ui.note(('no version 2 merge state\n'))
1370 printrecords(1)
1370 printrecords(1)
1371 elif ms._v1v2match(v1records, v2records):
1371 elif ms._v1v2match(v1records, v2records):
1372 ui.note(('v1 and v2 states match: using v2\n'))
1372 ui.note(('v1 and v2 states match: using v2\n'))
1373 printrecords(2)
1373 printrecords(2)
1374 else:
1374 else:
1375 ui.note(('v1 and v2 states mismatch: using v1\n'))
1375 ui.note(('v1 and v2 states mismatch: using v1\n'))
1376 printrecords(1)
1376 printrecords(1)
1377 if ui.verbose:
1377 if ui.verbose:
1378 printrecords(2)
1378 printrecords(2)
1379
1379
1380 @command('debugnamecomplete', [], _('NAME...'))
1380 @command('debugnamecomplete', [], _('NAME...'))
1381 def debugnamecomplete(ui, repo, *args):
1381 def debugnamecomplete(ui, repo, *args):
1382 '''complete "names" - tags, open branch names, bookmark names'''
1382 '''complete "names" - tags, open branch names, bookmark names'''
1383
1383
1384 names = set()
1384 names = set()
1385 # since we previously only listed open branches, we will handle that
1385 # since we previously only listed open branches, we will handle that
1386 # specially (after this for loop)
1386 # specially (after this for loop)
1387 for name, ns in repo.names.iteritems():
1387 for name, ns in repo.names.iteritems():
1388 if name != 'branches':
1388 if name != 'branches':
1389 names.update(ns.listnames(repo))
1389 names.update(ns.listnames(repo))
1390 names.update(tag for (tag, heads, tip, closed)
1390 names.update(tag for (tag, heads, tip, closed)
1391 in repo.branchmap().iterbranches() if not closed)
1391 in repo.branchmap().iterbranches() if not closed)
1392 completions = set()
1392 completions = set()
1393 if not args:
1393 if not args:
1394 args = ['']
1394 args = ['']
1395 for a in args:
1395 for a in args:
1396 completions.update(n for n in names if n.startswith(a))
1396 completions.update(n for n in names if n.startswith(a))
1397 ui.write('\n'.join(sorted(completions)))
1397 ui.write('\n'.join(sorted(completions)))
1398 ui.write('\n')
1398 ui.write('\n')
1399
1399
1400 @command('debugobsolete',
1400 @command('debugobsolete',
1401 [('', 'flags', 0, _('markers flag')),
1401 [('', 'flags', 0, _('markers flag')),
1402 ('', 'record-parents', False,
1402 ('', 'record-parents', False,
1403 _('record parent information for the precursor')),
1403 _('record parent information for the precursor')),
1404 ('r', 'rev', [], _('display markers relevant to REV')),
1404 ('r', 'rev', [], _('display markers relevant to REV')),
1405 ('', 'exclusive', False, _('restrict display to markers only '
1405 ('', 'exclusive', False, _('restrict display to markers only '
1406 'relevant to REV')),
1406 'relevant to REV')),
1407 ('', 'index', False, _('display index of the marker')),
1407 ('', 'index', False, _('display index of the marker')),
1408 ('', 'delete', [], _('delete markers specified by indices')),
1408 ('', 'delete', [], _('delete markers specified by indices')),
1409 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1409 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1410 _('[OBSOLETED [REPLACEMENT ...]]'))
1410 _('[OBSOLETED [REPLACEMENT ...]]'))
1411 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1411 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1412 """create arbitrary obsolete marker
1412 """create arbitrary obsolete marker
1413
1413
1414 With no arguments, displays the list of obsolescence markers."""
1414 With no arguments, displays the list of obsolescence markers."""
1415
1415
1416 opts = pycompat.byteskwargs(opts)
1416 opts = pycompat.byteskwargs(opts)
1417
1417
1418 def parsenodeid(s):
1418 def parsenodeid(s):
1419 try:
1419 try:
1420 # We do not use revsingle/revrange functions here to accept
1420 # We do not use revsingle/revrange functions here to accept
1421 # arbitrary node identifiers, possibly not present in the
1421 # arbitrary node identifiers, possibly not present in the
1422 # local repository.
1422 # local repository.
1423 n = bin(s)
1423 n = bin(s)
1424 if len(n) != len(nullid):
1424 if len(n) != len(nullid):
1425 raise TypeError()
1425 raise TypeError()
1426 return n
1426 return n
1427 except TypeError:
1427 except TypeError:
1428 raise error.Abort('changeset references must be full hexadecimal '
1428 raise error.Abort('changeset references must be full hexadecimal '
1429 'node identifiers')
1429 'node identifiers')
1430
1430
1431 if opts.get('delete'):
1431 if opts.get('delete'):
1432 indices = []
1432 indices = []
1433 for v in opts.get('delete'):
1433 for v in opts.get('delete'):
1434 try:
1434 try:
1435 indices.append(int(v))
1435 indices.append(int(v))
1436 except ValueError:
1436 except ValueError:
1437 raise error.Abort(_('invalid index value: %r') % v,
1437 raise error.Abort(_('invalid index value: %r') % v,
1438 hint=_('use integers for indices'))
1438 hint=_('use integers for indices'))
1439
1439
1440 if repo.currenttransaction():
1440 if repo.currenttransaction():
1441 raise error.Abort(_('cannot delete obsmarkers in the middle '
1441 raise error.Abort(_('cannot delete obsmarkers in the middle '
1442 'of transaction.'))
1442 'of transaction.'))
1443
1443
1444 with repo.lock():
1444 with repo.lock():
1445 n = repair.deleteobsmarkers(repo.obsstore, indices)
1445 n = repair.deleteobsmarkers(repo.obsstore, indices)
1446 ui.write(_('deleted %i obsolescence markers\n') % n)
1446 ui.write(_('deleted %i obsolescence markers\n') % n)
1447
1447
1448 return
1448 return
1449
1449
1450 if precursor is not None:
1450 if precursor is not None:
1451 if opts['rev']:
1451 if opts['rev']:
1452 raise error.Abort('cannot select revision when creating marker')
1452 raise error.Abort('cannot select revision when creating marker')
1453 metadata = {}
1453 metadata = {}
1454 metadata['user'] = opts['user'] or ui.username()
1454 metadata['user'] = opts['user'] or ui.username()
1455 succs = tuple(parsenodeid(succ) for succ in successors)
1455 succs = tuple(parsenodeid(succ) for succ in successors)
1456 l = repo.lock()
1456 l = repo.lock()
1457 try:
1457 try:
1458 tr = repo.transaction('debugobsolete')
1458 tr = repo.transaction('debugobsolete')
1459 try:
1459 try:
1460 date = opts.get('date')
1460 date = opts.get('date')
1461 if date:
1461 if date:
1462 date = util.parsedate(date)
1462 date = util.parsedate(date)
1463 else:
1463 else:
1464 date = None
1464 date = None
1465 prec = parsenodeid(precursor)
1465 prec = parsenodeid(precursor)
1466 parents = None
1466 parents = None
1467 if opts['record_parents']:
1467 if opts['record_parents']:
1468 if prec not in repo.unfiltered():
1468 if prec not in repo.unfiltered():
1469 raise error.Abort('cannot used --record-parents on '
1469 raise error.Abort('cannot used --record-parents on '
1470 'unknown changesets')
1470 'unknown changesets')
1471 parents = repo.unfiltered()[prec].parents()
1471 parents = repo.unfiltered()[prec].parents()
1472 parents = tuple(p.node() for p in parents)
1472 parents = tuple(p.node() for p in parents)
1473 repo.obsstore.create(tr, prec, succs, opts['flags'],
1473 repo.obsstore.create(tr, prec, succs, opts['flags'],
1474 parents=parents, date=date,
1474 parents=parents, date=date,
1475 metadata=metadata, ui=ui)
1475 metadata=metadata, ui=ui)
1476 tr.close()
1476 tr.close()
1477 except ValueError as exc:
1477 except ValueError as exc:
1478 raise error.Abort(_('bad obsmarker input: %s') % exc)
1478 raise error.Abort(_('bad obsmarker input: %s') % exc)
1479 finally:
1479 finally:
1480 tr.release()
1480 tr.release()
1481 finally:
1481 finally:
1482 l.release()
1482 l.release()
1483 else:
1483 else:
1484 if opts['rev']:
1484 if opts['rev']:
1485 revs = scmutil.revrange(repo, opts['rev'])
1485 revs = scmutil.revrange(repo, opts['rev'])
1486 nodes = [repo[r].node() for r in revs]
1486 nodes = [repo[r].node() for r in revs]
1487 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1487 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1488 exclusive=opts['exclusive']))
1488 exclusive=opts['exclusive']))
1489 markers.sort(key=lambda x: x._data)
1489 markers.sort(key=lambda x: x._data)
1490 else:
1490 else:
1491 markers = obsutil.getmarkers(repo)
1491 markers = obsutil.getmarkers(repo)
1492
1492
1493 markerstoiter = markers
1493 markerstoiter = markers
1494 isrelevant = lambda m: True
1494 isrelevant = lambda m: True
1495 if opts.get('rev') and opts.get('index'):
1495 if opts.get('rev') and opts.get('index'):
1496 markerstoiter = obsutil.getmarkers(repo)
1496 markerstoiter = obsutil.getmarkers(repo)
1497 markerset = set(markers)
1497 markerset = set(markers)
1498 isrelevant = lambda m: m in markerset
1498 isrelevant = lambda m: m in markerset
1499
1499
1500 fm = ui.formatter('debugobsolete', opts)
1500 fm = ui.formatter('debugobsolete', opts)
1501 for i, m in enumerate(markerstoiter):
1501 for i, m in enumerate(markerstoiter):
1502 if not isrelevant(m):
1502 if not isrelevant(m):
1503 # marker can be irrelevant when we're iterating over a set
1503 # marker can be irrelevant when we're iterating over a set
1504 # of markers (markerstoiter) which is bigger than the set
1504 # of markers (markerstoiter) which is bigger than the set
1505 # of markers we want to display (markers)
1505 # of markers we want to display (markers)
1506 # this can happen if both --index and --rev options are
1506 # this can happen if both --index and --rev options are
1507 # provided and thus we need to iterate over all of the markers
1507 # provided and thus we need to iterate over all of the markers
1508 # to get the correct indices, but only display the ones that
1508 # to get the correct indices, but only display the ones that
1509 # are relevant to --rev value
1509 # are relevant to --rev value
1510 continue
1510 continue
1511 fm.startitem()
1511 fm.startitem()
1512 ind = i if opts.get('index') else None
1512 ind = i if opts.get('index') else None
1513 cmdutil.showmarker(fm, m, index=ind)
1513 cmdutil.showmarker(fm, m, index=ind)
1514 fm.end()
1514 fm.end()
1515
1515
1516 @command('debugpathcomplete',
1516 @command('debugpathcomplete',
1517 [('f', 'full', None, _('complete an entire path')),
1517 [('f', 'full', None, _('complete an entire path')),
1518 ('n', 'normal', None, _('show only normal files')),
1518 ('n', 'normal', None, _('show only normal files')),
1519 ('a', 'added', None, _('show only added files')),
1519 ('a', 'added', None, _('show only added files')),
1520 ('r', 'removed', None, _('show only removed files'))],
1520 ('r', 'removed', None, _('show only removed files'))],
1521 _('FILESPEC...'))
1521 _('FILESPEC...'))
1522 def debugpathcomplete(ui, repo, *specs, **opts):
1522 def debugpathcomplete(ui, repo, *specs, **opts):
1523 '''complete part or all of a tracked path
1523 '''complete part or all of a tracked path
1524
1524
1525 This command supports shells that offer path name completion. It
1525 This command supports shells that offer path name completion. It
1526 currently completes only files already known to the dirstate.
1526 currently completes only files already known to the dirstate.
1527
1527
1528 Completion extends only to the next path segment unless
1528 Completion extends only to the next path segment unless
1529 --full is specified, in which case entire paths are used.'''
1529 --full is specified, in which case entire paths are used.'''
1530
1530
1531 def complete(path, acceptable):
1531 def complete(path, acceptable):
1532 dirstate = repo.dirstate
1532 dirstate = repo.dirstate
1533 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1533 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1534 rootdir = repo.root + pycompat.ossep
1534 rootdir = repo.root + pycompat.ossep
1535 if spec != repo.root and not spec.startswith(rootdir):
1535 if spec != repo.root and not spec.startswith(rootdir):
1536 return [], []
1536 return [], []
1537 if os.path.isdir(spec):
1537 if os.path.isdir(spec):
1538 spec += '/'
1538 spec += '/'
1539 spec = spec[len(rootdir):]
1539 spec = spec[len(rootdir):]
1540 fixpaths = pycompat.ossep != '/'
1540 fixpaths = pycompat.ossep != '/'
1541 if fixpaths:
1541 if fixpaths:
1542 spec = spec.replace(pycompat.ossep, '/')
1542 spec = spec.replace(pycompat.ossep, '/')
1543 speclen = len(spec)
1543 speclen = len(spec)
1544 fullpaths = opts[r'full']
1544 fullpaths = opts[r'full']
1545 files, dirs = set(), set()
1545 files, dirs = set(), set()
1546 adddir, addfile = dirs.add, files.add
1546 adddir, addfile = dirs.add, files.add
1547 for f, st in dirstate.iteritems():
1547 for f, st in dirstate.iteritems():
1548 if f.startswith(spec) and st[0] in acceptable:
1548 if f.startswith(spec) and st[0] in acceptable:
1549 if fixpaths:
1549 if fixpaths:
1550 f = f.replace('/', pycompat.ossep)
1550 f = f.replace('/', pycompat.ossep)
1551 if fullpaths:
1551 if fullpaths:
1552 addfile(f)
1552 addfile(f)
1553 continue
1553 continue
1554 s = f.find(pycompat.ossep, speclen)
1554 s = f.find(pycompat.ossep, speclen)
1555 if s >= 0:
1555 if s >= 0:
1556 adddir(f[:s])
1556 adddir(f[:s])
1557 else:
1557 else:
1558 addfile(f)
1558 addfile(f)
1559 return files, dirs
1559 return files, dirs
1560
1560
1561 acceptable = ''
1561 acceptable = ''
1562 if opts[r'normal']:
1562 if opts[r'normal']:
1563 acceptable += 'nm'
1563 acceptable += 'nm'
1564 if opts[r'added']:
1564 if opts[r'added']:
1565 acceptable += 'a'
1565 acceptable += 'a'
1566 if opts[r'removed']:
1566 if opts[r'removed']:
1567 acceptable += 'r'
1567 acceptable += 'r'
1568 cwd = repo.getcwd()
1568 cwd = repo.getcwd()
1569 if not specs:
1569 if not specs:
1570 specs = ['.']
1570 specs = ['.']
1571
1571
1572 files, dirs = set(), set()
1572 files, dirs = set(), set()
1573 for spec in specs:
1573 for spec in specs:
1574 f, d = complete(spec, acceptable or 'nmar')
1574 f, d = complete(spec, acceptable or 'nmar')
1575 files.update(f)
1575 files.update(f)
1576 dirs.update(d)
1576 dirs.update(d)
1577 files.update(dirs)
1577 files.update(dirs)
1578 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1578 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1579 ui.write('\n')
1579 ui.write('\n')
1580
1580
1581 @command('debugpickmergetool',
1581 @command('debugpickmergetool',
1582 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1582 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1583 ('', 'changedelete', None, _('emulate merging change and delete')),
1583 ('', 'changedelete', None, _('emulate merging change and delete')),
1584 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1584 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1585 _('[PATTERN]...'),
1585 _('[PATTERN]...'),
1586 inferrepo=True)
1586 inferrepo=True)
1587 def debugpickmergetool(ui, repo, *pats, **opts):
1587 def debugpickmergetool(ui, repo, *pats, **opts):
1588 """examine which merge tool is chosen for specified file
1588 """examine which merge tool is chosen for specified file
1589
1589
1590 As described in :hg:`help merge-tools`, Mercurial examines
1590 As described in :hg:`help merge-tools`, Mercurial examines
1591 configurations below in this order to decide which merge tool is
1591 configurations below in this order to decide which merge tool is
1592 chosen for specified file.
1592 chosen for specified file.
1593
1593
1594 1. ``--tool`` option
1594 1. ``--tool`` option
1595 2. ``HGMERGE`` environment variable
1595 2. ``HGMERGE`` environment variable
1596 3. configurations in ``merge-patterns`` section
1596 3. configurations in ``merge-patterns`` section
1597 4. configuration of ``ui.merge``
1597 4. configuration of ``ui.merge``
1598 5. configurations in ``merge-tools`` section
1598 5. configurations in ``merge-tools`` section
1599 6. ``hgmerge`` tool (for historical reason only)
1599 6. ``hgmerge`` tool (for historical reason only)
1600 7. default tool for fallback (``:merge`` or ``:prompt``)
1600 7. default tool for fallback (``:merge`` or ``:prompt``)
1601
1601
1602 This command writes out examination result in the style below::
1602 This command writes out examination result in the style below::
1603
1603
1604 FILE = MERGETOOL
1604 FILE = MERGETOOL
1605
1605
1606 By default, all files known in the first parent context of the
1606 By default, all files known in the first parent context of the
1607 working directory are examined. Use file patterns and/or -I/-X
1607 working directory are examined. Use file patterns and/or -I/-X
1608 options to limit target files. -r/--rev is also useful to examine
1608 options to limit target files. -r/--rev is also useful to examine
1609 files in another context without actual updating to it.
1609 files in another context without actual updating to it.
1610
1610
1611 With --debug, this command shows warning messages while matching
1611 With --debug, this command shows warning messages while matching
1612 against ``merge-patterns`` and so on, too. It is recommended to
1612 against ``merge-patterns`` and so on, too. It is recommended to
1613 use this option with explicit file patterns and/or -I/-X options,
1613 use this option with explicit file patterns and/or -I/-X options,
1614 because this option increases amount of output per file according
1614 because this option increases amount of output per file according
1615 to configurations in hgrc.
1615 to configurations in hgrc.
1616
1616
1617 With -v/--verbose, this command shows configurations below at
1617 With -v/--verbose, this command shows configurations below at
1618 first (only if specified).
1618 first (only if specified).
1619
1619
1620 - ``--tool`` option
1620 - ``--tool`` option
1621 - ``HGMERGE`` environment variable
1621 - ``HGMERGE`` environment variable
1622 - configuration of ``ui.merge``
1622 - configuration of ``ui.merge``
1623
1623
1624 If merge tool is chosen before matching against
1624 If merge tool is chosen before matching against
1625 ``merge-patterns``, this command can't show any helpful
1625 ``merge-patterns``, this command can't show any helpful
1626 information, even with --debug. In such case, information above is
1626 information, even with --debug. In such case, information above is
1627 useful to know why a merge tool is chosen.
1627 useful to know why a merge tool is chosen.
1628 """
1628 """
1629 opts = pycompat.byteskwargs(opts)
1629 opts = pycompat.byteskwargs(opts)
1630 overrides = {}
1630 overrides = {}
1631 if opts['tool']:
1631 if opts['tool']:
1632 overrides[('ui', 'forcemerge')] = opts['tool']
1632 overrides[('ui', 'forcemerge')] = opts['tool']
1633 ui.note(('with --tool %r\n') % (opts['tool']))
1633 ui.note(('with --tool %r\n') % (opts['tool']))
1634
1634
1635 with ui.configoverride(overrides, 'debugmergepatterns'):
1635 with ui.configoverride(overrides, 'debugmergepatterns'):
1636 hgmerge = encoding.environ.get("HGMERGE")
1636 hgmerge = encoding.environ.get("HGMERGE")
1637 if hgmerge is not None:
1637 if hgmerge is not None:
1638 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1638 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1639 uimerge = ui.config("ui", "merge")
1639 uimerge = ui.config("ui", "merge")
1640 if uimerge:
1640 if uimerge:
1641 ui.note(('with ui.merge=%r\n') % (uimerge))
1641 ui.note(('with ui.merge=%r\n') % (uimerge))
1642
1642
1643 ctx = scmutil.revsingle(repo, opts.get('rev'))
1643 ctx = scmutil.revsingle(repo, opts.get('rev'))
1644 m = scmutil.match(ctx, pats, opts)
1644 m = scmutil.match(ctx, pats, opts)
1645 changedelete = opts['changedelete']
1645 changedelete = opts['changedelete']
1646 for path in ctx.walk(m):
1646 for path in ctx.walk(m):
1647 fctx = ctx[path]
1647 fctx = ctx[path]
1648 try:
1648 try:
1649 if not ui.debugflag:
1649 if not ui.debugflag:
1650 ui.pushbuffer(error=True)
1650 ui.pushbuffer(error=True)
1651 tool, toolpath = filemerge._picktool(repo, ui, path,
1651 tool, toolpath = filemerge._picktool(repo, ui, path,
1652 fctx.isbinary(),
1652 fctx.isbinary(),
1653 'l' in fctx.flags(),
1653 'l' in fctx.flags(),
1654 changedelete)
1654 changedelete)
1655 finally:
1655 finally:
1656 if not ui.debugflag:
1656 if not ui.debugflag:
1657 ui.popbuffer()
1657 ui.popbuffer()
1658 ui.write(('%s = %s\n') % (path, tool))
1658 ui.write(('%s = %s\n') % (path, tool))
1659
1659
1660 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1660 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1661 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1661 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1662 '''access the pushkey key/value protocol
1662 '''access the pushkey key/value protocol
1663
1663
1664 With two args, list the keys in the given namespace.
1664 With two args, list the keys in the given namespace.
1665
1665
1666 With five args, set a key to new if it currently is set to old.
1666 With five args, set a key to new if it currently is set to old.
1667 Reports success or failure.
1667 Reports success or failure.
1668 '''
1668 '''
1669
1669
1670 target = hg.peer(ui, {}, repopath)
1670 target = hg.peer(ui, {}, repopath)
1671 if keyinfo:
1671 if keyinfo:
1672 key, old, new = keyinfo
1672 key, old, new = keyinfo
1673 r = target.pushkey(namespace, key, old, new)
1673 r = target.pushkey(namespace, key, old, new)
1674 ui.status(str(r) + '\n')
1674 ui.status(str(r) + '\n')
1675 return not r
1675 return not r
1676 else:
1676 else:
1677 for k, v in sorted(target.listkeys(namespace).iteritems()):
1677 for k, v in sorted(target.listkeys(namespace).iteritems()):
1678 ui.write("%s\t%s\n" % (util.escapestr(k),
1678 ui.write("%s\t%s\n" % (util.escapestr(k),
1679 util.escapestr(v)))
1679 util.escapestr(v)))
1680
1680
1681 @command('debugpvec', [], _('A B'))
1681 @command('debugpvec', [], _('A B'))
1682 def debugpvec(ui, repo, a, b=None):
1682 def debugpvec(ui, repo, a, b=None):
1683 ca = scmutil.revsingle(repo, a)
1683 ca = scmutil.revsingle(repo, a)
1684 cb = scmutil.revsingle(repo, b)
1684 cb = scmutil.revsingle(repo, b)
1685 pa = pvec.ctxpvec(ca)
1685 pa = pvec.ctxpvec(ca)
1686 pb = pvec.ctxpvec(cb)
1686 pb = pvec.ctxpvec(cb)
1687 if pa == pb:
1687 if pa == pb:
1688 rel = "="
1688 rel = "="
1689 elif pa > pb:
1689 elif pa > pb:
1690 rel = ">"
1690 rel = ">"
1691 elif pa < pb:
1691 elif pa < pb:
1692 rel = "<"
1692 rel = "<"
1693 elif pa | pb:
1693 elif pa | pb:
1694 rel = "|"
1694 rel = "|"
1695 ui.write(_("a: %s\n") % pa)
1695 ui.write(_("a: %s\n") % pa)
1696 ui.write(_("b: %s\n") % pb)
1696 ui.write(_("b: %s\n") % pb)
1697 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1697 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1698 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1698 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1699 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1699 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1700 pa.distance(pb), rel))
1700 pa.distance(pb), rel))
1701
1701
1702 @command('debugrebuilddirstate|debugrebuildstate',
1702 @command('debugrebuilddirstate|debugrebuildstate',
1703 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1703 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1704 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1704 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1705 'the working copy parent')),
1705 'the working copy parent')),
1706 ],
1706 ],
1707 _('[-r REV]'))
1707 _('[-r REV]'))
1708 def debugrebuilddirstate(ui, repo, rev, **opts):
1708 def debugrebuilddirstate(ui, repo, rev, **opts):
1709 """rebuild the dirstate as it would look like for the given revision
1709 """rebuild the dirstate as it would look like for the given revision
1710
1710
1711 If no revision is specified the first current parent will be used.
1711 If no revision is specified the first current parent will be used.
1712
1712
1713 The dirstate will be set to the files of the given revision.
1713 The dirstate will be set to the files of the given revision.
1714 The actual working directory content or existing dirstate
1714 The actual working directory content or existing dirstate
1715 information such as adds or removes is not considered.
1715 information such as adds or removes is not considered.
1716
1716
1717 ``minimal`` will only rebuild the dirstate status for files that claim to be
1717 ``minimal`` will only rebuild the dirstate status for files that claim to be
1718 tracked but are not in the parent manifest, or that exist in the parent
1718 tracked but are not in the parent manifest, or that exist in the parent
1719 manifest but are not in the dirstate. It will not change adds, removes, or
1719 manifest but are not in the dirstate. It will not change adds, removes, or
1720 modified files that are in the working copy parent.
1720 modified files that are in the working copy parent.
1721
1721
1722 One use of this command is to make the next :hg:`status` invocation
1722 One use of this command is to make the next :hg:`status` invocation
1723 check the actual file content.
1723 check the actual file content.
1724 """
1724 """
1725 ctx = scmutil.revsingle(repo, rev)
1725 ctx = scmutil.revsingle(repo, rev)
1726 with repo.wlock():
1726 with repo.wlock():
1727 dirstate = repo.dirstate
1727 dirstate = repo.dirstate
1728 changedfiles = None
1728 changedfiles = None
1729 # See command doc for what minimal does.
1729 # See command doc for what minimal does.
1730 if opts.get(r'minimal'):
1730 if opts.get(r'minimal'):
1731 manifestfiles = set(ctx.manifest().keys())
1731 manifestfiles = set(ctx.manifest().keys())
1732 dirstatefiles = set(dirstate)
1732 dirstatefiles = set(dirstate)
1733 manifestonly = manifestfiles - dirstatefiles
1733 manifestonly = manifestfiles - dirstatefiles
1734 dsonly = dirstatefiles - manifestfiles
1734 dsonly = dirstatefiles - manifestfiles
1735 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1735 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1736 changedfiles = manifestonly | dsnotadded
1736 changedfiles = manifestonly | dsnotadded
1737
1737
1738 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1738 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1739
1739
1740 @command('debugrebuildfncache', [], '')
1740 @command('debugrebuildfncache', [], '')
1741 def debugrebuildfncache(ui, repo):
1741 def debugrebuildfncache(ui, repo):
1742 """rebuild the fncache file"""
1742 """rebuild the fncache file"""
1743 repair.rebuildfncache(ui, repo)
1743 repair.rebuildfncache(ui, repo)
1744
1744
1745 @command('debugrename',
1745 @command('debugrename',
1746 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1746 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1747 _('[-r REV] FILE'))
1747 _('[-r REV] FILE'))
1748 def debugrename(ui, repo, file1, *pats, **opts):
1748 def debugrename(ui, repo, file1, *pats, **opts):
1749 """dump rename information"""
1749 """dump rename information"""
1750
1750
1751 opts = pycompat.byteskwargs(opts)
1751 opts = pycompat.byteskwargs(opts)
1752 ctx = scmutil.revsingle(repo, opts.get('rev'))
1752 ctx = scmutil.revsingle(repo, opts.get('rev'))
1753 m = scmutil.match(ctx, (file1,) + pats, opts)
1753 m = scmutil.match(ctx, (file1,) + pats, opts)
1754 for abs in ctx.walk(m):
1754 for abs in ctx.walk(m):
1755 fctx = ctx[abs]
1755 fctx = ctx[abs]
1756 o = fctx.filelog().renamed(fctx.filenode())
1756 o = fctx.filelog().renamed(fctx.filenode())
1757 rel = m.rel(abs)
1757 rel = m.rel(abs)
1758 if o:
1758 if o:
1759 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1759 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1760 else:
1760 else:
1761 ui.write(_("%s not renamed\n") % rel)
1761 ui.write(_("%s not renamed\n") % rel)
1762
1762
1763 @command('debugrevlog', cmdutil.debugrevlogopts +
1763 @command('debugrevlog', cmdutil.debugrevlogopts +
1764 [('d', 'dump', False, _('dump index data'))],
1764 [('d', 'dump', False, _('dump index data'))],
1765 _('-c|-m|FILE'),
1765 _('-c|-m|FILE'),
1766 optionalrepo=True)
1766 optionalrepo=True)
1767 def debugrevlog(ui, repo, file_=None, **opts):
1767 def debugrevlog(ui, repo, file_=None, **opts):
1768 """show data and statistics about a revlog"""
1768 """show data and statistics about a revlog"""
1769 opts = pycompat.byteskwargs(opts)
1769 opts = pycompat.byteskwargs(opts)
1770 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1770 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1771
1771
1772 if opts.get("dump"):
1772 if opts.get("dump"):
1773 numrevs = len(r)
1773 numrevs = len(r)
1774 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1774 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1775 " rawsize totalsize compression heads chainlen\n"))
1775 " rawsize totalsize compression heads chainlen\n"))
1776 ts = 0
1776 ts = 0
1777 heads = set()
1777 heads = set()
1778
1778
1779 for rev in xrange(numrevs):
1779 for rev in xrange(numrevs):
1780 dbase = r.deltaparent(rev)
1780 dbase = r.deltaparent(rev)
1781 if dbase == -1:
1781 if dbase == -1:
1782 dbase = rev
1782 dbase = rev
1783 cbase = r.chainbase(rev)
1783 cbase = r.chainbase(rev)
1784 clen = r.chainlen(rev)
1784 clen = r.chainlen(rev)
1785 p1, p2 = r.parentrevs(rev)
1785 p1, p2 = r.parentrevs(rev)
1786 rs = r.rawsize(rev)
1786 rs = r.rawsize(rev)
1787 ts = ts + rs
1787 ts = ts + rs
1788 heads -= set(r.parentrevs(rev))
1788 heads -= set(r.parentrevs(rev))
1789 heads.add(rev)
1789 heads.add(rev)
1790 try:
1790 try:
1791 compression = ts / r.end(rev)
1791 compression = ts / r.end(rev)
1792 except ZeroDivisionError:
1792 except ZeroDivisionError:
1793 compression = 0
1793 compression = 0
1794 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1794 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1795 "%11d %5d %8d\n" %
1795 "%11d %5d %8d\n" %
1796 (rev, p1, p2, r.start(rev), r.end(rev),
1796 (rev, p1, p2, r.start(rev), r.end(rev),
1797 r.start(dbase), r.start(cbase),
1797 r.start(dbase), r.start(cbase),
1798 r.start(p1), r.start(p2),
1798 r.start(p1), r.start(p2),
1799 rs, ts, compression, len(heads), clen))
1799 rs, ts, compression, len(heads), clen))
1800 return 0
1800 return 0
1801
1801
1802 v = r.version
1802 v = r.version
1803 format = v & 0xFFFF
1803 format = v & 0xFFFF
1804 flags = []
1804 flags = []
1805 gdelta = False
1805 gdelta = False
1806 if v & revlog.FLAG_INLINE_DATA:
1806 if v & revlog.FLAG_INLINE_DATA:
1807 flags.append('inline')
1807 flags.append('inline')
1808 if v & revlog.FLAG_GENERALDELTA:
1808 if v & revlog.FLAG_GENERALDELTA:
1809 gdelta = True
1809 gdelta = True
1810 flags.append('generaldelta')
1810 flags.append('generaldelta')
1811 if not flags:
1811 if not flags:
1812 flags = ['(none)']
1812 flags = ['(none)']
1813
1813
1814 nummerges = 0
1814 nummerges = 0
1815 numfull = 0
1815 numfull = 0
1816 numprev = 0
1816 numprev = 0
1817 nump1 = 0
1817 nump1 = 0
1818 nump2 = 0
1818 nump2 = 0
1819 numother = 0
1819 numother = 0
1820 nump1prev = 0
1820 nump1prev = 0
1821 nump2prev = 0
1821 nump2prev = 0
1822 chainlengths = []
1822 chainlengths = []
1823 chainbases = []
1823 chainbases = []
1824 chainspans = []
1824 chainspans = []
1825
1825
1826 datasize = [None, 0, 0]
1826 datasize = [None, 0, 0]
1827 fullsize = [None, 0, 0]
1827 fullsize = [None, 0, 0]
1828 deltasize = [None, 0, 0]
1828 deltasize = [None, 0, 0]
1829 chunktypecounts = {}
1829 chunktypecounts = {}
1830 chunktypesizes = {}
1830 chunktypesizes = {}
1831
1831
1832 def addsize(size, l):
1832 def addsize(size, l):
1833 if l[0] is None or size < l[0]:
1833 if l[0] is None or size < l[0]:
1834 l[0] = size
1834 l[0] = size
1835 if size > l[1]:
1835 if size > l[1]:
1836 l[1] = size
1836 l[1] = size
1837 l[2] += size
1837 l[2] += size
1838
1838
1839 numrevs = len(r)
1839 numrevs = len(r)
1840 for rev in xrange(numrevs):
1840 for rev in xrange(numrevs):
1841 p1, p2 = r.parentrevs(rev)
1841 p1, p2 = r.parentrevs(rev)
1842 delta = r.deltaparent(rev)
1842 delta = r.deltaparent(rev)
1843 if format > 0:
1843 if format > 0:
1844 addsize(r.rawsize(rev), datasize)
1844 addsize(r.rawsize(rev), datasize)
1845 if p2 != nullrev:
1845 if p2 != nullrev:
1846 nummerges += 1
1846 nummerges += 1
1847 size = r.length(rev)
1847 size = r.length(rev)
1848 if delta == nullrev:
1848 if delta == nullrev:
1849 chainlengths.append(0)
1849 chainlengths.append(0)
1850 chainbases.append(r.start(rev))
1850 chainbases.append(r.start(rev))
1851 chainspans.append(size)
1851 chainspans.append(size)
1852 numfull += 1
1852 numfull += 1
1853 addsize(size, fullsize)
1853 addsize(size, fullsize)
1854 else:
1854 else:
1855 chainlengths.append(chainlengths[delta] + 1)
1855 chainlengths.append(chainlengths[delta] + 1)
1856 baseaddr = chainbases[delta]
1856 baseaddr = chainbases[delta]
1857 revaddr = r.start(rev)
1857 revaddr = r.start(rev)
1858 chainbases.append(baseaddr)
1858 chainbases.append(baseaddr)
1859 chainspans.append((revaddr - baseaddr) + size)
1859 chainspans.append((revaddr - baseaddr) + size)
1860 addsize(size, deltasize)
1860 addsize(size, deltasize)
1861 if delta == rev - 1:
1861 if delta == rev - 1:
1862 numprev += 1
1862 numprev += 1
1863 if delta == p1:
1863 if delta == p1:
1864 nump1prev += 1
1864 nump1prev += 1
1865 elif delta == p2:
1865 elif delta == p2:
1866 nump2prev += 1
1866 nump2prev += 1
1867 elif delta == p1:
1867 elif delta == p1:
1868 nump1 += 1
1868 nump1 += 1
1869 elif delta == p2:
1869 elif delta == p2:
1870 nump2 += 1
1870 nump2 += 1
1871 elif delta != nullrev:
1871 elif delta != nullrev:
1872 numother += 1
1872 numother += 1
1873
1873
1874 # Obtain data on the raw chunks in the revlog.
1874 # Obtain data on the raw chunks in the revlog.
1875 segment = r._getsegmentforrevs(rev, rev)[1]
1875 segment = r._getsegmentforrevs(rev, rev)[1]
1876 if segment:
1876 if segment:
1877 chunktype = bytes(segment[0:1])
1877 chunktype = bytes(segment[0:1])
1878 else:
1878 else:
1879 chunktype = 'empty'
1879 chunktype = 'empty'
1880
1880
1881 if chunktype not in chunktypecounts:
1881 if chunktype not in chunktypecounts:
1882 chunktypecounts[chunktype] = 0
1882 chunktypecounts[chunktype] = 0
1883 chunktypesizes[chunktype] = 0
1883 chunktypesizes[chunktype] = 0
1884
1884
1885 chunktypecounts[chunktype] += 1
1885 chunktypecounts[chunktype] += 1
1886 chunktypesizes[chunktype] += size
1886 chunktypesizes[chunktype] += size
1887
1887
1888 # Adjust size min value for empty cases
1888 # Adjust size min value for empty cases
1889 for size in (datasize, fullsize, deltasize):
1889 for size in (datasize, fullsize, deltasize):
1890 if size[0] is None:
1890 if size[0] is None:
1891 size[0] = 0
1891 size[0] = 0
1892
1892
1893 numdeltas = numrevs - numfull
1893 numdeltas = numrevs - numfull
1894 numoprev = numprev - nump1prev - nump2prev
1894 numoprev = numprev - nump1prev - nump2prev
1895 totalrawsize = datasize[2]
1895 totalrawsize = datasize[2]
1896 datasize[2] /= numrevs
1896 datasize[2] /= numrevs
1897 fulltotal = fullsize[2]
1897 fulltotal = fullsize[2]
1898 fullsize[2] /= numfull
1898 fullsize[2] /= numfull
1899 deltatotal = deltasize[2]
1899 deltatotal = deltasize[2]
1900 if numrevs - numfull > 0:
1900 if numrevs - numfull > 0:
1901 deltasize[2] /= numrevs - numfull
1901 deltasize[2] /= numrevs - numfull
1902 totalsize = fulltotal + deltatotal
1902 totalsize = fulltotal + deltatotal
1903 avgchainlen = sum(chainlengths) / numrevs
1903 avgchainlen = sum(chainlengths) / numrevs
1904 maxchainlen = max(chainlengths)
1904 maxchainlen = max(chainlengths)
1905 maxchainspan = max(chainspans)
1905 maxchainspan = max(chainspans)
1906 compratio = 1
1906 compratio = 1
1907 if totalsize:
1907 if totalsize:
1908 compratio = totalrawsize / totalsize
1908 compratio = totalrawsize / totalsize
1909
1909
1910 basedfmtstr = '%%%dd\n'
1910 basedfmtstr = '%%%dd\n'
1911 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1911 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1912
1912
1913 def dfmtstr(max):
1913 def dfmtstr(max):
1914 return basedfmtstr % len(str(max))
1914 return basedfmtstr % len(str(max))
1915 def pcfmtstr(max, padding=0):
1915 def pcfmtstr(max, padding=0):
1916 return basepcfmtstr % (len(str(max)), ' ' * padding)
1916 return basepcfmtstr % (len(str(max)), ' ' * padding)
1917
1917
1918 def pcfmt(value, total):
1918 def pcfmt(value, total):
1919 if total:
1919 if total:
1920 return (value, 100 * float(value) / total)
1920 return (value, 100 * float(value) / total)
1921 else:
1921 else:
1922 return value, 100.0
1922 return value, 100.0
1923
1923
1924 ui.write(('format : %d\n') % format)
1924 ui.write(('format : %d\n') % format)
1925 ui.write(('flags : %s\n') % ', '.join(flags))
1925 ui.write(('flags : %s\n') % ', '.join(flags))
1926
1926
1927 ui.write('\n')
1927 ui.write('\n')
1928 fmt = pcfmtstr(totalsize)
1928 fmt = pcfmtstr(totalsize)
1929 fmt2 = dfmtstr(totalsize)
1929 fmt2 = dfmtstr(totalsize)
1930 ui.write(('revisions : ') + fmt2 % numrevs)
1930 ui.write(('revisions : ') + fmt2 % numrevs)
1931 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1931 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1932 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1932 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1933 ui.write(('revisions : ') + fmt2 % numrevs)
1933 ui.write(('revisions : ') + fmt2 % numrevs)
1934 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1934 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1935 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1935 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1936 ui.write(('revision size : ') + fmt2 % totalsize)
1936 ui.write(('revision size : ') + fmt2 % totalsize)
1937 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1937 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1938 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1938 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1939
1939
1940 def fmtchunktype(chunktype):
1940 def fmtchunktype(chunktype):
1941 if chunktype == 'empty':
1941 if chunktype == 'empty':
1942 return ' %s : ' % chunktype
1942 return ' %s : ' % chunktype
1943 elif chunktype in pycompat.bytestr(string.ascii_letters):
1943 elif chunktype in pycompat.bytestr(string.ascii_letters):
1944 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1944 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1945 else:
1945 else:
1946 return ' 0x%s : ' % hex(chunktype)
1946 return ' 0x%s : ' % hex(chunktype)
1947
1947
1948 ui.write('\n')
1948 ui.write('\n')
1949 ui.write(('chunks : ') + fmt2 % numrevs)
1949 ui.write(('chunks : ') + fmt2 % numrevs)
1950 for chunktype in sorted(chunktypecounts):
1950 for chunktype in sorted(chunktypecounts):
1951 ui.write(fmtchunktype(chunktype))
1951 ui.write(fmtchunktype(chunktype))
1952 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1952 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1953 ui.write(('chunks size : ') + fmt2 % totalsize)
1953 ui.write(('chunks size : ') + fmt2 % totalsize)
1954 for chunktype in sorted(chunktypecounts):
1954 for chunktype in sorted(chunktypecounts):
1955 ui.write(fmtchunktype(chunktype))
1955 ui.write(fmtchunktype(chunktype))
1956 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1956 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1957
1957
1958 ui.write('\n')
1958 ui.write('\n')
1959 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1959 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1960 ui.write(('avg chain length : ') + fmt % avgchainlen)
1960 ui.write(('avg chain length : ') + fmt % avgchainlen)
1961 ui.write(('max chain length : ') + fmt % maxchainlen)
1961 ui.write(('max chain length : ') + fmt % maxchainlen)
1962 ui.write(('max chain reach : ') + fmt % maxchainspan)
1962 ui.write(('max chain reach : ') + fmt % maxchainspan)
1963 ui.write(('compression ratio : ') + fmt % compratio)
1963 ui.write(('compression ratio : ') + fmt % compratio)
1964
1964
1965 if format > 0:
1965 if format > 0:
1966 ui.write('\n')
1966 ui.write('\n')
1967 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1967 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1968 % tuple(datasize))
1968 % tuple(datasize))
1969 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1969 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1970 % tuple(fullsize))
1970 % tuple(fullsize))
1971 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1971 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1972 % tuple(deltasize))
1972 % tuple(deltasize))
1973
1973
1974 if numdeltas > 0:
1974 if numdeltas > 0:
1975 ui.write('\n')
1975 ui.write('\n')
1976 fmt = pcfmtstr(numdeltas)
1976 fmt = pcfmtstr(numdeltas)
1977 fmt2 = pcfmtstr(numdeltas, 4)
1977 fmt2 = pcfmtstr(numdeltas, 4)
1978 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1978 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1979 if numprev > 0:
1979 if numprev > 0:
1980 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1980 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1981 numprev))
1981 numprev))
1982 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1982 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1983 numprev))
1983 numprev))
1984 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1984 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1985 numprev))
1985 numprev))
1986 if gdelta:
1986 if gdelta:
1987 ui.write(('deltas against p1 : ')
1987 ui.write(('deltas against p1 : ')
1988 + fmt % pcfmt(nump1, numdeltas))
1988 + fmt % pcfmt(nump1, numdeltas))
1989 ui.write(('deltas against p2 : ')
1989 ui.write(('deltas against p2 : ')
1990 + fmt % pcfmt(nump2, numdeltas))
1990 + fmt % pcfmt(nump2, numdeltas))
1991 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1991 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1992 numdeltas))
1992 numdeltas))
1993
1993
1994 @command('debugrevspec',
1994 @command('debugrevspec',
1995 [('', 'optimize', None,
1995 [('', 'optimize', None,
1996 _('print parsed tree after optimizing (DEPRECATED)')),
1996 _('print parsed tree after optimizing (DEPRECATED)')),
1997 ('', 'show-revs', True, _('print list of result revisions (default)')),
1997 ('', 'show-revs', True, _('print list of result revisions (default)')),
1998 ('s', 'show-set', None, _('print internal representation of result set')),
1998 ('s', 'show-set', None, _('print internal representation of result set')),
1999 ('p', 'show-stage', [],
1999 ('p', 'show-stage', [],
2000 _('print parsed tree at the given stage'), _('NAME')),
2000 _('print parsed tree at the given stage'), _('NAME')),
2001 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2001 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2002 ('', 'verify-optimized', False, _('verify optimized result')),
2002 ('', 'verify-optimized', False, _('verify optimized result')),
2003 ],
2003 ],
2004 ('REVSPEC'))
2004 ('REVSPEC'))
2005 def debugrevspec(ui, repo, expr, **opts):
2005 def debugrevspec(ui, repo, expr, **opts):
2006 """parse and apply a revision specification
2006 """parse and apply a revision specification
2007
2007
2008 Use -p/--show-stage option to print the parsed tree at the given stages.
2008 Use -p/--show-stage option to print the parsed tree at the given stages.
2009 Use -p all to print tree at every stage.
2009 Use -p all to print tree at every stage.
2010
2010
2011 Use --no-show-revs option with -s or -p to print only the set
2011 Use --no-show-revs option with -s or -p to print only the set
2012 representation or the parsed tree respectively.
2012 representation or the parsed tree respectively.
2013
2013
2014 Use --verify-optimized to compare the optimized result with the unoptimized
2014 Use --verify-optimized to compare the optimized result with the unoptimized
2015 one. Returns 1 if the optimized result differs.
2015 one. Returns 1 if the optimized result differs.
2016 """
2016 """
2017 opts = pycompat.byteskwargs(opts)
2017 opts = pycompat.byteskwargs(opts)
2018 aliases = ui.configitems('revsetalias')
2018 aliases = ui.configitems('revsetalias')
2019 stages = [
2019 stages = [
2020 ('parsed', lambda tree: tree),
2020 ('parsed', lambda tree: tree),
2021 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2021 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2022 ui.warn)),
2022 ui.warn)),
2023 ('concatenated', revsetlang.foldconcat),
2023 ('concatenated', revsetlang.foldconcat),
2024 ('analyzed', revsetlang.analyze),
2024 ('analyzed', revsetlang.analyze),
2025 ('optimized', revsetlang.optimize),
2025 ('optimized', revsetlang.optimize),
2026 ]
2026 ]
2027 if opts['no_optimized']:
2027 if opts['no_optimized']:
2028 stages = stages[:-1]
2028 stages = stages[:-1]
2029 if opts['verify_optimized'] and opts['no_optimized']:
2029 if opts['verify_optimized'] and opts['no_optimized']:
2030 raise error.Abort(_('cannot use --verify-optimized with '
2030 raise error.Abort(_('cannot use --verify-optimized with '
2031 '--no-optimized'))
2031 '--no-optimized'))
2032 stagenames = set(n for n, f in stages)
2032 stagenames = set(n for n, f in stages)
2033
2033
2034 showalways = set()
2034 showalways = set()
2035 showchanged = set()
2035 showchanged = set()
2036 if ui.verbose and not opts['show_stage']:
2036 if ui.verbose and not opts['show_stage']:
2037 # show parsed tree by --verbose (deprecated)
2037 # show parsed tree by --verbose (deprecated)
2038 showalways.add('parsed')
2038 showalways.add('parsed')
2039 showchanged.update(['expanded', 'concatenated'])
2039 showchanged.update(['expanded', 'concatenated'])
2040 if opts['optimize']:
2040 if opts['optimize']:
2041 showalways.add('optimized')
2041 showalways.add('optimized')
2042 if opts['show_stage'] and opts['optimize']:
2042 if opts['show_stage'] and opts['optimize']:
2043 raise error.Abort(_('cannot use --optimize with --show-stage'))
2043 raise error.Abort(_('cannot use --optimize with --show-stage'))
2044 if opts['show_stage'] == ['all']:
2044 if opts['show_stage'] == ['all']:
2045 showalways.update(stagenames)
2045 showalways.update(stagenames)
2046 else:
2046 else:
2047 for n in opts['show_stage']:
2047 for n in opts['show_stage']:
2048 if n not in stagenames:
2048 if n not in stagenames:
2049 raise error.Abort(_('invalid stage name: %s') % n)
2049 raise error.Abort(_('invalid stage name: %s') % n)
2050 showalways.update(opts['show_stage'])
2050 showalways.update(opts['show_stage'])
2051
2051
2052 treebystage = {}
2052 treebystage = {}
2053 printedtree = None
2053 printedtree = None
2054 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2054 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2055 for n, f in stages:
2055 for n, f in stages:
2056 treebystage[n] = tree = f(tree)
2056 treebystage[n] = tree = f(tree)
2057 if n in showalways or (n in showchanged and tree != printedtree):
2057 if n in showalways or (n in showchanged and tree != printedtree):
2058 if opts['show_stage'] or n != 'parsed':
2058 if opts['show_stage'] or n != 'parsed':
2059 ui.write(("* %s:\n") % n)
2059 ui.write(("* %s:\n") % n)
2060 ui.write(revsetlang.prettyformat(tree), "\n")
2060 ui.write(revsetlang.prettyformat(tree), "\n")
2061 printedtree = tree
2061 printedtree = tree
2062
2062
2063 if opts['verify_optimized']:
2063 if opts['verify_optimized']:
2064 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2064 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2065 brevs = revset.makematcher(treebystage['optimized'])(repo)
2065 brevs = revset.makematcher(treebystage['optimized'])(repo)
2066 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2066 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2067 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2067 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2068 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2068 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2069 arevs = list(arevs)
2069 arevs = list(arevs)
2070 brevs = list(brevs)
2070 brevs = list(brevs)
2071 if arevs == brevs:
2071 if arevs == brevs:
2072 return 0
2072 return 0
2073 ui.write(('--- analyzed\n'), label='diff.file_a')
2073 ui.write(('--- analyzed\n'), label='diff.file_a')
2074 ui.write(('+++ optimized\n'), label='diff.file_b')
2074 ui.write(('+++ optimized\n'), label='diff.file_b')
2075 sm = difflib.SequenceMatcher(None, arevs, brevs)
2075 sm = difflib.SequenceMatcher(None, arevs, brevs)
2076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2076 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2077 if tag in ('delete', 'replace'):
2077 if tag in ('delete', 'replace'):
2078 for c in arevs[alo:ahi]:
2078 for c in arevs[alo:ahi]:
2079 ui.write('-%s\n' % c, label='diff.deleted')
2079 ui.write('-%s\n' % c, label='diff.deleted')
2080 if tag in ('insert', 'replace'):
2080 if tag in ('insert', 'replace'):
2081 for c in brevs[blo:bhi]:
2081 for c in brevs[blo:bhi]:
2082 ui.write('+%s\n' % c, label='diff.inserted')
2082 ui.write('+%s\n' % c, label='diff.inserted')
2083 if tag == 'equal':
2083 if tag == 'equal':
2084 for c in arevs[alo:ahi]:
2084 for c in arevs[alo:ahi]:
2085 ui.write(' %s\n' % c)
2085 ui.write(' %s\n' % c)
2086 return 1
2086 return 1
2087
2087
2088 func = revset.makematcher(tree)
2088 func = revset.makematcher(tree)
2089 revs = func(repo)
2089 revs = func(repo)
2090 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2090 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2091 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2091 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2092 if not opts['show_revs']:
2092 if not opts['show_revs']:
2093 return
2093 return
2094 for c in revs:
2094 for c in revs:
2095 ui.write("%s\n" % c)
2095 ui.write("%s\n" % c)
2096
2096
2097 @command('debugsetparents', [], _('REV1 [REV2]'))
2097 @command('debugsetparents', [], _('REV1 [REV2]'))
2098 def debugsetparents(ui, repo, rev1, rev2=None):
2098 def debugsetparents(ui, repo, rev1, rev2=None):
2099 """manually set the parents of the current working directory
2099 """manually set the parents of the current working directory
2100
2100
2101 This is useful for writing repository conversion tools, but should
2101 This is useful for writing repository conversion tools, but should
2102 be used with care. For example, neither the working directory nor the
2102 be used with care. For example, neither the working directory nor the
2103 dirstate is updated, so file status may be incorrect after running this
2103 dirstate is updated, so file status may be incorrect after running this
2104 command.
2104 command.
2105
2105
2106 Returns 0 on success.
2106 Returns 0 on success.
2107 """
2107 """
2108
2108
2109 r1 = scmutil.revsingle(repo, rev1).node()
2109 r1 = scmutil.revsingle(repo, rev1).node()
2110 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2110 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2111
2111
2112 with repo.wlock():
2112 with repo.wlock():
2113 repo.setparents(r1, r2)
2113 repo.setparents(r1, r2)
2114
2114
2115 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2115 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2116 def debugssl(ui, repo, source=None, **opts):
2116 def debugssl(ui, repo, source=None, **opts):
2117 '''test a secure connection to a server
2117 '''test a secure connection to a server
2118
2118
2119 This builds the certificate chain for the server on Windows, installing the
2119 This builds the certificate chain for the server on Windows, installing the
2120 missing intermediates and trusted root via Windows Update if necessary. It
2120 missing intermediates and trusted root via Windows Update if necessary. It
2121 does nothing on other platforms.
2121 does nothing on other platforms.
2122
2122
2123 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2123 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2124 that server is used. See :hg:`help urls` for more information.
2124 that server is used. See :hg:`help urls` for more information.
2125
2125
2126 If the update succeeds, retry the original operation. Otherwise, the cause
2126 If the update succeeds, retry the original operation. Otherwise, the cause
2127 of the SSL error is likely another issue.
2127 of the SSL error is likely another issue.
2128 '''
2128 '''
2129 if not pycompat.iswindows:
2129 if not pycompat.iswindows:
2130 raise error.Abort(_('certificate chain building is only possible on '
2130 raise error.Abort(_('certificate chain building is only possible on '
2131 'Windows'))
2131 'Windows'))
2132
2132
2133 if not source:
2133 if not source:
2134 if not repo:
2134 if not repo:
2135 raise error.Abort(_("there is no Mercurial repository here, and no "
2135 raise error.Abort(_("there is no Mercurial repository here, and no "
2136 "server specified"))
2136 "server specified"))
2137 source = "default"
2137 source = "default"
2138
2138
2139 source, branches = hg.parseurl(ui.expandpath(source))
2139 source, branches = hg.parseurl(ui.expandpath(source))
2140 url = util.url(source)
2140 url = util.url(source)
2141 addr = None
2141 addr = None
2142
2142
2143 if url.scheme == 'https':
2143 if url.scheme == 'https':
2144 addr = (url.host, url.port or 443)
2144 addr = (url.host, url.port or 443)
2145 elif url.scheme == 'ssh':
2145 elif url.scheme == 'ssh':
2146 addr = (url.host, url.port or 22)
2146 addr = (url.host, url.port or 22)
2147 else:
2147 else:
2148 raise error.Abort(_("only https and ssh connections are supported"))
2148 raise error.Abort(_("only https and ssh connections are supported"))
2149
2149
2150 from . import win32
2150 from . import win32
2151
2151
2152 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2152 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2153 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2153 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2154
2154
2155 try:
2155 try:
2156 s.connect(addr)
2156 s.connect(addr)
2157 cert = s.getpeercert(True)
2157 cert = s.getpeercert(True)
2158
2158
2159 ui.status(_('checking the certificate chain for %s\n') % url.host)
2159 ui.status(_('checking the certificate chain for %s\n') % url.host)
2160
2160
2161 complete = win32.checkcertificatechain(cert, build=False)
2161 complete = win32.checkcertificatechain(cert, build=False)
2162
2162
2163 if not complete:
2163 if not complete:
2164 ui.status(_('certificate chain is incomplete, updating... '))
2164 ui.status(_('certificate chain is incomplete, updating... '))
2165
2165
2166 if not win32.checkcertificatechain(cert):
2166 if not win32.checkcertificatechain(cert):
2167 ui.status(_('failed.\n'))
2167 ui.status(_('failed.\n'))
2168 else:
2168 else:
2169 ui.status(_('done.\n'))
2169 ui.status(_('done.\n'))
2170 else:
2170 else:
2171 ui.status(_('full certificate chain is available\n'))
2171 ui.status(_('full certificate chain is available\n'))
2172 finally:
2172 finally:
2173 s.close()
2173 s.close()
2174
2174
2175 @command('debugsub',
2175 @command('debugsub',
2176 [('r', 'rev', '',
2176 [('r', 'rev', '',
2177 _('revision to check'), _('REV'))],
2177 _('revision to check'), _('REV'))],
2178 _('[-r REV] [REV]'))
2178 _('[-r REV] [REV]'))
2179 def debugsub(ui, repo, rev=None):
2179 def debugsub(ui, repo, rev=None):
2180 ctx = scmutil.revsingle(repo, rev, None)
2180 ctx = scmutil.revsingle(repo, rev, None)
2181 for k, v in sorted(ctx.substate.items()):
2181 for k, v in sorted(ctx.substate.items()):
2182 ui.write(('path %s\n') % k)
2182 ui.write(('path %s\n') % k)
2183 ui.write((' source %s\n') % v[0])
2183 ui.write((' source %s\n') % v[0])
2184 ui.write((' revision %s\n') % v[1])
2184 ui.write((' revision %s\n') % v[1])
2185
2185
2186 @command('debugsuccessorssets',
2186 @command('debugsuccessorssets',
2187 [('', 'closest', False, _('return closest successors sets only'))],
2187 [('', 'closest', False, _('return closest successors sets only'))],
2188 _('[REV]'))
2188 _('[REV]'))
2189 def debugsuccessorssets(ui, repo, *revs, **opts):
2189 def debugsuccessorssets(ui, repo, *revs, **opts):
2190 """show set of successors for revision
2190 """show set of successors for revision
2191
2191
2192 A successors set of changeset A is a consistent group of revisions that
2192 A successors set of changeset A is a consistent group of revisions that
2193 succeed A. It contains non-obsolete changesets only unless closests
2193 succeed A. It contains non-obsolete changesets only unless closests
2194 successors set is set.
2194 successors set is set.
2195
2195
2196 In most cases a changeset A has a single successors set containing a single
2196 In most cases a changeset A has a single successors set containing a single
2197 successor (changeset A replaced by A').
2197 successor (changeset A replaced by A').
2198
2198
2199 A changeset that is made obsolete with no successors are called "pruned".
2199 A changeset that is made obsolete with no successors are called "pruned".
2200 Such changesets have no successors sets at all.
2200 Such changesets have no successors sets at all.
2201
2201
2202 A changeset that has been "split" will have a successors set containing
2202 A changeset that has been "split" will have a successors set containing
2203 more than one successor.
2203 more than one successor.
2204
2204
2205 A changeset that has been rewritten in multiple different ways is called
2205 A changeset that has been rewritten in multiple different ways is called
2206 "divergent". Such changesets have multiple successor sets (each of which
2206 "divergent". Such changesets have multiple successor sets (each of which
2207 may also be split, i.e. have multiple successors).
2207 may also be split, i.e. have multiple successors).
2208
2208
2209 Results are displayed as follows::
2209 Results are displayed as follows::
2210
2210
2211 <rev1>
2211 <rev1>
2212 <successors-1A>
2212 <successors-1A>
2213 <rev2>
2213 <rev2>
2214 <successors-2A>
2214 <successors-2A>
2215 <successors-2B1> <successors-2B2> <successors-2B3>
2215 <successors-2B1> <successors-2B2> <successors-2B3>
2216
2216
2217 Here rev2 has two possible (i.e. divergent) successors sets. The first
2217 Here rev2 has two possible (i.e. divergent) successors sets. The first
2218 holds one element, whereas the second holds three (i.e. the changeset has
2218 holds one element, whereas the second holds three (i.e. the changeset has
2219 been split).
2219 been split).
2220 """
2220 """
2221 # passed to successorssets caching computation from one call to another
2221 # passed to successorssets caching computation from one call to another
2222 cache = {}
2222 cache = {}
2223 ctx2str = str
2223 ctx2str = str
2224 node2str = short
2224 node2str = short
2225 if ui.debug():
2225 if ui.debug():
2226 def ctx2str(ctx):
2226 def ctx2str(ctx):
2227 return ctx.hex()
2227 return ctx.hex()
2228 node2str = hex
2228 node2str = hex
2229 for rev in scmutil.revrange(repo, revs):
2229 for rev in scmutil.revrange(repo, revs):
2230 ctx = repo[rev]
2230 ctx = repo[rev]
2231 ui.write('%s\n'% ctx2str(ctx))
2231 ui.write('%s\n'% ctx2str(ctx))
2232 for succsset in obsutil.successorssets(repo, ctx.node(),
2232 for succsset in obsutil.successorssets(repo, ctx.node(),
2233 closest=opts['closest'],
2233 closest=opts['closest'],
2234 cache=cache):
2234 cache=cache):
2235 if succsset:
2235 if succsset:
2236 ui.write(' ')
2236 ui.write(' ')
2237 ui.write(node2str(succsset[0]))
2237 ui.write(node2str(succsset[0]))
2238 for node in succsset[1:]:
2238 for node in succsset[1:]:
2239 ui.write(' ')
2239 ui.write(' ')
2240 ui.write(node2str(node))
2240 ui.write(node2str(node))
2241 ui.write('\n')
2241 ui.write('\n')
2242
2242
2243 @command('debugtemplate',
2243 @command('debugtemplate',
2244 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2244 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2245 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2245 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2246 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2246 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2247 optionalrepo=True)
2247 optionalrepo=True)
2248 def debugtemplate(ui, repo, tmpl, **opts):
2248 def debugtemplate(ui, repo, tmpl, **opts):
2249 """parse and apply a template
2249 """parse and apply a template
2250
2250
2251 If -r/--rev is given, the template is processed as a log template and
2251 If -r/--rev is given, the template is processed as a log template and
2252 applied to the given changesets. Otherwise, it is processed as a generic
2252 applied to the given changesets. Otherwise, it is processed as a generic
2253 template.
2253 template.
2254
2254
2255 Use --verbose to print the parsed tree.
2255 Use --verbose to print the parsed tree.
2256 """
2256 """
2257 revs = None
2257 revs = None
2258 if opts[r'rev']:
2258 if opts[r'rev']:
2259 if repo is None:
2259 if repo is None:
2260 raise error.RepoError(_('there is no Mercurial repository here '
2260 raise error.RepoError(_('there is no Mercurial repository here '
2261 '(.hg not found)'))
2261 '(.hg not found)'))
2262 revs = scmutil.revrange(repo, opts[r'rev'])
2262 revs = scmutil.revrange(repo, opts[r'rev'])
2263
2263
2264 props = {}
2264 props = {}
2265 for d in opts[r'define']:
2265 for d in opts[r'define']:
2266 try:
2266 try:
2267 k, v = (e.strip() for e in d.split('=', 1))
2267 k, v = (e.strip() for e in d.split('=', 1))
2268 if not k or k == 'ui':
2268 if not k or k == 'ui':
2269 raise ValueError
2269 raise ValueError
2270 props[k] = v
2270 props[k] = v
2271 except ValueError:
2271 except ValueError:
2272 raise error.Abort(_('malformed keyword definition: %s') % d)
2272 raise error.Abort(_('malformed keyword definition: %s') % d)
2273
2273
2274 if ui.verbose:
2274 if ui.verbose:
2275 aliases = ui.configitems('templatealias')
2275 aliases = ui.configitems('templatealias')
2276 tree = templater.parse(tmpl)
2276 tree = templater.parse(tmpl)
2277 ui.note(templater.prettyformat(tree), '\n')
2277 ui.note(templater.prettyformat(tree), '\n')
2278 newtree = templater.expandaliases(tree, aliases)
2278 newtree = templater.expandaliases(tree, aliases)
2279 if newtree != tree:
2279 if newtree != tree:
2280 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2280 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2281
2281
2282 if revs is None:
2282 if revs is None:
2283 t = formatter.maketemplater(ui, tmpl)
2283 t = formatter.maketemplater(ui, tmpl)
2284 props['ui'] = ui
2284 props['ui'] = ui
2285 ui.write(t.render(props))
2285 ui.write(t.render(props))
2286 else:
2286 else:
2287 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2287 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2288 for r in revs:
2288 for r in revs:
2289 displayer.show(repo[r], **pycompat.strkwargs(props))
2289 displayer.show(repo[r], **pycompat.strkwargs(props))
2290 displayer.close()
2290 displayer.close()
2291
2291
2292 @command('debugupdatecaches', [])
2292 @command('debugupdatecaches', [])
2293 def debugupdatecaches(ui, repo, *pats, **opts):
2293 def debugupdatecaches(ui, repo, *pats, **opts):
2294 """warm all known caches in the repository"""
2294 """warm all known caches in the repository"""
2295 with repo.wlock(), repo.lock():
2295 with repo.wlock(), repo.lock():
2296 repo.updatecaches()
2296 repo.updatecaches()
2297
2297
2298 @command('debugupgraderepo', [
2298 @command('debugupgraderepo', [
2299 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2299 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2300 ('', 'run', False, _('performs an upgrade')),
2300 ('', 'run', False, _('performs an upgrade')),
2301 ])
2301 ])
2302 def debugupgraderepo(ui, repo, run=False, optimize=None):
2302 def debugupgraderepo(ui, repo, run=False, optimize=None):
2303 """upgrade a repository to use different features
2303 """upgrade a repository to use different features
2304
2304
2305 If no arguments are specified, the repository is evaluated for upgrade
2305 If no arguments are specified, the repository is evaluated for upgrade
2306 and a list of problems and potential optimizations is printed.
2306 and a list of problems and potential optimizations is printed.
2307
2307
2308 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2308 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2309 can be influenced via additional arguments. More details will be provided
2309 can be influenced via additional arguments. More details will be provided
2310 by the command output when run without ``--run``.
2310 by the command output when run without ``--run``.
2311
2311
2312 During the upgrade, the repository will be locked and no writes will be
2312 During the upgrade, the repository will be locked and no writes will be
2313 allowed.
2313 allowed.
2314
2314
2315 At the end of the upgrade, the repository may not be readable while new
2315 At the end of the upgrade, the repository may not be readable while new
2316 repository data is swapped in. This window will be as long as it takes to
2316 repository data is swapped in. This window will be as long as it takes to
2317 rename some directories inside the ``.hg`` directory. On most machines, this
2317 rename some directories inside the ``.hg`` directory. On most machines, this
2318 should complete almost instantaneously and the chances of a consumer being
2318 should complete almost instantaneously and the chances of a consumer being
2319 unable to access the repository should be low.
2319 unable to access the repository should be low.
2320 """
2320 """
2321 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2321 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2322
2322
2323 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2323 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2324 inferrepo=True)
2324 inferrepo=True)
2325 def debugwalk(ui, repo, *pats, **opts):
2325 def debugwalk(ui, repo, *pats, **opts):
2326 """show how files match on given patterns"""
2326 """show how files match on given patterns"""
2327 opts = pycompat.byteskwargs(opts)
2327 opts = pycompat.byteskwargs(opts)
2328 m = scmutil.match(repo[None], pats, opts)
2328 m = scmutil.match(repo[None], pats, opts)
2329 ui.write(('matcher: %r\n' % m))
2329 ui.write(('matcher: %r\n' % m))
2330 items = list(repo[None].walk(m))
2330 items = list(repo[None].walk(m))
2331 if not items:
2331 if not items:
2332 return
2332 return
2333 f = lambda fn: fn
2333 f = lambda fn: fn
2334 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2334 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2335 f = lambda fn: util.normpath(fn)
2335 f = lambda fn: util.normpath(fn)
2336 fmt = 'f %%-%ds %%-%ds %%s' % (
2336 fmt = 'f %%-%ds %%-%ds %%s' % (
2337 max([len(abs) for abs in items]),
2337 max([len(abs) for abs in items]),
2338 max([len(m.rel(abs)) for abs in items]))
2338 max([len(m.rel(abs)) for abs in items]))
2339 for abs in items:
2339 for abs in items:
2340 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2340 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2341 ui.write("%s\n" % line.rstrip())
2341 ui.write("%s\n" % line.rstrip())
2342
2342
2343 @command('debugwireargs',
2343 @command('debugwireargs',
2344 [('', 'three', '', 'three'),
2344 [('', 'three', '', 'three'),
2345 ('', 'four', '', 'four'),
2345 ('', 'four', '', 'four'),
2346 ('', 'five', '', 'five'),
2346 ('', 'five', '', 'five'),
2347 ] + cmdutil.remoteopts,
2347 ] + cmdutil.remoteopts,
2348 _('REPO [OPTIONS]... [ONE [TWO]]'),
2348 _('REPO [OPTIONS]... [ONE [TWO]]'),
2349 norepo=True)
2349 norepo=True)
2350 def debugwireargs(ui, repopath, *vals, **opts):
2350 def debugwireargs(ui, repopath, *vals, **opts):
2351 opts = pycompat.byteskwargs(opts)
2351 opts = pycompat.byteskwargs(opts)
2352 repo = hg.peer(ui, opts, repopath)
2352 repo = hg.peer(ui, opts, repopath)
2353 for opt in cmdutil.remoteopts:
2353 for opt in cmdutil.remoteopts:
2354 del opts[opt[1]]
2354 del opts[opt[1]]
2355 args = {}
2355 args = {}
2356 for k, v in opts.iteritems():
2356 for k, v in opts.iteritems():
2357 if v:
2357 if v:
2358 args[k] = v
2358 args[k] = v
2359 # run twice to check that we don't mess up the stream for the next command
2359 # run twice to check that we don't mess up the stream for the next command
2360 res1 = repo.debugwireargs(*vals, **args)
2360 res1 = repo.debugwireargs(*vals, **args)
2361 res2 = repo.debugwireargs(*vals, **args)
2361 res2 = repo.debugwireargs(*vals, **args)
2362 ui.write("%s\n" % res1)
2362 ui.write("%s\n" % res1)
2363 if res1 != res2:
2363 if res1 != res2:
2364 ui.warn("%s\n" % res2)
2364 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now