##// END OF EJS Templates
debugfsinfo: use util.getfstype...
Jun Wu -
r31595:e8bd005c default
parent child Browse files
Show More
@@ -1,2115 +1,2112 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 repair,
53 repair,
54 revlog,
54 revlog,
55 revset,
55 revset,
56 revsetlang,
56 revsetlang,
57 scmutil,
57 scmutil,
58 setdiscovery,
58 setdiscovery,
59 simplemerge,
59 simplemerge,
60 smartset,
60 smartset,
61 sslutil,
61 sslutil,
62 streamclone,
62 streamclone,
63 templater,
63 templater,
64 treediscovery,
64 treediscovery,
65 util,
65 util,
66 vfs as vfsmod,
66 vfs as vfsmod,
67 )
67 )
68
68
69 release = lockmod.release
69 release = lockmod.release
70
70
71 # We reuse the command table from commands because it is easier than
71 # We reuse the command table from commands because it is easier than
72 # teaching dispatch about multiple tables.
72 # teaching dispatch about multiple tables.
73 command = cmdutil.command(commands.table)
73 command = cmdutil.command(commands.table)
74
74
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 def debugancestor(ui, repo, *args):
76 def debugancestor(ui, repo, *args):
77 """find the ancestor revision of two revisions in a given index"""
77 """find the ancestor revision of two revisions in a given index"""
78 if len(args) == 3:
78 if len(args) == 3:
79 index, rev1, rev2 = args
79 index, rev1, rev2 = args
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 lookup = r.lookup
81 lookup = r.lookup
82 elif len(args) == 2:
82 elif len(args) == 2:
83 if not repo:
83 if not repo:
84 raise error.Abort(_('there is no Mercurial repository here '
84 raise error.Abort(_('there is no Mercurial repository here '
85 '(.hg not found)'))
85 '(.hg not found)'))
86 rev1, rev2 = args
86 rev1, rev2 = args
87 r = repo.changelog
87 r = repo.changelog
88 lookup = repo.lookup
88 lookup = repo.lookup
89 else:
89 else:
90 raise error.Abort(_('either two or three arguments required'))
90 raise error.Abort(_('either two or three arguments required'))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93
93
94 @command('debugapplystreamclonebundle', [], 'FILE')
94 @command('debugapplystreamclonebundle', [], 'FILE')
95 def debugapplystreamclonebundle(ui, repo, fname):
95 def debugapplystreamclonebundle(ui, repo, fname):
96 """apply a stream clone bundle file"""
96 """apply a stream clone bundle file"""
97 f = hg.openpath(ui, fname)
97 f = hg.openpath(ui, fname)
98 gen = exchange.readbundle(ui, f, fname)
98 gen = exchange.readbundle(ui, f, fname)
99 gen.apply(repo)
99 gen.apply(repo)
100
100
101 @command('debugbuilddag',
101 @command('debugbuilddag',
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('n', 'new-file', None, _('add new file at each rev'))],
104 ('n', 'new-file', None, _('add new file at each rev'))],
105 _('[OPTION]... [TEXT]'))
105 _('[OPTION]... [TEXT]'))
106 def debugbuilddag(ui, repo, text=None,
106 def debugbuilddag(ui, repo, text=None,
107 mergeable_file=False,
107 mergeable_file=False,
108 overwritten_file=False,
108 overwritten_file=False,
109 new_file=False):
109 new_file=False):
110 """builds a repo with a given DAG from scratch in the current empty repo
110 """builds a repo with a given DAG from scratch in the current empty repo
111
111
112 The description of the DAG is read from stdin if not given on the
112 The description of the DAG is read from stdin if not given on the
113 command line.
113 command line.
114
114
115 Elements:
115 Elements:
116
116
117 - "+n" is a linear run of n nodes based on the current default parent
117 - "+n" is a linear run of n nodes based on the current default parent
118 - "." is a single node based on the current default parent
118 - "." is a single node based on the current default parent
119 - "$" resets the default parent to null (implied at the start);
119 - "$" resets the default parent to null (implied at the start);
120 otherwise the default parent is always the last node created
120 otherwise the default parent is always the last node created
121 - "<p" sets the default parent to the backref p
121 - "<p" sets the default parent to the backref p
122 - "*p" is a fork at parent p, which is a backref
122 - "*p" is a fork at parent p, which is a backref
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "/p2" is a merge of the preceding node and p2
124 - "/p2" is a merge of the preceding node and p2
125 - ":tag" defines a local tag for the preceding node
125 - ":tag" defines a local tag for the preceding node
126 - "@branch" sets the named branch for subsequent nodes
126 - "@branch" sets the named branch for subsequent nodes
127 - "#...\\n" is a comment up to the end of the line
127 - "#...\\n" is a comment up to the end of the line
128
128
129 Whitespace between the above elements is ignored.
129 Whitespace between the above elements is ignored.
130
130
131 A backref is either
131 A backref is either
132
132
133 - a number n, which references the node curr-n, where curr is the current
133 - a number n, which references the node curr-n, where curr is the current
134 node, or
134 node, or
135 - the name of a local tag you placed earlier using ":tag", or
135 - the name of a local tag you placed earlier using ":tag", or
136 - empty to denote the default parent.
136 - empty to denote the default parent.
137
137
138 All string valued-elements are either strictly alphanumeric, or must
138 All string valued-elements are either strictly alphanumeric, or must
139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 """
140 """
141
141
142 if text is None:
142 if text is None:
143 ui.status(_("reading DAG from stdin\n"))
143 ui.status(_("reading DAG from stdin\n"))
144 text = ui.fin.read()
144 text = ui.fin.read()
145
145
146 cl = repo.changelog
146 cl = repo.changelog
147 if len(cl) > 0:
147 if len(cl) > 0:
148 raise error.Abort(_('repository is not empty'))
148 raise error.Abort(_('repository is not empty'))
149
149
150 # determine number of revs in DAG
150 # determine number of revs in DAG
151 total = 0
151 total = 0
152 for type, data in dagparser.parsedag(text):
152 for type, data in dagparser.parsedag(text):
153 if type == 'n':
153 if type == 'n':
154 total += 1
154 total += 1
155
155
156 if mergeable_file:
156 if mergeable_file:
157 linesperrev = 2
157 linesperrev = 2
158 # make a file with k lines per rev
158 # make a file with k lines per rev
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines.append("")
160 initialmergedlines.append("")
161
161
162 tags = []
162 tags = []
163
163
164 wlock = lock = tr = None
164 wlock = lock = tr = None
165 try:
165 try:
166 wlock = repo.wlock()
166 wlock = repo.wlock()
167 lock = repo.lock()
167 lock = repo.lock()
168 tr = repo.transaction("builddag")
168 tr = repo.transaction("builddag")
169
169
170 at = -1
170 at = -1
171 atbranch = 'default'
171 atbranch = 'default'
172 nodeids = []
172 nodeids = []
173 id = 0
173 id = 0
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 ui.note(('node %s\n' % str(data)))
177 ui.note(('node %s\n' % str(data)))
178 id, ps = data
178 id, ps = data
179
179
180 files = []
180 files = []
181 fctxs = {}
181 fctxs = {}
182
182
183 p2 = None
183 p2 = None
184 if mergeable_file:
184 if mergeable_file:
185 fn = "mf"
185 fn = "mf"
186 p1 = repo[ps[0]]
186 p1 = repo[ps[0]]
187 if len(ps) > 1:
187 if len(ps) > 1:
188 p2 = repo[ps[1]]
188 p2 = repo[ps[1]]
189 pa = p1.ancestor(p2)
189 pa = p1.ancestor(p2)
190 base, local, other = [x[fn].data() for x in (pa, p1,
190 base, local, other = [x[fn].data() for x in (pa, p1,
191 p2)]
191 p2)]
192 m3 = simplemerge.Merge3Text(base, local, other)
192 m3 = simplemerge.Merge3Text(base, local, other)
193 ml = [l.strip() for l in m3.merge_lines()]
193 ml = [l.strip() for l in m3.merge_lines()]
194 ml.append("")
194 ml.append("")
195 elif at > 0:
195 elif at > 0:
196 ml = p1[fn].data().split("\n")
196 ml = p1[fn].data().split("\n")
197 else:
197 else:
198 ml = initialmergedlines
198 ml = initialmergedlines
199 ml[id * linesperrev] += " r%i" % id
199 ml[id * linesperrev] += " r%i" % id
200 mergedtext = "\n".join(ml)
200 mergedtext = "\n".join(ml)
201 files.append(fn)
201 files.append(fn)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203
203
204 if overwritten_file:
204 if overwritten_file:
205 fn = "of"
205 fn = "of"
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208
208
209 if new_file:
209 if new_file:
210 fn = "nf%i" % id
210 fn = "nf%i" % id
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 if len(ps) > 1:
213 if len(ps) > 1:
214 if not p2:
214 if not p2:
215 p2 = repo[ps[1]]
215 p2 = repo[ps[1]]
216 for fn in p2:
216 for fn in p2:
217 if fn.startswith("nf"):
217 if fn.startswith("nf"):
218 files.append(fn)
218 files.append(fn)
219 fctxs[fn] = p2[fn]
219 fctxs[fn] = p2[fn]
220
220
221 def fctxfn(repo, cx, path):
221 def fctxfn(repo, cx, path):
222 return fctxs.get(path)
222 return fctxs.get(path)
223
223
224 if len(ps) == 0 or ps[0] < 0:
224 if len(ps) == 0 or ps[0] < 0:
225 pars = [None, None]
225 pars = [None, None]
226 elif len(ps) == 1:
226 elif len(ps) == 1:
227 pars = [nodeids[ps[0]], None]
227 pars = [nodeids[ps[0]], None]
228 else:
228 else:
229 pars = [nodeids[p] for p in ps]
229 pars = [nodeids[p] for p in ps]
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 date=(id, 0),
231 date=(id, 0),
232 user="debugbuilddag",
232 user="debugbuilddag",
233 extra={'branch': atbranch})
233 extra={'branch': atbranch})
234 nodeid = repo.commitctx(cx)
234 nodeid = repo.commitctx(cx)
235 nodeids.append(nodeid)
235 nodeids.append(nodeid)
236 at = id
236 at = id
237 elif type == 'l':
237 elif type == 'l':
238 id, name = data
238 id, name = data
239 ui.note(('tag %s\n' % name))
239 ui.note(('tag %s\n' % name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 elif type == 'a':
241 elif type == 'a':
242 ui.note(('branch %s\n' % data))
242 ui.note(('branch %s\n' % data))
243 atbranch = data
243 atbranch = data
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 tr.close()
245 tr.close()
246
246
247 if tags:
247 if tags:
248 repo.vfs.write("localtags", "".join(tags))
248 repo.vfs.write("localtags", "".join(tags))
249 finally:
249 finally:
250 ui.progress(_('building'), None)
250 ui.progress(_('building'), None)
251 release(tr, lock, wlock)
251 release(tr, lock, wlock)
252
252
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 indent_string = ' ' * indent
254 indent_string = ' ' * indent
255 if all:
255 if all:
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 % indent_string)
257 % indent_string)
258
258
259 def showchunks(named):
259 def showchunks(named):
260 ui.write("\n%s%s\n" % (indent_string, named))
260 ui.write("\n%s%s\n" % (indent_string, named))
261 chain = None
261 chain = None
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 node = chunkdata['node']
263 node = chunkdata['node']
264 p1 = chunkdata['p1']
264 p1 = chunkdata['p1']
265 p2 = chunkdata['p2']
265 p2 = chunkdata['p2']
266 cs = chunkdata['cs']
266 cs = chunkdata['cs']
267 deltabase = chunkdata['deltabase']
267 deltabase = chunkdata['deltabase']
268 delta = chunkdata['delta']
268 delta = chunkdata['delta']
269 ui.write("%s%s %s %s %s %s %s\n" %
269 ui.write("%s%s %s %s %s %s %s\n" %
270 (indent_string, hex(node), hex(p1), hex(p2),
270 (indent_string, hex(node), hex(p1), hex(p2),
271 hex(cs), hex(deltabase), len(delta)))
271 hex(cs), hex(deltabase), len(delta)))
272 chain = node
272 chain = node
273
273
274 chunkdata = gen.changelogheader()
274 chunkdata = gen.changelogheader()
275 showchunks("changelog")
275 showchunks("changelog")
276 chunkdata = gen.manifestheader()
276 chunkdata = gen.manifestheader()
277 showchunks("manifest")
277 showchunks("manifest")
278 for chunkdata in iter(gen.filelogheader, {}):
278 for chunkdata in iter(gen.filelogheader, {}):
279 fname = chunkdata['filename']
279 fname = chunkdata['filename']
280 showchunks(fname)
280 showchunks(fname)
281 else:
281 else:
282 if isinstance(gen, bundle2.unbundle20):
282 if isinstance(gen, bundle2.unbundle20):
283 raise error.Abort(_('use debugbundle2 for this file'))
283 raise error.Abort(_('use debugbundle2 for this file'))
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 chain = None
285 chain = None
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 node = chunkdata['node']
287 node = chunkdata['node']
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 chain = node
289 chain = node
290
290
291 def _debugbundle2(ui, gen, all=None, **opts):
291 def _debugbundle2(ui, gen, all=None, **opts):
292 """lists the contents of a bundle2"""
292 """lists the contents of a bundle2"""
293 if not isinstance(gen, bundle2.unbundle20):
293 if not isinstance(gen, bundle2.unbundle20):
294 raise error.Abort(_('not a bundle2 file'))
294 raise error.Abort(_('not a bundle2 file'))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
296 for part in gen.iterparts():
296 for part in gen.iterparts():
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
298 if part.type == 'changegroup':
298 if part.type == 'changegroup':
299 version = part.params.get('version', '01')
299 version = part.params.get('version', '01')
300 cg = changegroup.getunbundler(version, part, 'UN')
300 cg = changegroup.getunbundler(version, part, 'UN')
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
302
302
303 @command('debugbundle',
303 @command('debugbundle',
304 [('a', 'all', None, _('show all details')),
304 [('a', 'all', None, _('show all details')),
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
306 _('FILE'),
306 _('FILE'),
307 norepo=True)
307 norepo=True)
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
309 """lists the contents of a bundle"""
309 """lists the contents of a bundle"""
310 with hg.openpath(ui, bundlepath) as f:
310 with hg.openpath(ui, bundlepath) as f:
311 if spec:
311 if spec:
312 spec = exchange.getbundlespec(ui, f)
312 spec = exchange.getbundlespec(ui, f)
313 ui.write('%s\n' % spec)
313 ui.write('%s\n' % spec)
314 return
314 return
315
315
316 gen = exchange.readbundle(ui, f, bundlepath)
316 gen = exchange.readbundle(ui, f, bundlepath)
317 if isinstance(gen, bundle2.unbundle20):
317 if isinstance(gen, bundle2.unbundle20):
318 return _debugbundle2(ui, gen, all=all, **opts)
318 return _debugbundle2(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
320
320
321 @command('debugcheckstate', [], '')
321 @command('debugcheckstate', [], '')
322 def debugcheckstate(ui, repo):
322 def debugcheckstate(ui, repo):
323 """validate the correctness of the current dirstate"""
323 """validate the correctness of the current dirstate"""
324 parent1, parent2 = repo.dirstate.parents()
324 parent1, parent2 = repo.dirstate.parents()
325 m1 = repo[parent1].manifest()
325 m1 = repo[parent1].manifest()
326 m2 = repo[parent2].manifest()
326 m2 = repo[parent2].manifest()
327 errors = 0
327 errors = 0
328 for f in repo.dirstate:
328 for f in repo.dirstate:
329 state = repo.dirstate[f]
329 state = repo.dirstate[f]
330 if state in "nr" and f not in m1:
330 if state in "nr" and f not in m1:
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
332 errors += 1
332 errors += 1
333 if state in "a" and f in m1:
333 if state in "a" and f in m1:
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
335 errors += 1
335 errors += 1
336 if state in "m" and f not in m1 and f not in m2:
336 if state in "m" and f not in m1 and f not in m2:
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
338 (f, state))
338 (f, state))
339 errors += 1
339 errors += 1
340 for f in m1:
340 for f in m1:
341 state = repo.dirstate[f]
341 state = repo.dirstate[f]
342 if state not in "nrm":
342 if state not in "nrm":
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
344 errors += 1
344 errors += 1
345 if errors:
345 if errors:
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
347 raise error.Abort(error)
347 raise error.Abort(error)
348
348
349 @command('debugcolor',
349 @command('debugcolor',
350 [('', 'style', None, _('show all configured styles'))],
350 [('', 'style', None, _('show all configured styles'))],
351 'hg debugcolor')
351 'hg debugcolor')
352 def debugcolor(ui, repo, **opts):
352 def debugcolor(ui, repo, **opts):
353 """show available color, effects or style"""
353 """show available color, effects or style"""
354 ui.write(('color mode: %s\n') % ui._colormode)
354 ui.write(('color mode: %s\n') % ui._colormode)
355 if opts.get('style'):
355 if opts.get('style'):
356 return _debugdisplaystyle(ui)
356 return _debugdisplaystyle(ui)
357 else:
357 else:
358 return _debugdisplaycolor(ui)
358 return _debugdisplaycolor(ui)
359
359
360 def _debugdisplaycolor(ui):
360 def _debugdisplaycolor(ui):
361 ui = ui.copy()
361 ui = ui.copy()
362 ui._styles.clear()
362 ui._styles.clear()
363 for effect in color._effects.keys():
363 for effect in color._effects.keys():
364 ui._styles[effect] = effect
364 ui._styles[effect] = effect
365 if ui._terminfoparams:
365 if ui._terminfoparams:
366 for k, v in ui.configitems('color'):
366 for k, v in ui.configitems('color'):
367 if k.startswith('color.'):
367 if k.startswith('color.'):
368 ui._styles[k] = k[6:]
368 ui._styles[k] = k[6:]
369 elif k.startswith('terminfo.'):
369 elif k.startswith('terminfo.'):
370 ui._styles[k] = k[9:]
370 ui._styles[k] = k[9:]
371 ui.write(_('available colors:\n'))
371 ui.write(_('available colors:\n'))
372 # sort label with a '_' after the other to group '_background' entry.
372 # sort label with a '_' after the other to group '_background' entry.
373 items = sorted(ui._styles.items(),
373 items = sorted(ui._styles.items(),
374 key=lambda i: ('_' in i[0], i[0], i[1]))
374 key=lambda i: ('_' in i[0], i[0], i[1]))
375 for colorname, label in items:
375 for colorname, label in items:
376 ui.write(('%s\n') % colorname, label=label)
376 ui.write(('%s\n') % colorname, label=label)
377
377
378 def _debugdisplaystyle(ui):
378 def _debugdisplaystyle(ui):
379 ui.write(_('available style:\n'))
379 ui.write(_('available style:\n'))
380 width = max(len(s) for s in ui._styles)
380 width = max(len(s) for s in ui._styles)
381 for label, effects in sorted(ui._styles.items()):
381 for label, effects in sorted(ui._styles.items()):
382 ui.write('%s' % label, label=label)
382 ui.write('%s' % label, label=label)
383 if effects:
383 if effects:
384 # 50
384 # 50
385 ui.write(': ')
385 ui.write(': ')
386 ui.write(' ' * (max(0, width - len(label))))
386 ui.write(' ' * (max(0, width - len(label))))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
388 ui.write('\n')
388 ui.write('\n')
389
389
390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
391 def debugcommands(ui, cmd='', *args):
391 def debugcommands(ui, cmd='', *args):
392 """list all available commands and options"""
392 """list all available commands and options"""
393 for cmd, vals in sorted(commands.table.iteritems()):
393 for cmd, vals in sorted(commands.table.iteritems()):
394 cmd = cmd.split('|')[0].strip('^')
394 cmd = cmd.split('|')[0].strip('^')
395 opts = ', '.join([i[1] for i in vals[1]])
395 opts = ', '.join([i[1] for i in vals[1]])
396 ui.write('%s: %s\n' % (cmd, opts))
396 ui.write('%s: %s\n' % (cmd, opts))
397
397
398 @command('debugcomplete',
398 @command('debugcomplete',
399 [('o', 'options', None, _('show the command options'))],
399 [('o', 'options', None, _('show the command options'))],
400 _('[-o] CMD'),
400 _('[-o] CMD'),
401 norepo=True)
401 norepo=True)
402 def debugcomplete(ui, cmd='', **opts):
402 def debugcomplete(ui, cmd='', **opts):
403 """returns the completion list associated with the given command"""
403 """returns the completion list associated with the given command"""
404
404
405 if opts.get('options'):
405 if opts.get('options'):
406 options = []
406 options = []
407 otables = [commands.globalopts]
407 otables = [commands.globalopts]
408 if cmd:
408 if cmd:
409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
410 otables.append(entry[1])
410 otables.append(entry[1])
411 for t in otables:
411 for t in otables:
412 for o in t:
412 for o in t:
413 if "(DEPRECATED)" in o[3]:
413 if "(DEPRECATED)" in o[3]:
414 continue
414 continue
415 if o[0]:
415 if o[0]:
416 options.append('-%s' % o[0])
416 options.append('-%s' % o[0])
417 options.append('--%s' % o[1])
417 options.append('--%s' % o[1])
418 ui.write("%s\n" % "\n".join(options))
418 ui.write("%s\n" % "\n".join(options))
419 return
419 return
420
420
421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
422 if ui.verbose:
422 if ui.verbose:
423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
425
425
426 @command('debugcreatestreamclonebundle', [], 'FILE')
426 @command('debugcreatestreamclonebundle', [], 'FILE')
427 def debugcreatestreamclonebundle(ui, repo, fname):
427 def debugcreatestreamclonebundle(ui, repo, fname):
428 """create a stream clone bundle file
428 """create a stream clone bundle file
429
429
430 Stream bundles are special bundles that are essentially archives of
430 Stream bundles are special bundles that are essentially archives of
431 revlog files. They are commonly used for cloning very quickly.
431 revlog files. They are commonly used for cloning very quickly.
432 """
432 """
433 requirements, gen = streamclone.generatebundlev1(repo)
433 requirements, gen = streamclone.generatebundlev1(repo)
434 changegroup.writechunks(ui, gen, fname)
434 changegroup.writechunks(ui, gen, fname)
435
435
436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
437
437
438 @command('debugdag',
438 @command('debugdag',
439 [('t', 'tags', None, _('use tags as labels')),
439 [('t', 'tags', None, _('use tags as labels')),
440 ('b', 'branches', None, _('annotate with branch names')),
440 ('b', 'branches', None, _('annotate with branch names')),
441 ('', 'dots', None, _('use dots for runs')),
441 ('', 'dots', None, _('use dots for runs')),
442 ('s', 'spaces', None, _('separate elements by spaces'))],
442 ('s', 'spaces', None, _('separate elements by spaces'))],
443 _('[OPTION]... [FILE [REV]...]'),
443 _('[OPTION]... [FILE [REV]...]'),
444 optionalrepo=True)
444 optionalrepo=True)
445 def debugdag(ui, repo, file_=None, *revs, **opts):
445 def debugdag(ui, repo, file_=None, *revs, **opts):
446 """format the changelog or an index DAG as a concise textual description
446 """format the changelog or an index DAG as a concise textual description
447
447
448 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 If you pass a revlog index, the revlog's DAG is emitted. If you list
449 revision numbers, they get labeled in the output as rN.
449 revision numbers, they get labeled in the output as rN.
450
450
451 Otherwise, the changelog DAG of the current repo is emitted.
451 Otherwise, the changelog DAG of the current repo is emitted.
452 """
452 """
453 spaces = opts.get('spaces')
453 spaces = opts.get('spaces')
454 dots = opts.get('dots')
454 dots = opts.get('dots')
455 if file_:
455 if file_:
456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
457 file_)
457 file_)
458 revs = set((int(r) for r in revs))
458 revs = set((int(r) for r in revs))
459 def events():
459 def events():
460 for r in rlog:
460 for r in rlog:
461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
462 if p != -1))
462 if p != -1))
463 if r in revs:
463 if r in revs:
464 yield 'l', (r, "r%i" % r)
464 yield 'l', (r, "r%i" % r)
465 elif repo:
465 elif repo:
466 cl = repo.changelog
466 cl = repo.changelog
467 tags = opts.get('tags')
467 tags = opts.get('tags')
468 branches = opts.get('branches')
468 branches = opts.get('branches')
469 if tags:
469 if tags:
470 labels = {}
470 labels = {}
471 for l, n in repo.tags().items():
471 for l, n in repo.tags().items():
472 labels.setdefault(cl.rev(n), []).append(l)
472 labels.setdefault(cl.rev(n), []).append(l)
473 def events():
473 def events():
474 b = "default"
474 b = "default"
475 for r in cl:
475 for r in cl:
476 if branches:
476 if branches:
477 newb = cl.read(cl.node(r))[5]['branch']
477 newb = cl.read(cl.node(r))[5]['branch']
478 if newb != b:
478 if newb != b:
479 yield 'a', newb
479 yield 'a', newb
480 b = newb
480 b = newb
481 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 yield 'n', (r, list(p for p in cl.parentrevs(r)
482 if p != -1))
482 if p != -1))
483 if tags:
483 if tags:
484 ls = labels.get(r)
484 ls = labels.get(r)
485 if ls:
485 if ls:
486 for l in ls:
486 for l in ls:
487 yield 'l', (r, l)
487 yield 'l', (r, l)
488 else:
488 else:
489 raise error.Abort(_('need repo for changelog dag'))
489 raise error.Abort(_('need repo for changelog dag'))
490
490
491 for line in dagparser.dagtextlines(events(),
491 for line in dagparser.dagtextlines(events(),
492 addspaces=spaces,
492 addspaces=spaces,
493 wraplabels=True,
493 wraplabels=True,
494 wrapannotations=True,
494 wrapannotations=True,
495 wrapnonlinear=dots,
495 wrapnonlinear=dots,
496 usedots=dots,
496 usedots=dots,
497 maxlinewidth=70):
497 maxlinewidth=70):
498 ui.write(line)
498 ui.write(line)
499 ui.write("\n")
499 ui.write("\n")
500
500
501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
502 def debugdata(ui, repo, file_, rev=None, **opts):
502 def debugdata(ui, repo, file_, rev=None, **opts):
503 """dump the contents of a data file revision"""
503 """dump the contents of a data file revision"""
504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
505 if rev is not None:
505 if rev is not None:
506 raise error.CommandError('debugdata', _('invalid arguments'))
506 raise error.CommandError('debugdata', _('invalid arguments'))
507 file_, rev = None, file_
507 file_, rev = None, file_
508 elif rev is None:
508 elif rev is None:
509 raise error.CommandError('debugdata', _('invalid arguments'))
509 raise error.CommandError('debugdata', _('invalid arguments'))
510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
511 try:
511 try:
512 ui.write(r.revision(r.lookup(rev), raw=True))
512 ui.write(r.revision(r.lookup(rev), raw=True))
513 except KeyError:
513 except KeyError:
514 raise error.Abort(_('invalid revision identifier %s') % rev)
514 raise error.Abort(_('invalid revision identifier %s') % rev)
515
515
516 @command('debugdate',
516 @command('debugdate',
517 [('e', 'extended', None, _('try extended date formats'))],
517 [('e', 'extended', None, _('try extended date formats'))],
518 _('[-e] DATE [RANGE]'),
518 _('[-e] DATE [RANGE]'),
519 norepo=True, optionalrepo=True)
519 norepo=True, optionalrepo=True)
520 def debugdate(ui, date, range=None, **opts):
520 def debugdate(ui, date, range=None, **opts):
521 """parse and display a date"""
521 """parse and display a date"""
522 if opts["extended"]:
522 if opts["extended"]:
523 d = util.parsedate(date, util.extendeddateformats)
523 d = util.parsedate(date, util.extendeddateformats)
524 else:
524 else:
525 d = util.parsedate(date)
525 d = util.parsedate(date)
526 ui.write(("internal: %s %s\n") % d)
526 ui.write(("internal: %s %s\n") % d)
527 ui.write(("standard: %s\n") % util.datestr(d))
527 ui.write(("standard: %s\n") % util.datestr(d))
528 if range:
528 if range:
529 m = util.matchdate(range)
529 m = util.matchdate(range)
530 ui.write(("match: %s\n") % m(d[0]))
530 ui.write(("match: %s\n") % m(d[0]))
531
531
532 @command('debugdeltachain',
532 @command('debugdeltachain',
533 commands.debugrevlogopts + commands.formatteropts,
533 commands.debugrevlogopts + commands.formatteropts,
534 _('-c|-m|FILE'),
534 _('-c|-m|FILE'),
535 optionalrepo=True)
535 optionalrepo=True)
536 def debugdeltachain(ui, repo, file_=None, **opts):
536 def debugdeltachain(ui, repo, file_=None, **opts):
537 """dump information about delta chains in a revlog
537 """dump information about delta chains in a revlog
538
538
539 Output can be templatized. Available template keywords are:
539 Output can be templatized. Available template keywords are:
540
540
541 :``rev``: revision number
541 :``rev``: revision number
542 :``chainid``: delta chain identifier (numbered by unique base)
542 :``chainid``: delta chain identifier (numbered by unique base)
543 :``chainlen``: delta chain length to this revision
543 :``chainlen``: delta chain length to this revision
544 :``prevrev``: previous revision in delta chain
544 :``prevrev``: previous revision in delta chain
545 :``deltatype``: role of delta / how it was computed
545 :``deltatype``: role of delta / how it was computed
546 :``compsize``: compressed size of revision
546 :``compsize``: compressed size of revision
547 :``uncompsize``: uncompressed size of revision
547 :``uncompsize``: uncompressed size of revision
548 :``chainsize``: total size of compressed revisions in chain
548 :``chainsize``: total size of compressed revisions in chain
549 :``chainratio``: total chain size divided by uncompressed revision size
549 :``chainratio``: total chain size divided by uncompressed revision size
550 (new delta chains typically start at ratio 2.00)
550 (new delta chains typically start at ratio 2.00)
551 :``lindist``: linear distance from base revision in delta chain to end
551 :``lindist``: linear distance from base revision in delta chain to end
552 of this revision
552 of this revision
553 :``extradist``: total size of revisions not part of this delta chain from
553 :``extradist``: total size of revisions not part of this delta chain from
554 base of delta chain to end of this revision; a measurement
554 base of delta chain to end of this revision; a measurement
555 of how much extra data we need to read/seek across to read
555 of how much extra data we need to read/seek across to read
556 the delta chain for this revision
556 the delta chain for this revision
557 :``extraratio``: extradist divided by chainsize; another representation of
557 :``extraratio``: extradist divided by chainsize; another representation of
558 how much unrelated data is needed to load this delta chain
558 how much unrelated data is needed to load this delta chain
559 """
559 """
560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
561 index = r.index
561 index = r.index
562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
563
563
564 def revinfo(rev):
564 def revinfo(rev):
565 e = index[rev]
565 e = index[rev]
566 compsize = e[1]
566 compsize = e[1]
567 uncompsize = e[2]
567 uncompsize = e[2]
568 chainsize = 0
568 chainsize = 0
569
569
570 if generaldelta:
570 if generaldelta:
571 if e[3] == e[5]:
571 if e[3] == e[5]:
572 deltatype = 'p1'
572 deltatype = 'p1'
573 elif e[3] == e[6]:
573 elif e[3] == e[6]:
574 deltatype = 'p2'
574 deltatype = 'p2'
575 elif e[3] == rev - 1:
575 elif e[3] == rev - 1:
576 deltatype = 'prev'
576 deltatype = 'prev'
577 elif e[3] == rev:
577 elif e[3] == rev:
578 deltatype = 'base'
578 deltatype = 'base'
579 else:
579 else:
580 deltatype = 'other'
580 deltatype = 'other'
581 else:
581 else:
582 if e[3] == rev:
582 if e[3] == rev:
583 deltatype = 'base'
583 deltatype = 'base'
584 else:
584 else:
585 deltatype = 'prev'
585 deltatype = 'prev'
586
586
587 chain = r._deltachain(rev)[0]
587 chain = r._deltachain(rev)[0]
588 for iterrev in chain:
588 for iterrev in chain:
589 e = index[iterrev]
589 e = index[iterrev]
590 chainsize += e[1]
590 chainsize += e[1]
591
591
592 return compsize, uncompsize, deltatype, chain, chainsize
592 return compsize, uncompsize, deltatype, chain, chainsize
593
593
594 fm = ui.formatter('debugdeltachain', opts)
594 fm = ui.formatter('debugdeltachain', opts)
595
595
596 fm.plain(' rev chain# chainlen prev delta '
596 fm.plain(' rev chain# chainlen prev delta '
597 'size rawsize chainsize ratio lindist extradist '
597 'size rawsize chainsize ratio lindist extradist '
598 'extraratio\n')
598 'extraratio\n')
599
599
600 chainbases = {}
600 chainbases = {}
601 for rev in r:
601 for rev in r:
602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
603 chainbase = chain[0]
603 chainbase = chain[0]
604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
605 basestart = r.start(chainbase)
605 basestart = r.start(chainbase)
606 revstart = r.start(rev)
606 revstart = r.start(rev)
607 lineardist = revstart + comp - basestart
607 lineardist = revstart + comp - basestart
608 extradist = lineardist - chainsize
608 extradist = lineardist - chainsize
609 try:
609 try:
610 prevrev = chain[-2]
610 prevrev = chain[-2]
611 except IndexError:
611 except IndexError:
612 prevrev = -1
612 prevrev = -1
613
613
614 chainratio = float(chainsize) / float(uncomp)
614 chainratio = float(chainsize) / float(uncomp)
615 extraratio = float(extradist) / float(chainsize)
615 extraratio = float(extradist) / float(chainsize)
616
616
617 fm.startitem()
617 fm.startitem()
618 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 fm.write('rev chainid chainlen prevrev deltatype compsize '
619 'uncompsize chainsize chainratio lindist extradist '
619 'uncompsize chainsize chainratio lindist extradist '
620 'extraratio',
620 'extraratio',
621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
622 rev, chainid, len(chain), prevrev, deltatype, comp,
622 rev, chainid, len(chain), prevrev, deltatype, comp,
623 uncomp, chainsize, chainratio, lineardist, extradist,
623 uncomp, chainsize, chainratio, lineardist, extradist,
624 extraratio,
624 extraratio,
625 rev=rev, chainid=chainid, chainlen=len(chain),
625 rev=rev, chainid=chainid, chainlen=len(chain),
626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
627 uncompsize=uncomp, chainsize=chainsize,
627 uncompsize=uncomp, chainsize=chainsize,
628 chainratio=chainratio, lindist=lineardist,
628 chainratio=chainratio, lindist=lineardist,
629 extradist=extradist, extraratio=extraratio)
629 extradist=extradist, extraratio=extraratio)
630
630
631 fm.end()
631 fm.end()
632
632
633 @command('debugdirstate|debugstate',
633 @command('debugdirstate|debugstate',
634 [('', 'nodates', None, _('do not display the saved mtime')),
634 [('', 'nodates', None, _('do not display the saved mtime')),
635 ('', 'datesort', None, _('sort by saved mtime'))],
635 ('', 'datesort', None, _('sort by saved mtime'))],
636 _('[OPTION]...'))
636 _('[OPTION]...'))
637 def debugstate(ui, repo, **opts):
637 def debugstate(ui, repo, **opts):
638 """show the contents of the current dirstate"""
638 """show the contents of the current dirstate"""
639
639
640 nodates = opts.get('nodates')
640 nodates = opts.get('nodates')
641 datesort = opts.get('datesort')
641 datesort = opts.get('datesort')
642
642
643 timestr = ""
643 timestr = ""
644 if datesort:
644 if datesort:
645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
646 else:
646 else:
647 keyfunc = None # sort by filename
647 keyfunc = None # sort by filename
648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
649 if ent[3] == -1:
649 if ent[3] == -1:
650 timestr = 'unset '
650 timestr = 'unset '
651 elif nodates:
651 elif nodates:
652 timestr = 'set '
652 timestr = 'set '
653 else:
653 else:
654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
655 time.localtime(ent[3]))
655 time.localtime(ent[3]))
656 if ent[1] & 0o20000:
656 if ent[1] & 0o20000:
657 mode = 'lnk'
657 mode = 'lnk'
658 else:
658 else:
659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
661 for f in repo.dirstate.copies():
661 for f in repo.dirstate.copies():
662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
663
663
664 @command('debugdiscovery',
664 @command('debugdiscovery',
665 [('', 'old', None, _('use old-style discovery')),
665 [('', 'old', None, _('use old-style discovery')),
666 ('', 'nonheads', None,
666 ('', 'nonheads', None,
667 _('use old-style discovery with non-heads included')),
667 _('use old-style discovery with non-heads included')),
668 ] + commands.remoteopts,
668 ] + commands.remoteopts,
669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
671 """runs the changeset discovery protocol in isolation"""
671 """runs the changeset discovery protocol in isolation"""
672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
673 opts.get('branch'))
673 opts.get('branch'))
674 remote = hg.peer(repo, opts, remoteurl)
674 remote = hg.peer(repo, opts, remoteurl)
675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
676
676
677 # make sure tests are repeatable
677 # make sure tests are repeatable
678 random.seed(12323)
678 random.seed(12323)
679
679
680 def doit(localheads, remoteheads, remote=remote):
680 def doit(localheads, remoteheads, remote=remote):
681 if opts.get('old'):
681 if opts.get('old'):
682 if localheads:
682 if localheads:
683 raise error.Abort('cannot use localheads with old style '
683 raise error.Abort('cannot use localheads with old style '
684 'discovery')
684 'discovery')
685 if not util.safehasattr(remote, 'branches'):
685 if not util.safehasattr(remote, 'branches'):
686 # enable in-client legacy support
686 # enable in-client legacy support
687 remote = localrepo.locallegacypeer(remote.local())
687 remote = localrepo.locallegacypeer(remote.local())
688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
689 force=True)
689 force=True)
690 common = set(common)
690 common = set(common)
691 if not opts.get('nonheads'):
691 if not opts.get('nonheads'):
692 ui.write(("unpruned common: %s\n") %
692 ui.write(("unpruned common: %s\n") %
693 " ".join(sorted(short(n) for n in common)))
693 " ".join(sorted(short(n) for n in common)))
694 dag = dagutil.revlogdag(repo.changelog)
694 dag = dagutil.revlogdag(repo.changelog)
695 all = dag.ancestorset(dag.internalizeall(common))
695 all = dag.ancestorset(dag.internalizeall(common))
696 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 common = dag.externalizeall(dag.headsetofconnecteds(all))
697 else:
697 else:
698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
699 common = set(common)
699 common = set(common)
700 rheads = set(hds)
700 rheads = set(hds)
701 lheads = set(repo.heads())
701 lheads = set(repo.heads())
702 ui.write(("common heads: %s\n") %
702 ui.write(("common heads: %s\n") %
703 " ".join(sorted(short(n) for n in common)))
703 " ".join(sorted(short(n) for n in common)))
704 if lheads <= common:
704 if lheads <= common:
705 ui.write(("local is subset\n"))
705 ui.write(("local is subset\n"))
706 elif rheads <= common:
706 elif rheads <= common:
707 ui.write(("remote is subset\n"))
707 ui.write(("remote is subset\n"))
708
708
709 serverlogs = opts.get('serverlog')
709 serverlogs = opts.get('serverlog')
710 if serverlogs:
710 if serverlogs:
711 for filename in serverlogs:
711 for filename in serverlogs:
712 with open(filename, 'r') as logfile:
712 with open(filename, 'r') as logfile:
713 line = logfile.readline()
713 line = logfile.readline()
714 while line:
714 while line:
715 parts = line.strip().split(';')
715 parts = line.strip().split(';')
716 op = parts[1]
716 op = parts[1]
717 if op == 'cg':
717 if op == 'cg':
718 pass
718 pass
719 elif op == 'cgss':
719 elif op == 'cgss':
720 doit(parts[2].split(' '), parts[3].split(' '))
720 doit(parts[2].split(' '), parts[3].split(' '))
721 elif op == 'unb':
721 elif op == 'unb':
722 doit(parts[3].split(' '), parts[2].split(' '))
722 doit(parts[3].split(' '), parts[2].split(' '))
723 line = logfile.readline()
723 line = logfile.readline()
724 else:
724 else:
725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
726 opts.get('remote_head'))
726 opts.get('remote_head'))
727 localrevs = opts.get('local_head')
727 localrevs = opts.get('local_head')
728 doit(localrevs, remoterevs)
728 doit(localrevs, remoterevs)
729
729
730 @command('debugextensions', commands.formatteropts, [], norepo=True)
730 @command('debugextensions', commands.formatteropts, [], norepo=True)
731 def debugextensions(ui, **opts):
731 def debugextensions(ui, **opts):
732 '''show information about active extensions'''
732 '''show information about active extensions'''
733 exts = extensions.extensions(ui)
733 exts = extensions.extensions(ui)
734 hgver = util.version()
734 hgver = util.version()
735 fm = ui.formatter('debugextensions', opts)
735 fm = ui.formatter('debugextensions', opts)
736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
737 isinternal = extensions.ismoduleinternal(extmod)
737 isinternal = extensions.ismoduleinternal(extmod)
738 extsource = pycompat.fsencode(extmod.__file__)
738 extsource = pycompat.fsencode(extmod.__file__)
739 if isinternal:
739 if isinternal:
740 exttestedwith = [] # never expose magic string to users
740 exttestedwith = [] # never expose magic string to users
741 else:
741 else:
742 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 exttestedwith = getattr(extmod, 'testedwith', '').split()
743 extbuglink = getattr(extmod, 'buglink', None)
743 extbuglink = getattr(extmod, 'buglink', None)
744
744
745 fm.startitem()
745 fm.startitem()
746
746
747 if ui.quiet or ui.verbose:
747 if ui.quiet or ui.verbose:
748 fm.write('name', '%s\n', extname)
748 fm.write('name', '%s\n', extname)
749 else:
749 else:
750 fm.write('name', '%s', extname)
750 fm.write('name', '%s', extname)
751 if isinternal or hgver in exttestedwith:
751 if isinternal or hgver in exttestedwith:
752 fm.plain('\n')
752 fm.plain('\n')
753 elif not exttestedwith:
753 elif not exttestedwith:
754 fm.plain(_(' (untested!)\n'))
754 fm.plain(_(' (untested!)\n'))
755 else:
755 else:
756 lasttestedversion = exttestedwith[-1]
756 lasttestedversion = exttestedwith[-1]
757 fm.plain(' (%s!)\n' % lasttestedversion)
757 fm.plain(' (%s!)\n' % lasttestedversion)
758
758
759 fm.condwrite(ui.verbose and extsource, 'source',
759 fm.condwrite(ui.verbose and extsource, 'source',
760 _(' location: %s\n'), extsource or "")
760 _(' location: %s\n'), extsource or "")
761
761
762 if ui.verbose:
762 if ui.verbose:
763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
764 fm.data(bundled=isinternal)
764 fm.data(bundled=isinternal)
765
765
766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
767 _(' tested with: %s\n'),
767 _(' tested with: %s\n'),
768 fm.formatlist(exttestedwith, name='ver'))
768 fm.formatlist(exttestedwith, name='ver'))
769
769
770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
771 _(' bug reporting: %s\n'), extbuglink or "")
771 _(' bug reporting: %s\n'), extbuglink or "")
772
772
773 fm.end()
773 fm.end()
774
774
775 @command('debugfileset',
775 @command('debugfileset',
776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
777 _('[-r REV] FILESPEC'))
777 _('[-r REV] FILESPEC'))
778 def debugfileset(ui, repo, expr, **opts):
778 def debugfileset(ui, repo, expr, **opts):
779 '''parse and apply a fileset specification'''
779 '''parse and apply a fileset specification'''
780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
781 if ui.verbose:
781 if ui.verbose:
782 tree = fileset.parse(expr)
782 tree = fileset.parse(expr)
783 ui.note(fileset.prettyformat(tree), "\n")
783 ui.note(fileset.prettyformat(tree), "\n")
784
784
785 for f in ctx.getfileset(expr):
785 for f in ctx.getfileset(expr):
786 ui.write("%s\n" % f)
786 ui.write("%s\n" % f)
787
787
788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
789 def debugfsinfo(ui, path="."):
789 def debugfsinfo(ui, path="."):
790 """show information detected about current filesystem"""
790 """show information detected about current filesystem"""
791 util.writefile('.debugfsinfo', '')
791 util.writefile('.debugfsinfo', '')
792 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
792 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
793 from . import osutil
793 ui.write(('fstype: %s\n') % (util.getfstype('.') or '(unknown)'))
794 if util.safehasattr(osutil, 'getfstype'):
795 fstype = osutil.getfstype('.')
796 ui.write(('fstype: %s\n') % (fstype or '(unknown)'))
797 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
794 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
798 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
795 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
799 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
796 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
800 and 'yes' or 'no'))
797 and 'yes' or 'no'))
801 util.tryunlink('.debugfsinfo')
798 util.tryunlink('.debugfsinfo')
802
799
803 @command('debuggetbundle',
800 @command('debuggetbundle',
804 [('H', 'head', [], _('id of head node'), _('ID')),
801 [('H', 'head', [], _('id of head node'), _('ID')),
805 ('C', 'common', [], _('id of common node'), _('ID')),
802 ('C', 'common', [], _('id of common node'), _('ID')),
806 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
803 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
807 _('REPO FILE [-H|-C ID]...'),
804 _('REPO FILE [-H|-C ID]...'),
808 norepo=True)
805 norepo=True)
809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
806 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
810 """retrieves a bundle from a repo
807 """retrieves a bundle from a repo
811
808
812 Every ID must be a full-length hex node id string. Saves the bundle to the
809 Every ID must be a full-length hex node id string. Saves the bundle to the
813 given file.
810 given file.
814 """
811 """
815 repo = hg.peer(ui, opts, repopath)
812 repo = hg.peer(ui, opts, repopath)
816 if not repo.capable('getbundle'):
813 if not repo.capable('getbundle'):
817 raise error.Abort("getbundle() not supported by target repository")
814 raise error.Abort("getbundle() not supported by target repository")
818 args = {}
815 args = {}
819 if common:
816 if common:
820 args['common'] = [bin(s) for s in common]
817 args['common'] = [bin(s) for s in common]
821 if head:
818 if head:
822 args['heads'] = [bin(s) for s in head]
819 args['heads'] = [bin(s) for s in head]
823 # TODO: get desired bundlecaps from command line.
820 # TODO: get desired bundlecaps from command line.
824 args['bundlecaps'] = None
821 args['bundlecaps'] = None
825 bundle = repo.getbundle('debug', **args)
822 bundle = repo.getbundle('debug', **args)
826
823
827 bundletype = opts.get('type', 'bzip2').lower()
824 bundletype = opts.get('type', 'bzip2').lower()
828 btypes = {'none': 'HG10UN',
825 btypes = {'none': 'HG10UN',
829 'bzip2': 'HG10BZ',
826 'bzip2': 'HG10BZ',
830 'gzip': 'HG10GZ',
827 'gzip': 'HG10GZ',
831 'bundle2': 'HG20'}
828 'bundle2': 'HG20'}
832 bundletype = btypes.get(bundletype)
829 bundletype = btypes.get(bundletype)
833 if bundletype not in bundle2.bundletypes:
830 if bundletype not in bundle2.bundletypes:
834 raise error.Abort(_('unknown bundle type specified with --type'))
831 raise error.Abort(_('unknown bundle type specified with --type'))
835 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
832 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
836
833
837 @command('debugignore', [], '[FILE]')
834 @command('debugignore', [], '[FILE]')
838 def debugignore(ui, repo, *files, **opts):
835 def debugignore(ui, repo, *files, **opts):
839 """display the combined ignore pattern and information about ignored files
836 """display the combined ignore pattern and information about ignored files
840
837
841 With no argument display the combined ignore pattern.
838 With no argument display the combined ignore pattern.
842
839
843 Given space separated file names, shows if the given file is ignored and
840 Given space separated file names, shows if the given file is ignored and
844 if so, show the ignore rule (file and line number) that matched it.
841 if so, show the ignore rule (file and line number) that matched it.
845 """
842 """
846 ignore = repo.dirstate._ignore
843 ignore = repo.dirstate._ignore
847 if not files:
844 if not files:
848 # Show all the patterns
845 # Show all the patterns
849 includepat = getattr(ignore, 'includepat', None)
846 includepat = getattr(ignore, 'includepat', None)
850 if includepat is not None:
847 if includepat is not None:
851 ui.write("%s\n" % includepat)
848 ui.write("%s\n" % includepat)
852 else:
849 else:
853 raise error.Abort(_("no ignore patterns found"))
850 raise error.Abort(_("no ignore patterns found"))
854 else:
851 else:
855 for f in files:
852 for f in files:
856 nf = util.normpath(f)
853 nf = util.normpath(f)
857 ignored = None
854 ignored = None
858 ignoredata = None
855 ignoredata = None
859 if nf != '.':
856 if nf != '.':
860 if ignore(nf):
857 if ignore(nf):
861 ignored = nf
858 ignored = nf
862 ignoredata = repo.dirstate._ignorefileandline(nf)
859 ignoredata = repo.dirstate._ignorefileandline(nf)
863 else:
860 else:
864 for p in util.finddirs(nf):
861 for p in util.finddirs(nf):
865 if ignore(p):
862 if ignore(p):
866 ignored = p
863 ignored = p
867 ignoredata = repo.dirstate._ignorefileandline(p)
864 ignoredata = repo.dirstate._ignorefileandline(p)
868 break
865 break
869 if ignored:
866 if ignored:
870 if ignored == nf:
867 if ignored == nf:
871 ui.write(_("%s is ignored\n") % f)
868 ui.write(_("%s is ignored\n") % f)
872 else:
869 else:
873 ui.write(_("%s is ignored because of "
870 ui.write(_("%s is ignored because of "
874 "containing folder %s\n")
871 "containing folder %s\n")
875 % (f, ignored))
872 % (f, ignored))
876 ignorefile, lineno, line = ignoredata
873 ignorefile, lineno, line = ignoredata
877 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
874 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
878 % (ignorefile, lineno, line))
875 % (ignorefile, lineno, line))
879 else:
876 else:
880 ui.write(_("%s is not ignored\n") % f)
877 ui.write(_("%s is not ignored\n") % f)
881
878
882 @command('debugindex', commands.debugrevlogopts +
879 @command('debugindex', commands.debugrevlogopts +
883 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
880 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
884 _('[-f FORMAT] -c|-m|FILE'),
881 _('[-f FORMAT] -c|-m|FILE'),
885 optionalrepo=True)
882 optionalrepo=True)
886 def debugindex(ui, repo, file_=None, **opts):
883 def debugindex(ui, repo, file_=None, **opts):
887 """dump the contents of an index file"""
884 """dump the contents of an index file"""
888 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
885 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
889 format = opts.get('format', 0)
886 format = opts.get('format', 0)
890 if format not in (0, 1):
887 if format not in (0, 1):
891 raise error.Abort(_("unknown format %d") % format)
888 raise error.Abort(_("unknown format %d") % format)
892
889
893 generaldelta = r.version & revlog.REVLOGGENERALDELTA
890 generaldelta = r.version & revlog.REVLOGGENERALDELTA
894 if generaldelta:
891 if generaldelta:
895 basehdr = ' delta'
892 basehdr = ' delta'
896 else:
893 else:
897 basehdr = ' base'
894 basehdr = ' base'
898
895
899 if ui.debugflag:
896 if ui.debugflag:
900 shortfn = hex
897 shortfn = hex
901 else:
898 else:
902 shortfn = short
899 shortfn = short
903
900
904 # There might not be anything in r, so have a sane default
901 # There might not be anything in r, so have a sane default
905 idlen = 12
902 idlen = 12
906 for i in r:
903 for i in r:
907 idlen = len(shortfn(r.node(i)))
904 idlen = len(shortfn(r.node(i)))
908 break
905 break
909
906
910 if format == 0:
907 if format == 0:
911 ui.write((" rev offset length " + basehdr + " linkrev"
908 ui.write((" rev offset length " + basehdr + " linkrev"
912 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
909 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
913 elif format == 1:
910 elif format == 1:
914 ui.write((" rev flag offset length"
911 ui.write((" rev flag offset length"
915 " size " + basehdr + " link p1 p2"
912 " size " + basehdr + " link p1 p2"
916 " %s\n") % "nodeid".rjust(idlen))
913 " %s\n") % "nodeid".rjust(idlen))
917
914
918 for i in r:
915 for i in r:
919 node = r.node(i)
916 node = r.node(i)
920 if generaldelta:
917 if generaldelta:
921 base = r.deltaparent(i)
918 base = r.deltaparent(i)
922 else:
919 else:
923 base = r.chainbase(i)
920 base = r.chainbase(i)
924 if format == 0:
921 if format == 0:
925 try:
922 try:
926 pp = r.parents(node)
923 pp = r.parents(node)
927 except Exception:
924 except Exception:
928 pp = [nullid, nullid]
925 pp = [nullid, nullid]
929 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
926 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
930 i, r.start(i), r.length(i), base, r.linkrev(i),
927 i, r.start(i), r.length(i), base, r.linkrev(i),
931 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
928 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
932 elif format == 1:
929 elif format == 1:
933 pr = r.parentrevs(i)
930 pr = r.parentrevs(i)
934 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
931 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
935 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
932 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
936 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
933 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
937
934
938 @command('debugindexdot', commands.debugrevlogopts,
935 @command('debugindexdot', commands.debugrevlogopts,
939 _('-c|-m|FILE'), optionalrepo=True)
936 _('-c|-m|FILE'), optionalrepo=True)
940 def debugindexdot(ui, repo, file_=None, **opts):
937 def debugindexdot(ui, repo, file_=None, **opts):
941 """dump an index DAG as a graphviz dot file"""
938 """dump an index DAG as a graphviz dot file"""
942 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
939 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
943 ui.write(("digraph G {\n"))
940 ui.write(("digraph G {\n"))
944 for i in r:
941 for i in r:
945 node = r.node(i)
942 node = r.node(i)
946 pp = r.parents(node)
943 pp = r.parents(node)
947 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
944 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
948 if pp[1] != nullid:
945 if pp[1] != nullid:
949 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
946 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
950 ui.write("}\n")
947 ui.write("}\n")
951
948
952 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
949 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
953 def debuginstall(ui, **opts):
950 def debuginstall(ui, **opts):
954 '''test Mercurial installation
951 '''test Mercurial installation
955
952
956 Returns 0 on success.
953 Returns 0 on success.
957 '''
954 '''
958
955
959 def writetemp(contents):
956 def writetemp(contents):
960 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
957 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
961 f = os.fdopen(fd, pycompat.sysstr("wb"))
958 f = os.fdopen(fd, pycompat.sysstr("wb"))
962 f.write(contents)
959 f.write(contents)
963 f.close()
960 f.close()
964 return name
961 return name
965
962
966 problems = 0
963 problems = 0
967
964
968 fm = ui.formatter('debuginstall', opts)
965 fm = ui.formatter('debuginstall', opts)
969 fm.startitem()
966 fm.startitem()
970
967
971 # encoding
968 # encoding
972 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
969 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
973 err = None
970 err = None
974 try:
971 try:
975 encoding.fromlocal("test")
972 encoding.fromlocal("test")
976 except error.Abort as inst:
973 except error.Abort as inst:
977 err = inst
974 err = inst
978 problems += 1
975 problems += 1
979 fm.condwrite(err, 'encodingerror', _(" %s\n"
976 fm.condwrite(err, 'encodingerror', _(" %s\n"
980 " (check that your locale is properly set)\n"), err)
977 " (check that your locale is properly set)\n"), err)
981
978
982 # Python
979 # Python
983 fm.write('pythonexe', _("checking Python executable (%s)\n"),
980 fm.write('pythonexe', _("checking Python executable (%s)\n"),
984 pycompat.sysexecutable)
981 pycompat.sysexecutable)
985 fm.write('pythonver', _("checking Python version (%s)\n"),
982 fm.write('pythonver', _("checking Python version (%s)\n"),
986 ("%d.%d.%d" % sys.version_info[:3]))
983 ("%d.%d.%d" % sys.version_info[:3]))
987 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
984 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
988 os.path.dirname(pycompat.fsencode(os.__file__)))
985 os.path.dirname(pycompat.fsencode(os.__file__)))
989
986
990 security = set(sslutil.supportedprotocols)
987 security = set(sslutil.supportedprotocols)
991 if sslutil.hassni:
988 if sslutil.hassni:
992 security.add('sni')
989 security.add('sni')
993
990
994 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
991 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
995 fm.formatlist(sorted(security), name='protocol',
992 fm.formatlist(sorted(security), name='protocol',
996 fmt='%s', sep=','))
993 fmt='%s', sep=','))
997
994
998 # These are warnings, not errors. So don't increment problem count. This
995 # These are warnings, not errors. So don't increment problem count. This
999 # may change in the future.
996 # may change in the future.
1000 if 'tls1.2' not in security:
997 if 'tls1.2' not in security:
1001 fm.plain(_(' TLS 1.2 not supported by Python install; '
998 fm.plain(_(' TLS 1.2 not supported by Python install; '
1002 'network connections lack modern security\n'))
999 'network connections lack modern security\n'))
1003 if 'sni' not in security:
1000 if 'sni' not in security:
1004 fm.plain(_(' SNI not supported by Python install; may have '
1001 fm.plain(_(' SNI not supported by Python install; may have '
1005 'connectivity issues with some servers\n'))
1002 'connectivity issues with some servers\n'))
1006
1003
1007 # TODO print CA cert info
1004 # TODO print CA cert info
1008
1005
1009 # hg version
1006 # hg version
1010 hgver = util.version()
1007 hgver = util.version()
1011 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1008 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1012 hgver.split('+')[0])
1009 hgver.split('+')[0])
1013 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1010 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1014 '+'.join(hgver.split('+')[1:]))
1011 '+'.join(hgver.split('+')[1:]))
1015
1012
1016 # compiled modules
1013 # compiled modules
1017 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1014 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1018 policy.policy)
1015 policy.policy)
1019 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1016 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1020 os.path.dirname(pycompat.fsencode(__file__)))
1017 os.path.dirname(pycompat.fsencode(__file__)))
1021
1018
1022 err = None
1019 err = None
1023 try:
1020 try:
1024 from . import (
1021 from . import (
1025 base85,
1022 base85,
1026 bdiff,
1023 bdiff,
1027 mpatch,
1024 mpatch,
1028 osutil,
1025 osutil,
1029 )
1026 )
1030 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1027 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1031 except Exception as inst:
1028 except Exception as inst:
1032 err = inst
1029 err = inst
1033 problems += 1
1030 problems += 1
1034 fm.condwrite(err, 'extensionserror', " %s\n", err)
1031 fm.condwrite(err, 'extensionserror', " %s\n", err)
1035
1032
1036 compengines = util.compengines._engines.values()
1033 compengines = util.compengines._engines.values()
1037 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1034 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1038 fm.formatlist(sorted(e.name() for e in compengines),
1035 fm.formatlist(sorted(e.name() for e in compengines),
1039 name='compengine', fmt='%s', sep=', '))
1036 name='compengine', fmt='%s', sep=', '))
1040 fm.write('compenginesavail', _('checking available compression engines '
1037 fm.write('compenginesavail', _('checking available compression engines '
1041 '(%s)\n'),
1038 '(%s)\n'),
1042 fm.formatlist(sorted(e.name() for e in compengines
1039 fm.formatlist(sorted(e.name() for e in compengines
1043 if e.available()),
1040 if e.available()),
1044 name='compengine', fmt='%s', sep=', '))
1041 name='compengine', fmt='%s', sep=', '))
1045 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1042 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1046 fm.write('compenginesserver', _('checking available compression engines '
1043 fm.write('compenginesserver', _('checking available compression engines '
1047 'for wire protocol (%s)\n'),
1044 'for wire protocol (%s)\n'),
1048 fm.formatlist([e.name() for e in wirecompengines
1045 fm.formatlist([e.name() for e in wirecompengines
1049 if e.wireprotosupport()],
1046 if e.wireprotosupport()],
1050 name='compengine', fmt='%s', sep=', '))
1047 name='compengine', fmt='%s', sep=', '))
1051
1048
1052 # templates
1049 # templates
1053 p = templater.templatepaths()
1050 p = templater.templatepaths()
1054 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1051 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1055 fm.condwrite(not p, '', _(" no template directories found\n"))
1052 fm.condwrite(not p, '', _(" no template directories found\n"))
1056 if p:
1053 if p:
1057 m = templater.templatepath("map-cmdline.default")
1054 m = templater.templatepath("map-cmdline.default")
1058 if m:
1055 if m:
1059 # template found, check if it is working
1056 # template found, check if it is working
1060 err = None
1057 err = None
1061 try:
1058 try:
1062 templater.templater.frommapfile(m)
1059 templater.templater.frommapfile(m)
1063 except Exception as inst:
1060 except Exception as inst:
1064 err = inst
1061 err = inst
1065 p = None
1062 p = None
1066 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1063 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1067 else:
1064 else:
1068 p = None
1065 p = None
1069 fm.condwrite(p, 'defaulttemplate',
1066 fm.condwrite(p, 'defaulttemplate',
1070 _("checking default template (%s)\n"), m)
1067 _("checking default template (%s)\n"), m)
1071 fm.condwrite(not m, 'defaulttemplatenotfound',
1068 fm.condwrite(not m, 'defaulttemplatenotfound',
1072 _(" template '%s' not found\n"), "default")
1069 _(" template '%s' not found\n"), "default")
1073 if not p:
1070 if not p:
1074 problems += 1
1071 problems += 1
1075 fm.condwrite(not p, '',
1072 fm.condwrite(not p, '',
1076 _(" (templates seem to have been installed incorrectly)\n"))
1073 _(" (templates seem to have been installed incorrectly)\n"))
1077
1074
1078 # editor
1075 # editor
1079 editor = ui.geteditor()
1076 editor = ui.geteditor()
1080 editor = util.expandpath(editor)
1077 editor = util.expandpath(editor)
1081 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1078 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1082 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1079 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1083 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1080 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1084 _(" No commit editor set and can't find %s in PATH\n"
1081 _(" No commit editor set and can't find %s in PATH\n"
1085 " (specify a commit editor in your configuration"
1082 " (specify a commit editor in your configuration"
1086 " file)\n"), not cmdpath and editor == 'vi' and editor)
1083 " file)\n"), not cmdpath and editor == 'vi' and editor)
1087 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1084 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1088 _(" Can't find editor '%s' in PATH\n"
1085 _(" Can't find editor '%s' in PATH\n"
1089 " (specify a commit editor in your configuration"
1086 " (specify a commit editor in your configuration"
1090 " file)\n"), not cmdpath and editor)
1087 " file)\n"), not cmdpath and editor)
1091 if not cmdpath and editor != 'vi':
1088 if not cmdpath and editor != 'vi':
1092 problems += 1
1089 problems += 1
1093
1090
1094 # check username
1091 # check username
1095 username = None
1092 username = None
1096 err = None
1093 err = None
1097 try:
1094 try:
1098 username = ui.username()
1095 username = ui.username()
1099 except error.Abort as e:
1096 except error.Abort as e:
1100 err = e
1097 err = e
1101 problems += 1
1098 problems += 1
1102
1099
1103 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1100 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1104 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1101 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1105 " (specify a username in your configuration file)\n"), err)
1102 " (specify a username in your configuration file)\n"), err)
1106
1103
1107 fm.condwrite(not problems, '',
1104 fm.condwrite(not problems, '',
1108 _("no problems detected\n"))
1105 _("no problems detected\n"))
1109 if not problems:
1106 if not problems:
1110 fm.data(problems=problems)
1107 fm.data(problems=problems)
1111 fm.condwrite(problems, 'problems',
1108 fm.condwrite(problems, 'problems',
1112 _("%d problems detected,"
1109 _("%d problems detected,"
1113 " please check your install!\n"), problems)
1110 " please check your install!\n"), problems)
1114 fm.end()
1111 fm.end()
1115
1112
1116 return problems
1113 return problems
1117
1114
1118 @command('debugknown', [], _('REPO ID...'), norepo=True)
1115 @command('debugknown', [], _('REPO ID...'), norepo=True)
1119 def debugknown(ui, repopath, *ids, **opts):
1116 def debugknown(ui, repopath, *ids, **opts):
1120 """test whether node ids are known to a repo
1117 """test whether node ids are known to a repo
1121
1118
1122 Every ID must be a full-length hex node id string. Returns a list of 0s
1119 Every ID must be a full-length hex node id string. Returns a list of 0s
1123 and 1s indicating unknown/known.
1120 and 1s indicating unknown/known.
1124 """
1121 """
1125 repo = hg.peer(ui, opts, repopath)
1122 repo = hg.peer(ui, opts, repopath)
1126 if not repo.capable('known'):
1123 if not repo.capable('known'):
1127 raise error.Abort("known() not supported by target repository")
1124 raise error.Abort("known() not supported by target repository")
1128 flags = repo.known([bin(s) for s in ids])
1125 flags = repo.known([bin(s) for s in ids])
1129 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1126 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1130
1127
1131 @command('debuglabelcomplete', [], _('LABEL...'))
1128 @command('debuglabelcomplete', [], _('LABEL...'))
1132 def debuglabelcomplete(ui, repo, *args):
1129 def debuglabelcomplete(ui, repo, *args):
1133 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1130 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1134 debugnamecomplete(ui, repo, *args)
1131 debugnamecomplete(ui, repo, *args)
1135
1132
1136 @command('debuglocks',
1133 @command('debuglocks',
1137 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1134 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1138 ('W', 'force-wlock', None,
1135 ('W', 'force-wlock', None,
1139 _('free the working state lock (DANGEROUS)'))],
1136 _('free the working state lock (DANGEROUS)'))],
1140 _('[OPTION]...'))
1137 _('[OPTION]...'))
1141 def debuglocks(ui, repo, **opts):
1138 def debuglocks(ui, repo, **opts):
1142 """show or modify state of locks
1139 """show or modify state of locks
1143
1140
1144 By default, this command will show which locks are held. This
1141 By default, this command will show which locks are held. This
1145 includes the user and process holding the lock, the amount of time
1142 includes the user and process holding the lock, the amount of time
1146 the lock has been held, and the machine name where the process is
1143 the lock has been held, and the machine name where the process is
1147 running if it's not local.
1144 running if it's not local.
1148
1145
1149 Locks protect the integrity of Mercurial's data, so should be
1146 Locks protect the integrity of Mercurial's data, so should be
1150 treated with care. System crashes or other interruptions may cause
1147 treated with care. System crashes or other interruptions may cause
1151 locks to not be properly released, though Mercurial will usually
1148 locks to not be properly released, though Mercurial will usually
1152 detect and remove such stale locks automatically.
1149 detect and remove such stale locks automatically.
1153
1150
1154 However, detecting stale locks may not always be possible (for
1151 However, detecting stale locks may not always be possible (for
1155 instance, on a shared filesystem). Removing locks may also be
1152 instance, on a shared filesystem). Removing locks may also be
1156 blocked by filesystem permissions.
1153 blocked by filesystem permissions.
1157
1154
1158 Returns 0 if no locks are held.
1155 Returns 0 if no locks are held.
1159
1156
1160 """
1157 """
1161
1158
1162 if opts.get('force_lock'):
1159 if opts.get('force_lock'):
1163 repo.svfs.unlink('lock')
1160 repo.svfs.unlink('lock')
1164 if opts.get('force_wlock'):
1161 if opts.get('force_wlock'):
1165 repo.vfs.unlink('wlock')
1162 repo.vfs.unlink('wlock')
1166 if opts.get('force_lock') or opts.get('force_lock'):
1163 if opts.get('force_lock') or opts.get('force_lock'):
1167 return 0
1164 return 0
1168
1165
1169 now = time.time()
1166 now = time.time()
1170 held = 0
1167 held = 0
1171
1168
1172 def report(vfs, name, method):
1169 def report(vfs, name, method):
1173 # this causes stale locks to get reaped for more accurate reporting
1170 # this causes stale locks to get reaped for more accurate reporting
1174 try:
1171 try:
1175 l = method(False)
1172 l = method(False)
1176 except error.LockHeld:
1173 except error.LockHeld:
1177 l = None
1174 l = None
1178
1175
1179 if l:
1176 if l:
1180 l.release()
1177 l.release()
1181 else:
1178 else:
1182 try:
1179 try:
1183 stat = vfs.lstat(name)
1180 stat = vfs.lstat(name)
1184 age = now - stat.st_mtime
1181 age = now - stat.st_mtime
1185 user = util.username(stat.st_uid)
1182 user = util.username(stat.st_uid)
1186 locker = vfs.readlock(name)
1183 locker = vfs.readlock(name)
1187 if ":" in locker:
1184 if ":" in locker:
1188 host, pid = locker.split(':')
1185 host, pid = locker.split(':')
1189 if host == socket.gethostname():
1186 if host == socket.gethostname():
1190 locker = 'user %s, process %s' % (user, pid)
1187 locker = 'user %s, process %s' % (user, pid)
1191 else:
1188 else:
1192 locker = 'user %s, process %s, host %s' \
1189 locker = 'user %s, process %s, host %s' \
1193 % (user, pid, host)
1190 % (user, pid, host)
1194 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1191 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1195 return 1
1192 return 1
1196 except OSError as e:
1193 except OSError as e:
1197 if e.errno != errno.ENOENT:
1194 if e.errno != errno.ENOENT:
1198 raise
1195 raise
1199
1196
1200 ui.write(("%-6s free\n") % (name + ":"))
1197 ui.write(("%-6s free\n") % (name + ":"))
1201 return 0
1198 return 0
1202
1199
1203 held += report(repo.svfs, "lock", repo.lock)
1200 held += report(repo.svfs, "lock", repo.lock)
1204 held += report(repo.vfs, "wlock", repo.wlock)
1201 held += report(repo.vfs, "wlock", repo.wlock)
1205
1202
1206 return held
1203 return held
1207
1204
1208 @command('debugmergestate', [], '')
1205 @command('debugmergestate', [], '')
1209 def debugmergestate(ui, repo, *args):
1206 def debugmergestate(ui, repo, *args):
1210 """print merge state
1207 """print merge state
1211
1208
1212 Use --verbose to print out information about whether v1 or v2 merge state
1209 Use --verbose to print out information about whether v1 or v2 merge state
1213 was chosen."""
1210 was chosen."""
1214 def _hashornull(h):
1211 def _hashornull(h):
1215 if h == nullhex:
1212 if h == nullhex:
1216 return 'null'
1213 return 'null'
1217 else:
1214 else:
1218 return h
1215 return h
1219
1216
1220 def printrecords(version):
1217 def printrecords(version):
1221 ui.write(('* version %s records\n') % version)
1218 ui.write(('* version %s records\n') % version)
1222 if version == 1:
1219 if version == 1:
1223 records = v1records
1220 records = v1records
1224 else:
1221 else:
1225 records = v2records
1222 records = v2records
1226
1223
1227 for rtype, record in records:
1224 for rtype, record in records:
1228 # pretty print some record types
1225 # pretty print some record types
1229 if rtype == 'L':
1226 if rtype == 'L':
1230 ui.write(('local: %s\n') % record)
1227 ui.write(('local: %s\n') % record)
1231 elif rtype == 'O':
1228 elif rtype == 'O':
1232 ui.write(('other: %s\n') % record)
1229 ui.write(('other: %s\n') % record)
1233 elif rtype == 'm':
1230 elif rtype == 'm':
1234 driver, mdstate = record.split('\0', 1)
1231 driver, mdstate = record.split('\0', 1)
1235 ui.write(('merge driver: %s (state "%s")\n')
1232 ui.write(('merge driver: %s (state "%s")\n')
1236 % (driver, mdstate))
1233 % (driver, mdstate))
1237 elif rtype in 'FDC':
1234 elif rtype in 'FDC':
1238 r = record.split('\0')
1235 r = record.split('\0')
1239 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1236 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1240 if version == 1:
1237 if version == 1:
1241 onode = 'not stored in v1 format'
1238 onode = 'not stored in v1 format'
1242 flags = r[7]
1239 flags = r[7]
1243 else:
1240 else:
1244 onode, flags = r[7:9]
1241 onode, flags = r[7:9]
1245 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1242 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1246 % (f, rtype, state, _hashornull(hash)))
1243 % (f, rtype, state, _hashornull(hash)))
1247 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1244 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1248 ui.write((' ancestor path: %s (node %s)\n')
1245 ui.write((' ancestor path: %s (node %s)\n')
1249 % (afile, _hashornull(anode)))
1246 % (afile, _hashornull(anode)))
1250 ui.write((' other path: %s (node %s)\n')
1247 ui.write((' other path: %s (node %s)\n')
1251 % (ofile, _hashornull(onode)))
1248 % (ofile, _hashornull(onode)))
1252 elif rtype == 'f':
1249 elif rtype == 'f':
1253 filename, rawextras = record.split('\0', 1)
1250 filename, rawextras = record.split('\0', 1)
1254 extras = rawextras.split('\0')
1251 extras = rawextras.split('\0')
1255 i = 0
1252 i = 0
1256 extrastrings = []
1253 extrastrings = []
1257 while i < len(extras):
1254 while i < len(extras):
1258 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1255 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1259 i += 2
1256 i += 2
1260
1257
1261 ui.write(('file extras: %s (%s)\n')
1258 ui.write(('file extras: %s (%s)\n')
1262 % (filename, ', '.join(extrastrings)))
1259 % (filename, ', '.join(extrastrings)))
1263 elif rtype == 'l':
1260 elif rtype == 'l':
1264 labels = record.split('\0', 2)
1261 labels = record.split('\0', 2)
1265 labels = [l for l in labels if len(l) > 0]
1262 labels = [l for l in labels if len(l) > 0]
1266 ui.write(('labels:\n'))
1263 ui.write(('labels:\n'))
1267 ui.write((' local: %s\n' % labels[0]))
1264 ui.write((' local: %s\n' % labels[0]))
1268 ui.write((' other: %s\n' % labels[1]))
1265 ui.write((' other: %s\n' % labels[1]))
1269 if len(labels) > 2:
1266 if len(labels) > 2:
1270 ui.write((' base: %s\n' % labels[2]))
1267 ui.write((' base: %s\n' % labels[2]))
1271 else:
1268 else:
1272 ui.write(('unrecognized entry: %s\t%s\n')
1269 ui.write(('unrecognized entry: %s\t%s\n')
1273 % (rtype, record.replace('\0', '\t')))
1270 % (rtype, record.replace('\0', '\t')))
1274
1271
1275 # Avoid mergestate.read() since it may raise an exception for unsupported
1272 # Avoid mergestate.read() since it may raise an exception for unsupported
1276 # merge state records. We shouldn't be doing this, but this is OK since this
1273 # merge state records. We shouldn't be doing this, but this is OK since this
1277 # command is pretty low-level.
1274 # command is pretty low-level.
1278 ms = mergemod.mergestate(repo)
1275 ms = mergemod.mergestate(repo)
1279
1276
1280 # sort so that reasonable information is on top
1277 # sort so that reasonable information is on top
1281 v1records = ms._readrecordsv1()
1278 v1records = ms._readrecordsv1()
1282 v2records = ms._readrecordsv2()
1279 v2records = ms._readrecordsv2()
1283 order = 'LOml'
1280 order = 'LOml'
1284 def key(r):
1281 def key(r):
1285 idx = order.find(r[0])
1282 idx = order.find(r[0])
1286 if idx == -1:
1283 if idx == -1:
1287 return (1, r[1])
1284 return (1, r[1])
1288 else:
1285 else:
1289 return (0, idx)
1286 return (0, idx)
1290 v1records.sort(key=key)
1287 v1records.sort(key=key)
1291 v2records.sort(key=key)
1288 v2records.sort(key=key)
1292
1289
1293 if not v1records and not v2records:
1290 if not v1records and not v2records:
1294 ui.write(('no merge state found\n'))
1291 ui.write(('no merge state found\n'))
1295 elif not v2records:
1292 elif not v2records:
1296 ui.note(('no version 2 merge state\n'))
1293 ui.note(('no version 2 merge state\n'))
1297 printrecords(1)
1294 printrecords(1)
1298 elif ms._v1v2match(v1records, v2records):
1295 elif ms._v1v2match(v1records, v2records):
1299 ui.note(('v1 and v2 states match: using v2\n'))
1296 ui.note(('v1 and v2 states match: using v2\n'))
1300 printrecords(2)
1297 printrecords(2)
1301 else:
1298 else:
1302 ui.note(('v1 and v2 states mismatch: using v1\n'))
1299 ui.note(('v1 and v2 states mismatch: using v1\n'))
1303 printrecords(1)
1300 printrecords(1)
1304 if ui.verbose:
1301 if ui.verbose:
1305 printrecords(2)
1302 printrecords(2)
1306
1303
1307 @command('debugnamecomplete', [], _('NAME...'))
1304 @command('debugnamecomplete', [], _('NAME...'))
1308 def debugnamecomplete(ui, repo, *args):
1305 def debugnamecomplete(ui, repo, *args):
1309 '''complete "names" - tags, open branch names, bookmark names'''
1306 '''complete "names" - tags, open branch names, bookmark names'''
1310
1307
1311 names = set()
1308 names = set()
1312 # since we previously only listed open branches, we will handle that
1309 # since we previously only listed open branches, we will handle that
1313 # specially (after this for loop)
1310 # specially (after this for loop)
1314 for name, ns in repo.names.iteritems():
1311 for name, ns in repo.names.iteritems():
1315 if name != 'branches':
1312 if name != 'branches':
1316 names.update(ns.listnames(repo))
1313 names.update(ns.listnames(repo))
1317 names.update(tag for (tag, heads, tip, closed)
1314 names.update(tag for (tag, heads, tip, closed)
1318 in repo.branchmap().iterbranches() if not closed)
1315 in repo.branchmap().iterbranches() if not closed)
1319 completions = set()
1316 completions = set()
1320 if not args:
1317 if not args:
1321 args = ['']
1318 args = ['']
1322 for a in args:
1319 for a in args:
1323 completions.update(n for n in names if n.startswith(a))
1320 completions.update(n for n in names if n.startswith(a))
1324 ui.write('\n'.join(sorted(completions)))
1321 ui.write('\n'.join(sorted(completions)))
1325 ui.write('\n')
1322 ui.write('\n')
1326
1323
1327 @command('debugobsolete',
1324 @command('debugobsolete',
1328 [('', 'flags', 0, _('markers flag')),
1325 [('', 'flags', 0, _('markers flag')),
1329 ('', 'record-parents', False,
1326 ('', 'record-parents', False,
1330 _('record parent information for the precursor')),
1327 _('record parent information for the precursor')),
1331 ('r', 'rev', [], _('display markers relevant to REV')),
1328 ('r', 'rev', [], _('display markers relevant to REV')),
1332 ('', 'index', False, _('display index of the marker')),
1329 ('', 'index', False, _('display index of the marker')),
1333 ('', 'delete', [], _('delete markers specified by indices')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1334 ] + commands.commitopts2 + commands.formatteropts,
1331 ] + commands.commitopts2 + commands.formatteropts,
1335 _('[OBSOLETED [REPLACEMENT ...]]'))
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1336 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1337 """create arbitrary obsolete marker
1334 """create arbitrary obsolete marker
1338
1335
1339 With no arguments, displays the list of obsolescence markers."""
1336 With no arguments, displays the list of obsolescence markers."""
1340
1337
1341 def parsenodeid(s):
1338 def parsenodeid(s):
1342 try:
1339 try:
1343 # We do not use revsingle/revrange functions here to accept
1340 # We do not use revsingle/revrange functions here to accept
1344 # arbitrary node identifiers, possibly not present in the
1341 # arbitrary node identifiers, possibly not present in the
1345 # local repository.
1342 # local repository.
1346 n = bin(s)
1343 n = bin(s)
1347 if len(n) != len(nullid):
1344 if len(n) != len(nullid):
1348 raise TypeError()
1345 raise TypeError()
1349 return n
1346 return n
1350 except TypeError:
1347 except TypeError:
1351 raise error.Abort('changeset references must be full hexadecimal '
1348 raise error.Abort('changeset references must be full hexadecimal '
1352 'node identifiers')
1349 'node identifiers')
1353
1350
1354 if opts.get('delete'):
1351 if opts.get('delete'):
1355 indices = []
1352 indices = []
1356 for v in opts.get('delete'):
1353 for v in opts.get('delete'):
1357 try:
1354 try:
1358 indices.append(int(v))
1355 indices.append(int(v))
1359 except ValueError:
1356 except ValueError:
1360 raise error.Abort(_('invalid index value: %r') % v,
1357 raise error.Abort(_('invalid index value: %r') % v,
1361 hint=_('use integers for indices'))
1358 hint=_('use integers for indices'))
1362
1359
1363 if repo.currenttransaction():
1360 if repo.currenttransaction():
1364 raise error.Abort(_('cannot delete obsmarkers in the middle '
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1365 'of transaction.'))
1362 'of transaction.'))
1366
1363
1367 with repo.lock():
1364 with repo.lock():
1368 n = repair.deleteobsmarkers(repo.obsstore, indices)
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1369 ui.write(_('deleted %i obsolescence markers\n') % n)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1370
1367
1371 return
1368 return
1372
1369
1373 if precursor is not None:
1370 if precursor is not None:
1374 if opts['rev']:
1371 if opts['rev']:
1375 raise error.Abort('cannot select revision when creating marker')
1372 raise error.Abort('cannot select revision when creating marker')
1376 metadata = {}
1373 metadata = {}
1377 metadata['user'] = opts['user'] or ui.username()
1374 metadata['user'] = opts['user'] or ui.username()
1378 succs = tuple(parsenodeid(succ) for succ in successors)
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1379 l = repo.lock()
1376 l = repo.lock()
1380 try:
1377 try:
1381 tr = repo.transaction('debugobsolete')
1378 tr = repo.transaction('debugobsolete')
1382 try:
1379 try:
1383 date = opts.get('date')
1380 date = opts.get('date')
1384 if date:
1381 if date:
1385 date = util.parsedate(date)
1382 date = util.parsedate(date)
1386 else:
1383 else:
1387 date = None
1384 date = None
1388 prec = parsenodeid(precursor)
1385 prec = parsenodeid(precursor)
1389 parents = None
1386 parents = None
1390 if opts['record_parents']:
1387 if opts['record_parents']:
1391 if prec not in repo.unfiltered():
1388 if prec not in repo.unfiltered():
1392 raise error.Abort('cannot used --record-parents on '
1389 raise error.Abort('cannot used --record-parents on '
1393 'unknown changesets')
1390 'unknown changesets')
1394 parents = repo.unfiltered()[prec].parents()
1391 parents = repo.unfiltered()[prec].parents()
1395 parents = tuple(p.node() for p in parents)
1392 parents = tuple(p.node() for p in parents)
1396 repo.obsstore.create(tr, prec, succs, opts['flags'],
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1397 parents=parents, date=date,
1394 parents=parents, date=date,
1398 metadata=metadata)
1395 metadata=metadata)
1399 tr.close()
1396 tr.close()
1400 except ValueError as exc:
1397 except ValueError as exc:
1401 raise error.Abort(_('bad obsmarker input: %s') % exc)
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1402 finally:
1399 finally:
1403 tr.release()
1400 tr.release()
1404 finally:
1401 finally:
1405 l.release()
1402 l.release()
1406 else:
1403 else:
1407 if opts['rev']:
1404 if opts['rev']:
1408 revs = scmutil.revrange(repo, opts['rev'])
1405 revs = scmutil.revrange(repo, opts['rev'])
1409 nodes = [repo[r].node() for r in revs]
1406 nodes = [repo[r].node() for r in revs]
1410 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1411 markers.sort(key=lambda x: x._data)
1408 markers.sort(key=lambda x: x._data)
1412 else:
1409 else:
1413 markers = obsolete.getmarkers(repo)
1410 markers = obsolete.getmarkers(repo)
1414
1411
1415 markerstoiter = markers
1412 markerstoiter = markers
1416 isrelevant = lambda m: True
1413 isrelevant = lambda m: True
1417 if opts.get('rev') and opts.get('index'):
1414 if opts.get('rev') and opts.get('index'):
1418 markerstoiter = obsolete.getmarkers(repo)
1415 markerstoiter = obsolete.getmarkers(repo)
1419 markerset = set(markers)
1416 markerset = set(markers)
1420 isrelevant = lambda m: m in markerset
1417 isrelevant = lambda m: m in markerset
1421
1418
1422 fm = ui.formatter('debugobsolete', opts)
1419 fm = ui.formatter('debugobsolete', opts)
1423 for i, m in enumerate(markerstoiter):
1420 for i, m in enumerate(markerstoiter):
1424 if not isrelevant(m):
1421 if not isrelevant(m):
1425 # marker can be irrelevant when we're iterating over a set
1422 # marker can be irrelevant when we're iterating over a set
1426 # of markers (markerstoiter) which is bigger than the set
1423 # of markers (markerstoiter) which is bigger than the set
1427 # of markers we want to display (markers)
1424 # of markers we want to display (markers)
1428 # this can happen if both --index and --rev options are
1425 # this can happen if both --index and --rev options are
1429 # provided and thus we need to iterate over all of the markers
1426 # provided and thus we need to iterate over all of the markers
1430 # to get the correct indices, but only display the ones that
1427 # to get the correct indices, but only display the ones that
1431 # are relevant to --rev value
1428 # are relevant to --rev value
1432 continue
1429 continue
1433 fm.startitem()
1430 fm.startitem()
1434 ind = i if opts.get('index') else None
1431 ind = i if opts.get('index') else None
1435 cmdutil.showmarker(fm, m, index=ind)
1432 cmdutil.showmarker(fm, m, index=ind)
1436 fm.end()
1433 fm.end()
1437
1434
1438 @command('debugpathcomplete',
1435 @command('debugpathcomplete',
1439 [('f', 'full', None, _('complete an entire path')),
1436 [('f', 'full', None, _('complete an entire path')),
1440 ('n', 'normal', None, _('show only normal files')),
1437 ('n', 'normal', None, _('show only normal files')),
1441 ('a', 'added', None, _('show only added files')),
1438 ('a', 'added', None, _('show only added files')),
1442 ('r', 'removed', None, _('show only removed files'))],
1439 ('r', 'removed', None, _('show only removed files'))],
1443 _('FILESPEC...'))
1440 _('FILESPEC...'))
1444 def debugpathcomplete(ui, repo, *specs, **opts):
1441 def debugpathcomplete(ui, repo, *specs, **opts):
1445 '''complete part or all of a tracked path
1442 '''complete part or all of a tracked path
1446
1443
1447 This command supports shells that offer path name completion. It
1444 This command supports shells that offer path name completion. It
1448 currently completes only files already known to the dirstate.
1445 currently completes only files already known to the dirstate.
1449
1446
1450 Completion extends only to the next path segment unless
1447 Completion extends only to the next path segment unless
1451 --full is specified, in which case entire paths are used.'''
1448 --full is specified, in which case entire paths are used.'''
1452
1449
1453 def complete(path, acceptable):
1450 def complete(path, acceptable):
1454 dirstate = repo.dirstate
1451 dirstate = repo.dirstate
1455 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1452 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1456 rootdir = repo.root + pycompat.ossep
1453 rootdir = repo.root + pycompat.ossep
1457 if spec != repo.root and not spec.startswith(rootdir):
1454 if spec != repo.root and not spec.startswith(rootdir):
1458 return [], []
1455 return [], []
1459 if os.path.isdir(spec):
1456 if os.path.isdir(spec):
1460 spec += '/'
1457 spec += '/'
1461 spec = spec[len(rootdir):]
1458 spec = spec[len(rootdir):]
1462 fixpaths = pycompat.ossep != '/'
1459 fixpaths = pycompat.ossep != '/'
1463 if fixpaths:
1460 if fixpaths:
1464 spec = spec.replace(pycompat.ossep, '/')
1461 spec = spec.replace(pycompat.ossep, '/')
1465 speclen = len(spec)
1462 speclen = len(spec)
1466 fullpaths = opts['full']
1463 fullpaths = opts['full']
1467 files, dirs = set(), set()
1464 files, dirs = set(), set()
1468 adddir, addfile = dirs.add, files.add
1465 adddir, addfile = dirs.add, files.add
1469 for f, st in dirstate.iteritems():
1466 for f, st in dirstate.iteritems():
1470 if f.startswith(spec) and st[0] in acceptable:
1467 if f.startswith(spec) and st[0] in acceptable:
1471 if fixpaths:
1468 if fixpaths:
1472 f = f.replace('/', pycompat.ossep)
1469 f = f.replace('/', pycompat.ossep)
1473 if fullpaths:
1470 if fullpaths:
1474 addfile(f)
1471 addfile(f)
1475 continue
1472 continue
1476 s = f.find(pycompat.ossep, speclen)
1473 s = f.find(pycompat.ossep, speclen)
1477 if s >= 0:
1474 if s >= 0:
1478 adddir(f[:s])
1475 adddir(f[:s])
1479 else:
1476 else:
1480 addfile(f)
1477 addfile(f)
1481 return files, dirs
1478 return files, dirs
1482
1479
1483 acceptable = ''
1480 acceptable = ''
1484 if opts['normal']:
1481 if opts['normal']:
1485 acceptable += 'nm'
1482 acceptable += 'nm'
1486 if opts['added']:
1483 if opts['added']:
1487 acceptable += 'a'
1484 acceptable += 'a'
1488 if opts['removed']:
1485 if opts['removed']:
1489 acceptable += 'r'
1486 acceptable += 'r'
1490 cwd = repo.getcwd()
1487 cwd = repo.getcwd()
1491 if not specs:
1488 if not specs:
1492 specs = ['.']
1489 specs = ['.']
1493
1490
1494 files, dirs = set(), set()
1491 files, dirs = set(), set()
1495 for spec in specs:
1492 for spec in specs:
1496 f, d = complete(spec, acceptable or 'nmar')
1493 f, d = complete(spec, acceptable or 'nmar')
1497 files.update(f)
1494 files.update(f)
1498 dirs.update(d)
1495 dirs.update(d)
1499 files.update(dirs)
1496 files.update(dirs)
1500 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1497 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1501 ui.write('\n')
1498 ui.write('\n')
1502
1499
1503 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1500 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1504 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1501 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1505 '''access the pushkey key/value protocol
1502 '''access the pushkey key/value protocol
1506
1503
1507 With two args, list the keys in the given namespace.
1504 With two args, list the keys in the given namespace.
1508
1505
1509 With five args, set a key to new if it currently is set to old.
1506 With five args, set a key to new if it currently is set to old.
1510 Reports success or failure.
1507 Reports success or failure.
1511 '''
1508 '''
1512
1509
1513 target = hg.peer(ui, {}, repopath)
1510 target = hg.peer(ui, {}, repopath)
1514 if keyinfo:
1511 if keyinfo:
1515 key, old, new = keyinfo
1512 key, old, new = keyinfo
1516 r = target.pushkey(namespace, key, old, new)
1513 r = target.pushkey(namespace, key, old, new)
1517 ui.status(str(r) + '\n')
1514 ui.status(str(r) + '\n')
1518 return not r
1515 return not r
1519 else:
1516 else:
1520 for k, v in sorted(target.listkeys(namespace).iteritems()):
1517 for k, v in sorted(target.listkeys(namespace).iteritems()):
1521 ui.write("%s\t%s\n" % (util.escapestr(k),
1518 ui.write("%s\t%s\n" % (util.escapestr(k),
1522 util.escapestr(v)))
1519 util.escapestr(v)))
1523
1520
1524 @command('debugpvec', [], _('A B'))
1521 @command('debugpvec', [], _('A B'))
1525 def debugpvec(ui, repo, a, b=None):
1522 def debugpvec(ui, repo, a, b=None):
1526 ca = scmutil.revsingle(repo, a)
1523 ca = scmutil.revsingle(repo, a)
1527 cb = scmutil.revsingle(repo, b)
1524 cb = scmutil.revsingle(repo, b)
1528 pa = pvec.ctxpvec(ca)
1525 pa = pvec.ctxpvec(ca)
1529 pb = pvec.ctxpvec(cb)
1526 pb = pvec.ctxpvec(cb)
1530 if pa == pb:
1527 if pa == pb:
1531 rel = "="
1528 rel = "="
1532 elif pa > pb:
1529 elif pa > pb:
1533 rel = ">"
1530 rel = ">"
1534 elif pa < pb:
1531 elif pa < pb:
1535 rel = "<"
1532 rel = "<"
1536 elif pa | pb:
1533 elif pa | pb:
1537 rel = "|"
1534 rel = "|"
1538 ui.write(_("a: %s\n") % pa)
1535 ui.write(_("a: %s\n") % pa)
1539 ui.write(_("b: %s\n") % pb)
1536 ui.write(_("b: %s\n") % pb)
1540 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1537 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1541 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1538 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1542 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1539 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1543 pa.distance(pb), rel))
1540 pa.distance(pb), rel))
1544
1541
1545 @command('debugrebuilddirstate|debugrebuildstate',
1542 @command('debugrebuilddirstate|debugrebuildstate',
1546 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1543 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1547 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1544 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1548 'the working copy parent')),
1545 'the working copy parent')),
1549 ],
1546 ],
1550 _('[-r REV]'))
1547 _('[-r REV]'))
1551 def debugrebuilddirstate(ui, repo, rev, **opts):
1548 def debugrebuilddirstate(ui, repo, rev, **opts):
1552 """rebuild the dirstate as it would look like for the given revision
1549 """rebuild the dirstate as it would look like for the given revision
1553
1550
1554 If no revision is specified the first current parent will be used.
1551 If no revision is specified the first current parent will be used.
1555
1552
1556 The dirstate will be set to the files of the given revision.
1553 The dirstate will be set to the files of the given revision.
1557 The actual working directory content or existing dirstate
1554 The actual working directory content or existing dirstate
1558 information such as adds or removes is not considered.
1555 information such as adds or removes is not considered.
1559
1556
1560 ``minimal`` will only rebuild the dirstate status for files that claim to be
1557 ``minimal`` will only rebuild the dirstate status for files that claim to be
1561 tracked but are not in the parent manifest, or that exist in the parent
1558 tracked but are not in the parent manifest, or that exist in the parent
1562 manifest but are not in the dirstate. It will not change adds, removes, or
1559 manifest but are not in the dirstate. It will not change adds, removes, or
1563 modified files that are in the working copy parent.
1560 modified files that are in the working copy parent.
1564
1561
1565 One use of this command is to make the next :hg:`status` invocation
1562 One use of this command is to make the next :hg:`status` invocation
1566 check the actual file content.
1563 check the actual file content.
1567 """
1564 """
1568 ctx = scmutil.revsingle(repo, rev)
1565 ctx = scmutil.revsingle(repo, rev)
1569 with repo.wlock():
1566 with repo.wlock():
1570 dirstate = repo.dirstate
1567 dirstate = repo.dirstate
1571 changedfiles = None
1568 changedfiles = None
1572 # See command doc for what minimal does.
1569 # See command doc for what minimal does.
1573 if opts.get('minimal'):
1570 if opts.get('minimal'):
1574 manifestfiles = set(ctx.manifest().keys())
1571 manifestfiles = set(ctx.manifest().keys())
1575 dirstatefiles = set(dirstate)
1572 dirstatefiles = set(dirstate)
1576 manifestonly = manifestfiles - dirstatefiles
1573 manifestonly = manifestfiles - dirstatefiles
1577 dsonly = dirstatefiles - manifestfiles
1574 dsonly = dirstatefiles - manifestfiles
1578 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1575 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1579 changedfiles = manifestonly | dsnotadded
1576 changedfiles = manifestonly | dsnotadded
1580
1577
1581 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1578 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1582
1579
1583 @command('debugrebuildfncache', [], '')
1580 @command('debugrebuildfncache', [], '')
1584 def debugrebuildfncache(ui, repo):
1581 def debugrebuildfncache(ui, repo):
1585 """rebuild the fncache file"""
1582 """rebuild the fncache file"""
1586 repair.rebuildfncache(ui, repo)
1583 repair.rebuildfncache(ui, repo)
1587
1584
1588 @command('debugrename',
1585 @command('debugrename',
1589 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1586 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1590 _('[-r REV] FILE'))
1587 _('[-r REV] FILE'))
1591 def debugrename(ui, repo, file1, *pats, **opts):
1588 def debugrename(ui, repo, file1, *pats, **opts):
1592 """dump rename information"""
1589 """dump rename information"""
1593
1590
1594 ctx = scmutil.revsingle(repo, opts.get('rev'))
1591 ctx = scmutil.revsingle(repo, opts.get('rev'))
1595 m = scmutil.match(ctx, (file1,) + pats, opts)
1592 m = scmutil.match(ctx, (file1,) + pats, opts)
1596 for abs in ctx.walk(m):
1593 for abs in ctx.walk(m):
1597 fctx = ctx[abs]
1594 fctx = ctx[abs]
1598 o = fctx.filelog().renamed(fctx.filenode())
1595 o = fctx.filelog().renamed(fctx.filenode())
1599 rel = m.rel(abs)
1596 rel = m.rel(abs)
1600 if o:
1597 if o:
1601 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1598 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1602 else:
1599 else:
1603 ui.write(_("%s not renamed\n") % rel)
1600 ui.write(_("%s not renamed\n") % rel)
1604
1601
1605 @command('debugrevlog', commands.debugrevlogopts +
1602 @command('debugrevlog', commands.debugrevlogopts +
1606 [('d', 'dump', False, _('dump index data'))],
1603 [('d', 'dump', False, _('dump index data'))],
1607 _('-c|-m|FILE'),
1604 _('-c|-m|FILE'),
1608 optionalrepo=True)
1605 optionalrepo=True)
1609 def debugrevlog(ui, repo, file_=None, **opts):
1606 def debugrevlog(ui, repo, file_=None, **opts):
1610 """show data and statistics about a revlog"""
1607 """show data and statistics about a revlog"""
1611 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1608 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1612
1609
1613 if opts.get("dump"):
1610 if opts.get("dump"):
1614 numrevs = len(r)
1611 numrevs = len(r)
1615 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1612 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1616 " rawsize totalsize compression heads chainlen\n"))
1613 " rawsize totalsize compression heads chainlen\n"))
1617 ts = 0
1614 ts = 0
1618 heads = set()
1615 heads = set()
1619
1616
1620 for rev in xrange(numrevs):
1617 for rev in xrange(numrevs):
1621 dbase = r.deltaparent(rev)
1618 dbase = r.deltaparent(rev)
1622 if dbase == -1:
1619 if dbase == -1:
1623 dbase = rev
1620 dbase = rev
1624 cbase = r.chainbase(rev)
1621 cbase = r.chainbase(rev)
1625 clen = r.chainlen(rev)
1622 clen = r.chainlen(rev)
1626 p1, p2 = r.parentrevs(rev)
1623 p1, p2 = r.parentrevs(rev)
1627 rs = r.rawsize(rev)
1624 rs = r.rawsize(rev)
1628 ts = ts + rs
1625 ts = ts + rs
1629 heads -= set(r.parentrevs(rev))
1626 heads -= set(r.parentrevs(rev))
1630 heads.add(rev)
1627 heads.add(rev)
1631 try:
1628 try:
1632 compression = ts / r.end(rev)
1629 compression = ts / r.end(rev)
1633 except ZeroDivisionError:
1630 except ZeroDivisionError:
1634 compression = 0
1631 compression = 0
1635 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1632 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1636 "%11d %5d %8d\n" %
1633 "%11d %5d %8d\n" %
1637 (rev, p1, p2, r.start(rev), r.end(rev),
1634 (rev, p1, p2, r.start(rev), r.end(rev),
1638 r.start(dbase), r.start(cbase),
1635 r.start(dbase), r.start(cbase),
1639 r.start(p1), r.start(p2),
1636 r.start(p1), r.start(p2),
1640 rs, ts, compression, len(heads), clen))
1637 rs, ts, compression, len(heads), clen))
1641 return 0
1638 return 0
1642
1639
1643 v = r.version
1640 v = r.version
1644 format = v & 0xFFFF
1641 format = v & 0xFFFF
1645 flags = []
1642 flags = []
1646 gdelta = False
1643 gdelta = False
1647 if v & revlog.REVLOGNGINLINEDATA:
1644 if v & revlog.REVLOGNGINLINEDATA:
1648 flags.append('inline')
1645 flags.append('inline')
1649 if v & revlog.REVLOGGENERALDELTA:
1646 if v & revlog.REVLOGGENERALDELTA:
1650 gdelta = True
1647 gdelta = True
1651 flags.append('generaldelta')
1648 flags.append('generaldelta')
1652 if not flags:
1649 if not flags:
1653 flags = ['(none)']
1650 flags = ['(none)']
1654
1651
1655 nummerges = 0
1652 nummerges = 0
1656 numfull = 0
1653 numfull = 0
1657 numprev = 0
1654 numprev = 0
1658 nump1 = 0
1655 nump1 = 0
1659 nump2 = 0
1656 nump2 = 0
1660 numother = 0
1657 numother = 0
1661 nump1prev = 0
1658 nump1prev = 0
1662 nump2prev = 0
1659 nump2prev = 0
1663 chainlengths = []
1660 chainlengths = []
1664
1661
1665 datasize = [None, 0, 0]
1662 datasize = [None, 0, 0]
1666 fullsize = [None, 0, 0]
1663 fullsize = [None, 0, 0]
1667 deltasize = [None, 0, 0]
1664 deltasize = [None, 0, 0]
1668 chunktypecounts = {}
1665 chunktypecounts = {}
1669 chunktypesizes = {}
1666 chunktypesizes = {}
1670
1667
1671 def addsize(size, l):
1668 def addsize(size, l):
1672 if l[0] is None or size < l[0]:
1669 if l[0] is None or size < l[0]:
1673 l[0] = size
1670 l[0] = size
1674 if size > l[1]:
1671 if size > l[1]:
1675 l[1] = size
1672 l[1] = size
1676 l[2] += size
1673 l[2] += size
1677
1674
1678 numrevs = len(r)
1675 numrevs = len(r)
1679 for rev in xrange(numrevs):
1676 for rev in xrange(numrevs):
1680 p1, p2 = r.parentrevs(rev)
1677 p1, p2 = r.parentrevs(rev)
1681 delta = r.deltaparent(rev)
1678 delta = r.deltaparent(rev)
1682 if format > 0:
1679 if format > 0:
1683 addsize(r.rawsize(rev), datasize)
1680 addsize(r.rawsize(rev), datasize)
1684 if p2 != nullrev:
1681 if p2 != nullrev:
1685 nummerges += 1
1682 nummerges += 1
1686 size = r.length(rev)
1683 size = r.length(rev)
1687 if delta == nullrev:
1684 if delta == nullrev:
1688 chainlengths.append(0)
1685 chainlengths.append(0)
1689 numfull += 1
1686 numfull += 1
1690 addsize(size, fullsize)
1687 addsize(size, fullsize)
1691 else:
1688 else:
1692 chainlengths.append(chainlengths[delta] + 1)
1689 chainlengths.append(chainlengths[delta] + 1)
1693 addsize(size, deltasize)
1690 addsize(size, deltasize)
1694 if delta == rev - 1:
1691 if delta == rev - 1:
1695 numprev += 1
1692 numprev += 1
1696 if delta == p1:
1693 if delta == p1:
1697 nump1prev += 1
1694 nump1prev += 1
1698 elif delta == p2:
1695 elif delta == p2:
1699 nump2prev += 1
1696 nump2prev += 1
1700 elif delta == p1:
1697 elif delta == p1:
1701 nump1 += 1
1698 nump1 += 1
1702 elif delta == p2:
1699 elif delta == p2:
1703 nump2 += 1
1700 nump2 += 1
1704 elif delta != nullrev:
1701 elif delta != nullrev:
1705 numother += 1
1702 numother += 1
1706
1703
1707 # Obtain data on the raw chunks in the revlog.
1704 # Obtain data on the raw chunks in the revlog.
1708 chunk = r._chunkraw(rev, rev)[1]
1705 chunk = r._chunkraw(rev, rev)[1]
1709 if chunk:
1706 if chunk:
1710 chunktype = chunk[0]
1707 chunktype = chunk[0]
1711 else:
1708 else:
1712 chunktype = 'empty'
1709 chunktype = 'empty'
1713
1710
1714 if chunktype not in chunktypecounts:
1711 if chunktype not in chunktypecounts:
1715 chunktypecounts[chunktype] = 0
1712 chunktypecounts[chunktype] = 0
1716 chunktypesizes[chunktype] = 0
1713 chunktypesizes[chunktype] = 0
1717
1714
1718 chunktypecounts[chunktype] += 1
1715 chunktypecounts[chunktype] += 1
1719 chunktypesizes[chunktype] += size
1716 chunktypesizes[chunktype] += size
1720
1717
1721 # Adjust size min value for empty cases
1718 # Adjust size min value for empty cases
1722 for size in (datasize, fullsize, deltasize):
1719 for size in (datasize, fullsize, deltasize):
1723 if size[0] is None:
1720 if size[0] is None:
1724 size[0] = 0
1721 size[0] = 0
1725
1722
1726 numdeltas = numrevs - numfull
1723 numdeltas = numrevs - numfull
1727 numoprev = numprev - nump1prev - nump2prev
1724 numoprev = numprev - nump1prev - nump2prev
1728 totalrawsize = datasize[2]
1725 totalrawsize = datasize[2]
1729 datasize[2] /= numrevs
1726 datasize[2] /= numrevs
1730 fulltotal = fullsize[2]
1727 fulltotal = fullsize[2]
1731 fullsize[2] /= numfull
1728 fullsize[2] /= numfull
1732 deltatotal = deltasize[2]
1729 deltatotal = deltasize[2]
1733 if numrevs - numfull > 0:
1730 if numrevs - numfull > 0:
1734 deltasize[2] /= numrevs - numfull
1731 deltasize[2] /= numrevs - numfull
1735 totalsize = fulltotal + deltatotal
1732 totalsize = fulltotal + deltatotal
1736 avgchainlen = sum(chainlengths) / numrevs
1733 avgchainlen = sum(chainlengths) / numrevs
1737 maxchainlen = max(chainlengths)
1734 maxchainlen = max(chainlengths)
1738 compratio = 1
1735 compratio = 1
1739 if totalsize:
1736 if totalsize:
1740 compratio = totalrawsize / totalsize
1737 compratio = totalrawsize / totalsize
1741
1738
1742 basedfmtstr = '%%%dd\n'
1739 basedfmtstr = '%%%dd\n'
1743 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1740 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1744
1741
1745 def dfmtstr(max):
1742 def dfmtstr(max):
1746 return basedfmtstr % len(str(max))
1743 return basedfmtstr % len(str(max))
1747 def pcfmtstr(max, padding=0):
1744 def pcfmtstr(max, padding=0):
1748 return basepcfmtstr % (len(str(max)), ' ' * padding)
1745 return basepcfmtstr % (len(str(max)), ' ' * padding)
1749
1746
1750 def pcfmt(value, total):
1747 def pcfmt(value, total):
1751 if total:
1748 if total:
1752 return (value, 100 * float(value) / total)
1749 return (value, 100 * float(value) / total)
1753 else:
1750 else:
1754 return value, 100.0
1751 return value, 100.0
1755
1752
1756 ui.write(('format : %d\n') % format)
1753 ui.write(('format : %d\n') % format)
1757 ui.write(('flags : %s\n') % ', '.join(flags))
1754 ui.write(('flags : %s\n') % ', '.join(flags))
1758
1755
1759 ui.write('\n')
1756 ui.write('\n')
1760 fmt = pcfmtstr(totalsize)
1757 fmt = pcfmtstr(totalsize)
1761 fmt2 = dfmtstr(totalsize)
1758 fmt2 = dfmtstr(totalsize)
1762 ui.write(('revisions : ') + fmt2 % numrevs)
1759 ui.write(('revisions : ') + fmt2 % numrevs)
1763 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1760 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1764 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1761 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1765 ui.write(('revisions : ') + fmt2 % numrevs)
1762 ui.write(('revisions : ') + fmt2 % numrevs)
1766 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1763 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1767 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1764 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1768 ui.write(('revision size : ') + fmt2 % totalsize)
1765 ui.write(('revision size : ') + fmt2 % totalsize)
1769 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1766 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1770 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1767 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1771
1768
1772 def fmtchunktype(chunktype):
1769 def fmtchunktype(chunktype):
1773 if chunktype == 'empty':
1770 if chunktype == 'empty':
1774 return ' %s : ' % chunktype
1771 return ' %s : ' % chunktype
1775 elif chunktype in string.ascii_letters:
1772 elif chunktype in string.ascii_letters:
1776 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1773 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1777 else:
1774 else:
1778 return ' 0x%s : ' % hex(chunktype)
1775 return ' 0x%s : ' % hex(chunktype)
1779
1776
1780 ui.write('\n')
1777 ui.write('\n')
1781 ui.write(('chunks : ') + fmt2 % numrevs)
1778 ui.write(('chunks : ') + fmt2 % numrevs)
1782 for chunktype in sorted(chunktypecounts):
1779 for chunktype in sorted(chunktypecounts):
1783 ui.write(fmtchunktype(chunktype))
1780 ui.write(fmtchunktype(chunktype))
1784 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1781 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1785 ui.write(('chunks size : ') + fmt2 % totalsize)
1782 ui.write(('chunks size : ') + fmt2 % totalsize)
1786 for chunktype in sorted(chunktypecounts):
1783 for chunktype in sorted(chunktypecounts):
1787 ui.write(fmtchunktype(chunktype))
1784 ui.write(fmtchunktype(chunktype))
1788 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1785 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1789
1786
1790 ui.write('\n')
1787 ui.write('\n')
1791 fmt = dfmtstr(max(avgchainlen, compratio))
1788 fmt = dfmtstr(max(avgchainlen, compratio))
1792 ui.write(('avg chain length : ') + fmt % avgchainlen)
1789 ui.write(('avg chain length : ') + fmt % avgchainlen)
1793 ui.write(('max chain length : ') + fmt % maxchainlen)
1790 ui.write(('max chain length : ') + fmt % maxchainlen)
1794 ui.write(('compression ratio : ') + fmt % compratio)
1791 ui.write(('compression ratio : ') + fmt % compratio)
1795
1792
1796 if format > 0:
1793 if format > 0:
1797 ui.write('\n')
1794 ui.write('\n')
1798 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1795 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1799 % tuple(datasize))
1796 % tuple(datasize))
1800 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1797 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1801 % tuple(fullsize))
1798 % tuple(fullsize))
1802 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1799 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1803 % tuple(deltasize))
1800 % tuple(deltasize))
1804
1801
1805 if numdeltas > 0:
1802 if numdeltas > 0:
1806 ui.write('\n')
1803 ui.write('\n')
1807 fmt = pcfmtstr(numdeltas)
1804 fmt = pcfmtstr(numdeltas)
1808 fmt2 = pcfmtstr(numdeltas, 4)
1805 fmt2 = pcfmtstr(numdeltas, 4)
1809 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1806 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1810 if numprev > 0:
1807 if numprev > 0:
1811 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1808 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1812 numprev))
1809 numprev))
1813 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1810 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1814 numprev))
1811 numprev))
1815 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1812 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1816 numprev))
1813 numprev))
1817 if gdelta:
1814 if gdelta:
1818 ui.write(('deltas against p1 : ')
1815 ui.write(('deltas against p1 : ')
1819 + fmt % pcfmt(nump1, numdeltas))
1816 + fmt % pcfmt(nump1, numdeltas))
1820 ui.write(('deltas against p2 : ')
1817 ui.write(('deltas against p2 : ')
1821 + fmt % pcfmt(nump2, numdeltas))
1818 + fmt % pcfmt(nump2, numdeltas))
1822 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1819 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1823 numdeltas))
1820 numdeltas))
1824
1821
1825 @command('debugrevspec',
1822 @command('debugrevspec',
1826 [('', 'optimize', None,
1823 [('', 'optimize', None,
1827 _('print parsed tree after optimizing (DEPRECATED)')),
1824 _('print parsed tree after optimizing (DEPRECATED)')),
1828 ('p', 'show-stage', [],
1825 ('p', 'show-stage', [],
1829 _('print parsed tree at the given stage'), _('NAME')),
1826 _('print parsed tree at the given stage'), _('NAME')),
1830 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1827 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1831 ('', 'verify-optimized', False, _('verify optimized result')),
1828 ('', 'verify-optimized', False, _('verify optimized result')),
1832 ],
1829 ],
1833 ('REVSPEC'))
1830 ('REVSPEC'))
1834 def debugrevspec(ui, repo, expr, **opts):
1831 def debugrevspec(ui, repo, expr, **opts):
1835 """parse and apply a revision specification
1832 """parse and apply a revision specification
1836
1833
1837 Use -p/--show-stage option to print the parsed tree at the given stages.
1834 Use -p/--show-stage option to print the parsed tree at the given stages.
1838 Use -p all to print tree at every stage.
1835 Use -p all to print tree at every stage.
1839
1836
1840 Use --verify-optimized to compare the optimized result with the unoptimized
1837 Use --verify-optimized to compare the optimized result with the unoptimized
1841 one. Returns 1 if the optimized result differs.
1838 one. Returns 1 if the optimized result differs.
1842 """
1839 """
1843 stages = [
1840 stages = [
1844 ('parsed', lambda tree: tree),
1841 ('parsed', lambda tree: tree),
1845 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1842 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1846 ('concatenated', revsetlang.foldconcat),
1843 ('concatenated', revsetlang.foldconcat),
1847 ('analyzed', revsetlang.analyze),
1844 ('analyzed', revsetlang.analyze),
1848 ('optimized', revsetlang.optimize),
1845 ('optimized', revsetlang.optimize),
1849 ]
1846 ]
1850 if opts['no_optimized']:
1847 if opts['no_optimized']:
1851 stages = stages[:-1]
1848 stages = stages[:-1]
1852 if opts['verify_optimized'] and opts['no_optimized']:
1849 if opts['verify_optimized'] and opts['no_optimized']:
1853 raise error.Abort(_('cannot use --verify-optimized with '
1850 raise error.Abort(_('cannot use --verify-optimized with '
1854 '--no-optimized'))
1851 '--no-optimized'))
1855 stagenames = set(n for n, f in stages)
1852 stagenames = set(n for n, f in stages)
1856
1853
1857 showalways = set()
1854 showalways = set()
1858 showchanged = set()
1855 showchanged = set()
1859 if ui.verbose and not opts['show_stage']:
1856 if ui.verbose and not opts['show_stage']:
1860 # show parsed tree by --verbose (deprecated)
1857 # show parsed tree by --verbose (deprecated)
1861 showalways.add('parsed')
1858 showalways.add('parsed')
1862 showchanged.update(['expanded', 'concatenated'])
1859 showchanged.update(['expanded', 'concatenated'])
1863 if opts['optimize']:
1860 if opts['optimize']:
1864 showalways.add('optimized')
1861 showalways.add('optimized')
1865 if opts['show_stage'] and opts['optimize']:
1862 if opts['show_stage'] and opts['optimize']:
1866 raise error.Abort(_('cannot use --optimize with --show-stage'))
1863 raise error.Abort(_('cannot use --optimize with --show-stage'))
1867 if opts['show_stage'] == ['all']:
1864 if opts['show_stage'] == ['all']:
1868 showalways.update(stagenames)
1865 showalways.update(stagenames)
1869 else:
1866 else:
1870 for n in opts['show_stage']:
1867 for n in opts['show_stage']:
1871 if n not in stagenames:
1868 if n not in stagenames:
1872 raise error.Abort(_('invalid stage name: %s') % n)
1869 raise error.Abort(_('invalid stage name: %s') % n)
1873 showalways.update(opts['show_stage'])
1870 showalways.update(opts['show_stage'])
1874
1871
1875 treebystage = {}
1872 treebystage = {}
1876 printedtree = None
1873 printedtree = None
1877 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1874 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1878 for n, f in stages:
1875 for n, f in stages:
1879 treebystage[n] = tree = f(tree)
1876 treebystage[n] = tree = f(tree)
1880 if n in showalways or (n in showchanged and tree != printedtree):
1877 if n in showalways or (n in showchanged and tree != printedtree):
1881 if opts['show_stage'] or n != 'parsed':
1878 if opts['show_stage'] or n != 'parsed':
1882 ui.write(("* %s:\n") % n)
1879 ui.write(("* %s:\n") % n)
1883 ui.write(revsetlang.prettyformat(tree), "\n")
1880 ui.write(revsetlang.prettyformat(tree), "\n")
1884 printedtree = tree
1881 printedtree = tree
1885
1882
1886 if opts['verify_optimized']:
1883 if opts['verify_optimized']:
1887 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1884 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1888 brevs = revset.makematcher(treebystage['optimized'])(repo)
1885 brevs = revset.makematcher(treebystage['optimized'])(repo)
1889 if ui.verbose:
1886 if ui.verbose:
1890 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1887 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1891 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1888 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1892 arevs = list(arevs)
1889 arevs = list(arevs)
1893 brevs = list(brevs)
1890 brevs = list(brevs)
1894 if arevs == brevs:
1891 if arevs == brevs:
1895 return 0
1892 return 0
1896 ui.write(('--- analyzed\n'), label='diff.file_a')
1893 ui.write(('--- analyzed\n'), label='diff.file_a')
1897 ui.write(('+++ optimized\n'), label='diff.file_b')
1894 ui.write(('+++ optimized\n'), label='diff.file_b')
1898 sm = difflib.SequenceMatcher(None, arevs, brevs)
1895 sm = difflib.SequenceMatcher(None, arevs, brevs)
1899 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1896 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1900 if tag in ('delete', 'replace'):
1897 if tag in ('delete', 'replace'):
1901 for c in arevs[alo:ahi]:
1898 for c in arevs[alo:ahi]:
1902 ui.write('-%s\n' % c, label='diff.deleted')
1899 ui.write('-%s\n' % c, label='diff.deleted')
1903 if tag in ('insert', 'replace'):
1900 if tag in ('insert', 'replace'):
1904 for c in brevs[blo:bhi]:
1901 for c in brevs[blo:bhi]:
1905 ui.write('+%s\n' % c, label='diff.inserted')
1902 ui.write('+%s\n' % c, label='diff.inserted')
1906 if tag == 'equal':
1903 if tag == 'equal':
1907 for c in arevs[alo:ahi]:
1904 for c in arevs[alo:ahi]:
1908 ui.write(' %s\n' % c)
1905 ui.write(' %s\n' % c)
1909 return 1
1906 return 1
1910
1907
1911 func = revset.makematcher(tree)
1908 func = revset.makematcher(tree)
1912 revs = func(repo)
1909 revs = func(repo)
1913 if ui.verbose:
1910 if ui.verbose:
1914 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1911 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1915 for c in revs:
1912 for c in revs:
1916 ui.write("%s\n" % c)
1913 ui.write("%s\n" % c)
1917
1914
1918 @command('debugsetparents', [], _('REV1 [REV2]'))
1915 @command('debugsetparents', [], _('REV1 [REV2]'))
1919 def debugsetparents(ui, repo, rev1, rev2=None):
1916 def debugsetparents(ui, repo, rev1, rev2=None):
1920 """manually set the parents of the current working directory
1917 """manually set the parents of the current working directory
1921
1918
1922 This is useful for writing repository conversion tools, but should
1919 This is useful for writing repository conversion tools, but should
1923 be used with care. For example, neither the working directory nor the
1920 be used with care. For example, neither the working directory nor the
1924 dirstate is updated, so file status may be incorrect after running this
1921 dirstate is updated, so file status may be incorrect after running this
1925 command.
1922 command.
1926
1923
1927 Returns 0 on success.
1924 Returns 0 on success.
1928 """
1925 """
1929
1926
1930 r1 = scmutil.revsingle(repo, rev1).node()
1927 r1 = scmutil.revsingle(repo, rev1).node()
1931 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1928 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1932
1929
1933 with repo.wlock():
1930 with repo.wlock():
1934 repo.setparents(r1, r2)
1931 repo.setparents(r1, r2)
1935
1932
1936 @command('debugsub',
1933 @command('debugsub',
1937 [('r', 'rev', '',
1934 [('r', 'rev', '',
1938 _('revision to check'), _('REV'))],
1935 _('revision to check'), _('REV'))],
1939 _('[-r REV] [REV]'))
1936 _('[-r REV] [REV]'))
1940 def debugsub(ui, repo, rev=None):
1937 def debugsub(ui, repo, rev=None):
1941 ctx = scmutil.revsingle(repo, rev, None)
1938 ctx = scmutil.revsingle(repo, rev, None)
1942 for k, v in sorted(ctx.substate.items()):
1939 for k, v in sorted(ctx.substate.items()):
1943 ui.write(('path %s\n') % k)
1940 ui.write(('path %s\n') % k)
1944 ui.write((' source %s\n') % v[0])
1941 ui.write((' source %s\n') % v[0])
1945 ui.write((' revision %s\n') % v[1])
1942 ui.write((' revision %s\n') % v[1])
1946
1943
1947 @command('debugsuccessorssets',
1944 @command('debugsuccessorssets',
1948 [],
1945 [],
1949 _('[REV]'))
1946 _('[REV]'))
1950 def debugsuccessorssets(ui, repo, *revs):
1947 def debugsuccessorssets(ui, repo, *revs):
1951 """show set of successors for revision
1948 """show set of successors for revision
1952
1949
1953 A successors set of changeset A is a consistent group of revisions that
1950 A successors set of changeset A is a consistent group of revisions that
1954 succeed A. It contains non-obsolete changesets only.
1951 succeed A. It contains non-obsolete changesets only.
1955
1952
1956 In most cases a changeset A has a single successors set containing a single
1953 In most cases a changeset A has a single successors set containing a single
1957 successor (changeset A replaced by A').
1954 successor (changeset A replaced by A').
1958
1955
1959 A changeset that is made obsolete with no successors are called "pruned".
1956 A changeset that is made obsolete with no successors are called "pruned".
1960 Such changesets have no successors sets at all.
1957 Such changesets have no successors sets at all.
1961
1958
1962 A changeset that has been "split" will have a successors set containing
1959 A changeset that has been "split" will have a successors set containing
1963 more than one successor.
1960 more than one successor.
1964
1961
1965 A changeset that has been rewritten in multiple different ways is called
1962 A changeset that has been rewritten in multiple different ways is called
1966 "divergent". Such changesets have multiple successor sets (each of which
1963 "divergent". Such changesets have multiple successor sets (each of which
1967 may also be split, i.e. have multiple successors).
1964 may also be split, i.e. have multiple successors).
1968
1965
1969 Results are displayed as follows::
1966 Results are displayed as follows::
1970
1967
1971 <rev1>
1968 <rev1>
1972 <successors-1A>
1969 <successors-1A>
1973 <rev2>
1970 <rev2>
1974 <successors-2A>
1971 <successors-2A>
1975 <successors-2B1> <successors-2B2> <successors-2B3>
1972 <successors-2B1> <successors-2B2> <successors-2B3>
1976
1973
1977 Here rev2 has two possible (i.e. divergent) successors sets. The first
1974 Here rev2 has two possible (i.e. divergent) successors sets. The first
1978 holds one element, whereas the second holds three (i.e. the changeset has
1975 holds one element, whereas the second holds three (i.e. the changeset has
1979 been split).
1976 been split).
1980 """
1977 """
1981 # passed to successorssets caching computation from one call to another
1978 # passed to successorssets caching computation from one call to another
1982 cache = {}
1979 cache = {}
1983 ctx2str = str
1980 ctx2str = str
1984 node2str = short
1981 node2str = short
1985 if ui.debug():
1982 if ui.debug():
1986 def ctx2str(ctx):
1983 def ctx2str(ctx):
1987 return ctx.hex()
1984 return ctx.hex()
1988 node2str = hex
1985 node2str = hex
1989 for rev in scmutil.revrange(repo, revs):
1986 for rev in scmutil.revrange(repo, revs):
1990 ctx = repo[rev]
1987 ctx = repo[rev]
1991 ui.write('%s\n'% ctx2str(ctx))
1988 ui.write('%s\n'% ctx2str(ctx))
1992 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1989 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1993 if succsset:
1990 if succsset:
1994 ui.write(' ')
1991 ui.write(' ')
1995 ui.write(node2str(succsset[0]))
1992 ui.write(node2str(succsset[0]))
1996 for node in succsset[1:]:
1993 for node in succsset[1:]:
1997 ui.write(' ')
1994 ui.write(' ')
1998 ui.write(node2str(node))
1995 ui.write(node2str(node))
1999 ui.write('\n')
1996 ui.write('\n')
2000
1997
2001 @command('debugtemplate',
1998 @command('debugtemplate',
2002 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
1999 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2003 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2000 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2004 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2001 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2005 optionalrepo=True)
2002 optionalrepo=True)
2006 def debugtemplate(ui, repo, tmpl, **opts):
2003 def debugtemplate(ui, repo, tmpl, **opts):
2007 """parse and apply a template
2004 """parse and apply a template
2008
2005
2009 If -r/--rev is given, the template is processed as a log template and
2006 If -r/--rev is given, the template is processed as a log template and
2010 applied to the given changesets. Otherwise, it is processed as a generic
2007 applied to the given changesets. Otherwise, it is processed as a generic
2011 template.
2008 template.
2012
2009
2013 Use --verbose to print the parsed tree.
2010 Use --verbose to print the parsed tree.
2014 """
2011 """
2015 revs = None
2012 revs = None
2016 if opts['rev']:
2013 if opts['rev']:
2017 if repo is None:
2014 if repo is None:
2018 raise error.RepoError(_('there is no Mercurial repository here '
2015 raise error.RepoError(_('there is no Mercurial repository here '
2019 '(.hg not found)'))
2016 '(.hg not found)'))
2020 revs = scmutil.revrange(repo, opts['rev'])
2017 revs = scmutil.revrange(repo, opts['rev'])
2021
2018
2022 props = {}
2019 props = {}
2023 for d in opts['define']:
2020 for d in opts['define']:
2024 try:
2021 try:
2025 k, v = (e.strip() for e in d.split('=', 1))
2022 k, v = (e.strip() for e in d.split('=', 1))
2026 if not k or k == 'ui':
2023 if not k or k == 'ui':
2027 raise ValueError
2024 raise ValueError
2028 props[k] = v
2025 props[k] = v
2029 except ValueError:
2026 except ValueError:
2030 raise error.Abort(_('malformed keyword definition: %s') % d)
2027 raise error.Abort(_('malformed keyword definition: %s') % d)
2031
2028
2032 if ui.verbose:
2029 if ui.verbose:
2033 aliases = ui.configitems('templatealias')
2030 aliases = ui.configitems('templatealias')
2034 tree = templater.parse(tmpl)
2031 tree = templater.parse(tmpl)
2035 ui.note(templater.prettyformat(tree), '\n')
2032 ui.note(templater.prettyformat(tree), '\n')
2036 newtree = templater.expandaliases(tree, aliases)
2033 newtree = templater.expandaliases(tree, aliases)
2037 if newtree != tree:
2034 if newtree != tree:
2038 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2035 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2039
2036
2040 mapfile = None
2037 mapfile = None
2041 if revs is None:
2038 if revs is None:
2042 k = 'debugtemplate'
2039 k = 'debugtemplate'
2043 t = formatter.maketemplater(ui, k, tmpl)
2040 t = formatter.maketemplater(ui, k, tmpl)
2044 ui.write(templater.stringify(t(k, ui=ui, **props)))
2041 ui.write(templater.stringify(t(k, ui=ui, **props)))
2045 else:
2042 else:
2046 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2043 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2047 mapfile, buffered=False)
2044 mapfile, buffered=False)
2048 for r in revs:
2045 for r in revs:
2049 displayer.show(repo[r], **props)
2046 displayer.show(repo[r], **props)
2050 displayer.close()
2047 displayer.close()
2051
2048
2052 @command('debugupgraderepo', [
2049 @command('debugupgraderepo', [
2053 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2050 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2054 ('', 'run', False, _('performs an upgrade')),
2051 ('', 'run', False, _('performs an upgrade')),
2055 ])
2052 ])
2056 def debugupgraderepo(ui, repo, run=False, optimize=None):
2053 def debugupgraderepo(ui, repo, run=False, optimize=None):
2057 """upgrade a repository to use different features
2054 """upgrade a repository to use different features
2058
2055
2059 If no arguments are specified, the repository is evaluated for upgrade
2056 If no arguments are specified, the repository is evaluated for upgrade
2060 and a list of problems and potential optimizations is printed.
2057 and a list of problems and potential optimizations is printed.
2061
2058
2062 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2059 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2063 can be influenced via additional arguments. More details will be provided
2060 can be influenced via additional arguments. More details will be provided
2064 by the command output when run without ``--run``.
2061 by the command output when run without ``--run``.
2065
2062
2066 During the upgrade, the repository will be locked and no writes will be
2063 During the upgrade, the repository will be locked and no writes will be
2067 allowed.
2064 allowed.
2068
2065
2069 At the end of the upgrade, the repository may not be readable while new
2066 At the end of the upgrade, the repository may not be readable while new
2070 repository data is swapped in. This window will be as long as it takes to
2067 repository data is swapped in. This window will be as long as it takes to
2071 rename some directories inside the ``.hg`` directory. On most machines, this
2068 rename some directories inside the ``.hg`` directory. On most machines, this
2072 should complete almost instantaneously and the chances of a consumer being
2069 should complete almost instantaneously and the chances of a consumer being
2073 unable to access the repository should be low.
2070 unable to access the repository should be low.
2074 """
2071 """
2075 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2072 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2076
2073
2077 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2074 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2078 inferrepo=True)
2075 inferrepo=True)
2079 def debugwalk(ui, repo, *pats, **opts):
2076 def debugwalk(ui, repo, *pats, **opts):
2080 """show how files match on given patterns"""
2077 """show how files match on given patterns"""
2081 m = scmutil.match(repo[None], pats, opts)
2078 m = scmutil.match(repo[None], pats, opts)
2082 items = list(repo.walk(m))
2079 items = list(repo.walk(m))
2083 if not items:
2080 if not items:
2084 return
2081 return
2085 f = lambda fn: fn
2082 f = lambda fn: fn
2086 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2083 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2087 f = lambda fn: util.normpath(fn)
2084 f = lambda fn: util.normpath(fn)
2088 fmt = 'f %%-%ds %%-%ds %%s' % (
2085 fmt = 'f %%-%ds %%-%ds %%s' % (
2089 max([len(abs) for abs in items]),
2086 max([len(abs) for abs in items]),
2090 max([len(m.rel(abs)) for abs in items]))
2087 max([len(m.rel(abs)) for abs in items]))
2091 for abs in items:
2088 for abs in items:
2092 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2089 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2093 ui.write("%s\n" % line.rstrip())
2090 ui.write("%s\n" % line.rstrip())
2094
2091
2095 @command('debugwireargs',
2092 @command('debugwireargs',
2096 [('', 'three', '', 'three'),
2093 [('', 'three', '', 'three'),
2097 ('', 'four', '', 'four'),
2094 ('', 'four', '', 'four'),
2098 ('', 'five', '', 'five'),
2095 ('', 'five', '', 'five'),
2099 ] + commands.remoteopts,
2096 ] + commands.remoteopts,
2100 _('REPO [OPTIONS]... [ONE [TWO]]'),
2097 _('REPO [OPTIONS]... [ONE [TWO]]'),
2101 norepo=True)
2098 norepo=True)
2102 def debugwireargs(ui, repopath, *vals, **opts):
2099 def debugwireargs(ui, repopath, *vals, **opts):
2103 repo = hg.peer(ui, opts, repopath)
2100 repo = hg.peer(ui, opts, repopath)
2104 for opt in commands.remoteopts:
2101 for opt in commands.remoteopts:
2105 del opts[opt[1]]
2102 del opts[opt[1]]
2106 args = {}
2103 args = {}
2107 for k, v in opts.iteritems():
2104 for k, v in opts.iteritems():
2108 if v:
2105 if v:
2109 args[k] = v
2106 args[k] = v
2110 # run twice to check that we don't mess up the stream for the next command
2107 # run twice to check that we don't mess up the stream for the next command
2111 res1 = repo.debugwireargs(*vals, **args)
2108 res1 = repo.debugwireargs(*vals, **args)
2112 res2 = repo.debugwireargs(*vals, **args)
2109 res2 = repo.debugwireargs(*vals, **args)
2113 ui.write("%s\n" % res1)
2110 ui.write("%s\n" % res1)
2114 if res1 != res2:
2111 if res1 != res2:
2115 ui.warn("%s\n" % res2)
2112 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now