##// END OF EJS Templates
debugfsinfo: improve case-sensitive testing...
Jun Wu -
r31634:35738db2 default
parent child Browse files
Show More
@@ -1,2112 +1,2115
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 repair,
53 repair,
54 revlog,
54 revlog,
55 revset,
55 revset,
56 revsetlang,
56 revsetlang,
57 scmutil,
57 scmutil,
58 setdiscovery,
58 setdiscovery,
59 simplemerge,
59 simplemerge,
60 smartset,
60 smartset,
61 sslutil,
61 sslutil,
62 streamclone,
62 streamclone,
63 templater,
63 templater,
64 treediscovery,
64 treediscovery,
65 util,
65 util,
66 vfs as vfsmod,
66 vfs as vfsmod,
67 )
67 )
68
68
69 release = lockmod.release
69 release = lockmod.release
70
70
71 # We reuse the command table from commands because it is easier than
71 # We reuse the command table from commands because it is easier than
72 # teaching dispatch about multiple tables.
72 # teaching dispatch about multiple tables.
73 command = cmdutil.command(commands.table)
73 command = cmdutil.command(commands.table)
74
74
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 def debugancestor(ui, repo, *args):
76 def debugancestor(ui, repo, *args):
77 """find the ancestor revision of two revisions in a given index"""
77 """find the ancestor revision of two revisions in a given index"""
78 if len(args) == 3:
78 if len(args) == 3:
79 index, rev1, rev2 = args
79 index, rev1, rev2 = args
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 lookup = r.lookup
81 lookup = r.lookup
82 elif len(args) == 2:
82 elif len(args) == 2:
83 if not repo:
83 if not repo:
84 raise error.Abort(_('there is no Mercurial repository here '
84 raise error.Abort(_('there is no Mercurial repository here '
85 '(.hg not found)'))
85 '(.hg not found)'))
86 rev1, rev2 = args
86 rev1, rev2 = args
87 r = repo.changelog
87 r = repo.changelog
88 lookup = repo.lookup
88 lookup = repo.lookup
89 else:
89 else:
90 raise error.Abort(_('either two or three arguments required'))
90 raise error.Abort(_('either two or three arguments required'))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93
93
94 @command('debugapplystreamclonebundle', [], 'FILE')
94 @command('debugapplystreamclonebundle', [], 'FILE')
95 def debugapplystreamclonebundle(ui, repo, fname):
95 def debugapplystreamclonebundle(ui, repo, fname):
96 """apply a stream clone bundle file"""
96 """apply a stream clone bundle file"""
97 f = hg.openpath(ui, fname)
97 f = hg.openpath(ui, fname)
98 gen = exchange.readbundle(ui, f, fname)
98 gen = exchange.readbundle(ui, f, fname)
99 gen.apply(repo)
99 gen.apply(repo)
100
100
101 @command('debugbuilddag',
101 @command('debugbuilddag',
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('n', 'new-file', None, _('add new file at each rev'))],
104 ('n', 'new-file', None, _('add new file at each rev'))],
105 _('[OPTION]... [TEXT]'))
105 _('[OPTION]... [TEXT]'))
106 def debugbuilddag(ui, repo, text=None,
106 def debugbuilddag(ui, repo, text=None,
107 mergeable_file=False,
107 mergeable_file=False,
108 overwritten_file=False,
108 overwritten_file=False,
109 new_file=False):
109 new_file=False):
110 """builds a repo with a given DAG from scratch in the current empty repo
110 """builds a repo with a given DAG from scratch in the current empty repo
111
111
112 The description of the DAG is read from stdin if not given on the
112 The description of the DAG is read from stdin if not given on the
113 command line.
113 command line.
114
114
115 Elements:
115 Elements:
116
116
117 - "+n" is a linear run of n nodes based on the current default parent
117 - "+n" is a linear run of n nodes based on the current default parent
118 - "." is a single node based on the current default parent
118 - "." is a single node based on the current default parent
119 - "$" resets the default parent to null (implied at the start);
119 - "$" resets the default parent to null (implied at the start);
120 otherwise the default parent is always the last node created
120 otherwise the default parent is always the last node created
121 - "<p" sets the default parent to the backref p
121 - "<p" sets the default parent to the backref p
122 - "*p" is a fork at parent p, which is a backref
122 - "*p" is a fork at parent p, which is a backref
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "/p2" is a merge of the preceding node and p2
124 - "/p2" is a merge of the preceding node and p2
125 - ":tag" defines a local tag for the preceding node
125 - ":tag" defines a local tag for the preceding node
126 - "@branch" sets the named branch for subsequent nodes
126 - "@branch" sets the named branch for subsequent nodes
127 - "#...\\n" is a comment up to the end of the line
127 - "#...\\n" is a comment up to the end of the line
128
128
129 Whitespace between the above elements is ignored.
129 Whitespace between the above elements is ignored.
130
130
131 A backref is either
131 A backref is either
132
132
133 - a number n, which references the node curr-n, where curr is the current
133 - a number n, which references the node curr-n, where curr is the current
134 node, or
134 node, or
135 - the name of a local tag you placed earlier using ":tag", or
135 - the name of a local tag you placed earlier using ":tag", or
136 - empty to denote the default parent.
136 - empty to denote the default parent.
137
137
138 All string valued-elements are either strictly alphanumeric, or must
138 All string valued-elements are either strictly alphanumeric, or must
139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 be enclosed in double quotes ("..."), with "\\" as escape character.
140 """
140 """
141
141
142 if text is None:
142 if text is None:
143 ui.status(_("reading DAG from stdin\n"))
143 ui.status(_("reading DAG from stdin\n"))
144 text = ui.fin.read()
144 text = ui.fin.read()
145
145
146 cl = repo.changelog
146 cl = repo.changelog
147 if len(cl) > 0:
147 if len(cl) > 0:
148 raise error.Abort(_('repository is not empty'))
148 raise error.Abort(_('repository is not empty'))
149
149
150 # determine number of revs in DAG
150 # determine number of revs in DAG
151 total = 0
151 total = 0
152 for type, data in dagparser.parsedag(text):
152 for type, data in dagparser.parsedag(text):
153 if type == 'n':
153 if type == 'n':
154 total += 1
154 total += 1
155
155
156 if mergeable_file:
156 if mergeable_file:
157 linesperrev = 2
157 linesperrev = 2
158 # make a file with k lines per rev
158 # make a file with k lines per rev
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines.append("")
160 initialmergedlines.append("")
161
161
162 tags = []
162 tags = []
163
163
164 wlock = lock = tr = None
164 wlock = lock = tr = None
165 try:
165 try:
166 wlock = repo.wlock()
166 wlock = repo.wlock()
167 lock = repo.lock()
167 lock = repo.lock()
168 tr = repo.transaction("builddag")
168 tr = repo.transaction("builddag")
169
169
170 at = -1
170 at = -1
171 atbranch = 'default'
171 atbranch = 'default'
172 nodeids = []
172 nodeids = []
173 id = 0
173 id = 0
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 ui.note(('node %s\n' % str(data)))
177 ui.note(('node %s\n' % str(data)))
178 id, ps = data
178 id, ps = data
179
179
180 files = []
180 files = []
181 fctxs = {}
181 fctxs = {}
182
182
183 p2 = None
183 p2 = None
184 if mergeable_file:
184 if mergeable_file:
185 fn = "mf"
185 fn = "mf"
186 p1 = repo[ps[0]]
186 p1 = repo[ps[0]]
187 if len(ps) > 1:
187 if len(ps) > 1:
188 p2 = repo[ps[1]]
188 p2 = repo[ps[1]]
189 pa = p1.ancestor(p2)
189 pa = p1.ancestor(p2)
190 base, local, other = [x[fn].data() for x in (pa, p1,
190 base, local, other = [x[fn].data() for x in (pa, p1,
191 p2)]
191 p2)]
192 m3 = simplemerge.Merge3Text(base, local, other)
192 m3 = simplemerge.Merge3Text(base, local, other)
193 ml = [l.strip() for l in m3.merge_lines()]
193 ml = [l.strip() for l in m3.merge_lines()]
194 ml.append("")
194 ml.append("")
195 elif at > 0:
195 elif at > 0:
196 ml = p1[fn].data().split("\n")
196 ml = p1[fn].data().split("\n")
197 else:
197 else:
198 ml = initialmergedlines
198 ml = initialmergedlines
199 ml[id * linesperrev] += " r%i" % id
199 ml[id * linesperrev] += " r%i" % id
200 mergedtext = "\n".join(ml)
200 mergedtext = "\n".join(ml)
201 files.append(fn)
201 files.append(fn)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203
203
204 if overwritten_file:
204 if overwritten_file:
205 fn = "of"
205 fn = "of"
206 files.append(fn)
206 files.append(fn)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208
208
209 if new_file:
209 if new_file:
210 fn = "nf%i" % id
210 fn = "nf%i" % id
211 files.append(fn)
211 files.append(fn)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 if len(ps) > 1:
213 if len(ps) > 1:
214 if not p2:
214 if not p2:
215 p2 = repo[ps[1]]
215 p2 = repo[ps[1]]
216 for fn in p2:
216 for fn in p2:
217 if fn.startswith("nf"):
217 if fn.startswith("nf"):
218 files.append(fn)
218 files.append(fn)
219 fctxs[fn] = p2[fn]
219 fctxs[fn] = p2[fn]
220
220
221 def fctxfn(repo, cx, path):
221 def fctxfn(repo, cx, path):
222 return fctxs.get(path)
222 return fctxs.get(path)
223
223
224 if len(ps) == 0 or ps[0] < 0:
224 if len(ps) == 0 or ps[0] < 0:
225 pars = [None, None]
225 pars = [None, None]
226 elif len(ps) == 1:
226 elif len(ps) == 1:
227 pars = [nodeids[ps[0]], None]
227 pars = [nodeids[ps[0]], None]
228 else:
228 else:
229 pars = [nodeids[p] for p in ps]
229 pars = [nodeids[p] for p in ps]
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 date=(id, 0),
231 date=(id, 0),
232 user="debugbuilddag",
232 user="debugbuilddag",
233 extra={'branch': atbranch})
233 extra={'branch': atbranch})
234 nodeid = repo.commitctx(cx)
234 nodeid = repo.commitctx(cx)
235 nodeids.append(nodeid)
235 nodeids.append(nodeid)
236 at = id
236 at = id
237 elif type == 'l':
237 elif type == 'l':
238 id, name = data
238 id, name = data
239 ui.note(('tag %s\n' % name))
239 ui.note(('tag %s\n' % name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 elif type == 'a':
241 elif type == 'a':
242 ui.note(('branch %s\n' % data))
242 ui.note(('branch %s\n' % data))
243 atbranch = data
243 atbranch = data
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 tr.close()
245 tr.close()
246
246
247 if tags:
247 if tags:
248 repo.vfs.write("localtags", "".join(tags))
248 repo.vfs.write("localtags", "".join(tags))
249 finally:
249 finally:
250 ui.progress(_('building'), None)
250 ui.progress(_('building'), None)
251 release(tr, lock, wlock)
251 release(tr, lock, wlock)
252
252
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 indent_string = ' ' * indent
254 indent_string = ' ' * indent
255 if all:
255 if all:
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 % indent_string)
257 % indent_string)
258
258
259 def showchunks(named):
259 def showchunks(named):
260 ui.write("\n%s%s\n" % (indent_string, named))
260 ui.write("\n%s%s\n" % (indent_string, named))
261 chain = None
261 chain = None
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 node = chunkdata['node']
263 node = chunkdata['node']
264 p1 = chunkdata['p1']
264 p1 = chunkdata['p1']
265 p2 = chunkdata['p2']
265 p2 = chunkdata['p2']
266 cs = chunkdata['cs']
266 cs = chunkdata['cs']
267 deltabase = chunkdata['deltabase']
267 deltabase = chunkdata['deltabase']
268 delta = chunkdata['delta']
268 delta = chunkdata['delta']
269 ui.write("%s%s %s %s %s %s %s\n" %
269 ui.write("%s%s %s %s %s %s %s\n" %
270 (indent_string, hex(node), hex(p1), hex(p2),
270 (indent_string, hex(node), hex(p1), hex(p2),
271 hex(cs), hex(deltabase), len(delta)))
271 hex(cs), hex(deltabase), len(delta)))
272 chain = node
272 chain = node
273
273
274 chunkdata = gen.changelogheader()
274 chunkdata = gen.changelogheader()
275 showchunks("changelog")
275 showchunks("changelog")
276 chunkdata = gen.manifestheader()
276 chunkdata = gen.manifestheader()
277 showchunks("manifest")
277 showchunks("manifest")
278 for chunkdata in iter(gen.filelogheader, {}):
278 for chunkdata in iter(gen.filelogheader, {}):
279 fname = chunkdata['filename']
279 fname = chunkdata['filename']
280 showchunks(fname)
280 showchunks(fname)
281 else:
281 else:
282 if isinstance(gen, bundle2.unbundle20):
282 if isinstance(gen, bundle2.unbundle20):
283 raise error.Abort(_('use debugbundle2 for this file'))
283 raise error.Abort(_('use debugbundle2 for this file'))
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 chain = None
285 chain = None
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 node = chunkdata['node']
287 node = chunkdata['node']
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
289 chain = node
289 chain = node
290
290
291 def _debugbundle2(ui, gen, all=None, **opts):
291 def _debugbundle2(ui, gen, all=None, **opts):
292 """lists the contents of a bundle2"""
292 """lists the contents of a bundle2"""
293 if not isinstance(gen, bundle2.unbundle20):
293 if not isinstance(gen, bundle2.unbundle20):
294 raise error.Abort(_('not a bundle2 file'))
294 raise error.Abort(_('not a bundle2 file'))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
296 for part in gen.iterparts():
296 for part in gen.iterparts():
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
298 if part.type == 'changegroup':
298 if part.type == 'changegroup':
299 version = part.params.get('version', '01')
299 version = part.params.get('version', '01')
300 cg = changegroup.getunbundler(version, part, 'UN')
300 cg = changegroup.getunbundler(version, part, 'UN')
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
302
302
303 @command('debugbundle',
303 @command('debugbundle',
304 [('a', 'all', None, _('show all details')),
304 [('a', 'all', None, _('show all details')),
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
306 _('FILE'),
306 _('FILE'),
307 norepo=True)
307 norepo=True)
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
309 """lists the contents of a bundle"""
309 """lists the contents of a bundle"""
310 with hg.openpath(ui, bundlepath) as f:
310 with hg.openpath(ui, bundlepath) as f:
311 if spec:
311 if spec:
312 spec = exchange.getbundlespec(ui, f)
312 spec = exchange.getbundlespec(ui, f)
313 ui.write('%s\n' % spec)
313 ui.write('%s\n' % spec)
314 return
314 return
315
315
316 gen = exchange.readbundle(ui, f, bundlepath)
316 gen = exchange.readbundle(ui, f, bundlepath)
317 if isinstance(gen, bundle2.unbundle20):
317 if isinstance(gen, bundle2.unbundle20):
318 return _debugbundle2(ui, gen, all=all, **opts)
318 return _debugbundle2(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
320
320
321 @command('debugcheckstate', [], '')
321 @command('debugcheckstate', [], '')
322 def debugcheckstate(ui, repo):
322 def debugcheckstate(ui, repo):
323 """validate the correctness of the current dirstate"""
323 """validate the correctness of the current dirstate"""
324 parent1, parent2 = repo.dirstate.parents()
324 parent1, parent2 = repo.dirstate.parents()
325 m1 = repo[parent1].manifest()
325 m1 = repo[parent1].manifest()
326 m2 = repo[parent2].manifest()
326 m2 = repo[parent2].manifest()
327 errors = 0
327 errors = 0
328 for f in repo.dirstate:
328 for f in repo.dirstate:
329 state = repo.dirstate[f]
329 state = repo.dirstate[f]
330 if state in "nr" and f not in m1:
330 if state in "nr" and f not in m1:
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
332 errors += 1
332 errors += 1
333 if state in "a" and f in m1:
333 if state in "a" and f in m1:
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
335 errors += 1
335 errors += 1
336 if state in "m" and f not in m1 and f not in m2:
336 if state in "m" and f not in m1 and f not in m2:
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
338 (f, state))
338 (f, state))
339 errors += 1
339 errors += 1
340 for f in m1:
340 for f in m1:
341 state = repo.dirstate[f]
341 state = repo.dirstate[f]
342 if state not in "nrm":
342 if state not in "nrm":
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
344 errors += 1
344 errors += 1
345 if errors:
345 if errors:
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
347 raise error.Abort(error)
347 raise error.Abort(error)
348
348
349 @command('debugcolor',
349 @command('debugcolor',
350 [('', 'style', None, _('show all configured styles'))],
350 [('', 'style', None, _('show all configured styles'))],
351 'hg debugcolor')
351 'hg debugcolor')
352 def debugcolor(ui, repo, **opts):
352 def debugcolor(ui, repo, **opts):
353 """show available color, effects or style"""
353 """show available color, effects or style"""
354 ui.write(('color mode: %s\n') % ui._colormode)
354 ui.write(('color mode: %s\n') % ui._colormode)
355 if opts.get('style'):
355 if opts.get('style'):
356 return _debugdisplaystyle(ui)
356 return _debugdisplaystyle(ui)
357 else:
357 else:
358 return _debugdisplaycolor(ui)
358 return _debugdisplaycolor(ui)
359
359
360 def _debugdisplaycolor(ui):
360 def _debugdisplaycolor(ui):
361 ui = ui.copy()
361 ui = ui.copy()
362 ui._styles.clear()
362 ui._styles.clear()
363 for effect in color._effects.keys():
363 for effect in color._effects.keys():
364 ui._styles[effect] = effect
364 ui._styles[effect] = effect
365 if ui._terminfoparams:
365 if ui._terminfoparams:
366 for k, v in ui.configitems('color'):
366 for k, v in ui.configitems('color'):
367 if k.startswith('color.'):
367 if k.startswith('color.'):
368 ui._styles[k] = k[6:]
368 ui._styles[k] = k[6:]
369 elif k.startswith('terminfo.'):
369 elif k.startswith('terminfo.'):
370 ui._styles[k] = k[9:]
370 ui._styles[k] = k[9:]
371 ui.write(_('available colors:\n'))
371 ui.write(_('available colors:\n'))
372 # sort label with a '_' after the other to group '_background' entry.
372 # sort label with a '_' after the other to group '_background' entry.
373 items = sorted(ui._styles.items(),
373 items = sorted(ui._styles.items(),
374 key=lambda i: ('_' in i[0], i[0], i[1]))
374 key=lambda i: ('_' in i[0], i[0], i[1]))
375 for colorname, label in items:
375 for colorname, label in items:
376 ui.write(('%s\n') % colorname, label=label)
376 ui.write(('%s\n') % colorname, label=label)
377
377
378 def _debugdisplaystyle(ui):
378 def _debugdisplaystyle(ui):
379 ui.write(_('available style:\n'))
379 ui.write(_('available style:\n'))
380 width = max(len(s) for s in ui._styles)
380 width = max(len(s) for s in ui._styles)
381 for label, effects in sorted(ui._styles.items()):
381 for label, effects in sorted(ui._styles.items()):
382 ui.write('%s' % label, label=label)
382 ui.write('%s' % label, label=label)
383 if effects:
383 if effects:
384 # 50
384 # 50
385 ui.write(': ')
385 ui.write(': ')
386 ui.write(' ' * (max(0, width - len(label))))
386 ui.write(' ' * (max(0, width - len(label))))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
388 ui.write('\n')
388 ui.write('\n')
389
389
390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
391 def debugcommands(ui, cmd='', *args):
391 def debugcommands(ui, cmd='', *args):
392 """list all available commands and options"""
392 """list all available commands and options"""
393 for cmd, vals in sorted(commands.table.iteritems()):
393 for cmd, vals in sorted(commands.table.iteritems()):
394 cmd = cmd.split('|')[0].strip('^')
394 cmd = cmd.split('|')[0].strip('^')
395 opts = ', '.join([i[1] for i in vals[1]])
395 opts = ', '.join([i[1] for i in vals[1]])
396 ui.write('%s: %s\n' % (cmd, opts))
396 ui.write('%s: %s\n' % (cmd, opts))
397
397
398 @command('debugcomplete',
398 @command('debugcomplete',
399 [('o', 'options', None, _('show the command options'))],
399 [('o', 'options', None, _('show the command options'))],
400 _('[-o] CMD'),
400 _('[-o] CMD'),
401 norepo=True)
401 norepo=True)
402 def debugcomplete(ui, cmd='', **opts):
402 def debugcomplete(ui, cmd='', **opts):
403 """returns the completion list associated with the given command"""
403 """returns the completion list associated with the given command"""
404
404
405 if opts.get('options'):
405 if opts.get('options'):
406 options = []
406 options = []
407 otables = [commands.globalopts]
407 otables = [commands.globalopts]
408 if cmd:
408 if cmd:
409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
410 otables.append(entry[1])
410 otables.append(entry[1])
411 for t in otables:
411 for t in otables:
412 for o in t:
412 for o in t:
413 if "(DEPRECATED)" in o[3]:
413 if "(DEPRECATED)" in o[3]:
414 continue
414 continue
415 if o[0]:
415 if o[0]:
416 options.append('-%s' % o[0])
416 options.append('-%s' % o[0])
417 options.append('--%s' % o[1])
417 options.append('--%s' % o[1])
418 ui.write("%s\n" % "\n".join(options))
418 ui.write("%s\n" % "\n".join(options))
419 return
419 return
420
420
421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
422 if ui.verbose:
422 if ui.verbose:
423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
425
425
426 @command('debugcreatestreamclonebundle', [], 'FILE')
426 @command('debugcreatestreamclonebundle', [], 'FILE')
427 def debugcreatestreamclonebundle(ui, repo, fname):
427 def debugcreatestreamclonebundle(ui, repo, fname):
428 """create a stream clone bundle file
428 """create a stream clone bundle file
429
429
430 Stream bundles are special bundles that are essentially archives of
430 Stream bundles are special bundles that are essentially archives of
431 revlog files. They are commonly used for cloning very quickly.
431 revlog files. They are commonly used for cloning very quickly.
432 """
432 """
433 requirements, gen = streamclone.generatebundlev1(repo)
433 requirements, gen = streamclone.generatebundlev1(repo)
434 changegroup.writechunks(ui, gen, fname)
434 changegroup.writechunks(ui, gen, fname)
435
435
436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
437
437
438 @command('debugdag',
438 @command('debugdag',
439 [('t', 'tags', None, _('use tags as labels')),
439 [('t', 'tags', None, _('use tags as labels')),
440 ('b', 'branches', None, _('annotate with branch names')),
440 ('b', 'branches', None, _('annotate with branch names')),
441 ('', 'dots', None, _('use dots for runs')),
441 ('', 'dots', None, _('use dots for runs')),
442 ('s', 'spaces', None, _('separate elements by spaces'))],
442 ('s', 'spaces', None, _('separate elements by spaces'))],
443 _('[OPTION]... [FILE [REV]...]'),
443 _('[OPTION]... [FILE [REV]...]'),
444 optionalrepo=True)
444 optionalrepo=True)
445 def debugdag(ui, repo, file_=None, *revs, **opts):
445 def debugdag(ui, repo, file_=None, *revs, **opts):
446 """format the changelog or an index DAG as a concise textual description
446 """format the changelog or an index DAG as a concise textual description
447
447
448 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 If you pass a revlog index, the revlog's DAG is emitted. If you list
449 revision numbers, they get labeled in the output as rN.
449 revision numbers, they get labeled in the output as rN.
450
450
451 Otherwise, the changelog DAG of the current repo is emitted.
451 Otherwise, the changelog DAG of the current repo is emitted.
452 """
452 """
453 spaces = opts.get('spaces')
453 spaces = opts.get('spaces')
454 dots = opts.get('dots')
454 dots = opts.get('dots')
455 if file_:
455 if file_:
456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
457 file_)
457 file_)
458 revs = set((int(r) for r in revs))
458 revs = set((int(r) for r in revs))
459 def events():
459 def events():
460 for r in rlog:
460 for r in rlog:
461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
462 if p != -1))
462 if p != -1))
463 if r in revs:
463 if r in revs:
464 yield 'l', (r, "r%i" % r)
464 yield 'l', (r, "r%i" % r)
465 elif repo:
465 elif repo:
466 cl = repo.changelog
466 cl = repo.changelog
467 tags = opts.get('tags')
467 tags = opts.get('tags')
468 branches = opts.get('branches')
468 branches = opts.get('branches')
469 if tags:
469 if tags:
470 labels = {}
470 labels = {}
471 for l, n in repo.tags().items():
471 for l, n in repo.tags().items():
472 labels.setdefault(cl.rev(n), []).append(l)
472 labels.setdefault(cl.rev(n), []).append(l)
473 def events():
473 def events():
474 b = "default"
474 b = "default"
475 for r in cl:
475 for r in cl:
476 if branches:
476 if branches:
477 newb = cl.read(cl.node(r))[5]['branch']
477 newb = cl.read(cl.node(r))[5]['branch']
478 if newb != b:
478 if newb != b:
479 yield 'a', newb
479 yield 'a', newb
480 b = newb
480 b = newb
481 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 yield 'n', (r, list(p for p in cl.parentrevs(r)
482 if p != -1))
482 if p != -1))
483 if tags:
483 if tags:
484 ls = labels.get(r)
484 ls = labels.get(r)
485 if ls:
485 if ls:
486 for l in ls:
486 for l in ls:
487 yield 'l', (r, l)
487 yield 'l', (r, l)
488 else:
488 else:
489 raise error.Abort(_('need repo for changelog dag'))
489 raise error.Abort(_('need repo for changelog dag'))
490
490
491 for line in dagparser.dagtextlines(events(),
491 for line in dagparser.dagtextlines(events(),
492 addspaces=spaces,
492 addspaces=spaces,
493 wraplabels=True,
493 wraplabels=True,
494 wrapannotations=True,
494 wrapannotations=True,
495 wrapnonlinear=dots,
495 wrapnonlinear=dots,
496 usedots=dots,
496 usedots=dots,
497 maxlinewidth=70):
497 maxlinewidth=70):
498 ui.write(line)
498 ui.write(line)
499 ui.write("\n")
499 ui.write("\n")
500
500
501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
502 def debugdata(ui, repo, file_, rev=None, **opts):
502 def debugdata(ui, repo, file_, rev=None, **opts):
503 """dump the contents of a data file revision"""
503 """dump the contents of a data file revision"""
504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
505 if rev is not None:
505 if rev is not None:
506 raise error.CommandError('debugdata', _('invalid arguments'))
506 raise error.CommandError('debugdata', _('invalid arguments'))
507 file_, rev = None, file_
507 file_, rev = None, file_
508 elif rev is None:
508 elif rev is None:
509 raise error.CommandError('debugdata', _('invalid arguments'))
509 raise error.CommandError('debugdata', _('invalid arguments'))
510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
511 try:
511 try:
512 ui.write(r.revision(r.lookup(rev), raw=True))
512 ui.write(r.revision(r.lookup(rev), raw=True))
513 except KeyError:
513 except KeyError:
514 raise error.Abort(_('invalid revision identifier %s') % rev)
514 raise error.Abort(_('invalid revision identifier %s') % rev)
515
515
516 @command('debugdate',
516 @command('debugdate',
517 [('e', 'extended', None, _('try extended date formats'))],
517 [('e', 'extended', None, _('try extended date formats'))],
518 _('[-e] DATE [RANGE]'),
518 _('[-e] DATE [RANGE]'),
519 norepo=True, optionalrepo=True)
519 norepo=True, optionalrepo=True)
520 def debugdate(ui, date, range=None, **opts):
520 def debugdate(ui, date, range=None, **opts):
521 """parse and display a date"""
521 """parse and display a date"""
522 if opts["extended"]:
522 if opts["extended"]:
523 d = util.parsedate(date, util.extendeddateformats)
523 d = util.parsedate(date, util.extendeddateformats)
524 else:
524 else:
525 d = util.parsedate(date)
525 d = util.parsedate(date)
526 ui.write(("internal: %s %s\n") % d)
526 ui.write(("internal: %s %s\n") % d)
527 ui.write(("standard: %s\n") % util.datestr(d))
527 ui.write(("standard: %s\n") % util.datestr(d))
528 if range:
528 if range:
529 m = util.matchdate(range)
529 m = util.matchdate(range)
530 ui.write(("match: %s\n") % m(d[0]))
530 ui.write(("match: %s\n") % m(d[0]))
531
531
532 @command('debugdeltachain',
532 @command('debugdeltachain',
533 commands.debugrevlogopts + commands.formatteropts,
533 commands.debugrevlogopts + commands.formatteropts,
534 _('-c|-m|FILE'),
534 _('-c|-m|FILE'),
535 optionalrepo=True)
535 optionalrepo=True)
536 def debugdeltachain(ui, repo, file_=None, **opts):
536 def debugdeltachain(ui, repo, file_=None, **opts):
537 """dump information about delta chains in a revlog
537 """dump information about delta chains in a revlog
538
538
539 Output can be templatized. Available template keywords are:
539 Output can be templatized. Available template keywords are:
540
540
541 :``rev``: revision number
541 :``rev``: revision number
542 :``chainid``: delta chain identifier (numbered by unique base)
542 :``chainid``: delta chain identifier (numbered by unique base)
543 :``chainlen``: delta chain length to this revision
543 :``chainlen``: delta chain length to this revision
544 :``prevrev``: previous revision in delta chain
544 :``prevrev``: previous revision in delta chain
545 :``deltatype``: role of delta / how it was computed
545 :``deltatype``: role of delta / how it was computed
546 :``compsize``: compressed size of revision
546 :``compsize``: compressed size of revision
547 :``uncompsize``: uncompressed size of revision
547 :``uncompsize``: uncompressed size of revision
548 :``chainsize``: total size of compressed revisions in chain
548 :``chainsize``: total size of compressed revisions in chain
549 :``chainratio``: total chain size divided by uncompressed revision size
549 :``chainratio``: total chain size divided by uncompressed revision size
550 (new delta chains typically start at ratio 2.00)
550 (new delta chains typically start at ratio 2.00)
551 :``lindist``: linear distance from base revision in delta chain to end
551 :``lindist``: linear distance from base revision in delta chain to end
552 of this revision
552 of this revision
553 :``extradist``: total size of revisions not part of this delta chain from
553 :``extradist``: total size of revisions not part of this delta chain from
554 base of delta chain to end of this revision; a measurement
554 base of delta chain to end of this revision; a measurement
555 of how much extra data we need to read/seek across to read
555 of how much extra data we need to read/seek across to read
556 the delta chain for this revision
556 the delta chain for this revision
557 :``extraratio``: extradist divided by chainsize; another representation of
557 :``extraratio``: extradist divided by chainsize; another representation of
558 how much unrelated data is needed to load this delta chain
558 how much unrelated data is needed to load this delta chain
559 """
559 """
560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
561 index = r.index
561 index = r.index
562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
563
563
564 def revinfo(rev):
564 def revinfo(rev):
565 e = index[rev]
565 e = index[rev]
566 compsize = e[1]
566 compsize = e[1]
567 uncompsize = e[2]
567 uncompsize = e[2]
568 chainsize = 0
568 chainsize = 0
569
569
570 if generaldelta:
570 if generaldelta:
571 if e[3] == e[5]:
571 if e[3] == e[5]:
572 deltatype = 'p1'
572 deltatype = 'p1'
573 elif e[3] == e[6]:
573 elif e[3] == e[6]:
574 deltatype = 'p2'
574 deltatype = 'p2'
575 elif e[3] == rev - 1:
575 elif e[3] == rev - 1:
576 deltatype = 'prev'
576 deltatype = 'prev'
577 elif e[3] == rev:
577 elif e[3] == rev:
578 deltatype = 'base'
578 deltatype = 'base'
579 else:
579 else:
580 deltatype = 'other'
580 deltatype = 'other'
581 else:
581 else:
582 if e[3] == rev:
582 if e[3] == rev:
583 deltatype = 'base'
583 deltatype = 'base'
584 else:
584 else:
585 deltatype = 'prev'
585 deltatype = 'prev'
586
586
587 chain = r._deltachain(rev)[0]
587 chain = r._deltachain(rev)[0]
588 for iterrev in chain:
588 for iterrev in chain:
589 e = index[iterrev]
589 e = index[iterrev]
590 chainsize += e[1]
590 chainsize += e[1]
591
591
592 return compsize, uncompsize, deltatype, chain, chainsize
592 return compsize, uncompsize, deltatype, chain, chainsize
593
593
594 fm = ui.formatter('debugdeltachain', opts)
594 fm = ui.formatter('debugdeltachain', opts)
595
595
596 fm.plain(' rev chain# chainlen prev delta '
596 fm.plain(' rev chain# chainlen prev delta '
597 'size rawsize chainsize ratio lindist extradist '
597 'size rawsize chainsize ratio lindist extradist '
598 'extraratio\n')
598 'extraratio\n')
599
599
600 chainbases = {}
600 chainbases = {}
601 for rev in r:
601 for rev in r:
602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
603 chainbase = chain[0]
603 chainbase = chain[0]
604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
605 basestart = r.start(chainbase)
605 basestart = r.start(chainbase)
606 revstart = r.start(rev)
606 revstart = r.start(rev)
607 lineardist = revstart + comp - basestart
607 lineardist = revstart + comp - basestart
608 extradist = lineardist - chainsize
608 extradist = lineardist - chainsize
609 try:
609 try:
610 prevrev = chain[-2]
610 prevrev = chain[-2]
611 except IndexError:
611 except IndexError:
612 prevrev = -1
612 prevrev = -1
613
613
614 chainratio = float(chainsize) / float(uncomp)
614 chainratio = float(chainsize) / float(uncomp)
615 extraratio = float(extradist) / float(chainsize)
615 extraratio = float(extradist) / float(chainsize)
616
616
617 fm.startitem()
617 fm.startitem()
618 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 fm.write('rev chainid chainlen prevrev deltatype compsize '
619 'uncompsize chainsize chainratio lindist extradist '
619 'uncompsize chainsize chainratio lindist extradist '
620 'extraratio',
620 'extraratio',
621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
622 rev, chainid, len(chain), prevrev, deltatype, comp,
622 rev, chainid, len(chain), prevrev, deltatype, comp,
623 uncomp, chainsize, chainratio, lineardist, extradist,
623 uncomp, chainsize, chainratio, lineardist, extradist,
624 extraratio,
624 extraratio,
625 rev=rev, chainid=chainid, chainlen=len(chain),
625 rev=rev, chainid=chainid, chainlen=len(chain),
626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
627 uncompsize=uncomp, chainsize=chainsize,
627 uncompsize=uncomp, chainsize=chainsize,
628 chainratio=chainratio, lindist=lineardist,
628 chainratio=chainratio, lindist=lineardist,
629 extradist=extradist, extraratio=extraratio)
629 extradist=extradist, extraratio=extraratio)
630
630
631 fm.end()
631 fm.end()
632
632
633 @command('debugdirstate|debugstate',
633 @command('debugdirstate|debugstate',
634 [('', 'nodates', None, _('do not display the saved mtime')),
634 [('', 'nodates', None, _('do not display the saved mtime')),
635 ('', 'datesort', None, _('sort by saved mtime'))],
635 ('', 'datesort', None, _('sort by saved mtime'))],
636 _('[OPTION]...'))
636 _('[OPTION]...'))
637 def debugstate(ui, repo, **opts):
637 def debugstate(ui, repo, **opts):
638 """show the contents of the current dirstate"""
638 """show the contents of the current dirstate"""
639
639
640 nodates = opts.get('nodates')
640 nodates = opts.get('nodates')
641 datesort = opts.get('datesort')
641 datesort = opts.get('datesort')
642
642
643 timestr = ""
643 timestr = ""
644 if datesort:
644 if datesort:
645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
646 else:
646 else:
647 keyfunc = None # sort by filename
647 keyfunc = None # sort by filename
648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
649 if ent[3] == -1:
649 if ent[3] == -1:
650 timestr = 'unset '
650 timestr = 'unset '
651 elif nodates:
651 elif nodates:
652 timestr = 'set '
652 timestr = 'set '
653 else:
653 else:
654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
655 time.localtime(ent[3]))
655 time.localtime(ent[3]))
656 if ent[1] & 0o20000:
656 if ent[1] & 0o20000:
657 mode = 'lnk'
657 mode = 'lnk'
658 else:
658 else:
659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
661 for f in repo.dirstate.copies():
661 for f in repo.dirstate.copies():
662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
663
663
664 @command('debugdiscovery',
664 @command('debugdiscovery',
665 [('', 'old', None, _('use old-style discovery')),
665 [('', 'old', None, _('use old-style discovery')),
666 ('', 'nonheads', None,
666 ('', 'nonheads', None,
667 _('use old-style discovery with non-heads included')),
667 _('use old-style discovery with non-heads included')),
668 ] + commands.remoteopts,
668 ] + commands.remoteopts,
669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
671 """runs the changeset discovery protocol in isolation"""
671 """runs the changeset discovery protocol in isolation"""
672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
673 opts.get('branch'))
673 opts.get('branch'))
674 remote = hg.peer(repo, opts, remoteurl)
674 remote = hg.peer(repo, opts, remoteurl)
675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
676
676
677 # make sure tests are repeatable
677 # make sure tests are repeatable
678 random.seed(12323)
678 random.seed(12323)
679
679
680 def doit(localheads, remoteheads, remote=remote):
680 def doit(localheads, remoteheads, remote=remote):
681 if opts.get('old'):
681 if opts.get('old'):
682 if localheads:
682 if localheads:
683 raise error.Abort('cannot use localheads with old style '
683 raise error.Abort('cannot use localheads with old style '
684 'discovery')
684 'discovery')
685 if not util.safehasattr(remote, 'branches'):
685 if not util.safehasattr(remote, 'branches'):
686 # enable in-client legacy support
686 # enable in-client legacy support
687 remote = localrepo.locallegacypeer(remote.local())
687 remote = localrepo.locallegacypeer(remote.local())
688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
689 force=True)
689 force=True)
690 common = set(common)
690 common = set(common)
691 if not opts.get('nonheads'):
691 if not opts.get('nonheads'):
692 ui.write(("unpruned common: %s\n") %
692 ui.write(("unpruned common: %s\n") %
693 " ".join(sorted(short(n) for n in common)))
693 " ".join(sorted(short(n) for n in common)))
694 dag = dagutil.revlogdag(repo.changelog)
694 dag = dagutil.revlogdag(repo.changelog)
695 all = dag.ancestorset(dag.internalizeall(common))
695 all = dag.ancestorset(dag.internalizeall(common))
696 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 common = dag.externalizeall(dag.headsetofconnecteds(all))
697 else:
697 else:
698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
699 common = set(common)
699 common = set(common)
700 rheads = set(hds)
700 rheads = set(hds)
701 lheads = set(repo.heads())
701 lheads = set(repo.heads())
702 ui.write(("common heads: %s\n") %
702 ui.write(("common heads: %s\n") %
703 " ".join(sorted(short(n) for n in common)))
703 " ".join(sorted(short(n) for n in common)))
704 if lheads <= common:
704 if lheads <= common:
705 ui.write(("local is subset\n"))
705 ui.write(("local is subset\n"))
706 elif rheads <= common:
706 elif rheads <= common:
707 ui.write(("remote is subset\n"))
707 ui.write(("remote is subset\n"))
708
708
709 serverlogs = opts.get('serverlog')
709 serverlogs = opts.get('serverlog')
710 if serverlogs:
710 if serverlogs:
711 for filename in serverlogs:
711 for filename in serverlogs:
712 with open(filename, 'r') as logfile:
712 with open(filename, 'r') as logfile:
713 line = logfile.readline()
713 line = logfile.readline()
714 while line:
714 while line:
715 parts = line.strip().split(';')
715 parts = line.strip().split(';')
716 op = parts[1]
716 op = parts[1]
717 if op == 'cg':
717 if op == 'cg':
718 pass
718 pass
719 elif op == 'cgss':
719 elif op == 'cgss':
720 doit(parts[2].split(' '), parts[3].split(' '))
720 doit(parts[2].split(' '), parts[3].split(' '))
721 elif op == 'unb':
721 elif op == 'unb':
722 doit(parts[3].split(' '), parts[2].split(' '))
722 doit(parts[3].split(' '), parts[2].split(' '))
723 line = logfile.readline()
723 line = logfile.readline()
724 else:
724 else:
725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
726 opts.get('remote_head'))
726 opts.get('remote_head'))
727 localrevs = opts.get('local_head')
727 localrevs = opts.get('local_head')
728 doit(localrevs, remoterevs)
728 doit(localrevs, remoterevs)
729
729
730 @command('debugextensions', commands.formatteropts, [], norepo=True)
730 @command('debugextensions', commands.formatteropts, [], norepo=True)
731 def debugextensions(ui, **opts):
731 def debugextensions(ui, **opts):
732 '''show information about active extensions'''
732 '''show information about active extensions'''
733 exts = extensions.extensions(ui)
733 exts = extensions.extensions(ui)
734 hgver = util.version()
734 hgver = util.version()
735 fm = ui.formatter('debugextensions', opts)
735 fm = ui.formatter('debugextensions', opts)
736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
737 isinternal = extensions.ismoduleinternal(extmod)
737 isinternal = extensions.ismoduleinternal(extmod)
738 extsource = pycompat.fsencode(extmod.__file__)
738 extsource = pycompat.fsencode(extmod.__file__)
739 if isinternal:
739 if isinternal:
740 exttestedwith = [] # never expose magic string to users
740 exttestedwith = [] # never expose magic string to users
741 else:
741 else:
742 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 exttestedwith = getattr(extmod, 'testedwith', '').split()
743 extbuglink = getattr(extmod, 'buglink', None)
743 extbuglink = getattr(extmod, 'buglink', None)
744
744
745 fm.startitem()
745 fm.startitem()
746
746
747 if ui.quiet or ui.verbose:
747 if ui.quiet or ui.verbose:
748 fm.write('name', '%s\n', extname)
748 fm.write('name', '%s\n', extname)
749 else:
749 else:
750 fm.write('name', '%s', extname)
750 fm.write('name', '%s', extname)
751 if isinternal or hgver in exttestedwith:
751 if isinternal or hgver in exttestedwith:
752 fm.plain('\n')
752 fm.plain('\n')
753 elif not exttestedwith:
753 elif not exttestedwith:
754 fm.plain(_(' (untested!)\n'))
754 fm.plain(_(' (untested!)\n'))
755 else:
755 else:
756 lasttestedversion = exttestedwith[-1]
756 lasttestedversion = exttestedwith[-1]
757 fm.plain(' (%s!)\n' % lasttestedversion)
757 fm.plain(' (%s!)\n' % lasttestedversion)
758
758
759 fm.condwrite(ui.verbose and extsource, 'source',
759 fm.condwrite(ui.verbose and extsource, 'source',
760 _(' location: %s\n'), extsource or "")
760 _(' location: %s\n'), extsource or "")
761
761
762 if ui.verbose:
762 if ui.verbose:
763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
764 fm.data(bundled=isinternal)
764 fm.data(bundled=isinternal)
765
765
766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
767 _(' tested with: %s\n'),
767 _(' tested with: %s\n'),
768 fm.formatlist(exttestedwith, name='ver'))
768 fm.formatlist(exttestedwith, name='ver'))
769
769
770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
771 _(' bug reporting: %s\n'), extbuglink or "")
771 _(' bug reporting: %s\n'), extbuglink or "")
772
772
773 fm.end()
773 fm.end()
774
774
775 @command('debugfileset',
775 @command('debugfileset',
776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
777 _('[-r REV] FILESPEC'))
777 _('[-r REV] FILESPEC'))
778 def debugfileset(ui, repo, expr, **opts):
778 def debugfileset(ui, repo, expr, **opts):
779 '''parse and apply a fileset specification'''
779 '''parse and apply a fileset specification'''
780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
781 if ui.verbose:
781 if ui.verbose:
782 tree = fileset.parse(expr)
782 tree = fileset.parse(expr)
783 ui.note(fileset.prettyformat(tree), "\n")
783 ui.note(fileset.prettyformat(tree), "\n")
784
784
785 for f in ctx.getfileset(expr):
785 for f in ctx.getfileset(expr):
786 ui.write("%s\n" % f)
786 ui.write("%s\n" % f)
787
787
788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
789 def debugfsinfo(ui, path="."):
789 def debugfsinfo(ui, path="."):
790 """show information detected about current filesystem"""
790 """show information detected about current filesystem"""
791 util.writefile('.debugfsinfo', '')
792 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
791 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
793 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
792 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
794 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
795 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
796 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
795 casesensitive = '(unknown)'
797 and 'yes' or 'no'))
796 try:
798 util.tryunlink('.debugfsinfo')
797 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
798 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
799 except OSError:
800 pass
801 ui.write(('case-sensitive: %s\n') % casesensitive)
799
802
800 @command('debuggetbundle',
803 @command('debuggetbundle',
801 [('H', 'head', [], _('id of head node'), _('ID')),
804 [('H', 'head', [], _('id of head node'), _('ID')),
802 ('C', 'common', [], _('id of common node'), _('ID')),
805 ('C', 'common', [], _('id of common node'), _('ID')),
803 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
806 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
804 _('REPO FILE [-H|-C ID]...'),
807 _('REPO FILE [-H|-C ID]...'),
805 norepo=True)
808 norepo=True)
806 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
807 """retrieves a bundle from a repo
810 """retrieves a bundle from a repo
808
811
809 Every ID must be a full-length hex node id string. Saves the bundle to the
812 Every ID must be a full-length hex node id string. Saves the bundle to the
810 given file.
813 given file.
811 """
814 """
812 repo = hg.peer(ui, opts, repopath)
815 repo = hg.peer(ui, opts, repopath)
813 if not repo.capable('getbundle'):
816 if not repo.capable('getbundle'):
814 raise error.Abort("getbundle() not supported by target repository")
817 raise error.Abort("getbundle() not supported by target repository")
815 args = {}
818 args = {}
816 if common:
819 if common:
817 args['common'] = [bin(s) for s in common]
820 args['common'] = [bin(s) for s in common]
818 if head:
821 if head:
819 args['heads'] = [bin(s) for s in head]
822 args['heads'] = [bin(s) for s in head]
820 # TODO: get desired bundlecaps from command line.
823 # TODO: get desired bundlecaps from command line.
821 args['bundlecaps'] = None
824 args['bundlecaps'] = None
822 bundle = repo.getbundle('debug', **args)
825 bundle = repo.getbundle('debug', **args)
823
826
824 bundletype = opts.get('type', 'bzip2').lower()
827 bundletype = opts.get('type', 'bzip2').lower()
825 btypes = {'none': 'HG10UN',
828 btypes = {'none': 'HG10UN',
826 'bzip2': 'HG10BZ',
829 'bzip2': 'HG10BZ',
827 'gzip': 'HG10GZ',
830 'gzip': 'HG10GZ',
828 'bundle2': 'HG20'}
831 'bundle2': 'HG20'}
829 bundletype = btypes.get(bundletype)
832 bundletype = btypes.get(bundletype)
830 if bundletype not in bundle2.bundletypes:
833 if bundletype not in bundle2.bundletypes:
831 raise error.Abort(_('unknown bundle type specified with --type'))
834 raise error.Abort(_('unknown bundle type specified with --type'))
832 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
835 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
833
836
834 @command('debugignore', [], '[FILE]')
837 @command('debugignore', [], '[FILE]')
835 def debugignore(ui, repo, *files, **opts):
838 def debugignore(ui, repo, *files, **opts):
836 """display the combined ignore pattern and information about ignored files
839 """display the combined ignore pattern and information about ignored files
837
840
838 With no argument display the combined ignore pattern.
841 With no argument display the combined ignore pattern.
839
842
840 Given space separated file names, shows if the given file is ignored and
843 Given space separated file names, shows if the given file is ignored and
841 if so, show the ignore rule (file and line number) that matched it.
844 if so, show the ignore rule (file and line number) that matched it.
842 """
845 """
843 ignore = repo.dirstate._ignore
846 ignore = repo.dirstate._ignore
844 if not files:
847 if not files:
845 # Show all the patterns
848 # Show all the patterns
846 includepat = getattr(ignore, 'includepat', None)
849 includepat = getattr(ignore, 'includepat', None)
847 if includepat is not None:
850 if includepat is not None:
848 ui.write("%s\n" % includepat)
851 ui.write("%s\n" % includepat)
849 else:
852 else:
850 raise error.Abort(_("no ignore patterns found"))
853 raise error.Abort(_("no ignore patterns found"))
851 else:
854 else:
852 for f in files:
855 for f in files:
853 nf = util.normpath(f)
856 nf = util.normpath(f)
854 ignored = None
857 ignored = None
855 ignoredata = None
858 ignoredata = None
856 if nf != '.':
859 if nf != '.':
857 if ignore(nf):
860 if ignore(nf):
858 ignored = nf
861 ignored = nf
859 ignoredata = repo.dirstate._ignorefileandline(nf)
862 ignoredata = repo.dirstate._ignorefileandline(nf)
860 else:
863 else:
861 for p in util.finddirs(nf):
864 for p in util.finddirs(nf):
862 if ignore(p):
865 if ignore(p):
863 ignored = p
866 ignored = p
864 ignoredata = repo.dirstate._ignorefileandline(p)
867 ignoredata = repo.dirstate._ignorefileandline(p)
865 break
868 break
866 if ignored:
869 if ignored:
867 if ignored == nf:
870 if ignored == nf:
868 ui.write(_("%s is ignored\n") % f)
871 ui.write(_("%s is ignored\n") % f)
869 else:
872 else:
870 ui.write(_("%s is ignored because of "
873 ui.write(_("%s is ignored because of "
871 "containing folder %s\n")
874 "containing folder %s\n")
872 % (f, ignored))
875 % (f, ignored))
873 ignorefile, lineno, line = ignoredata
876 ignorefile, lineno, line = ignoredata
874 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
877 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
875 % (ignorefile, lineno, line))
878 % (ignorefile, lineno, line))
876 else:
879 else:
877 ui.write(_("%s is not ignored\n") % f)
880 ui.write(_("%s is not ignored\n") % f)
878
881
879 @command('debugindex', commands.debugrevlogopts +
882 @command('debugindex', commands.debugrevlogopts +
880 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
883 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
881 _('[-f FORMAT] -c|-m|FILE'),
884 _('[-f FORMAT] -c|-m|FILE'),
882 optionalrepo=True)
885 optionalrepo=True)
883 def debugindex(ui, repo, file_=None, **opts):
886 def debugindex(ui, repo, file_=None, **opts):
884 """dump the contents of an index file"""
887 """dump the contents of an index file"""
885 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
888 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
886 format = opts.get('format', 0)
889 format = opts.get('format', 0)
887 if format not in (0, 1):
890 if format not in (0, 1):
888 raise error.Abort(_("unknown format %d") % format)
891 raise error.Abort(_("unknown format %d") % format)
889
892
890 generaldelta = r.version & revlog.REVLOGGENERALDELTA
893 generaldelta = r.version & revlog.REVLOGGENERALDELTA
891 if generaldelta:
894 if generaldelta:
892 basehdr = ' delta'
895 basehdr = ' delta'
893 else:
896 else:
894 basehdr = ' base'
897 basehdr = ' base'
895
898
896 if ui.debugflag:
899 if ui.debugflag:
897 shortfn = hex
900 shortfn = hex
898 else:
901 else:
899 shortfn = short
902 shortfn = short
900
903
901 # There might not be anything in r, so have a sane default
904 # There might not be anything in r, so have a sane default
902 idlen = 12
905 idlen = 12
903 for i in r:
906 for i in r:
904 idlen = len(shortfn(r.node(i)))
907 idlen = len(shortfn(r.node(i)))
905 break
908 break
906
909
907 if format == 0:
910 if format == 0:
908 ui.write((" rev offset length " + basehdr + " linkrev"
911 ui.write((" rev offset length " + basehdr + " linkrev"
909 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
912 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
910 elif format == 1:
913 elif format == 1:
911 ui.write((" rev flag offset length"
914 ui.write((" rev flag offset length"
912 " size " + basehdr + " link p1 p2"
915 " size " + basehdr + " link p1 p2"
913 " %s\n") % "nodeid".rjust(idlen))
916 " %s\n") % "nodeid".rjust(idlen))
914
917
915 for i in r:
918 for i in r:
916 node = r.node(i)
919 node = r.node(i)
917 if generaldelta:
920 if generaldelta:
918 base = r.deltaparent(i)
921 base = r.deltaparent(i)
919 else:
922 else:
920 base = r.chainbase(i)
923 base = r.chainbase(i)
921 if format == 0:
924 if format == 0:
922 try:
925 try:
923 pp = r.parents(node)
926 pp = r.parents(node)
924 except Exception:
927 except Exception:
925 pp = [nullid, nullid]
928 pp = [nullid, nullid]
926 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
929 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
927 i, r.start(i), r.length(i), base, r.linkrev(i),
930 i, r.start(i), r.length(i), base, r.linkrev(i),
928 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
931 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
929 elif format == 1:
932 elif format == 1:
930 pr = r.parentrevs(i)
933 pr = r.parentrevs(i)
931 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
934 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
932 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
935 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
933 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
936 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
934
937
935 @command('debugindexdot', commands.debugrevlogopts,
938 @command('debugindexdot', commands.debugrevlogopts,
936 _('-c|-m|FILE'), optionalrepo=True)
939 _('-c|-m|FILE'), optionalrepo=True)
937 def debugindexdot(ui, repo, file_=None, **opts):
940 def debugindexdot(ui, repo, file_=None, **opts):
938 """dump an index DAG as a graphviz dot file"""
941 """dump an index DAG as a graphviz dot file"""
939 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
942 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
940 ui.write(("digraph G {\n"))
943 ui.write(("digraph G {\n"))
941 for i in r:
944 for i in r:
942 node = r.node(i)
945 node = r.node(i)
943 pp = r.parents(node)
946 pp = r.parents(node)
944 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
947 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
945 if pp[1] != nullid:
948 if pp[1] != nullid:
946 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
949 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
947 ui.write("}\n")
950 ui.write("}\n")
948
951
949 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
952 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
950 def debuginstall(ui, **opts):
953 def debuginstall(ui, **opts):
951 '''test Mercurial installation
954 '''test Mercurial installation
952
955
953 Returns 0 on success.
956 Returns 0 on success.
954 '''
957 '''
955
958
956 def writetemp(contents):
959 def writetemp(contents):
957 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
960 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
958 f = os.fdopen(fd, pycompat.sysstr("wb"))
961 f = os.fdopen(fd, pycompat.sysstr("wb"))
959 f.write(contents)
962 f.write(contents)
960 f.close()
963 f.close()
961 return name
964 return name
962
965
963 problems = 0
966 problems = 0
964
967
965 fm = ui.formatter('debuginstall', opts)
968 fm = ui.formatter('debuginstall', opts)
966 fm.startitem()
969 fm.startitem()
967
970
968 # encoding
971 # encoding
969 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
972 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
970 err = None
973 err = None
971 try:
974 try:
972 encoding.fromlocal("test")
975 encoding.fromlocal("test")
973 except error.Abort as inst:
976 except error.Abort as inst:
974 err = inst
977 err = inst
975 problems += 1
978 problems += 1
976 fm.condwrite(err, 'encodingerror', _(" %s\n"
979 fm.condwrite(err, 'encodingerror', _(" %s\n"
977 " (check that your locale is properly set)\n"), err)
980 " (check that your locale is properly set)\n"), err)
978
981
979 # Python
982 # Python
980 fm.write('pythonexe', _("checking Python executable (%s)\n"),
983 fm.write('pythonexe', _("checking Python executable (%s)\n"),
981 pycompat.sysexecutable)
984 pycompat.sysexecutable)
982 fm.write('pythonver', _("checking Python version (%s)\n"),
985 fm.write('pythonver', _("checking Python version (%s)\n"),
983 ("%d.%d.%d" % sys.version_info[:3]))
986 ("%d.%d.%d" % sys.version_info[:3]))
984 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
987 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
985 os.path.dirname(pycompat.fsencode(os.__file__)))
988 os.path.dirname(pycompat.fsencode(os.__file__)))
986
989
987 security = set(sslutil.supportedprotocols)
990 security = set(sslutil.supportedprotocols)
988 if sslutil.hassni:
991 if sslutil.hassni:
989 security.add('sni')
992 security.add('sni')
990
993
991 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
994 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
992 fm.formatlist(sorted(security), name='protocol',
995 fm.formatlist(sorted(security), name='protocol',
993 fmt='%s', sep=','))
996 fmt='%s', sep=','))
994
997
995 # These are warnings, not errors. So don't increment problem count. This
998 # These are warnings, not errors. So don't increment problem count. This
996 # may change in the future.
999 # may change in the future.
997 if 'tls1.2' not in security:
1000 if 'tls1.2' not in security:
998 fm.plain(_(' TLS 1.2 not supported by Python install; '
1001 fm.plain(_(' TLS 1.2 not supported by Python install; '
999 'network connections lack modern security\n'))
1002 'network connections lack modern security\n'))
1000 if 'sni' not in security:
1003 if 'sni' not in security:
1001 fm.plain(_(' SNI not supported by Python install; may have '
1004 fm.plain(_(' SNI not supported by Python install; may have '
1002 'connectivity issues with some servers\n'))
1005 'connectivity issues with some servers\n'))
1003
1006
1004 # TODO print CA cert info
1007 # TODO print CA cert info
1005
1008
1006 # hg version
1009 # hg version
1007 hgver = util.version()
1010 hgver = util.version()
1008 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1011 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1009 hgver.split('+')[0])
1012 hgver.split('+')[0])
1010 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1013 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1011 '+'.join(hgver.split('+')[1:]))
1014 '+'.join(hgver.split('+')[1:]))
1012
1015
1013 # compiled modules
1016 # compiled modules
1014 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1017 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1015 policy.policy)
1018 policy.policy)
1016 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1019 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1017 os.path.dirname(pycompat.fsencode(__file__)))
1020 os.path.dirname(pycompat.fsencode(__file__)))
1018
1021
1019 err = None
1022 err = None
1020 try:
1023 try:
1021 from . import (
1024 from . import (
1022 base85,
1025 base85,
1023 bdiff,
1026 bdiff,
1024 mpatch,
1027 mpatch,
1025 osutil,
1028 osutil,
1026 )
1029 )
1027 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1030 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1028 except Exception as inst:
1031 except Exception as inst:
1029 err = inst
1032 err = inst
1030 problems += 1
1033 problems += 1
1031 fm.condwrite(err, 'extensionserror', " %s\n", err)
1034 fm.condwrite(err, 'extensionserror', " %s\n", err)
1032
1035
1033 compengines = util.compengines._engines.values()
1036 compengines = util.compengines._engines.values()
1034 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1037 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1035 fm.formatlist(sorted(e.name() for e in compengines),
1038 fm.formatlist(sorted(e.name() for e in compengines),
1036 name='compengine', fmt='%s', sep=', '))
1039 name='compengine', fmt='%s', sep=', '))
1037 fm.write('compenginesavail', _('checking available compression engines '
1040 fm.write('compenginesavail', _('checking available compression engines '
1038 '(%s)\n'),
1041 '(%s)\n'),
1039 fm.formatlist(sorted(e.name() for e in compengines
1042 fm.formatlist(sorted(e.name() for e in compengines
1040 if e.available()),
1043 if e.available()),
1041 name='compengine', fmt='%s', sep=', '))
1044 name='compengine', fmt='%s', sep=', '))
1042 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1045 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1043 fm.write('compenginesserver', _('checking available compression engines '
1046 fm.write('compenginesserver', _('checking available compression engines '
1044 'for wire protocol (%s)\n'),
1047 'for wire protocol (%s)\n'),
1045 fm.formatlist([e.name() for e in wirecompengines
1048 fm.formatlist([e.name() for e in wirecompengines
1046 if e.wireprotosupport()],
1049 if e.wireprotosupport()],
1047 name='compengine', fmt='%s', sep=', '))
1050 name='compengine', fmt='%s', sep=', '))
1048
1051
1049 # templates
1052 # templates
1050 p = templater.templatepaths()
1053 p = templater.templatepaths()
1051 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1054 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1052 fm.condwrite(not p, '', _(" no template directories found\n"))
1055 fm.condwrite(not p, '', _(" no template directories found\n"))
1053 if p:
1056 if p:
1054 m = templater.templatepath("map-cmdline.default")
1057 m = templater.templatepath("map-cmdline.default")
1055 if m:
1058 if m:
1056 # template found, check if it is working
1059 # template found, check if it is working
1057 err = None
1060 err = None
1058 try:
1061 try:
1059 templater.templater.frommapfile(m)
1062 templater.templater.frommapfile(m)
1060 except Exception as inst:
1063 except Exception as inst:
1061 err = inst
1064 err = inst
1062 p = None
1065 p = None
1063 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1066 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1064 else:
1067 else:
1065 p = None
1068 p = None
1066 fm.condwrite(p, 'defaulttemplate',
1069 fm.condwrite(p, 'defaulttemplate',
1067 _("checking default template (%s)\n"), m)
1070 _("checking default template (%s)\n"), m)
1068 fm.condwrite(not m, 'defaulttemplatenotfound',
1071 fm.condwrite(not m, 'defaulttemplatenotfound',
1069 _(" template '%s' not found\n"), "default")
1072 _(" template '%s' not found\n"), "default")
1070 if not p:
1073 if not p:
1071 problems += 1
1074 problems += 1
1072 fm.condwrite(not p, '',
1075 fm.condwrite(not p, '',
1073 _(" (templates seem to have been installed incorrectly)\n"))
1076 _(" (templates seem to have been installed incorrectly)\n"))
1074
1077
1075 # editor
1078 # editor
1076 editor = ui.geteditor()
1079 editor = ui.geteditor()
1077 editor = util.expandpath(editor)
1080 editor = util.expandpath(editor)
1078 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1081 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1079 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1082 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1080 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1083 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1081 _(" No commit editor set and can't find %s in PATH\n"
1084 _(" No commit editor set and can't find %s in PATH\n"
1082 " (specify a commit editor in your configuration"
1085 " (specify a commit editor in your configuration"
1083 " file)\n"), not cmdpath and editor == 'vi' and editor)
1086 " file)\n"), not cmdpath and editor == 'vi' and editor)
1084 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1087 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1085 _(" Can't find editor '%s' in PATH\n"
1088 _(" Can't find editor '%s' in PATH\n"
1086 " (specify a commit editor in your configuration"
1089 " (specify a commit editor in your configuration"
1087 " file)\n"), not cmdpath and editor)
1090 " file)\n"), not cmdpath and editor)
1088 if not cmdpath and editor != 'vi':
1091 if not cmdpath and editor != 'vi':
1089 problems += 1
1092 problems += 1
1090
1093
1091 # check username
1094 # check username
1092 username = None
1095 username = None
1093 err = None
1096 err = None
1094 try:
1097 try:
1095 username = ui.username()
1098 username = ui.username()
1096 except error.Abort as e:
1099 except error.Abort as e:
1097 err = e
1100 err = e
1098 problems += 1
1101 problems += 1
1099
1102
1100 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1103 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1101 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1104 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1102 " (specify a username in your configuration file)\n"), err)
1105 " (specify a username in your configuration file)\n"), err)
1103
1106
1104 fm.condwrite(not problems, '',
1107 fm.condwrite(not problems, '',
1105 _("no problems detected\n"))
1108 _("no problems detected\n"))
1106 if not problems:
1109 if not problems:
1107 fm.data(problems=problems)
1110 fm.data(problems=problems)
1108 fm.condwrite(problems, 'problems',
1111 fm.condwrite(problems, 'problems',
1109 _("%d problems detected,"
1112 _("%d problems detected,"
1110 " please check your install!\n"), problems)
1113 " please check your install!\n"), problems)
1111 fm.end()
1114 fm.end()
1112
1115
1113 return problems
1116 return problems
1114
1117
1115 @command('debugknown', [], _('REPO ID...'), norepo=True)
1118 @command('debugknown', [], _('REPO ID...'), norepo=True)
1116 def debugknown(ui, repopath, *ids, **opts):
1119 def debugknown(ui, repopath, *ids, **opts):
1117 """test whether node ids are known to a repo
1120 """test whether node ids are known to a repo
1118
1121
1119 Every ID must be a full-length hex node id string. Returns a list of 0s
1122 Every ID must be a full-length hex node id string. Returns a list of 0s
1120 and 1s indicating unknown/known.
1123 and 1s indicating unknown/known.
1121 """
1124 """
1122 repo = hg.peer(ui, opts, repopath)
1125 repo = hg.peer(ui, opts, repopath)
1123 if not repo.capable('known'):
1126 if not repo.capable('known'):
1124 raise error.Abort("known() not supported by target repository")
1127 raise error.Abort("known() not supported by target repository")
1125 flags = repo.known([bin(s) for s in ids])
1128 flags = repo.known([bin(s) for s in ids])
1126 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1129 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1127
1130
1128 @command('debuglabelcomplete', [], _('LABEL...'))
1131 @command('debuglabelcomplete', [], _('LABEL...'))
1129 def debuglabelcomplete(ui, repo, *args):
1132 def debuglabelcomplete(ui, repo, *args):
1130 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1133 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1131 debugnamecomplete(ui, repo, *args)
1134 debugnamecomplete(ui, repo, *args)
1132
1135
1133 @command('debuglocks',
1136 @command('debuglocks',
1134 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1137 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1135 ('W', 'force-wlock', None,
1138 ('W', 'force-wlock', None,
1136 _('free the working state lock (DANGEROUS)'))],
1139 _('free the working state lock (DANGEROUS)'))],
1137 _('[OPTION]...'))
1140 _('[OPTION]...'))
1138 def debuglocks(ui, repo, **opts):
1141 def debuglocks(ui, repo, **opts):
1139 """show or modify state of locks
1142 """show or modify state of locks
1140
1143
1141 By default, this command will show which locks are held. This
1144 By default, this command will show which locks are held. This
1142 includes the user and process holding the lock, the amount of time
1145 includes the user and process holding the lock, the amount of time
1143 the lock has been held, and the machine name where the process is
1146 the lock has been held, and the machine name where the process is
1144 running if it's not local.
1147 running if it's not local.
1145
1148
1146 Locks protect the integrity of Mercurial's data, so should be
1149 Locks protect the integrity of Mercurial's data, so should be
1147 treated with care. System crashes or other interruptions may cause
1150 treated with care. System crashes or other interruptions may cause
1148 locks to not be properly released, though Mercurial will usually
1151 locks to not be properly released, though Mercurial will usually
1149 detect and remove such stale locks automatically.
1152 detect and remove such stale locks automatically.
1150
1153
1151 However, detecting stale locks may not always be possible (for
1154 However, detecting stale locks may not always be possible (for
1152 instance, on a shared filesystem). Removing locks may also be
1155 instance, on a shared filesystem). Removing locks may also be
1153 blocked by filesystem permissions.
1156 blocked by filesystem permissions.
1154
1157
1155 Returns 0 if no locks are held.
1158 Returns 0 if no locks are held.
1156
1159
1157 """
1160 """
1158
1161
1159 if opts.get('force_lock'):
1162 if opts.get('force_lock'):
1160 repo.svfs.unlink('lock')
1163 repo.svfs.unlink('lock')
1161 if opts.get('force_wlock'):
1164 if opts.get('force_wlock'):
1162 repo.vfs.unlink('wlock')
1165 repo.vfs.unlink('wlock')
1163 if opts.get('force_lock') or opts.get('force_lock'):
1166 if opts.get('force_lock') or opts.get('force_lock'):
1164 return 0
1167 return 0
1165
1168
1166 now = time.time()
1169 now = time.time()
1167 held = 0
1170 held = 0
1168
1171
1169 def report(vfs, name, method):
1172 def report(vfs, name, method):
1170 # this causes stale locks to get reaped for more accurate reporting
1173 # this causes stale locks to get reaped for more accurate reporting
1171 try:
1174 try:
1172 l = method(False)
1175 l = method(False)
1173 except error.LockHeld:
1176 except error.LockHeld:
1174 l = None
1177 l = None
1175
1178
1176 if l:
1179 if l:
1177 l.release()
1180 l.release()
1178 else:
1181 else:
1179 try:
1182 try:
1180 stat = vfs.lstat(name)
1183 stat = vfs.lstat(name)
1181 age = now - stat.st_mtime
1184 age = now - stat.st_mtime
1182 user = util.username(stat.st_uid)
1185 user = util.username(stat.st_uid)
1183 locker = vfs.readlock(name)
1186 locker = vfs.readlock(name)
1184 if ":" in locker:
1187 if ":" in locker:
1185 host, pid = locker.split(':')
1188 host, pid = locker.split(':')
1186 if host == socket.gethostname():
1189 if host == socket.gethostname():
1187 locker = 'user %s, process %s' % (user, pid)
1190 locker = 'user %s, process %s' % (user, pid)
1188 else:
1191 else:
1189 locker = 'user %s, process %s, host %s' \
1192 locker = 'user %s, process %s, host %s' \
1190 % (user, pid, host)
1193 % (user, pid, host)
1191 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1194 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1192 return 1
1195 return 1
1193 except OSError as e:
1196 except OSError as e:
1194 if e.errno != errno.ENOENT:
1197 if e.errno != errno.ENOENT:
1195 raise
1198 raise
1196
1199
1197 ui.write(("%-6s free\n") % (name + ":"))
1200 ui.write(("%-6s free\n") % (name + ":"))
1198 return 0
1201 return 0
1199
1202
1200 held += report(repo.svfs, "lock", repo.lock)
1203 held += report(repo.svfs, "lock", repo.lock)
1201 held += report(repo.vfs, "wlock", repo.wlock)
1204 held += report(repo.vfs, "wlock", repo.wlock)
1202
1205
1203 return held
1206 return held
1204
1207
1205 @command('debugmergestate', [], '')
1208 @command('debugmergestate', [], '')
1206 def debugmergestate(ui, repo, *args):
1209 def debugmergestate(ui, repo, *args):
1207 """print merge state
1210 """print merge state
1208
1211
1209 Use --verbose to print out information about whether v1 or v2 merge state
1212 Use --verbose to print out information about whether v1 or v2 merge state
1210 was chosen."""
1213 was chosen."""
1211 def _hashornull(h):
1214 def _hashornull(h):
1212 if h == nullhex:
1215 if h == nullhex:
1213 return 'null'
1216 return 'null'
1214 else:
1217 else:
1215 return h
1218 return h
1216
1219
1217 def printrecords(version):
1220 def printrecords(version):
1218 ui.write(('* version %s records\n') % version)
1221 ui.write(('* version %s records\n') % version)
1219 if version == 1:
1222 if version == 1:
1220 records = v1records
1223 records = v1records
1221 else:
1224 else:
1222 records = v2records
1225 records = v2records
1223
1226
1224 for rtype, record in records:
1227 for rtype, record in records:
1225 # pretty print some record types
1228 # pretty print some record types
1226 if rtype == 'L':
1229 if rtype == 'L':
1227 ui.write(('local: %s\n') % record)
1230 ui.write(('local: %s\n') % record)
1228 elif rtype == 'O':
1231 elif rtype == 'O':
1229 ui.write(('other: %s\n') % record)
1232 ui.write(('other: %s\n') % record)
1230 elif rtype == 'm':
1233 elif rtype == 'm':
1231 driver, mdstate = record.split('\0', 1)
1234 driver, mdstate = record.split('\0', 1)
1232 ui.write(('merge driver: %s (state "%s")\n')
1235 ui.write(('merge driver: %s (state "%s")\n')
1233 % (driver, mdstate))
1236 % (driver, mdstate))
1234 elif rtype in 'FDC':
1237 elif rtype in 'FDC':
1235 r = record.split('\0')
1238 r = record.split('\0')
1236 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1239 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1237 if version == 1:
1240 if version == 1:
1238 onode = 'not stored in v1 format'
1241 onode = 'not stored in v1 format'
1239 flags = r[7]
1242 flags = r[7]
1240 else:
1243 else:
1241 onode, flags = r[7:9]
1244 onode, flags = r[7:9]
1242 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1245 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1243 % (f, rtype, state, _hashornull(hash)))
1246 % (f, rtype, state, _hashornull(hash)))
1244 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1247 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1245 ui.write((' ancestor path: %s (node %s)\n')
1248 ui.write((' ancestor path: %s (node %s)\n')
1246 % (afile, _hashornull(anode)))
1249 % (afile, _hashornull(anode)))
1247 ui.write((' other path: %s (node %s)\n')
1250 ui.write((' other path: %s (node %s)\n')
1248 % (ofile, _hashornull(onode)))
1251 % (ofile, _hashornull(onode)))
1249 elif rtype == 'f':
1252 elif rtype == 'f':
1250 filename, rawextras = record.split('\0', 1)
1253 filename, rawextras = record.split('\0', 1)
1251 extras = rawextras.split('\0')
1254 extras = rawextras.split('\0')
1252 i = 0
1255 i = 0
1253 extrastrings = []
1256 extrastrings = []
1254 while i < len(extras):
1257 while i < len(extras):
1255 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1258 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1256 i += 2
1259 i += 2
1257
1260
1258 ui.write(('file extras: %s (%s)\n')
1261 ui.write(('file extras: %s (%s)\n')
1259 % (filename, ', '.join(extrastrings)))
1262 % (filename, ', '.join(extrastrings)))
1260 elif rtype == 'l':
1263 elif rtype == 'l':
1261 labels = record.split('\0', 2)
1264 labels = record.split('\0', 2)
1262 labels = [l for l in labels if len(l) > 0]
1265 labels = [l for l in labels if len(l) > 0]
1263 ui.write(('labels:\n'))
1266 ui.write(('labels:\n'))
1264 ui.write((' local: %s\n' % labels[0]))
1267 ui.write((' local: %s\n' % labels[0]))
1265 ui.write((' other: %s\n' % labels[1]))
1268 ui.write((' other: %s\n' % labels[1]))
1266 if len(labels) > 2:
1269 if len(labels) > 2:
1267 ui.write((' base: %s\n' % labels[2]))
1270 ui.write((' base: %s\n' % labels[2]))
1268 else:
1271 else:
1269 ui.write(('unrecognized entry: %s\t%s\n')
1272 ui.write(('unrecognized entry: %s\t%s\n')
1270 % (rtype, record.replace('\0', '\t')))
1273 % (rtype, record.replace('\0', '\t')))
1271
1274
1272 # Avoid mergestate.read() since it may raise an exception for unsupported
1275 # Avoid mergestate.read() since it may raise an exception for unsupported
1273 # merge state records. We shouldn't be doing this, but this is OK since this
1276 # merge state records. We shouldn't be doing this, but this is OK since this
1274 # command is pretty low-level.
1277 # command is pretty low-level.
1275 ms = mergemod.mergestate(repo)
1278 ms = mergemod.mergestate(repo)
1276
1279
1277 # sort so that reasonable information is on top
1280 # sort so that reasonable information is on top
1278 v1records = ms._readrecordsv1()
1281 v1records = ms._readrecordsv1()
1279 v2records = ms._readrecordsv2()
1282 v2records = ms._readrecordsv2()
1280 order = 'LOml'
1283 order = 'LOml'
1281 def key(r):
1284 def key(r):
1282 idx = order.find(r[0])
1285 idx = order.find(r[0])
1283 if idx == -1:
1286 if idx == -1:
1284 return (1, r[1])
1287 return (1, r[1])
1285 else:
1288 else:
1286 return (0, idx)
1289 return (0, idx)
1287 v1records.sort(key=key)
1290 v1records.sort(key=key)
1288 v2records.sort(key=key)
1291 v2records.sort(key=key)
1289
1292
1290 if not v1records and not v2records:
1293 if not v1records and not v2records:
1291 ui.write(('no merge state found\n'))
1294 ui.write(('no merge state found\n'))
1292 elif not v2records:
1295 elif not v2records:
1293 ui.note(('no version 2 merge state\n'))
1296 ui.note(('no version 2 merge state\n'))
1294 printrecords(1)
1297 printrecords(1)
1295 elif ms._v1v2match(v1records, v2records):
1298 elif ms._v1v2match(v1records, v2records):
1296 ui.note(('v1 and v2 states match: using v2\n'))
1299 ui.note(('v1 and v2 states match: using v2\n'))
1297 printrecords(2)
1300 printrecords(2)
1298 else:
1301 else:
1299 ui.note(('v1 and v2 states mismatch: using v1\n'))
1302 ui.note(('v1 and v2 states mismatch: using v1\n'))
1300 printrecords(1)
1303 printrecords(1)
1301 if ui.verbose:
1304 if ui.verbose:
1302 printrecords(2)
1305 printrecords(2)
1303
1306
1304 @command('debugnamecomplete', [], _('NAME...'))
1307 @command('debugnamecomplete', [], _('NAME...'))
1305 def debugnamecomplete(ui, repo, *args):
1308 def debugnamecomplete(ui, repo, *args):
1306 '''complete "names" - tags, open branch names, bookmark names'''
1309 '''complete "names" - tags, open branch names, bookmark names'''
1307
1310
1308 names = set()
1311 names = set()
1309 # since we previously only listed open branches, we will handle that
1312 # since we previously only listed open branches, we will handle that
1310 # specially (after this for loop)
1313 # specially (after this for loop)
1311 for name, ns in repo.names.iteritems():
1314 for name, ns in repo.names.iteritems():
1312 if name != 'branches':
1315 if name != 'branches':
1313 names.update(ns.listnames(repo))
1316 names.update(ns.listnames(repo))
1314 names.update(tag for (tag, heads, tip, closed)
1317 names.update(tag for (tag, heads, tip, closed)
1315 in repo.branchmap().iterbranches() if not closed)
1318 in repo.branchmap().iterbranches() if not closed)
1316 completions = set()
1319 completions = set()
1317 if not args:
1320 if not args:
1318 args = ['']
1321 args = ['']
1319 for a in args:
1322 for a in args:
1320 completions.update(n for n in names if n.startswith(a))
1323 completions.update(n for n in names if n.startswith(a))
1321 ui.write('\n'.join(sorted(completions)))
1324 ui.write('\n'.join(sorted(completions)))
1322 ui.write('\n')
1325 ui.write('\n')
1323
1326
1324 @command('debugobsolete',
1327 @command('debugobsolete',
1325 [('', 'flags', 0, _('markers flag')),
1328 [('', 'flags', 0, _('markers flag')),
1326 ('', 'record-parents', False,
1329 ('', 'record-parents', False,
1327 _('record parent information for the precursor')),
1330 _('record parent information for the precursor')),
1328 ('r', 'rev', [], _('display markers relevant to REV')),
1331 ('r', 'rev', [], _('display markers relevant to REV')),
1329 ('', 'index', False, _('display index of the marker')),
1332 ('', 'index', False, _('display index of the marker')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1333 ('', 'delete', [], _('delete markers specified by indices')),
1331 ] + commands.commitopts2 + commands.formatteropts,
1334 ] + commands.commitopts2 + commands.formatteropts,
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1335 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1336 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 """create arbitrary obsolete marker
1337 """create arbitrary obsolete marker
1335
1338
1336 With no arguments, displays the list of obsolescence markers."""
1339 With no arguments, displays the list of obsolescence markers."""
1337
1340
1338 def parsenodeid(s):
1341 def parsenodeid(s):
1339 try:
1342 try:
1340 # We do not use revsingle/revrange functions here to accept
1343 # We do not use revsingle/revrange functions here to accept
1341 # arbitrary node identifiers, possibly not present in the
1344 # arbitrary node identifiers, possibly not present in the
1342 # local repository.
1345 # local repository.
1343 n = bin(s)
1346 n = bin(s)
1344 if len(n) != len(nullid):
1347 if len(n) != len(nullid):
1345 raise TypeError()
1348 raise TypeError()
1346 return n
1349 return n
1347 except TypeError:
1350 except TypeError:
1348 raise error.Abort('changeset references must be full hexadecimal '
1351 raise error.Abort('changeset references must be full hexadecimal '
1349 'node identifiers')
1352 'node identifiers')
1350
1353
1351 if opts.get('delete'):
1354 if opts.get('delete'):
1352 indices = []
1355 indices = []
1353 for v in opts.get('delete'):
1356 for v in opts.get('delete'):
1354 try:
1357 try:
1355 indices.append(int(v))
1358 indices.append(int(v))
1356 except ValueError:
1359 except ValueError:
1357 raise error.Abort(_('invalid index value: %r') % v,
1360 raise error.Abort(_('invalid index value: %r') % v,
1358 hint=_('use integers for indices'))
1361 hint=_('use integers for indices'))
1359
1362
1360 if repo.currenttransaction():
1363 if repo.currenttransaction():
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1364 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 'of transaction.'))
1365 'of transaction.'))
1363
1366
1364 with repo.lock():
1367 with repo.lock():
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1368 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1369 ui.write(_('deleted %i obsolescence markers\n') % n)
1367
1370
1368 return
1371 return
1369
1372
1370 if precursor is not None:
1373 if precursor is not None:
1371 if opts['rev']:
1374 if opts['rev']:
1372 raise error.Abort('cannot select revision when creating marker')
1375 raise error.Abort('cannot select revision when creating marker')
1373 metadata = {}
1376 metadata = {}
1374 metadata['user'] = opts['user'] or ui.username()
1377 metadata['user'] = opts['user'] or ui.username()
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1378 succs = tuple(parsenodeid(succ) for succ in successors)
1376 l = repo.lock()
1379 l = repo.lock()
1377 try:
1380 try:
1378 tr = repo.transaction('debugobsolete')
1381 tr = repo.transaction('debugobsolete')
1379 try:
1382 try:
1380 date = opts.get('date')
1383 date = opts.get('date')
1381 if date:
1384 if date:
1382 date = util.parsedate(date)
1385 date = util.parsedate(date)
1383 else:
1386 else:
1384 date = None
1387 date = None
1385 prec = parsenodeid(precursor)
1388 prec = parsenodeid(precursor)
1386 parents = None
1389 parents = None
1387 if opts['record_parents']:
1390 if opts['record_parents']:
1388 if prec not in repo.unfiltered():
1391 if prec not in repo.unfiltered():
1389 raise error.Abort('cannot used --record-parents on '
1392 raise error.Abort('cannot used --record-parents on '
1390 'unknown changesets')
1393 'unknown changesets')
1391 parents = repo.unfiltered()[prec].parents()
1394 parents = repo.unfiltered()[prec].parents()
1392 parents = tuple(p.node() for p in parents)
1395 parents = tuple(p.node() for p in parents)
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1396 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 parents=parents, date=date,
1397 parents=parents, date=date,
1395 metadata=metadata)
1398 metadata=metadata)
1396 tr.close()
1399 tr.close()
1397 except ValueError as exc:
1400 except ValueError as exc:
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1401 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 finally:
1402 finally:
1400 tr.release()
1403 tr.release()
1401 finally:
1404 finally:
1402 l.release()
1405 l.release()
1403 else:
1406 else:
1404 if opts['rev']:
1407 if opts['rev']:
1405 revs = scmutil.revrange(repo, opts['rev'])
1408 revs = scmutil.revrange(repo, opts['rev'])
1406 nodes = [repo[r].node() for r in revs]
1409 nodes = [repo[r].node() for r in revs]
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1410 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1408 markers.sort(key=lambda x: x._data)
1411 markers.sort(key=lambda x: x._data)
1409 else:
1412 else:
1410 markers = obsolete.getmarkers(repo)
1413 markers = obsolete.getmarkers(repo)
1411
1414
1412 markerstoiter = markers
1415 markerstoiter = markers
1413 isrelevant = lambda m: True
1416 isrelevant = lambda m: True
1414 if opts.get('rev') and opts.get('index'):
1417 if opts.get('rev') and opts.get('index'):
1415 markerstoiter = obsolete.getmarkers(repo)
1418 markerstoiter = obsolete.getmarkers(repo)
1416 markerset = set(markers)
1419 markerset = set(markers)
1417 isrelevant = lambda m: m in markerset
1420 isrelevant = lambda m: m in markerset
1418
1421
1419 fm = ui.formatter('debugobsolete', opts)
1422 fm = ui.formatter('debugobsolete', opts)
1420 for i, m in enumerate(markerstoiter):
1423 for i, m in enumerate(markerstoiter):
1421 if not isrelevant(m):
1424 if not isrelevant(m):
1422 # marker can be irrelevant when we're iterating over a set
1425 # marker can be irrelevant when we're iterating over a set
1423 # of markers (markerstoiter) which is bigger than the set
1426 # of markers (markerstoiter) which is bigger than the set
1424 # of markers we want to display (markers)
1427 # of markers we want to display (markers)
1425 # this can happen if both --index and --rev options are
1428 # this can happen if both --index and --rev options are
1426 # provided and thus we need to iterate over all of the markers
1429 # provided and thus we need to iterate over all of the markers
1427 # to get the correct indices, but only display the ones that
1430 # to get the correct indices, but only display the ones that
1428 # are relevant to --rev value
1431 # are relevant to --rev value
1429 continue
1432 continue
1430 fm.startitem()
1433 fm.startitem()
1431 ind = i if opts.get('index') else None
1434 ind = i if opts.get('index') else None
1432 cmdutil.showmarker(fm, m, index=ind)
1435 cmdutil.showmarker(fm, m, index=ind)
1433 fm.end()
1436 fm.end()
1434
1437
1435 @command('debugpathcomplete',
1438 @command('debugpathcomplete',
1436 [('f', 'full', None, _('complete an entire path')),
1439 [('f', 'full', None, _('complete an entire path')),
1437 ('n', 'normal', None, _('show only normal files')),
1440 ('n', 'normal', None, _('show only normal files')),
1438 ('a', 'added', None, _('show only added files')),
1441 ('a', 'added', None, _('show only added files')),
1439 ('r', 'removed', None, _('show only removed files'))],
1442 ('r', 'removed', None, _('show only removed files'))],
1440 _('FILESPEC...'))
1443 _('FILESPEC...'))
1441 def debugpathcomplete(ui, repo, *specs, **opts):
1444 def debugpathcomplete(ui, repo, *specs, **opts):
1442 '''complete part or all of a tracked path
1445 '''complete part or all of a tracked path
1443
1446
1444 This command supports shells that offer path name completion. It
1447 This command supports shells that offer path name completion. It
1445 currently completes only files already known to the dirstate.
1448 currently completes only files already known to the dirstate.
1446
1449
1447 Completion extends only to the next path segment unless
1450 Completion extends only to the next path segment unless
1448 --full is specified, in which case entire paths are used.'''
1451 --full is specified, in which case entire paths are used.'''
1449
1452
1450 def complete(path, acceptable):
1453 def complete(path, acceptable):
1451 dirstate = repo.dirstate
1454 dirstate = repo.dirstate
1452 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1455 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1453 rootdir = repo.root + pycompat.ossep
1456 rootdir = repo.root + pycompat.ossep
1454 if spec != repo.root and not spec.startswith(rootdir):
1457 if spec != repo.root and not spec.startswith(rootdir):
1455 return [], []
1458 return [], []
1456 if os.path.isdir(spec):
1459 if os.path.isdir(spec):
1457 spec += '/'
1460 spec += '/'
1458 spec = spec[len(rootdir):]
1461 spec = spec[len(rootdir):]
1459 fixpaths = pycompat.ossep != '/'
1462 fixpaths = pycompat.ossep != '/'
1460 if fixpaths:
1463 if fixpaths:
1461 spec = spec.replace(pycompat.ossep, '/')
1464 spec = spec.replace(pycompat.ossep, '/')
1462 speclen = len(spec)
1465 speclen = len(spec)
1463 fullpaths = opts['full']
1466 fullpaths = opts['full']
1464 files, dirs = set(), set()
1467 files, dirs = set(), set()
1465 adddir, addfile = dirs.add, files.add
1468 adddir, addfile = dirs.add, files.add
1466 for f, st in dirstate.iteritems():
1469 for f, st in dirstate.iteritems():
1467 if f.startswith(spec) and st[0] in acceptable:
1470 if f.startswith(spec) and st[0] in acceptable:
1468 if fixpaths:
1471 if fixpaths:
1469 f = f.replace('/', pycompat.ossep)
1472 f = f.replace('/', pycompat.ossep)
1470 if fullpaths:
1473 if fullpaths:
1471 addfile(f)
1474 addfile(f)
1472 continue
1475 continue
1473 s = f.find(pycompat.ossep, speclen)
1476 s = f.find(pycompat.ossep, speclen)
1474 if s >= 0:
1477 if s >= 0:
1475 adddir(f[:s])
1478 adddir(f[:s])
1476 else:
1479 else:
1477 addfile(f)
1480 addfile(f)
1478 return files, dirs
1481 return files, dirs
1479
1482
1480 acceptable = ''
1483 acceptable = ''
1481 if opts['normal']:
1484 if opts['normal']:
1482 acceptable += 'nm'
1485 acceptable += 'nm'
1483 if opts['added']:
1486 if opts['added']:
1484 acceptable += 'a'
1487 acceptable += 'a'
1485 if opts['removed']:
1488 if opts['removed']:
1486 acceptable += 'r'
1489 acceptable += 'r'
1487 cwd = repo.getcwd()
1490 cwd = repo.getcwd()
1488 if not specs:
1491 if not specs:
1489 specs = ['.']
1492 specs = ['.']
1490
1493
1491 files, dirs = set(), set()
1494 files, dirs = set(), set()
1492 for spec in specs:
1495 for spec in specs:
1493 f, d = complete(spec, acceptable or 'nmar')
1496 f, d = complete(spec, acceptable or 'nmar')
1494 files.update(f)
1497 files.update(f)
1495 dirs.update(d)
1498 dirs.update(d)
1496 files.update(dirs)
1499 files.update(dirs)
1497 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1500 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1498 ui.write('\n')
1501 ui.write('\n')
1499
1502
1500 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1503 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1501 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1504 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1502 '''access the pushkey key/value protocol
1505 '''access the pushkey key/value protocol
1503
1506
1504 With two args, list the keys in the given namespace.
1507 With two args, list the keys in the given namespace.
1505
1508
1506 With five args, set a key to new if it currently is set to old.
1509 With five args, set a key to new if it currently is set to old.
1507 Reports success or failure.
1510 Reports success or failure.
1508 '''
1511 '''
1509
1512
1510 target = hg.peer(ui, {}, repopath)
1513 target = hg.peer(ui, {}, repopath)
1511 if keyinfo:
1514 if keyinfo:
1512 key, old, new = keyinfo
1515 key, old, new = keyinfo
1513 r = target.pushkey(namespace, key, old, new)
1516 r = target.pushkey(namespace, key, old, new)
1514 ui.status(str(r) + '\n')
1517 ui.status(str(r) + '\n')
1515 return not r
1518 return not r
1516 else:
1519 else:
1517 for k, v in sorted(target.listkeys(namespace).iteritems()):
1520 for k, v in sorted(target.listkeys(namespace).iteritems()):
1518 ui.write("%s\t%s\n" % (util.escapestr(k),
1521 ui.write("%s\t%s\n" % (util.escapestr(k),
1519 util.escapestr(v)))
1522 util.escapestr(v)))
1520
1523
1521 @command('debugpvec', [], _('A B'))
1524 @command('debugpvec', [], _('A B'))
1522 def debugpvec(ui, repo, a, b=None):
1525 def debugpvec(ui, repo, a, b=None):
1523 ca = scmutil.revsingle(repo, a)
1526 ca = scmutil.revsingle(repo, a)
1524 cb = scmutil.revsingle(repo, b)
1527 cb = scmutil.revsingle(repo, b)
1525 pa = pvec.ctxpvec(ca)
1528 pa = pvec.ctxpvec(ca)
1526 pb = pvec.ctxpvec(cb)
1529 pb = pvec.ctxpvec(cb)
1527 if pa == pb:
1530 if pa == pb:
1528 rel = "="
1531 rel = "="
1529 elif pa > pb:
1532 elif pa > pb:
1530 rel = ">"
1533 rel = ">"
1531 elif pa < pb:
1534 elif pa < pb:
1532 rel = "<"
1535 rel = "<"
1533 elif pa | pb:
1536 elif pa | pb:
1534 rel = "|"
1537 rel = "|"
1535 ui.write(_("a: %s\n") % pa)
1538 ui.write(_("a: %s\n") % pa)
1536 ui.write(_("b: %s\n") % pb)
1539 ui.write(_("b: %s\n") % pb)
1537 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1540 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1538 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1541 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1539 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1542 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1540 pa.distance(pb), rel))
1543 pa.distance(pb), rel))
1541
1544
1542 @command('debugrebuilddirstate|debugrebuildstate',
1545 @command('debugrebuilddirstate|debugrebuildstate',
1543 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1546 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1544 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1547 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1545 'the working copy parent')),
1548 'the working copy parent')),
1546 ],
1549 ],
1547 _('[-r REV]'))
1550 _('[-r REV]'))
1548 def debugrebuilddirstate(ui, repo, rev, **opts):
1551 def debugrebuilddirstate(ui, repo, rev, **opts):
1549 """rebuild the dirstate as it would look like for the given revision
1552 """rebuild the dirstate as it would look like for the given revision
1550
1553
1551 If no revision is specified the first current parent will be used.
1554 If no revision is specified the first current parent will be used.
1552
1555
1553 The dirstate will be set to the files of the given revision.
1556 The dirstate will be set to the files of the given revision.
1554 The actual working directory content or existing dirstate
1557 The actual working directory content or existing dirstate
1555 information such as adds or removes is not considered.
1558 information such as adds or removes is not considered.
1556
1559
1557 ``minimal`` will only rebuild the dirstate status for files that claim to be
1560 ``minimal`` will only rebuild the dirstate status for files that claim to be
1558 tracked but are not in the parent manifest, or that exist in the parent
1561 tracked but are not in the parent manifest, or that exist in the parent
1559 manifest but are not in the dirstate. It will not change adds, removes, or
1562 manifest but are not in the dirstate. It will not change adds, removes, or
1560 modified files that are in the working copy parent.
1563 modified files that are in the working copy parent.
1561
1564
1562 One use of this command is to make the next :hg:`status` invocation
1565 One use of this command is to make the next :hg:`status` invocation
1563 check the actual file content.
1566 check the actual file content.
1564 """
1567 """
1565 ctx = scmutil.revsingle(repo, rev)
1568 ctx = scmutil.revsingle(repo, rev)
1566 with repo.wlock():
1569 with repo.wlock():
1567 dirstate = repo.dirstate
1570 dirstate = repo.dirstate
1568 changedfiles = None
1571 changedfiles = None
1569 # See command doc for what minimal does.
1572 # See command doc for what minimal does.
1570 if opts.get('minimal'):
1573 if opts.get('minimal'):
1571 manifestfiles = set(ctx.manifest().keys())
1574 manifestfiles = set(ctx.manifest().keys())
1572 dirstatefiles = set(dirstate)
1575 dirstatefiles = set(dirstate)
1573 manifestonly = manifestfiles - dirstatefiles
1576 manifestonly = manifestfiles - dirstatefiles
1574 dsonly = dirstatefiles - manifestfiles
1577 dsonly = dirstatefiles - manifestfiles
1575 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1578 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1576 changedfiles = manifestonly | dsnotadded
1579 changedfiles = manifestonly | dsnotadded
1577
1580
1578 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1581 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1579
1582
1580 @command('debugrebuildfncache', [], '')
1583 @command('debugrebuildfncache', [], '')
1581 def debugrebuildfncache(ui, repo):
1584 def debugrebuildfncache(ui, repo):
1582 """rebuild the fncache file"""
1585 """rebuild the fncache file"""
1583 repair.rebuildfncache(ui, repo)
1586 repair.rebuildfncache(ui, repo)
1584
1587
1585 @command('debugrename',
1588 @command('debugrename',
1586 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1589 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1587 _('[-r REV] FILE'))
1590 _('[-r REV] FILE'))
1588 def debugrename(ui, repo, file1, *pats, **opts):
1591 def debugrename(ui, repo, file1, *pats, **opts):
1589 """dump rename information"""
1592 """dump rename information"""
1590
1593
1591 ctx = scmutil.revsingle(repo, opts.get('rev'))
1594 ctx = scmutil.revsingle(repo, opts.get('rev'))
1592 m = scmutil.match(ctx, (file1,) + pats, opts)
1595 m = scmutil.match(ctx, (file1,) + pats, opts)
1593 for abs in ctx.walk(m):
1596 for abs in ctx.walk(m):
1594 fctx = ctx[abs]
1597 fctx = ctx[abs]
1595 o = fctx.filelog().renamed(fctx.filenode())
1598 o = fctx.filelog().renamed(fctx.filenode())
1596 rel = m.rel(abs)
1599 rel = m.rel(abs)
1597 if o:
1600 if o:
1598 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1601 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1599 else:
1602 else:
1600 ui.write(_("%s not renamed\n") % rel)
1603 ui.write(_("%s not renamed\n") % rel)
1601
1604
1602 @command('debugrevlog', commands.debugrevlogopts +
1605 @command('debugrevlog', commands.debugrevlogopts +
1603 [('d', 'dump', False, _('dump index data'))],
1606 [('d', 'dump', False, _('dump index data'))],
1604 _('-c|-m|FILE'),
1607 _('-c|-m|FILE'),
1605 optionalrepo=True)
1608 optionalrepo=True)
1606 def debugrevlog(ui, repo, file_=None, **opts):
1609 def debugrevlog(ui, repo, file_=None, **opts):
1607 """show data and statistics about a revlog"""
1610 """show data and statistics about a revlog"""
1608 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1611 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1609
1612
1610 if opts.get("dump"):
1613 if opts.get("dump"):
1611 numrevs = len(r)
1614 numrevs = len(r)
1612 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1615 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1613 " rawsize totalsize compression heads chainlen\n"))
1616 " rawsize totalsize compression heads chainlen\n"))
1614 ts = 0
1617 ts = 0
1615 heads = set()
1618 heads = set()
1616
1619
1617 for rev in xrange(numrevs):
1620 for rev in xrange(numrevs):
1618 dbase = r.deltaparent(rev)
1621 dbase = r.deltaparent(rev)
1619 if dbase == -1:
1622 if dbase == -1:
1620 dbase = rev
1623 dbase = rev
1621 cbase = r.chainbase(rev)
1624 cbase = r.chainbase(rev)
1622 clen = r.chainlen(rev)
1625 clen = r.chainlen(rev)
1623 p1, p2 = r.parentrevs(rev)
1626 p1, p2 = r.parentrevs(rev)
1624 rs = r.rawsize(rev)
1627 rs = r.rawsize(rev)
1625 ts = ts + rs
1628 ts = ts + rs
1626 heads -= set(r.parentrevs(rev))
1629 heads -= set(r.parentrevs(rev))
1627 heads.add(rev)
1630 heads.add(rev)
1628 try:
1631 try:
1629 compression = ts / r.end(rev)
1632 compression = ts / r.end(rev)
1630 except ZeroDivisionError:
1633 except ZeroDivisionError:
1631 compression = 0
1634 compression = 0
1632 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1635 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1633 "%11d %5d %8d\n" %
1636 "%11d %5d %8d\n" %
1634 (rev, p1, p2, r.start(rev), r.end(rev),
1637 (rev, p1, p2, r.start(rev), r.end(rev),
1635 r.start(dbase), r.start(cbase),
1638 r.start(dbase), r.start(cbase),
1636 r.start(p1), r.start(p2),
1639 r.start(p1), r.start(p2),
1637 rs, ts, compression, len(heads), clen))
1640 rs, ts, compression, len(heads), clen))
1638 return 0
1641 return 0
1639
1642
1640 v = r.version
1643 v = r.version
1641 format = v & 0xFFFF
1644 format = v & 0xFFFF
1642 flags = []
1645 flags = []
1643 gdelta = False
1646 gdelta = False
1644 if v & revlog.REVLOGNGINLINEDATA:
1647 if v & revlog.REVLOGNGINLINEDATA:
1645 flags.append('inline')
1648 flags.append('inline')
1646 if v & revlog.REVLOGGENERALDELTA:
1649 if v & revlog.REVLOGGENERALDELTA:
1647 gdelta = True
1650 gdelta = True
1648 flags.append('generaldelta')
1651 flags.append('generaldelta')
1649 if not flags:
1652 if not flags:
1650 flags = ['(none)']
1653 flags = ['(none)']
1651
1654
1652 nummerges = 0
1655 nummerges = 0
1653 numfull = 0
1656 numfull = 0
1654 numprev = 0
1657 numprev = 0
1655 nump1 = 0
1658 nump1 = 0
1656 nump2 = 0
1659 nump2 = 0
1657 numother = 0
1660 numother = 0
1658 nump1prev = 0
1661 nump1prev = 0
1659 nump2prev = 0
1662 nump2prev = 0
1660 chainlengths = []
1663 chainlengths = []
1661
1664
1662 datasize = [None, 0, 0]
1665 datasize = [None, 0, 0]
1663 fullsize = [None, 0, 0]
1666 fullsize = [None, 0, 0]
1664 deltasize = [None, 0, 0]
1667 deltasize = [None, 0, 0]
1665 chunktypecounts = {}
1668 chunktypecounts = {}
1666 chunktypesizes = {}
1669 chunktypesizes = {}
1667
1670
1668 def addsize(size, l):
1671 def addsize(size, l):
1669 if l[0] is None or size < l[0]:
1672 if l[0] is None or size < l[0]:
1670 l[0] = size
1673 l[0] = size
1671 if size > l[1]:
1674 if size > l[1]:
1672 l[1] = size
1675 l[1] = size
1673 l[2] += size
1676 l[2] += size
1674
1677
1675 numrevs = len(r)
1678 numrevs = len(r)
1676 for rev in xrange(numrevs):
1679 for rev in xrange(numrevs):
1677 p1, p2 = r.parentrevs(rev)
1680 p1, p2 = r.parentrevs(rev)
1678 delta = r.deltaparent(rev)
1681 delta = r.deltaparent(rev)
1679 if format > 0:
1682 if format > 0:
1680 addsize(r.rawsize(rev), datasize)
1683 addsize(r.rawsize(rev), datasize)
1681 if p2 != nullrev:
1684 if p2 != nullrev:
1682 nummerges += 1
1685 nummerges += 1
1683 size = r.length(rev)
1686 size = r.length(rev)
1684 if delta == nullrev:
1687 if delta == nullrev:
1685 chainlengths.append(0)
1688 chainlengths.append(0)
1686 numfull += 1
1689 numfull += 1
1687 addsize(size, fullsize)
1690 addsize(size, fullsize)
1688 else:
1691 else:
1689 chainlengths.append(chainlengths[delta] + 1)
1692 chainlengths.append(chainlengths[delta] + 1)
1690 addsize(size, deltasize)
1693 addsize(size, deltasize)
1691 if delta == rev - 1:
1694 if delta == rev - 1:
1692 numprev += 1
1695 numprev += 1
1693 if delta == p1:
1696 if delta == p1:
1694 nump1prev += 1
1697 nump1prev += 1
1695 elif delta == p2:
1698 elif delta == p2:
1696 nump2prev += 1
1699 nump2prev += 1
1697 elif delta == p1:
1700 elif delta == p1:
1698 nump1 += 1
1701 nump1 += 1
1699 elif delta == p2:
1702 elif delta == p2:
1700 nump2 += 1
1703 nump2 += 1
1701 elif delta != nullrev:
1704 elif delta != nullrev:
1702 numother += 1
1705 numother += 1
1703
1706
1704 # Obtain data on the raw chunks in the revlog.
1707 # Obtain data on the raw chunks in the revlog.
1705 chunk = r._chunkraw(rev, rev)[1]
1708 chunk = r._chunkraw(rev, rev)[1]
1706 if chunk:
1709 if chunk:
1707 chunktype = chunk[0]
1710 chunktype = chunk[0]
1708 else:
1711 else:
1709 chunktype = 'empty'
1712 chunktype = 'empty'
1710
1713
1711 if chunktype not in chunktypecounts:
1714 if chunktype not in chunktypecounts:
1712 chunktypecounts[chunktype] = 0
1715 chunktypecounts[chunktype] = 0
1713 chunktypesizes[chunktype] = 0
1716 chunktypesizes[chunktype] = 0
1714
1717
1715 chunktypecounts[chunktype] += 1
1718 chunktypecounts[chunktype] += 1
1716 chunktypesizes[chunktype] += size
1719 chunktypesizes[chunktype] += size
1717
1720
1718 # Adjust size min value for empty cases
1721 # Adjust size min value for empty cases
1719 for size in (datasize, fullsize, deltasize):
1722 for size in (datasize, fullsize, deltasize):
1720 if size[0] is None:
1723 if size[0] is None:
1721 size[0] = 0
1724 size[0] = 0
1722
1725
1723 numdeltas = numrevs - numfull
1726 numdeltas = numrevs - numfull
1724 numoprev = numprev - nump1prev - nump2prev
1727 numoprev = numprev - nump1prev - nump2prev
1725 totalrawsize = datasize[2]
1728 totalrawsize = datasize[2]
1726 datasize[2] /= numrevs
1729 datasize[2] /= numrevs
1727 fulltotal = fullsize[2]
1730 fulltotal = fullsize[2]
1728 fullsize[2] /= numfull
1731 fullsize[2] /= numfull
1729 deltatotal = deltasize[2]
1732 deltatotal = deltasize[2]
1730 if numrevs - numfull > 0:
1733 if numrevs - numfull > 0:
1731 deltasize[2] /= numrevs - numfull
1734 deltasize[2] /= numrevs - numfull
1732 totalsize = fulltotal + deltatotal
1735 totalsize = fulltotal + deltatotal
1733 avgchainlen = sum(chainlengths) / numrevs
1736 avgchainlen = sum(chainlengths) / numrevs
1734 maxchainlen = max(chainlengths)
1737 maxchainlen = max(chainlengths)
1735 compratio = 1
1738 compratio = 1
1736 if totalsize:
1739 if totalsize:
1737 compratio = totalrawsize / totalsize
1740 compratio = totalrawsize / totalsize
1738
1741
1739 basedfmtstr = '%%%dd\n'
1742 basedfmtstr = '%%%dd\n'
1740 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1743 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1741
1744
1742 def dfmtstr(max):
1745 def dfmtstr(max):
1743 return basedfmtstr % len(str(max))
1746 return basedfmtstr % len(str(max))
1744 def pcfmtstr(max, padding=0):
1747 def pcfmtstr(max, padding=0):
1745 return basepcfmtstr % (len(str(max)), ' ' * padding)
1748 return basepcfmtstr % (len(str(max)), ' ' * padding)
1746
1749
1747 def pcfmt(value, total):
1750 def pcfmt(value, total):
1748 if total:
1751 if total:
1749 return (value, 100 * float(value) / total)
1752 return (value, 100 * float(value) / total)
1750 else:
1753 else:
1751 return value, 100.0
1754 return value, 100.0
1752
1755
1753 ui.write(('format : %d\n') % format)
1756 ui.write(('format : %d\n') % format)
1754 ui.write(('flags : %s\n') % ', '.join(flags))
1757 ui.write(('flags : %s\n') % ', '.join(flags))
1755
1758
1756 ui.write('\n')
1759 ui.write('\n')
1757 fmt = pcfmtstr(totalsize)
1760 fmt = pcfmtstr(totalsize)
1758 fmt2 = dfmtstr(totalsize)
1761 fmt2 = dfmtstr(totalsize)
1759 ui.write(('revisions : ') + fmt2 % numrevs)
1762 ui.write(('revisions : ') + fmt2 % numrevs)
1760 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1763 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1761 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1764 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1762 ui.write(('revisions : ') + fmt2 % numrevs)
1765 ui.write(('revisions : ') + fmt2 % numrevs)
1763 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1766 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1764 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1767 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1765 ui.write(('revision size : ') + fmt2 % totalsize)
1768 ui.write(('revision size : ') + fmt2 % totalsize)
1766 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1769 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1767 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1770 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1768
1771
1769 def fmtchunktype(chunktype):
1772 def fmtchunktype(chunktype):
1770 if chunktype == 'empty':
1773 if chunktype == 'empty':
1771 return ' %s : ' % chunktype
1774 return ' %s : ' % chunktype
1772 elif chunktype in string.ascii_letters:
1775 elif chunktype in string.ascii_letters:
1773 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1776 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1774 else:
1777 else:
1775 return ' 0x%s : ' % hex(chunktype)
1778 return ' 0x%s : ' % hex(chunktype)
1776
1779
1777 ui.write('\n')
1780 ui.write('\n')
1778 ui.write(('chunks : ') + fmt2 % numrevs)
1781 ui.write(('chunks : ') + fmt2 % numrevs)
1779 for chunktype in sorted(chunktypecounts):
1782 for chunktype in sorted(chunktypecounts):
1780 ui.write(fmtchunktype(chunktype))
1783 ui.write(fmtchunktype(chunktype))
1781 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1784 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1782 ui.write(('chunks size : ') + fmt2 % totalsize)
1785 ui.write(('chunks size : ') + fmt2 % totalsize)
1783 for chunktype in sorted(chunktypecounts):
1786 for chunktype in sorted(chunktypecounts):
1784 ui.write(fmtchunktype(chunktype))
1787 ui.write(fmtchunktype(chunktype))
1785 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1788 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1786
1789
1787 ui.write('\n')
1790 ui.write('\n')
1788 fmt = dfmtstr(max(avgchainlen, compratio))
1791 fmt = dfmtstr(max(avgchainlen, compratio))
1789 ui.write(('avg chain length : ') + fmt % avgchainlen)
1792 ui.write(('avg chain length : ') + fmt % avgchainlen)
1790 ui.write(('max chain length : ') + fmt % maxchainlen)
1793 ui.write(('max chain length : ') + fmt % maxchainlen)
1791 ui.write(('compression ratio : ') + fmt % compratio)
1794 ui.write(('compression ratio : ') + fmt % compratio)
1792
1795
1793 if format > 0:
1796 if format > 0:
1794 ui.write('\n')
1797 ui.write('\n')
1795 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1798 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1796 % tuple(datasize))
1799 % tuple(datasize))
1797 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1800 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1798 % tuple(fullsize))
1801 % tuple(fullsize))
1799 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1802 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1800 % tuple(deltasize))
1803 % tuple(deltasize))
1801
1804
1802 if numdeltas > 0:
1805 if numdeltas > 0:
1803 ui.write('\n')
1806 ui.write('\n')
1804 fmt = pcfmtstr(numdeltas)
1807 fmt = pcfmtstr(numdeltas)
1805 fmt2 = pcfmtstr(numdeltas, 4)
1808 fmt2 = pcfmtstr(numdeltas, 4)
1806 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1809 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1807 if numprev > 0:
1810 if numprev > 0:
1808 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1811 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1809 numprev))
1812 numprev))
1810 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1813 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1811 numprev))
1814 numprev))
1812 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1815 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1813 numprev))
1816 numprev))
1814 if gdelta:
1817 if gdelta:
1815 ui.write(('deltas against p1 : ')
1818 ui.write(('deltas against p1 : ')
1816 + fmt % pcfmt(nump1, numdeltas))
1819 + fmt % pcfmt(nump1, numdeltas))
1817 ui.write(('deltas against p2 : ')
1820 ui.write(('deltas against p2 : ')
1818 + fmt % pcfmt(nump2, numdeltas))
1821 + fmt % pcfmt(nump2, numdeltas))
1819 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1822 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1820 numdeltas))
1823 numdeltas))
1821
1824
1822 @command('debugrevspec',
1825 @command('debugrevspec',
1823 [('', 'optimize', None,
1826 [('', 'optimize', None,
1824 _('print parsed tree after optimizing (DEPRECATED)')),
1827 _('print parsed tree after optimizing (DEPRECATED)')),
1825 ('p', 'show-stage', [],
1828 ('p', 'show-stage', [],
1826 _('print parsed tree at the given stage'), _('NAME')),
1829 _('print parsed tree at the given stage'), _('NAME')),
1827 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1830 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1828 ('', 'verify-optimized', False, _('verify optimized result')),
1831 ('', 'verify-optimized', False, _('verify optimized result')),
1829 ],
1832 ],
1830 ('REVSPEC'))
1833 ('REVSPEC'))
1831 def debugrevspec(ui, repo, expr, **opts):
1834 def debugrevspec(ui, repo, expr, **opts):
1832 """parse and apply a revision specification
1835 """parse and apply a revision specification
1833
1836
1834 Use -p/--show-stage option to print the parsed tree at the given stages.
1837 Use -p/--show-stage option to print the parsed tree at the given stages.
1835 Use -p all to print tree at every stage.
1838 Use -p all to print tree at every stage.
1836
1839
1837 Use --verify-optimized to compare the optimized result with the unoptimized
1840 Use --verify-optimized to compare the optimized result with the unoptimized
1838 one. Returns 1 if the optimized result differs.
1841 one. Returns 1 if the optimized result differs.
1839 """
1842 """
1840 stages = [
1843 stages = [
1841 ('parsed', lambda tree: tree),
1844 ('parsed', lambda tree: tree),
1842 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1845 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1843 ('concatenated', revsetlang.foldconcat),
1846 ('concatenated', revsetlang.foldconcat),
1844 ('analyzed', revsetlang.analyze),
1847 ('analyzed', revsetlang.analyze),
1845 ('optimized', revsetlang.optimize),
1848 ('optimized', revsetlang.optimize),
1846 ]
1849 ]
1847 if opts['no_optimized']:
1850 if opts['no_optimized']:
1848 stages = stages[:-1]
1851 stages = stages[:-1]
1849 if opts['verify_optimized'] and opts['no_optimized']:
1852 if opts['verify_optimized'] and opts['no_optimized']:
1850 raise error.Abort(_('cannot use --verify-optimized with '
1853 raise error.Abort(_('cannot use --verify-optimized with '
1851 '--no-optimized'))
1854 '--no-optimized'))
1852 stagenames = set(n for n, f in stages)
1855 stagenames = set(n for n, f in stages)
1853
1856
1854 showalways = set()
1857 showalways = set()
1855 showchanged = set()
1858 showchanged = set()
1856 if ui.verbose and not opts['show_stage']:
1859 if ui.verbose and not opts['show_stage']:
1857 # show parsed tree by --verbose (deprecated)
1860 # show parsed tree by --verbose (deprecated)
1858 showalways.add('parsed')
1861 showalways.add('parsed')
1859 showchanged.update(['expanded', 'concatenated'])
1862 showchanged.update(['expanded', 'concatenated'])
1860 if opts['optimize']:
1863 if opts['optimize']:
1861 showalways.add('optimized')
1864 showalways.add('optimized')
1862 if opts['show_stage'] and opts['optimize']:
1865 if opts['show_stage'] and opts['optimize']:
1863 raise error.Abort(_('cannot use --optimize with --show-stage'))
1866 raise error.Abort(_('cannot use --optimize with --show-stage'))
1864 if opts['show_stage'] == ['all']:
1867 if opts['show_stage'] == ['all']:
1865 showalways.update(stagenames)
1868 showalways.update(stagenames)
1866 else:
1869 else:
1867 for n in opts['show_stage']:
1870 for n in opts['show_stage']:
1868 if n not in stagenames:
1871 if n not in stagenames:
1869 raise error.Abort(_('invalid stage name: %s') % n)
1872 raise error.Abort(_('invalid stage name: %s') % n)
1870 showalways.update(opts['show_stage'])
1873 showalways.update(opts['show_stage'])
1871
1874
1872 treebystage = {}
1875 treebystage = {}
1873 printedtree = None
1876 printedtree = None
1874 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1877 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1875 for n, f in stages:
1878 for n, f in stages:
1876 treebystage[n] = tree = f(tree)
1879 treebystage[n] = tree = f(tree)
1877 if n in showalways or (n in showchanged and tree != printedtree):
1880 if n in showalways or (n in showchanged and tree != printedtree):
1878 if opts['show_stage'] or n != 'parsed':
1881 if opts['show_stage'] or n != 'parsed':
1879 ui.write(("* %s:\n") % n)
1882 ui.write(("* %s:\n") % n)
1880 ui.write(revsetlang.prettyformat(tree), "\n")
1883 ui.write(revsetlang.prettyformat(tree), "\n")
1881 printedtree = tree
1884 printedtree = tree
1882
1885
1883 if opts['verify_optimized']:
1886 if opts['verify_optimized']:
1884 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1887 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1885 brevs = revset.makematcher(treebystage['optimized'])(repo)
1888 brevs = revset.makematcher(treebystage['optimized'])(repo)
1886 if ui.verbose:
1889 if ui.verbose:
1887 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1890 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1888 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1891 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1889 arevs = list(arevs)
1892 arevs = list(arevs)
1890 brevs = list(brevs)
1893 brevs = list(brevs)
1891 if arevs == brevs:
1894 if arevs == brevs:
1892 return 0
1895 return 0
1893 ui.write(('--- analyzed\n'), label='diff.file_a')
1896 ui.write(('--- analyzed\n'), label='diff.file_a')
1894 ui.write(('+++ optimized\n'), label='diff.file_b')
1897 ui.write(('+++ optimized\n'), label='diff.file_b')
1895 sm = difflib.SequenceMatcher(None, arevs, brevs)
1898 sm = difflib.SequenceMatcher(None, arevs, brevs)
1896 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1899 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1897 if tag in ('delete', 'replace'):
1900 if tag in ('delete', 'replace'):
1898 for c in arevs[alo:ahi]:
1901 for c in arevs[alo:ahi]:
1899 ui.write('-%s\n' % c, label='diff.deleted')
1902 ui.write('-%s\n' % c, label='diff.deleted')
1900 if tag in ('insert', 'replace'):
1903 if tag in ('insert', 'replace'):
1901 for c in brevs[blo:bhi]:
1904 for c in brevs[blo:bhi]:
1902 ui.write('+%s\n' % c, label='diff.inserted')
1905 ui.write('+%s\n' % c, label='diff.inserted')
1903 if tag == 'equal':
1906 if tag == 'equal':
1904 for c in arevs[alo:ahi]:
1907 for c in arevs[alo:ahi]:
1905 ui.write(' %s\n' % c)
1908 ui.write(' %s\n' % c)
1906 return 1
1909 return 1
1907
1910
1908 func = revset.makematcher(tree)
1911 func = revset.makematcher(tree)
1909 revs = func(repo)
1912 revs = func(repo)
1910 if ui.verbose:
1913 if ui.verbose:
1911 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1914 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1912 for c in revs:
1915 for c in revs:
1913 ui.write("%s\n" % c)
1916 ui.write("%s\n" % c)
1914
1917
1915 @command('debugsetparents', [], _('REV1 [REV2]'))
1918 @command('debugsetparents', [], _('REV1 [REV2]'))
1916 def debugsetparents(ui, repo, rev1, rev2=None):
1919 def debugsetparents(ui, repo, rev1, rev2=None):
1917 """manually set the parents of the current working directory
1920 """manually set the parents of the current working directory
1918
1921
1919 This is useful for writing repository conversion tools, but should
1922 This is useful for writing repository conversion tools, but should
1920 be used with care. For example, neither the working directory nor the
1923 be used with care. For example, neither the working directory nor the
1921 dirstate is updated, so file status may be incorrect after running this
1924 dirstate is updated, so file status may be incorrect after running this
1922 command.
1925 command.
1923
1926
1924 Returns 0 on success.
1927 Returns 0 on success.
1925 """
1928 """
1926
1929
1927 r1 = scmutil.revsingle(repo, rev1).node()
1930 r1 = scmutil.revsingle(repo, rev1).node()
1928 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1931 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1929
1932
1930 with repo.wlock():
1933 with repo.wlock():
1931 repo.setparents(r1, r2)
1934 repo.setparents(r1, r2)
1932
1935
1933 @command('debugsub',
1936 @command('debugsub',
1934 [('r', 'rev', '',
1937 [('r', 'rev', '',
1935 _('revision to check'), _('REV'))],
1938 _('revision to check'), _('REV'))],
1936 _('[-r REV] [REV]'))
1939 _('[-r REV] [REV]'))
1937 def debugsub(ui, repo, rev=None):
1940 def debugsub(ui, repo, rev=None):
1938 ctx = scmutil.revsingle(repo, rev, None)
1941 ctx = scmutil.revsingle(repo, rev, None)
1939 for k, v in sorted(ctx.substate.items()):
1942 for k, v in sorted(ctx.substate.items()):
1940 ui.write(('path %s\n') % k)
1943 ui.write(('path %s\n') % k)
1941 ui.write((' source %s\n') % v[0])
1944 ui.write((' source %s\n') % v[0])
1942 ui.write((' revision %s\n') % v[1])
1945 ui.write((' revision %s\n') % v[1])
1943
1946
1944 @command('debugsuccessorssets',
1947 @command('debugsuccessorssets',
1945 [],
1948 [],
1946 _('[REV]'))
1949 _('[REV]'))
1947 def debugsuccessorssets(ui, repo, *revs):
1950 def debugsuccessorssets(ui, repo, *revs):
1948 """show set of successors for revision
1951 """show set of successors for revision
1949
1952
1950 A successors set of changeset A is a consistent group of revisions that
1953 A successors set of changeset A is a consistent group of revisions that
1951 succeed A. It contains non-obsolete changesets only.
1954 succeed A. It contains non-obsolete changesets only.
1952
1955
1953 In most cases a changeset A has a single successors set containing a single
1956 In most cases a changeset A has a single successors set containing a single
1954 successor (changeset A replaced by A').
1957 successor (changeset A replaced by A').
1955
1958
1956 A changeset that is made obsolete with no successors are called "pruned".
1959 A changeset that is made obsolete with no successors are called "pruned".
1957 Such changesets have no successors sets at all.
1960 Such changesets have no successors sets at all.
1958
1961
1959 A changeset that has been "split" will have a successors set containing
1962 A changeset that has been "split" will have a successors set containing
1960 more than one successor.
1963 more than one successor.
1961
1964
1962 A changeset that has been rewritten in multiple different ways is called
1965 A changeset that has been rewritten in multiple different ways is called
1963 "divergent". Such changesets have multiple successor sets (each of which
1966 "divergent". Such changesets have multiple successor sets (each of which
1964 may also be split, i.e. have multiple successors).
1967 may also be split, i.e. have multiple successors).
1965
1968
1966 Results are displayed as follows::
1969 Results are displayed as follows::
1967
1970
1968 <rev1>
1971 <rev1>
1969 <successors-1A>
1972 <successors-1A>
1970 <rev2>
1973 <rev2>
1971 <successors-2A>
1974 <successors-2A>
1972 <successors-2B1> <successors-2B2> <successors-2B3>
1975 <successors-2B1> <successors-2B2> <successors-2B3>
1973
1976
1974 Here rev2 has two possible (i.e. divergent) successors sets. The first
1977 Here rev2 has two possible (i.e. divergent) successors sets. The first
1975 holds one element, whereas the second holds three (i.e. the changeset has
1978 holds one element, whereas the second holds three (i.e. the changeset has
1976 been split).
1979 been split).
1977 """
1980 """
1978 # passed to successorssets caching computation from one call to another
1981 # passed to successorssets caching computation from one call to another
1979 cache = {}
1982 cache = {}
1980 ctx2str = str
1983 ctx2str = str
1981 node2str = short
1984 node2str = short
1982 if ui.debug():
1985 if ui.debug():
1983 def ctx2str(ctx):
1986 def ctx2str(ctx):
1984 return ctx.hex()
1987 return ctx.hex()
1985 node2str = hex
1988 node2str = hex
1986 for rev in scmutil.revrange(repo, revs):
1989 for rev in scmutil.revrange(repo, revs):
1987 ctx = repo[rev]
1990 ctx = repo[rev]
1988 ui.write('%s\n'% ctx2str(ctx))
1991 ui.write('%s\n'% ctx2str(ctx))
1989 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1992 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1990 if succsset:
1993 if succsset:
1991 ui.write(' ')
1994 ui.write(' ')
1992 ui.write(node2str(succsset[0]))
1995 ui.write(node2str(succsset[0]))
1993 for node in succsset[1:]:
1996 for node in succsset[1:]:
1994 ui.write(' ')
1997 ui.write(' ')
1995 ui.write(node2str(node))
1998 ui.write(node2str(node))
1996 ui.write('\n')
1999 ui.write('\n')
1997
2000
1998 @command('debugtemplate',
2001 @command('debugtemplate',
1999 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2002 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2000 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2003 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2001 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2004 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2002 optionalrepo=True)
2005 optionalrepo=True)
2003 def debugtemplate(ui, repo, tmpl, **opts):
2006 def debugtemplate(ui, repo, tmpl, **opts):
2004 """parse and apply a template
2007 """parse and apply a template
2005
2008
2006 If -r/--rev is given, the template is processed as a log template and
2009 If -r/--rev is given, the template is processed as a log template and
2007 applied to the given changesets. Otherwise, it is processed as a generic
2010 applied to the given changesets. Otherwise, it is processed as a generic
2008 template.
2011 template.
2009
2012
2010 Use --verbose to print the parsed tree.
2013 Use --verbose to print the parsed tree.
2011 """
2014 """
2012 revs = None
2015 revs = None
2013 if opts['rev']:
2016 if opts['rev']:
2014 if repo is None:
2017 if repo is None:
2015 raise error.RepoError(_('there is no Mercurial repository here '
2018 raise error.RepoError(_('there is no Mercurial repository here '
2016 '(.hg not found)'))
2019 '(.hg not found)'))
2017 revs = scmutil.revrange(repo, opts['rev'])
2020 revs = scmutil.revrange(repo, opts['rev'])
2018
2021
2019 props = {}
2022 props = {}
2020 for d in opts['define']:
2023 for d in opts['define']:
2021 try:
2024 try:
2022 k, v = (e.strip() for e in d.split('=', 1))
2025 k, v = (e.strip() for e in d.split('=', 1))
2023 if not k or k == 'ui':
2026 if not k or k == 'ui':
2024 raise ValueError
2027 raise ValueError
2025 props[k] = v
2028 props[k] = v
2026 except ValueError:
2029 except ValueError:
2027 raise error.Abort(_('malformed keyword definition: %s') % d)
2030 raise error.Abort(_('malformed keyword definition: %s') % d)
2028
2031
2029 if ui.verbose:
2032 if ui.verbose:
2030 aliases = ui.configitems('templatealias')
2033 aliases = ui.configitems('templatealias')
2031 tree = templater.parse(tmpl)
2034 tree = templater.parse(tmpl)
2032 ui.note(templater.prettyformat(tree), '\n')
2035 ui.note(templater.prettyformat(tree), '\n')
2033 newtree = templater.expandaliases(tree, aliases)
2036 newtree = templater.expandaliases(tree, aliases)
2034 if newtree != tree:
2037 if newtree != tree:
2035 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2038 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2036
2039
2037 mapfile = None
2040 mapfile = None
2038 if revs is None:
2041 if revs is None:
2039 k = 'debugtemplate'
2042 k = 'debugtemplate'
2040 t = formatter.maketemplater(ui, k, tmpl)
2043 t = formatter.maketemplater(ui, k, tmpl)
2041 ui.write(templater.stringify(t(k, ui=ui, **props)))
2044 ui.write(templater.stringify(t(k, ui=ui, **props)))
2042 else:
2045 else:
2043 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2046 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2044 mapfile, buffered=False)
2047 mapfile, buffered=False)
2045 for r in revs:
2048 for r in revs:
2046 displayer.show(repo[r], **props)
2049 displayer.show(repo[r], **props)
2047 displayer.close()
2050 displayer.close()
2048
2051
2049 @command('debugupgraderepo', [
2052 @command('debugupgraderepo', [
2050 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2053 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2051 ('', 'run', False, _('performs an upgrade')),
2054 ('', 'run', False, _('performs an upgrade')),
2052 ])
2055 ])
2053 def debugupgraderepo(ui, repo, run=False, optimize=None):
2056 def debugupgraderepo(ui, repo, run=False, optimize=None):
2054 """upgrade a repository to use different features
2057 """upgrade a repository to use different features
2055
2058
2056 If no arguments are specified, the repository is evaluated for upgrade
2059 If no arguments are specified, the repository is evaluated for upgrade
2057 and a list of problems and potential optimizations is printed.
2060 and a list of problems and potential optimizations is printed.
2058
2061
2059 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2062 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2060 can be influenced via additional arguments. More details will be provided
2063 can be influenced via additional arguments. More details will be provided
2061 by the command output when run without ``--run``.
2064 by the command output when run without ``--run``.
2062
2065
2063 During the upgrade, the repository will be locked and no writes will be
2066 During the upgrade, the repository will be locked and no writes will be
2064 allowed.
2067 allowed.
2065
2068
2066 At the end of the upgrade, the repository may not be readable while new
2069 At the end of the upgrade, the repository may not be readable while new
2067 repository data is swapped in. This window will be as long as it takes to
2070 repository data is swapped in. This window will be as long as it takes to
2068 rename some directories inside the ``.hg`` directory. On most machines, this
2071 rename some directories inside the ``.hg`` directory. On most machines, this
2069 should complete almost instantaneously and the chances of a consumer being
2072 should complete almost instantaneously and the chances of a consumer being
2070 unable to access the repository should be low.
2073 unable to access the repository should be low.
2071 """
2074 """
2072 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2075 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2073
2076
2074 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2077 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2075 inferrepo=True)
2078 inferrepo=True)
2076 def debugwalk(ui, repo, *pats, **opts):
2079 def debugwalk(ui, repo, *pats, **opts):
2077 """show how files match on given patterns"""
2080 """show how files match on given patterns"""
2078 m = scmutil.match(repo[None], pats, opts)
2081 m = scmutil.match(repo[None], pats, opts)
2079 items = list(repo.walk(m))
2082 items = list(repo.walk(m))
2080 if not items:
2083 if not items:
2081 return
2084 return
2082 f = lambda fn: fn
2085 f = lambda fn: fn
2083 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2086 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2084 f = lambda fn: util.normpath(fn)
2087 f = lambda fn: util.normpath(fn)
2085 fmt = 'f %%-%ds %%-%ds %%s' % (
2088 fmt = 'f %%-%ds %%-%ds %%s' % (
2086 max([len(abs) for abs in items]),
2089 max([len(abs) for abs in items]),
2087 max([len(m.rel(abs)) for abs in items]))
2090 max([len(m.rel(abs)) for abs in items]))
2088 for abs in items:
2091 for abs in items:
2089 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2092 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2090 ui.write("%s\n" % line.rstrip())
2093 ui.write("%s\n" % line.rstrip())
2091
2094
2092 @command('debugwireargs',
2095 @command('debugwireargs',
2093 [('', 'three', '', 'three'),
2096 [('', 'three', '', 'three'),
2094 ('', 'four', '', 'four'),
2097 ('', 'four', '', 'four'),
2095 ('', 'five', '', 'five'),
2098 ('', 'five', '', 'five'),
2096 ] + commands.remoteopts,
2099 ] + commands.remoteopts,
2097 _('REPO [OPTIONS]... [ONE [TWO]]'),
2100 _('REPO [OPTIONS]... [ONE [TWO]]'),
2098 norepo=True)
2101 norepo=True)
2099 def debugwireargs(ui, repopath, *vals, **opts):
2102 def debugwireargs(ui, repopath, *vals, **opts):
2100 repo = hg.peer(ui, opts, repopath)
2103 repo = hg.peer(ui, opts, repopath)
2101 for opt in commands.remoteopts:
2104 for opt in commands.remoteopts:
2102 del opts[opt[1]]
2105 del opts[opt[1]]
2103 args = {}
2106 args = {}
2104 for k, v in opts.iteritems():
2107 for k, v in opts.iteritems():
2105 if v:
2108 if v:
2106 args[k] = v
2109 args[k] = v
2107 # run twice to check that we don't mess up the stream for the next command
2110 # run twice to check that we don't mess up the stream for the next command
2108 res1 = repo.debugwireargs(*vals, **args)
2111 res1 = repo.debugwireargs(*vals, **args)
2109 res2 = repo.debugwireargs(*vals, **args)
2112 res2 = repo.debugwireargs(*vals, **args)
2110 ui.write("%s\n" % res1)
2113 ui.write("%s\n" % res1)
2111 if res1 != res2:
2114 if res1 != res2:
2112 ui.warn("%s\n" % res2)
2115 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now