##// END OF EJS Templates
vfs: use 'vfs' module directly in 'mercurial.debugcommand'...
Pierre-Yves David -
r31239:9cdba607 default
parent child Browse files
Show More
@@ -1,2110 +1,2111
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 policy,
50 policy,
51 pvec,
51 pvec,
52 pycompat,
52 pycompat,
53 repair,
53 repair,
54 revlog,
54 revlog,
55 revset,
55 revset,
56 revsetlang,
56 revsetlang,
57 scmutil,
57 scmutil,
58 setdiscovery,
58 setdiscovery,
59 simplemerge,
59 simplemerge,
60 smartset,
60 smartset,
61 sslutil,
61 sslutil,
62 streamclone,
62 streamclone,
63 templater,
63 templater,
64 treediscovery,
64 treediscovery,
65 util,
65 util,
66 vfs as vfsmod,
66 )
67 )
67
68
68 release = lockmod.release
69 release = lockmod.release
69
70
70 # We reuse the command table from commands because it is easier than
71 # We reuse the command table from commands because it is easier than
71 # teaching dispatch about multiple tables.
72 # teaching dispatch about multiple tables.
72 command = cmdutil.command(commands.table)
73 command = cmdutil.command(commands.table)
73
74
74 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
75 def debugancestor(ui, repo, *args):
76 def debugancestor(ui, repo, *args):
76 """find the ancestor revision of two revisions in a given index"""
77 """find the ancestor revision of two revisions in a given index"""
77 if len(args) == 3:
78 if len(args) == 3:
78 index, rev1, rev2 = args
79 index, rev1, rev2 = args
79 r = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False), index)
80 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
80 lookup = r.lookup
81 lookup = r.lookup
81 elif len(args) == 2:
82 elif len(args) == 2:
82 if not repo:
83 if not repo:
83 raise error.Abort(_('there is no Mercurial repository here '
84 raise error.Abort(_('there is no Mercurial repository here '
84 '(.hg not found)'))
85 '(.hg not found)'))
85 rev1, rev2 = args
86 rev1, rev2 = args
86 r = repo.changelog
87 r = repo.changelog
87 lookup = repo.lookup
88 lookup = repo.lookup
88 else:
89 else:
89 raise error.Abort(_('either two or three arguments required'))
90 raise error.Abort(_('either two or three arguments required'))
90 a = r.ancestor(lookup(rev1), lookup(rev2))
91 a = r.ancestor(lookup(rev1), lookup(rev2))
91 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
92
93
93 @command('debugapplystreamclonebundle', [], 'FILE')
94 @command('debugapplystreamclonebundle', [], 'FILE')
94 def debugapplystreamclonebundle(ui, repo, fname):
95 def debugapplystreamclonebundle(ui, repo, fname):
95 """apply a stream clone bundle file"""
96 """apply a stream clone bundle file"""
96 f = hg.openpath(ui, fname)
97 f = hg.openpath(ui, fname)
97 gen = exchange.readbundle(ui, f, fname)
98 gen = exchange.readbundle(ui, f, fname)
98 gen.apply(repo)
99 gen.apply(repo)
99
100
100 @command('debugbuilddag',
101 @command('debugbuilddag',
101 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
102 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
103 ('n', 'new-file', None, _('add new file at each rev'))],
104 ('n', 'new-file', None, _('add new file at each rev'))],
104 _('[OPTION]... [TEXT]'))
105 _('[OPTION]... [TEXT]'))
105 def debugbuilddag(ui, repo, text=None,
106 def debugbuilddag(ui, repo, text=None,
106 mergeable_file=False,
107 mergeable_file=False,
107 overwritten_file=False,
108 overwritten_file=False,
108 new_file=False):
109 new_file=False):
109 """builds a repo with a given DAG from scratch in the current empty repo
110 """builds a repo with a given DAG from scratch in the current empty repo
110
111
111 The description of the DAG is read from stdin if not given on the
112 The description of the DAG is read from stdin if not given on the
112 command line.
113 command line.
113
114
114 Elements:
115 Elements:
115
116
116 - "+n" is a linear run of n nodes based on the current default parent
117 - "+n" is a linear run of n nodes based on the current default parent
117 - "." is a single node based on the current default parent
118 - "." is a single node based on the current default parent
118 - "$" resets the default parent to null (implied at the start);
119 - "$" resets the default parent to null (implied at the start);
119 otherwise the default parent is always the last node created
120 otherwise the default parent is always the last node created
120 - "<p" sets the default parent to the backref p
121 - "<p" sets the default parent to the backref p
121 - "*p" is a fork at parent p, which is a backref
122 - "*p" is a fork at parent p, which is a backref
122 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
123 - "/p2" is a merge of the preceding node and p2
124 - "/p2" is a merge of the preceding node and p2
124 - ":tag" defines a local tag for the preceding node
125 - ":tag" defines a local tag for the preceding node
125 - "@branch" sets the named branch for subsequent nodes
126 - "@branch" sets the named branch for subsequent nodes
126 - "#...\\n" is a comment up to the end of the line
127 - "#...\\n" is a comment up to the end of the line
127
128
128 Whitespace between the above elements is ignored.
129 Whitespace between the above elements is ignored.
129
130
130 A backref is either
131 A backref is either
131
132
132 - a number n, which references the node curr-n, where curr is the current
133 - a number n, which references the node curr-n, where curr is the current
133 node, or
134 node, or
134 - the name of a local tag you placed earlier using ":tag", or
135 - the name of a local tag you placed earlier using ":tag", or
135 - empty to denote the default parent.
136 - empty to denote the default parent.
136
137
137 All string valued-elements are either strictly alphanumeric, or must
138 All string valued-elements are either strictly alphanumeric, or must
138 be enclosed in double quotes ("..."), with "\\" as escape character.
139 be enclosed in double quotes ("..."), with "\\" as escape character.
139 """
140 """
140
141
141 if text is None:
142 if text is None:
142 ui.status(_("reading DAG from stdin\n"))
143 ui.status(_("reading DAG from stdin\n"))
143 text = ui.fin.read()
144 text = ui.fin.read()
144
145
145 cl = repo.changelog
146 cl = repo.changelog
146 if len(cl) > 0:
147 if len(cl) > 0:
147 raise error.Abort(_('repository is not empty'))
148 raise error.Abort(_('repository is not empty'))
148
149
149 # determine number of revs in DAG
150 # determine number of revs in DAG
150 total = 0
151 total = 0
151 for type, data in dagparser.parsedag(text):
152 for type, data in dagparser.parsedag(text):
152 if type == 'n':
153 if type == 'n':
153 total += 1
154 total += 1
154
155
155 if mergeable_file:
156 if mergeable_file:
156 linesperrev = 2
157 linesperrev = 2
157 # make a file with k lines per rev
158 # make a file with k lines per rev
158 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
159 initialmergedlines.append("")
160 initialmergedlines.append("")
160
161
161 tags = []
162 tags = []
162
163
163 wlock = lock = tr = None
164 wlock = lock = tr = None
164 try:
165 try:
165 wlock = repo.wlock()
166 wlock = repo.wlock()
166 lock = repo.lock()
167 lock = repo.lock()
167 tr = repo.transaction("builddag")
168 tr = repo.transaction("builddag")
168
169
169 at = -1
170 at = -1
170 atbranch = 'default'
171 atbranch = 'default'
171 nodeids = []
172 nodeids = []
172 id = 0
173 id = 0
173 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 ui.progress(_('building'), id, unit=_('revisions'), total=total)
174 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
175 if type == 'n':
176 if type == 'n':
176 ui.note(('node %s\n' % str(data)))
177 ui.note(('node %s\n' % str(data)))
177 id, ps = data
178 id, ps = data
178
179
179 files = []
180 files = []
180 fctxs = {}
181 fctxs = {}
181
182
182 p2 = None
183 p2 = None
183 if mergeable_file:
184 if mergeable_file:
184 fn = "mf"
185 fn = "mf"
185 p1 = repo[ps[0]]
186 p1 = repo[ps[0]]
186 if len(ps) > 1:
187 if len(ps) > 1:
187 p2 = repo[ps[1]]
188 p2 = repo[ps[1]]
188 pa = p1.ancestor(p2)
189 pa = p1.ancestor(p2)
189 base, local, other = [x[fn].data() for x in (pa, p1,
190 base, local, other = [x[fn].data() for x in (pa, p1,
190 p2)]
191 p2)]
191 m3 = simplemerge.Merge3Text(base, local, other)
192 m3 = simplemerge.Merge3Text(base, local, other)
192 ml = [l.strip() for l in m3.merge_lines()]
193 ml = [l.strip() for l in m3.merge_lines()]
193 ml.append("")
194 ml.append("")
194 elif at > 0:
195 elif at > 0:
195 ml = p1[fn].data().split("\n")
196 ml = p1[fn].data().split("\n")
196 else:
197 else:
197 ml = initialmergedlines
198 ml = initialmergedlines
198 ml[id * linesperrev] += " r%i" % id
199 ml[id * linesperrev] += " r%i" % id
199 mergedtext = "\n".join(ml)
200 mergedtext = "\n".join(ml)
200 files.append(fn)
201 files.append(fn)
201 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
202
203
203 if overwritten_file:
204 if overwritten_file:
204 fn = "of"
205 fn = "of"
205 files.append(fn)
206 files.append(fn)
206 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
207
208
208 if new_file:
209 if new_file:
209 fn = "nf%i" % id
210 fn = "nf%i" % id
210 files.append(fn)
211 files.append(fn)
211 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
212 if len(ps) > 1:
213 if len(ps) > 1:
213 if not p2:
214 if not p2:
214 p2 = repo[ps[1]]
215 p2 = repo[ps[1]]
215 for fn in p2:
216 for fn in p2:
216 if fn.startswith("nf"):
217 if fn.startswith("nf"):
217 files.append(fn)
218 files.append(fn)
218 fctxs[fn] = p2[fn]
219 fctxs[fn] = p2[fn]
219
220
220 def fctxfn(repo, cx, path):
221 def fctxfn(repo, cx, path):
221 return fctxs.get(path)
222 return fctxs.get(path)
222
223
223 if len(ps) == 0 or ps[0] < 0:
224 if len(ps) == 0 or ps[0] < 0:
224 pars = [None, None]
225 pars = [None, None]
225 elif len(ps) == 1:
226 elif len(ps) == 1:
226 pars = [nodeids[ps[0]], None]
227 pars = [nodeids[ps[0]], None]
227 else:
228 else:
228 pars = [nodeids[p] for p in ps]
229 pars = [nodeids[p] for p in ps]
229 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
230 date=(id, 0),
231 date=(id, 0),
231 user="debugbuilddag",
232 user="debugbuilddag",
232 extra={'branch': atbranch})
233 extra={'branch': atbranch})
233 nodeid = repo.commitctx(cx)
234 nodeid = repo.commitctx(cx)
234 nodeids.append(nodeid)
235 nodeids.append(nodeid)
235 at = id
236 at = id
236 elif type == 'l':
237 elif type == 'l':
237 id, name = data
238 id, name = data
238 ui.note(('tag %s\n' % name))
239 ui.note(('tag %s\n' % name))
239 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
240 elif type == 'a':
241 elif type == 'a':
241 ui.note(('branch %s\n' % data))
242 ui.note(('branch %s\n' % data))
242 atbranch = data
243 atbranch = data
243 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 ui.progress(_('building'), id, unit=_('revisions'), total=total)
244 tr.close()
245 tr.close()
245
246
246 if tags:
247 if tags:
247 repo.vfs.write("localtags", "".join(tags))
248 repo.vfs.write("localtags", "".join(tags))
248 finally:
249 finally:
249 ui.progress(_('building'), None)
250 ui.progress(_('building'), None)
250 release(tr, lock, wlock)
251 release(tr, lock, wlock)
251
252
252 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
253 indent_string = ' ' * indent
254 indent_string = ' ' * indent
254 if all:
255 if all:
255 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
256 % indent_string)
257 % indent_string)
257
258
258 def showchunks(named):
259 def showchunks(named):
259 ui.write("\n%s%s\n" % (indent_string, named))
260 ui.write("\n%s%s\n" % (indent_string, named))
260 chain = None
261 chain = None
261 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
262 node = chunkdata['node']
263 node = chunkdata['node']
263 p1 = chunkdata['p1']
264 p1 = chunkdata['p1']
264 p2 = chunkdata['p2']
265 p2 = chunkdata['p2']
265 cs = chunkdata['cs']
266 cs = chunkdata['cs']
266 deltabase = chunkdata['deltabase']
267 deltabase = chunkdata['deltabase']
267 delta = chunkdata['delta']
268 delta = chunkdata['delta']
268 ui.write("%s%s %s %s %s %s %s\n" %
269 ui.write("%s%s %s %s %s %s %s\n" %
269 (indent_string, hex(node), hex(p1), hex(p2),
270 (indent_string, hex(node), hex(p1), hex(p2),
270 hex(cs), hex(deltabase), len(delta)))
271 hex(cs), hex(deltabase), len(delta)))
271 chain = node
272 chain = node
272
273
273 chunkdata = gen.changelogheader()
274 chunkdata = gen.changelogheader()
274 showchunks("changelog")
275 showchunks("changelog")
275 chunkdata = gen.manifestheader()
276 chunkdata = gen.manifestheader()
276 showchunks("manifest")
277 showchunks("manifest")
277 for chunkdata in iter(gen.filelogheader, {}):
278 for chunkdata in iter(gen.filelogheader, {}):
278 fname = chunkdata['filename']
279 fname = chunkdata['filename']
279 showchunks(fname)
280 showchunks(fname)
280 else:
281 else:
281 if isinstance(gen, bundle2.unbundle20):
282 if isinstance(gen, bundle2.unbundle20):
282 raise error.Abort(_('use debugbundle2 for this file'))
283 raise error.Abort(_('use debugbundle2 for this file'))
283 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
284 chain = None
285 chain = None
285 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
286 node = chunkdata['node']
287 node = chunkdata['node']
287 ui.write("%s%s\n" % (indent_string, hex(node)))
288 ui.write("%s%s\n" % (indent_string, hex(node)))
288 chain = node
289 chain = node
289
290
290 def _debugbundle2(ui, gen, all=None, **opts):
291 def _debugbundle2(ui, gen, all=None, **opts):
291 """lists the contents of a bundle2"""
292 """lists the contents of a bundle2"""
292 if not isinstance(gen, bundle2.unbundle20):
293 if not isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('not a bundle2 file'))
294 raise error.Abort(_('not a bundle2 file'))
294 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 ui.write(('Stream params: %s\n' % repr(gen.params)))
295 for part in gen.iterparts():
296 for part in gen.iterparts():
296 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
297 if part.type == 'changegroup':
298 if part.type == 'changegroup':
298 version = part.params.get('version', '01')
299 version = part.params.get('version', '01')
299 cg = changegroup.getunbundler(version, part, 'UN')
300 cg = changegroup.getunbundler(version, part, 'UN')
300 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
301
302
302 @command('debugbundle',
303 @command('debugbundle',
303 [('a', 'all', None, _('show all details')),
304 [('a', 'all', None, _('show all details')),
304 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 ('', 'spec', None, _('print the bundlespec of the bundle'))],
305 _('FILE'),
306 _('FILE'),
306 norepo=True)
307 norepo=True)
307 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
308 """lists the contents of a bundle"""
309 """lists the contents of a bundle"""
309 with hg.openpath(ui, bundlepath) as f:
310 with hg.openpath(ui, bundlepath) as f:
310 if spec:
311 if spec:
311 spec = exchange.getbundlespec(ui, f)
312 spec = exchange.getbundlespec(ui, f)
312 ui.write('%s\n' % spec)
313 ui.write('%s\n' % spec)
313 return
314 return
314
315
315 gen = exchange.readbundle(ui, f, bundlepath)
316 gen = exchange.readbundle(ui, f, bundlepath)
316 if isinstance(gen, bundle2.unbundle20):
317 if isinstance(gen, bundle2.unbundle20):
317 return _debugbundle2(ui, gen, all=all, **opts)
318 return _debugbundle2(ui, gen, all=all, **opts)
318 _debugchangegroup(ui, gen, all=all, **opts)
319 _debugchangegroup(ui, gen, all=all, **opts)
319
320
320 @command('debugcheckstate', [], '')
321 @command('debugcheckstate', [], '')
321 def debugcheckstate(ui, repo):
322 def debugcheckstate(ui, repo):
322 """validate the correctness of the current dirstate"""
323 """validate the correctness of the current dirstate"""
323 parent1, parent2 = repo.dirstate.parents()
324 parent1, parent2 = repo.dirstate.parents()
324 m1 = repo[parent1].manifest()
325 m1 = repo[parent1].manifest()
325 m2 = repo[parent2].manifest()
326 m2 = repo[parent2].manifest()
326 errors = 0
327 errors = 0
327 for f in repo.dirstate:
328 for f in repo.dirstate:
328 state = repo.dirstate[f]
329 state = repo.dirstate[f]
329 if state in "nr" and f not in m1:
330 if state in "nr" and f not in m1:
330 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
331 errors += 1
332 errors += 1
332 if state in "a" and f in m1:
333 if state in "a" and f in m1:
333 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
334 errors += 1
335 errors += 1
335 if state in "m" and f not in m1 and f not in m2:
336 if state in "m" and f not in m1 and f not in m2:
336 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 ui.warn(_("%s in state %s, but not in either manifest\n") %
337 (f, state))
338 (f, state))
338 errors += 1
339 errors += 1
339 for f in m1:
340 for f in m1:
340 state = repo.dirstate[f]
341 state = repo.dirstate[f]
341 if state not in "nrm":
342 if state not in "nrm":
342 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
343 errors += 1
344 errors += 1
344 if errors:
345 if errors:
345 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 error = _(".hg/dirstate inconsistent with current parent's manifest")
346 raise error.Abort(error)
347 raise error.Abort(error)
347
348
348 @command('debugcolor',
349 @command('debugcolor',
349 [('', 'style', None, _('show all configured styles'))],
350 [('', 'style', None, _('show all configured styles'))],
350 'hg debugcolor')
351 'hg debugcolor')
351 def debugcolor(ui, repo, **opts):
352 def debugcolor(ui, repo, **opts):
352 """show available color, effects or style"""
353 """show available color, effects or style"""
353 ui.write(('color mode: %s\n') % ui._colormode)
354 ui.write(('color mode: %s\n') % ui._colormode)
354 if opts.get('style'):
355 if opts.get('style'):
355 return _debugdisplaystyle(ui)
356 return _debugdisplaystyle(ui)
356 else:
357 else:
357 return _debugdisplaycolor(ui)
358 return _debugdisplaycolor(ui)
358
359
359 def _debugdisplaycolor(ui):
360 def _debugdisplaycolor(ui):
360 ui = ui.copy()
361 ui = ui.copy()
361 ui._styles.clear()
362 ui._styles.clear()
362 for effect in color._effects.keys():
363 for effect in color._effects.keys():
363 ui._styles[effect] = effect
364 ui._styles[effect] = effect
364 if ui._terminfoparams:
365 if ui._terminfoparams:
365 for k, v in ui.configitems('color'):
366 for k, v in ui.configitems('color'):
366 if k.startswith('color.'):
367 if k.startswith('color.'):
367 ui._styles[k] = k[6:]
368 ui._styles[k] = k[6:]
368 elif k.startswith('terminfo.'):
369 elif k.startswith('terminfo.'):
369 ui._styles[k] = k[9:]
370 ui._styles[k] = k[9:]
370 ui.write(_('available colors:\n'))
371 ui.write(_('available colors:\n'))
371 # sort label with a '_' after the other to group '_background' entry.
372 # sort label with a '_' after the other to group '_background' entry.
372 items = sorted(ui._styles.items(),
373 items = sorted(ui._styles.items(),
373 key=lambda i: ('_' in i[0], i[0], i[1]))
374 key=lambda i: ('_' in i[0], i[0], i[1]))
374 for colorname, label in items:
375 for colorname, label in items:
375 ui.write(('%s\n') % colorname, label=label)
376 ui.write(('%s\n') % colorname, label=label)
376
377
377 def _debugdisplaystyle(ui):
378 def _debugdisplaystyle(ui):
378 ui.write(_('available style:\n'))
379 ui.write(_('available style:\n'))
379 width = max(len(s) for s in ui._styles)
380 width = max(len(s) for s in ui._styles)
380 for label, effects in sorted(ui._styles.items()):
381 for label, effects in sorted(ui._styles.items()):
381 ui.write('%s' % label, label=label)
382 ui.write('%s' % label, label=label)
382 if effects:
383 if effects:
383 # 50
384 # 50
384 ui.write(': ')
385 ui.write(': ')
385 ui.write(' ' * (max(0, width - len(label))))
386 ui.write(' ' * (max(0, width - len(label))))
386 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
387 ui.write('\n')
388 ui.write('\n')
388
389
389 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
390 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
390 def debugcommands(ui, cmd='', *args):
391 def debugcommands(ui, cmd='', *args):
391 """list all available commands and options"""
392 """list all available commands and options"""
392 for cmd, vals in sorted(commands.table.iteritems()):
393 for cmd, vals in sorted(commands.table.iteritems()):
393 cmd = cmd.split('|')[0].strip('^')
394 cmd = cmd.split('|')[0].strip('^')
394 opts = ', '.join([i[1] for i in vals[1]])
395 opts = ', '.join([i[1] for i in vals[1]])
395 ui.write('%s: %s\n' % (cmd, opts))
396 ui.write('%s: %s\n' % (cmd, opts))
396
397
397 @command('debugcomplete',
398 @command('debugcomplete',
398 [('o', 'options', None, _('show the command options'))],
399 [('o', 'options', None, _('show the command options'))],
399 _('[-o] CMD'),
400 _('[-o] CMD'),
400 norepo=True)
401 norepo=True)
401 def debugcomplete(ui, cmd='', **opts):
402 def debugcomplete(ui, cmd='', **opts):
402 """returns the completion list associated with the given command"""
403 """returns the completion list associated with the given command"""
403
404
404 if opts.get('options'):
405 if opts.get('options'):
405 options = []
406 options = []
406 otables = [commands.globalopts]
407 otables = [commands.globalopts]
407 if cmd:
408 if cmd:
408 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
409 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
409 otables.append(entry[1])
410 otables.append(entry[1])
410 for t in otables:
411 for t in otables:
411 for o in t:
412 for o in t:
412 if "(DEPRECATED)" in o[3]:
413 if "(DEPRECATED)" in o[3]:
413 continue
414 continue
414 if o[0]:
415 if o[0]:
415 options.append('-%s' % o[0])
416 options.append('-%s' % o[0])
416 options.append('--%s' % o[1])
417 options.append('--%s' % o[1])
417 ui.write("%s\n" % "\n".join(options))
418 ui.write("%s\n" % "\n".join(options))
418 return
419 return
419
420
420 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
421 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
421 if ui.verbose:
422 if ui.verbose:
422 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
423 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
423 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
424 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
424
425
425 @command('debugcreatestreamclonebundle', [], 'FILE')
426 @command('debugcreatestreamclonebundle', [], 'FILE')
426 def debugcreatestreamclonebundle(ui, repo, fname):
427 def debugcreatestreamclonebundle(ui, repo, fname):
427 """create a stream clone bundle file
428 """create a stream clone bundle file
428
429
429 Stream bundles are special bundles that are essentially archives of
430 Stream bundles are special bundles that are essentially archives of
430 revlog files. They are commonly used for cloning very quickly.
431 revlog files. They are commonly used for cloning very quickly.
431 """
432 """
432 requirements, gen = streamclone.generatebundlev1(repo)
433 requirements, gen = streamclone.generatebundlev1(repo)
433 changegroup.writechunks(ui, gen, fname)
434 changegroup.writechunks(ui, gen, fname)
434
435
435 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436
437
437 @command('debugdag',
438 @command('debugdag',
438 [('t', 'tags', None, _('use tags as labels')),
439 [('t', 'tags', None, _('use tags as labels')),
439 ('b', 'branches', None, _('annotate with branch names')),
440 ('b', 'branches', None, _('annotate with branch names')),
440 ('', 'dots', None, _('use dots for runs')),
441 ('', 'dots', None, _('use dots for runs')),
441 ('s', 'spaces', None, _('separate elements by spaces'))],
442 ('s', 'spaces', None, _('separate elements by spaces'))],
442 _('[OPTION]... [FILE [REV]...]'),
443 _('[OPTION]... [FILE [REV]...]'),
443 optionalrepo=True)
444 optionalrepo=True)
444 def debugdag(ui, repo, file_=None, *revs, **opts):
445 def debugdag(ui, repo, file_=None, *revs, **opts):
445 """format the changelog or an index DAG as a concise textual description
446 """format the changelog or an index DAG as a concise textual description
446
447
447 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 revision numbers, they get labeled in the output as rN.
449 revision numbers, they get labeled in the output as rN.
449
450
450 Otherwise, the changelog DAG of the current repo is emitted.
451 Otherwise, the changelog DAG of the current repo is emitted.
451 """
452 """
452 spaces = opts.get('spaces')
453 spaces = opts.get('spaces')
453 dots = opts.get('dots')
454 dots = opts.get('dots')
454 if file_:
455 if file_:
455 rlog = revlog.revlog(scmutil.vfs(pycompat.getcwd(), audit=False),
456 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
456 file_)
457 file_)
457 revs = set((int(r) for r in revs))
458 revs = set((int(r) for r in revs))
458 def events():
459 def events():
459 for r in rlog:
460 for r in rlog:
460 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 if p != -1))
462 if p != -1))
462 if r in revs:
463 if r in revs:
463 yield 'l', (r, "r%i" % r)
464 yield 'l', (r, "r%i" % r)
464 elif repo:
465 elif repo:
465 cl = repo.changelog
466 cl = repo.changelog
466 tags = opts.get('tags')
467 tags = opts.get('tags')
467 branches = opts.get('branches')
468 branches = opts.get('branches')
468 if tags:
469 if tags:
469 labels = {}
470 labels = {}
470 for l, n in repo.tags().items():
471 for l, n in repo.tags().items():
471 labels.setdefault(cl.rev(n), []).append(l)
472 labels.setdefault(cl.rev(n), []).append(l)
472 def events():
473 def events():
473 b = "default"
474 b = "default"
474 for r in cl:
475 for r in cl:
475 if branches:
476 if branches:
476 newb = cl.read(cl.node(r))[5]['branch']
477 newb = cl.read(cl.node(r))[5]['branch']
477 if newb != b:
478 if newb != b:
478 yield 'a', newb
479 yield 'a', newb
479 b = newb
480 b = newb
480 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 if p != -1))
482 if p != -1))
482 if tags:
483 if tags:
483 ls = labels.get(r)
484 ls = labels.get(r)
484 if ls:
485 if ls:
485 for l in ls:
486 for l in ls:
486 yield 'l', (r, l)
487 yield 'l', (r, l)
487 else:
488 else:
488 raise error.Abort(_('need repo for changelog dag'))
489 raise error.Abort(_('need repo for changelog dag'))
489
490
490 for line in dagparser.dagtextlines(events(),
491 for line in dagparser.dagtextlines(events(),
491 addspaces=spaces,
492 addspaces=spaces,
492 wraplabels=True,
493 wraplabels=True,
493 wrapannotations=True,
494 wrapannotations=True,
494 wrapnonlinear=dots,
495 wrapnonlinear=dots,
495 usedots=dots,
496 usedots=dots,
496 maxlinewidth=70):
497 maxlinewidth=70):
497 ui.write(line)
498 ui.write(line)
498 ui.write("\n")
499 ui.write("\n")
499
500
500 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
501 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
501 def debugdata(ui, repo, file_, rev=None, **opts):
502 def debugdata(ui, repo, file_, rev=None, **opts):
502 """dump the contents of a data file revision"""
503 """dump the contents of a data file revision"""
503 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 if rev is not None:
505 if rev is not None:
505 raise error.CommandError('debugdata', _('invalid arguments'))
506 raise error.CommandError('debugdata', _('invalid arguments'))
506 file_, rev = None, file_
507 file_, rev = None, file_
507 elif rev is None:
508 elif rev is None:
508 raise error.CommandError('debugdata', _('invalid arguments'))
509 raise error.CommandError('debugdata', _('invalid arguments'))
509 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 try:
511 try:
511 ui.write(r.revision(r.lookup(rev), raw=True))
512 ui.write(r.revision(r.lookup(rev), raw=True))
512 except KeyError:
513 except KeyError:
513 raise error.Abort(_('invalid revision identifier %s') % rev)
514 raise error.Abort(_('invalid revision identifier %s') % rev)
514
515
515 @command('debugdate',
516 @command('debugdate',
516 [('e', 'extended', None, _('try extended date formats'))],
517 [('e', 'extended', None, _('try extended date formats'))],
517 _('[-e] DATE [RANGE]'),
518 _('[-e] DATE [RANGE]'),
518 norepo=True, optionalrepo=True)
519 norepo=True, optionalrepo=True)
519 def debugdate(ui, date, range=None, **opts):
520 def debugdate(ui, date, range=None, **opts):
520 """parse and display a date"""
521 """parse and display a date"""
521 if opts["extended"]:
522 if opts["extended"]:
522 d = util.parsedate(date, util.extendeddateformats)
523 d = util.parsedate(date, util.extendeddateformats)
523 else:
524 else:
524 d = util.parsedate(date)
525 d = util.parsedate(date)
525 ui.write(("internal: %s %s\n") % d)
526 ui.write(("internal: %s %s\n") % d)
526 ui.write(("standard: %s\n") % util.datestr(d))
527 ui.write(("standard: %s\n") % util.datestr(d))
527 if range:
528 if range:
528 m = util.matchdate(range)
529 m = util.matchdate(range)
529 ui.write(("match: %s\n") % m(d[0]))
530 ui.write(("match: %s\n") % m(d[0]))
530
531
531 @command('debugdeltachain',
532 @command('debugdeltachain',
532 commands.debugrevlogopts + commands.formatteropts,
533 commands.debugrevlogopts + commands.formatteropts,
533 _('-c|-m|FILE'),
534 _('-c|-m|FILE'),
534 optionalrepo=True)
535 optionalrepo=True)
535 def debugdeltachain(ui, repo, file_=None, **opts):
536 def debugdeltachain(ui, repo, file_=None, **opts):
536 """dump information about delta chains in a revlog
537 """dump information about delta chains in a revlog
537
538
538 Output can be templatized. Available template keywords are:
539 Output can be templatized. Available template keywords are:
539
540
540 :``rev``: revision number
541 :``rev``: revision number
541 :``chainid``: delta chain identifier (numbered by unique base)
542 :``chainid``: delta chain identifier (numbered by unique base)
542 :``chainlen``: delta chain length to this revision
543 :``chainlen``: delta chain length to this revision
543 :``prevrev``: previous revision in delta chain
544 :``prevrev``: previous revision in delta chain
544 :``deltatype``: role of delta / how it was computed
545 :``deltatype``: role of delta / how it was computed
545 :``compsize``: compressed size of revision
546 :``compsize``: compressed size of revision
546 :``uncompsize``: uncompressed size of revision
547 :``uncompsize``: uncompressed size of revision
547 :``chainsize``: total size of compressed revisions in chain
548 :``chainsize``: total size of compressed revisions in chain
548 :``chainratio``: total chain size divided by uncompressed revision size
549 :``chainratio``: total chain size divided by uncompressed revision size
549 (new delta chains typically start at ratio 2.00)
550 (new delta chains typically start at ratio 2.00)
550 :``lindist``: linear distance from base revision in delta chain to end
551 :``lindist``: linear distance from base revision in delta chain to end
551 of this revision
552 of this revision
552 :``extradist``: total size of revisions not part of this delta chain from
553 :``extradist``: total size of revisions not part of this delta chain from
553 base of delta chain to end of this revision; a measurement
554 base of delta chain to end of this revision; a measurement
554 of how much extra data we need to read/seek across to read
555 of how much extra data we need to read/seek across to read
555 the delta chain for this revision
556 the delta chain for this revision
556 :``extraratio``: extradist divided by chainsize; another representation of
557 :``extraratio``: extradist divided by chainsize; another representation of
557 how much unrelated data is needed to load this delta chain
558 how much unrelated data is needed to load this delta chain
558 """
559 """
559 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 index = r.index
561 index = r.index
561 generaldelta = r.version & revlog.REVLOGGENERALDELTA
562 generaldelta = r.version & revlog.REVLOGGENERALDELTA
562
563
563 def revinfo(rev):
564 def revinfo(rev):
564 e = index[rev]
565 e = index[rev]
565 compsize = e[1]
566 compsize = e[1]
566 uncompsize = e[2]
567 uncompsize = e[2]
567 chainsize = 0
568 chainsize = 0
568
569
569 if generaldelta:
570 if generaldelta:
570 if e[3] == e[5]:
571 if e[3] == e[5]:
571 deltatype = 'p1'
572 deltatype = 'p1'
572 elif e[3] == e[6]:
573 elif e[3] == e[6]:
573 deltatype = 'p2'
574 deltatype = 'p2'
574 elif e[3] == rev - 1:
575 elif e[3] == rev - 1:
575 deltatype = 'prev'
576 deltatype = 'prev'
576 elif e[3] == rev:
577 elif e[3] == rev:
577 deltatype = 'base'
578 deltatype = 'base'
578 else:
579 else:
579 deltatype = 'other'
580 deltatype = 'other'
580 else:
581 else:
581 if e[3] == rev:
582 if e[3] == rev:
582 deltatype = 'base'
583 deltatype = 'base'
583 else:
584 else:
584 deltatype = 'prev'
585 deltatype = 'prev'
585
586
586 chain = r._deltachain(rev)[0]
587 chain = r._deltachain(rev)[0]
587 for iterrev in chain:
588 for iterrev in chain:
588 e = index[iterrev]
589 e = index[iterrev]
589 chainsize += e[1]
590 chainsize += e[1]
590
591
591 return compsize, uncompsize, deltatype, chain, chainsize
592 return compsize, uncompsize, deltatype, chain, chainsize
592
593
593 fm = ui.formatter('debugdeltachain', opts)
594 fm = ui.formatter('debugdeltachain', opts)
594
595
595 fm.plain(' rev chain# chainlen prev delta '
596 fm.plain(' rev chain# chainlen prev delta '
596 'size rawsize chainsize ratio lindist extradist '
597 'size rawsize chainsize ratio lindist extradist '
597 'extraratio\n')
598 'extraratio\n')
598
599
599 chainbases = {}
600 chainbases = {}
600 for rev in r:
601 for rev in r:
601 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 chainbase = chain[0]
603 chainbase = chain[0]
603 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 basestart = r.start(chainbase)
605 basestart = r.start(chainbase)
605 revstart = r.start(rev)
606 revstart = r.start(rev)
606 lineardist = revstart + comp - basestart
607 lineardist = revstart + comp - basestart
607 extradist = lineardist - chainsize
608 extradist = lineardist - chainsize
608 try:
609 try:
609 prevrev = chain[-2]
610 prevrev = chain[-2]
610 except IndexError:
611 except IndexError:
611 prevrev = -1
612 prevrev = -1
612
613
613 chainratio = float(chainsize) / float(uncomp)
614 chainratio = float(chainsize) / float(uncomp)
614 extraratio = float(extradist) / float(chainsize)
615 extraratio = float(extradist) / float(chainsize)
615
616
616 fm.startitem()
617 fm.startitem()
617 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 'uncompsize chainsize chainratio lindist extradist '
619 'uncompsize chainsize chainratio lindist extradist '
619 'extraratio',
620 'extraratio',
620 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 rev, chainid, len(chain), prevrev, deltatype, comp,
622 rev, chainid, len(chain), prevrev, deltatype, comp,
622 uncomp, chainsize, chainratio, lineardist, extradist,
623 uncomp, chainsize, chainratio, lineardist, extradist,
623 extraratio,
624 extraratio,
624 rev=rev, chainid=chainid, chainlen=len(chain),
625 rev=rev, chainid=chainid, chainlen=len(chain),
625 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 uncompsize=uncomp, chainsize=chainsize,
627 uncompsize=uncomp, chainsize=chainsize,
627 chainratio=chainratio, lindist=lineardist,
628 chainratio=chainratio, lindist=lineardist,
628 extradist=extradist, extraratio=extraratio)
629 extradist=extradist, extraratio=extraratio)
629
630
630 fm.end()
631 fm.end()
631
632
632 @command('debugdirstate|debugstate',
633 @command('debugdirstate|debugstate',
633 [('', 'nodates', None, _('do not display the saved mtime')),
634 [('', 'nodates', None, _('do not display the saved mtime')),
634 ('', 'datesort', None, _('sort by saved mtime'))],
635 ('', 'datesort', None, _('sort by saved mtime'))],
635 _('[OPTION]...'))
636 _('[OPTION]...'))
636 def debugstate(ui, repo, **opts):
637 def debugstate(ui, repo, **opts):
637 """show the contents of the current dirstate"""
638 """show the contents of the current dirstate"""
638
639
639 nodates = opts.get('nodates')
640 nodates = opts.get('nodates')
640 datesort = opts.get('datesort')
641 datesort = opts.get('datesort')
641
642
642 timestr = ""
643 timestr = ""
643 if datesort:
644 if datesort:
644 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 else:
646 else:
646 keyfunc = None # sort by filename
647 keyfunc = None # sort by filename
647 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 if ent[3] == -1:
649 if ent[3] == -1:
649 timestr = 'unset '
650 timestr = 'unset '
650 elif nodates:
651 elif nodates:
651 timestr = 'set '
652 timestr = 'set '
652 else:
653 else:
653 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 time.localtime(ent[3]))
655 time.localtime(ent[3]))
655 if ent[1] & 0o20000:
656 if ent[1] & 0o20000:
656 mode = 'lnk'
657 mode = 'lnk'
657 else:
658 else:
658 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 for f in repo.dirstate.copies():
661 for f in repo.dirstate.copies():
661 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662
663
663 @command('debugdiscovery',
664 @command('debugdiscovery',
664 [('', 'old', None, _('use old-style discovery')),
665 [('', 'old', None, _('use old-style discovery')),
665 ('', 'nonheads', None,
666 ('', 'nonheads', None,
666 _('use old-style discovery with non-heads included')),
667 _('use old-style discovery with non-heads included')),
667 ] + commands.remoteopts,
668 ] + commands.remoteopts,
668 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 """runs the changeset discovery protocol in isolation"""
671 """runs the changeset discovery protocol in isolation"""
671 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 opts.get('branch'))
673 opts.get('branch'))
673 remote = hg.peer(repo, opts, remoteurl)
674 remote = hg.peer(repo, opts, remoteurl)
674 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675
676
676 # make sure tests are repeatable
677 # make sure tests are repeatable
677 random.seed(12323)
678 random.seed(12323)
678
679
679 def doit(localheads, remoteheads, remote=remote):
680 def doit(localheads, remoteheads, remote=remote):
680 if opts.get('old'):
681 if opts.get('old'):
681 if localheads:
682 if localheads:
682 raise error.Abort('cannot use localheads with old style '
683 raise error.Abort('cannot use localheads with old style '
683 'discovery')
684 'discovery')
684 if not util.safehasattr(remote, 'branches'):
685 if not util.safehasattr(remote, 'branches'):
685 # enable in-client legacy support
686 # enable in-client legacy support
686 remote = localrepo.locallegacypeer(remote.local())
687 remote = localrepo.locallegacypeer(remote.local())
687 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 force=True)
689 force=True)
689 common = set(common)
690 common = set(common)
690 if not opts.get('nonheads'):
691 if not opts.get('nonheads'):
691 ui.write(("unpruned common: %s\n") %
692 ui.write(("unpruned common: %s\n") %
692 " ".join(sorted(short(n) for n in common)))
693 " ".join(sorted(short(n) for n in common)))
693 dag = dagutil.revlogdag(repo.changelog)
694 dag = dagutil.revlogdag(repo.changelog)
694 all = dag.ancestorset(dag.internalizeall(common))
695 all = dag.ancestorset(dag.internalizeall(common))
695 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 else:
697 else:
697 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 common = set(common)
699 common = set(common)
699 rheads = set(hds)
700 rheads = set(hds)
700 lheads = set(repo.heads())
701 lheads = set(repo.heads())
701 ui.write(("common heads: %s\n") %
702 ui.write(("common heads: %s\n") %
702 " ".join(sorted(short(n) for n in common)))
703 " ".join(sorted(short(n) for n in common)))
703 if lheads <= common:
704 if lheads <= common:
704 ui.write(("local is subset\n"))
705 ui.write(("local is subset\n"))
705 elif rheads <= common:
706 elif rheads <= common:
706 ui.write(("remote is subset\n"))
707 ui.write(("remote is subset\n"))
707
708
708 serverlogs = opts.get('serverlog')
709 serverlogs = opts.get('serverlog')
709 if serverlogs:
710 if serverlogs:
710 for filename in serverlogs:
711 for filename in serverlogs:
711 with open(filename, 'r') as logfile:
712 with open(filename, 'r') as logfile:
712 line = logfile.readline()
713 line = logfile.readline()
713 while line:
714 while line:
714 parts = line.strip().split(';')
715 parts = line.strip().split(';')
715 op = parts[1]
716 op = parts[1]
716 if op == 'cg':
717 if op == 'cg':
717 pass
718 pass
718 elif op == 'cgss':
719 elif op == 'cgss':
719 doit(parts[2].split(' '), parts[3].split(' '))
720 doit(parts[2].split(' '), parts[3].split(' '))
720 elif op == 'unb':
721 elif op == 'unb':
721 doit(parts[3].split(' '), parts[2].split(' '))
722 doit(parts[3].split(' '), parts[2].split(' '))
722 line = logfile.readline()
723 line = logfile.readline()
723 else:
724 else:
724 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 opts.get('remote_head'))
726 opts.get('remote_head'))
726 localrevs = opts.get('local_head')
727 localrevs = opts.get('local_head')
727 doit(localrevs, remoterevs)
728 doit(localrevs, remoterevs)
728
729
729 @command('debugextensions', commands.formatteropts, [], norepo=True)
730 @command('debugextensions', commands.formatteropts, [], norepo=True)
730 def debugextensions(ui, **opts):
731 def debugextensions(ui, **opts):
731 '''show information about active extensions'''
732 '''show information about active extensions'''
732 exts = extensions.extensions(ui)
733 exts = extensions.extensions(ui)
733 hgver = util.version()
734 hgver = util.version()
734 fm = ui.formatter('debugextensions', opts)
735 fm = ui.formatter('debugextensions', opts)
735 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 isinternal = extensions.ismoduleinternal(extmod)
737 isinternal = extensions.ismoduleinternal(extmod)
737 extsource = pycompat.fsencode(extmod.__file__)
738 extsource = pycompat.fsencode(extmod.__file__)
738 if isinternal:
739 if isinternal:
739 exttestedwith = [] # never expose magic string to users
740 exttestedwith = [] # never expose magic string to users
740 else:
741 else:
741 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 extbuglink = getattr(extmod, 'buglink', None)
743 extbuglink = getattr(extmod, 'buglink', None)
743
744
744 fm.startitem()
745 fm.startitem()
745
746
746 if ui.quiet or ui.verbose:
747 if ui.quiet or ui.verbose:
747 fm.write('name', '%s\n', extname)
748 fm.write('name', '%s\n', extname)
748 else:
749 else:
749 fm.write('name', '%s', extname)
750 fm.write('name', '%s', extname)
750 if isinternal or hgver in exttestedwith:
751 if isinternal or hgver in exttestedwith:
751 fm.plain('\n')
752 fm.plain('\n')
752 elif not exttestedwith:
753 elif not exttestedwith:
753 fm.plain(_(' (untested!)\n'))
754 fm.plain(_(' (untested!)\n'))
754 else:
755 else:
755 lasttestedversion = exttestedwith[-1]
756 lasttestedversion = exttestedwith[-1]
756 fm.plain(' (%s!)\n' % lasttestedversion)
757 fm.plain(' (%s!)\n' % lasttestedversion)
757
758
758 fm.condwrite(ui.verbose and extsource, 'source',
759 fm.condwrite(ui.verbose and extsource, 'source',
759 _(' location: %s\n'), extsource or "")
760 _(' location: %s\n'), extsource or "")
760
761
761 if ui.verbose:
762 if ui.verbose:
762 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 fm.data(bundled=isinternal)
764 fm.data(bundled=isinternal)
764
765
765 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 _(' tested with: %s\n'),
767 _(' tested with: %s\n'),
767 fm.formatlist(exttestedwith, name='ver'))
768 fm.formatlist(exttestedwith, name='ver'))
768
769
769 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 _(' bug reporting: %s\n'), extbuglink or "")
771 _(' bug reporting: %s\n'), extbuglink or "")
771
772
772 fm.end()
773 fm.end()
773
774
774 @command('debugfileset',
775 @command('debugfileset',
775 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 _('[-r REV] FILESPEC'))
777 _('[-r REV] FILESPEC'))
777 def debugfileset(ui, repo, expr, **opts):
778 def debugfileset(ui, repo, expr, **opts):
778 '''parse and apply a fileset specification'''
779 '''parse and apply a fileset specification'''
779 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 if ui.verbose:
781 if ui.verbose:
781 tree = fileset.parse(expr)
782 tree = fileset.parse(expr)
782 ui.note(fileset.prettyformat(tree), "\n")
783 ui.note(fileset.prettyformat(tree), "\n")
783
784
784 for f in ctx.getfileset(expr):
785 for f in ctx.getfileset(expr):
785 ui.write("%s\n" % f)
786 ui.write("%s\n" % f)
786
787
787 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 def debugfsinfo(ui, path="."):
789 def debugfsinfo(ui, path="."):
789 """show information detected about current filesystem"""
790 """show information detected about current filesystem"""
790 util.writefile('.debugfsinfo', '')
791 util.writefile('.debugfsinfo', '')
791 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
792 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
792 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
795 ui.write(('case-sensitive: %s\n') % (util.fscasesensitive('.debugfsinfo')
795 and 'yes' or 'no'))
796 and 'yes' or 'no'))
796 os.unlink('.debugfsinfo')
797 os.unlink('.debugfsinfo')
797
798
798 @command('debuggetbundle',
799 @command('debuggetbundle',
799 [('H', 'head', [], _('id of head node'), _('ID')),
800 [('H', 'head', [], _('id of head node'), _('ID')),
800 ('C', 'common', [], _('id of common node'), _('ID')),
801 ('C', 'common', [], _('id of common node'), _('ID')),
801 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
802 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
802 _('REPO FILE [-H|-C ID]...'),
803 _('REPO FILE [-H|-C ID]...'),
803 norepo=True)
804 norepo=True)
804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
805 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
805 """retrieves a bundle from a repo
806 """retrieves a bundle from a repo
806
807
807 Every ID must be a full-length hex node id string. Saves the bundle to the
808 Every ID must be a full-length hex node id string. Saves the bundle to the
808 given file.
809 given file.
809 """
810 """
810 repo = hg.peer(ui, opts, repopath)
811 repo = hg.peer(ui, opts, repopath)
811 if not repo.capable('getbundle'):
812 if not repo.capable('getbundle'):
812 raise error.Abort("getbundle() not supported by target repository")
813 raise error.Abort("getbundle() not supported by target repository")
813 args = {}
814 args = {}
814 if common:
815 if common:
815 args['common'] = [bin(s) for s in common]
816 args['common'] = [bin(s) for s in common]
816 if head:
817 if head:
817 args['heads'] = [bin(s) for s in head]
818 args['heads'] = [bin(s) for s in head]
818 # TODO: get desired bundlecaps from command line.
819 # TODO: get desired bundlecaps from command line.
819 args['bundlecaps'] = None
820 args['bundlecaps'] = None
820 bundle = repo.getbundle('debug', **args)
821 bundle = repo.getbundle('debug', **args)
821
822
822 bundletype = opts.get('type', 'bzip2').lower()
823 bundletype = opts.get('type', 'bzip2').lower()
823 btypes = {'none': 'HG10UN',
824 btypes = {'none': 'HG10UN',
824 'bzip2': 'HG10BZ',
825 'bzip2': 'HG10BZ',
825 'gzip': 'HG10GZ',
826 'gzip': 'HG10GZ',
826 'bundle2': 'HG20'}
827 'bundle2': 'HG20'}
827 bundletype = btypes.get(bundletype)
828 bundletype = btypes.get(bundletype)
828 if bundletype not in bundle2.bundletypes:
829 if bundletype not in bundle2.bundletypes:
829 raise error.Abort(_('unknown bundle type specified with --type'))
830 raise error.Abort(_('unknown bundle type specified with --type'))
830 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
831 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
831
832
832 @command('debugignore', [], '[FILE]')
833 @command('debugignore', [], '[FILE]')
833 def debugignore(ui, repo, *files, **opts):
834 def debugignore(ui, repo, *files, **opts):
834 """display the combined ignore pattern and information about ignored files
835 """display the combined ignore pattern and information about ignored files
835
836
836 With no argument display the combined ignore pattern.
837 With no argument display the combined ignore pattern.
837
838
838 Given space separated file names, shows if the given file is ignored and
839 Given space separated file names, shows if the given file is ignored and
839 if so, show the ignore rule (file and line number) that matched it.
840 if so, show the ignore rule (file and line number) that matched it.
840 """
841 """
841 ignore = repo.dirstate._ignore
842 ignore = repo.dirstate._ignore
842 if not files:
843 if not files:
843 # Show all the patterns
844 # Show all the patterns
844 includepat = getattr(ignore, 'includepat', None)
845 includepat = getattr(ignore, 'includepat', None)
845 if includepat is not None:
846 if includepat is not None:
846 ui.write("%s\n" % includepat)
847 ui.write("%s\n" % includepat)
847 else:
848 else:
848 raise error.Abort(_("no ignore patterns found"))
849 raise error.Abort(_("no ignore patterns found"))
849 else:
850 else:
850 for f in files:
851 for f in files:
851 nf = util.normpath(f)
852 nf = util.normpath(f)
852 ignored = None
853 ignored = None
853 ignoredata = None
854 ignoredata = None
854 if nf != '.':
855 if nf != '.':
855 if ignore(nf):
856 if ignore(nf):
856 ignored = nf
857 ignored = nf
857 ignoredata = repo.dirstate._ignorefileandline(nf)
858 ignoredata = repo.dirstate._ignorefileandline(nf)
858 else:
859 else:
859 for p in util.finddirs(nf):
860 for p in util.finddirs(nf):
860 if ignore(p):
861 if ignore(p):
861 ignored = p
862 ignored = p
862 ignoredata = repo.dirstate._ignorefileandline(p)
863 ignoredata = repo.dirstate._ignorefileandline(p)
863 break
864 break
864 if ignored:
865 if ignored:
865 if ignored == nf:
866 if ignored == nf:
866 ui.write(_("%s is ignored\n") % f)
867 ui.write(_("%s is ignored\n") % f)
867 else:
868 else:
868 ui.write(_("%s is ignored because of "
869 ui.write(_("%s is ignored because of "
869 "containing folder %s\n")
870 "containing folder %s\n")
870 % (f, ignored))
871 % (f, ignored))
871 ignorefile, lineno, line = ignoredata
872 ignorefile, lineno, line = ignoredata
872 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
873 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
873 % (ignorefile, lineno, line))
874 % (ignorefile, lineno, line))
874 else:
875 else:
875 ui.write(_("%s is not ignored\n") % f)
876 ui.write(_("%s is not ignored\n") % f)
876
877
877 @command('debugindex', commands.debugrevlogopts +
878 @command('debugindex', commands.debugrevlogopts +
878 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
879 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
879 _('[-f FORMAT] -c|-m|FILE'),
880 _('[-f FORMAT] -c|-m|FILE'),
880 optionalrepo=True)
881 optionalrepo=True)
881 def debugindex(ui, repo, file_=None, **opts):
882 def debugindex(ui, repo, file_=None, **opts):
882 """dump the contents of an index file"""
883 """dump the contents of an index file"""
883 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
884 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
884 format = opts.get('format', 0)
885 format = opts.get('format', 0)
885 if format not in (0, 1):
886 if format not in (0, 1):
886 raise error.Abort(_("unknown format %d") % format)
887 raise error.Abort(_("unknown format %d") % format)
887
888
888 generaldelta = r.version & revlog.REVLOGGENERALDELTA
889 generaldelta = r.version & revlog.REVLOGGENERALDELTA
889 if generaldelta:
890 if generaldelta:
890 basehdr = ' delta'
891 basehdr = ' delta'
891 else:
892 else:
892 basehdr = ' base'
893 basehdr = ' base'
893
894
894 if ui.debugflag:
895 if ui.debugflag:
895 shortfn = hex
896 shortfn = hex
896 else:
897 else:
897 shortfn = short
898 shortfn = short
898
899
899 # There might not be anything in r, so have a sane default
900 # There might not be anything in r, so have a sane default
900 idlen = 12
901 idlen = 12
901 for i in r:
902 for i in r:
902 idlen = len(shortfn(r.node(i)))
903 idlen = len(shortfn(r.node(i)))
903 break
904 break
904
905
905 if format == 0:
906 if format == 0:
906 ui.write((" rev offset length " + basehdr + " linkrev"
907 ui.write((" rev offset length " + basehdr + " linkrev"
907 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
908 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
908 elif format == 1:
909 elif format == 1:
909 ui.write((" rev flag offset length"
910 ui.write((" rev flag offset length"
910 " size " + basehdr + " link p1 p2"
911 " size " + basehdr + " link p1 p2"
911 " %s\n") % "nodeid".rjust(idlen))
912 " %s\n") % "nodeid".rjust(idlen))
912
913
913 for i in r:
914 for i in r:
914 node = r.node(i)
915 node = r.node(i)
915 if generaldelta:
916 if generaldelta:
916 base = r.deltaparent(i)
917 base = r.deltaparent(i)
917 else:
918 else:
918 base = r.chainbase(i)
919 base = r.chainbase(i)
919 if format == 0:
920 if format == 0:
920 try:
921 try:
921 pp = r.parents(node)
922 pp = r.parents(node)
922 except Exception:
923 except Exception:
923 pp = [nullid, nullid]
924 pp = [nullid, nullid]
924 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
925 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
925 i, r.start(i), r.length(i), base, r.linkrev(i),
926 i, r.start(i), r.length(i), base, r.linkrev(i),
926 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
927 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
927 elif format == 1:
928 elif format == 1:
928 pr = r.parentrevs(i)
929 pr = r.parentrevs(i)
929 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
930 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
930 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
931 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
931 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
932 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
932
933
933 @command('debugindexdot', commands.debugrevlogopts,
934 @command('debugindexdot', commands.debugrevlogopts,
934 _('-c|-m|FILE'), optionalrepo=True)
935 _('-c|-m|FILE'), optionalrepo=True)
935 def debugindexdot(ui, repo, file_=None, **opts):
936 def debugindexdot(ui, repo, file_=None, **opts):
936 """dump an index DAG as a graphviz dot file"""
937 """dump an index DAG as a graphviz dot file"""
937 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
938 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
938 ui.write(("digraph G {\n"))
939 ui.write(("digraph G {\n"))
939 for i in r:
940 for i in r:
940 node = r.node(i)
941 node = r.node(i)
941 pp = r.parents(node)
942 pp = r.parents(node)
942 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 if pp[1] != nullid:
944 if pp[1] != nullid:
944 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 ui.write("}\n")
946 ui.write("}\n")
946
947
947 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
948 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
948 def debuginstall(ui, **opts):
949 def debuginstall(ui, **opts):
949 '''test Mercurial installation
950 '''test Mercurial installation
950
951
951 Returns 0 on success.
952 Returns 0 on success.
952 '''
953 '''
953
954
954 def writetemp(contents):
955 def writetemp(contents):
955 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
956 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
956 f = os.fdopen(fd, pycompat.sysstr("wb"))
957 f = os.fdopen(fd, pycompat.sysstr("wb"))
957 f.write(contents)
958 f.write(contents)
958 f.close()
959 f.close()
959 return name
960 return name
960
961
961 problems = 0
962 problems = 0
962
963
963 fm = ui.formatter('debuginstall', opts)
964 fm = ui.formatter('debuginstall', opts)
964 fm.startitem()
965 fm.startitem()
965
966
966 # encoding
967 # encoding
967 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
968 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
968 err = None
969 err = None
969 try:
970 try:
970 encoding.fromlocal("test")
971 encoding.fromlocal("test")
971 except error.Abort as inst:
972 except error.Abort as inst:
972 err = inst
973 err = inst
973 problems += 1
974 problems += 1
974 fm.condwrite(err, 'encodingerror', _(" %s\n"
975 fm.condwrite(err, 'encodingerror', _(" %s\n"
975 " (check that your locale is properly set)\n"), err)
976 " (check that your locale is properly set)\n"), err)
976
977
977 # Python
978 # Python
978 fm.write('pythonexe', _("checking Python executable (%s)\n"),
979 fm.write('pythonexe', _("checking Python executable (%s)\n"),
979 pycompat.sysexecutable)
980 pycompat.sysexecutable)
980 fm.write('pythonver', _("checking Python version (%s)\n"),
981 fm.write('pythonver', _("checking Python version (%s)\n"),
981 ("%d.%d.%d" % sys.version_info[:3]))
982 ("%d.%d.%d" % sys.version_info[:3]))
982 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
983 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
983 os.path.dirname(pycompat.fsencode(os.__file__)))
984 os.path.dirname(pycompat.fsencode(os.__file__)))
984
985
985 security = set(sslutil.supportedprotocols)
986 security = set(sslutil.supportedprotocols)
986 if sslutil.hassni:
987 if sslutil.hassni:
987 security.add('sni')
988 security.add('sni')
988
989
989 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
990 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
990 fm.formatlist(sorted(security), name='protocol',
991 fm.formatlist(sorted(security), name='protocol',
991 fmt='%s', sep=','))
992 fmt='%s', sep=','))
992
993
993 # These are warnings, not errors. So don't increment problem count. This
994 # These are warnings, not errors. So don't increment problem count. This
994 # may change in the future.
995 # may change in the future.
995 if 'tls1.2' not in security:
996 if 'tls1.2' not in security:
996 fm.plain(_(' TLS 1.2 not supported by Python install; '
997 fm.plain(_(' TLS 1.2 not supported by Python install; '
997 'network connections lack modern security\n'))
998 'network connections lack modern security\n'))
998 if 'sni' not in security:
999 if 'sni' not in security:
999 fm.plain(_(' SNI not supported by Python install; may have '
1000 fm.plain(_(' SNI not supported by Python install; may have '
1000 'connectivity issues with some servers\n'))
1001 'connectivity issues with some servers\n'))
1001
1002
1002 # TODO print CA cert info
1003 # TODO print CA cert info
1003
1004
1004 # hg version
1005 # hg version
1005 hgver = util.version()
1006 hgver = util.version()
1006 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1007 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1007 hgver.split('+')[0])
1008 hgver.split('+')[0])
1008 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1009 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1009 '+'.join(hgver.split('+')[1:]))
1010 '+'.join(hgver.split('+')[1:]))
1010
1011
1011 # compiled modules
1012 # compiled modules
1012 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1013 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1013 policy.policy)
1014 policy.policy)
1014 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1015 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1015 os.path.dirname(pycompat.fsencode(__file__)))
1016 os.path.dirname(pycompat.fsencode(__file__)))
1016
1017
1017 err = None
1018 err = None
1018 try:
1019 try:
1019 from . import (
1020 from . import (
1020 base85,
1021 base85,
1021 bdiff,
1022 bdiff,
1022 mpatch,
1023 mpatch,
1023 osutil,
1024 osutil,
1024 )
1025 )
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 except Exception as inst:
1027 except Exception as inst:
1027 err = inst
1028 err = inst
1028 problems += 1
1029 problems += 1
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030
1031
1031 compengines = util.compengines._engines.values()
1032 compengines = util.compengines._engines.values()
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1034 fm.formatlist(sorted(e.name() for e in compengines),
1034 name='compengine', fmt='%s', sep=', '))
1035 name='compengine', fmt='%s', sep=', '))
1035 fm.write('compenginesavail', _('checking available compression engines '
1036 fm.write('compenginesavail', _('checking available compression engines '
1036 '(%s)\n'),
1037 '(%s)\n'),
1037 fm.formatlist(sorted(e.name() for e in compengines
1038 fm.formatlist(sorted(e.name() for e in compengines
1038 if e.available()),
1039 if e.available()),
1039 name='compengine', fmt='%s', sep=', '))
1040 name='compengine', fmt='%s', sep=', '))
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 fm.write('compenginesserver', _('checking available compression engines '
1042 fm.write('compenginesserver', _('checking available compression engines '
1042 'for wire protocol (%s)\n'),
1043 'for wire protocol (%s)\n'),
1043 fm.formatlist([e.name() for e in wirecompengines
1044 fm.formatlist([e.name() for e in wirecompengines
1044 if e.wireprotosupport()],
1045 if e.wireprotosupport()],
1045 name='compengine', fmt='%s', sep=', '))
1046 name='compengine', fmt='%s', sep=', '))
1046
1047
1047 # templates
1048 # templates
1048 p = templater.templatepaths()
1049 p = templater.templatepaths()
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 if p:
1052 if p:
1052 m = templater.templatepath("map-cmdline.default")
1053 m = templater.templatepath("map-cmdline.default")
1053 if m:
1054 if m:
1054 # template found, check if it is working
1055 # template found, check if it is working
1055 err = None
1056 err = None
1056 try:
1057 try:
1057 templater.templater.frommapfile(m)
1058 templater.templater.frommapfile(m)
1058 except Exception as inst:
1059 except Exception as inst:
1059 err = inst
1060 err = inst
1060 p = None
1061 p = None
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 else:
1063 else:
1063 p = None
1064 p = None
1064 fm.condwrite(p, 'defaulttemplate',
1065 fm.condwrite(p, 'defaulttemplate',
1065 _("checking default template (%s)\n"), m)
1066 _("checking default template (%s)\n"), m)
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 _(" template '%s' not found\n"), "default")
1068 _(" template '%s' not found\n"), "default")
1068 if not p:
1069 if not p:
1069 problems += 1
1070 problems += 1
1070 fm.condwrite(not p, '',
1071 fm.condwrite(not p, '',
1071 _(" (templates seem to have been installed incorrectly)\n"))
1072 _(" (templates seem to have been installed incorrectly)\n"))
1072
1073
1073 # editor
1074 # editor
1074 editor = ui.geteditor()
1075 editor = ui.geteditor()
1075 editor = util.expandpath(editor)
1076 editor = util.expandpath(editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 _(" No commit editor set and can't find %s in PATH\n"
1080 _(" No commit editor set and can't find %s in PATH\n"
1080 " (specify a commit editor in your configuration"
1081 " (specify a commit editor in your configuration"
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 _(" Can't find editor '%s' in PATH\n"
1084 _(" Can't find editor '%s' in PATH\n"
1084 " (specify a commit editor in your configuration"
1085 " (specify a commit editor in your configuration"
1085 " file)\n"), not cmdpath and editor)
1086 " file)\n"), not cmdpath and editor)
1086 if not cmdpath and editor != 'vi':
1087 if not cmdpath and editor != 'vi':
1087 problems += 1
1088 problems += 1
1088
1089
1089 # check username
1090 # check username
1090 username = None
1091 username = None
1091 err = None
1092 err = None
1092 try:
1093 try:
1093 username = ui.username()
1094 username = ui.username()
1094 except error.Abort as e:
1095 except error.Abort as e:
1095 err = e
1096 err = e
1096 problems += 1
1097 problems += 1
1097
1098
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 " (specify a username in your configuration file)\n"), err)
1101 " (specify a username in your configuration file)\n"), err)
1101
1102
1102 fm.condwrite(not problems, '',
1103 fm.condwrite(not problems, '',
1103 _("no problems detected\n"))
1104 _("no problems detected\n"))
1104 if not problems:
1105 if not problems:
1105 fm.data(problems=problems)
1106 fm.data(problems=problems)
1106 fm.condwrite(problems, 'problems',
1107 fm.condwrite(problems, 'problems',
1107 _("%d problems detected,"
1108 _("%d problems detected,"
1108 " please check your install!\n"), problems)
1109 " please check your install!\n"), problems)
1109 fm.end()
1110 fm.end()
1110
1111
1111 return problems
1112 return problems
1112
1113
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 def debugknown(ui, repopath, *ids, **opts):
1115 def debugknown(ui, repopath, *ids, **opts):
1115 """test whether node ids are known to a repo
1116 """test whether node ids are known to a repo
1116
1117
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 and 1s indicating unknown/known.
1119 and 1s indicating unknown/known.
1119 """
1120 """
1120 repo = hg.peer(ui, opts, repopath)
1121 repo = hg.peer(ui, opts, repopath)
1121 if not repo.capable('known'):
1122 if not repo.capable('known'):
1122 raise error.Abort("known() not supported by target repository")
1123 raise error.Abort("known() not supported by target repository")
1123 flags = repo.known([bin(s) for s in ids])
1124 flags = repo.known([bin(s) for s in ids])
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125
1126
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1127 @command('debuglabelcomplete', [], _('LABEL...'))
1127 def debuglabelcomplete(ui, repo, *args):
1128 def debuglabelcomplete(ui, repo, *args):
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 commands.debugnamecomplete(ui, repo, *args)
1130 commands.debugnamecomplete(ui, repo, *args)
1130
1131
1131 @command('debuglocks',
1132 @command('debuglocks',
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 ('W', 'force-wlock', None,
1134 ('W', 'force-wlock', None,
1134 _('free the working state lock (DANGEROUS)'))],
1135 _('free the working state lock (DANGEROUS)'))],
1135 _('[OPTION]...'))
1136 _('[OPTION]...'))
1136 def debuglocks(ui, repo, **opts):
1137 def debuglocks(ui, repo, **opts):
1137 """show or modify state of locks
1138 """show or modify state of locks
1138
1139
1139 By default, this command will show which locks are held. This
1140 By default, this command will show which locks are held. This
1140 includes the user and process holding the lock, the amount of time
1141 includes the user and process holding the lock, the amount of time
1141 the lock has been held, and the machine name where the process is
1142 the lock has been held, and the machine name where the process is
1142 running if it's not local.
1143 running if it's not local.
1143
1144
1144 Locks protect the integrity of Mercurial's data, so should be
1145 Locks protect the integrity of Mercurial's data, so should be
1145 treated with care. System crashes or other interruptions may cause
1146 treated with care. System crashes or other interruptions may cause
1146 locks to not be properly released, though Mercurial will usually
1147 locks to not be properly released, though Mercurial will usually
1147 detect and remove such stale locks automatically.
1148 detect and remove such stale locks automatically.
1148
1149
1149 However, detecting stale locks may not always be possible (for
1150 However, detecting stale locks may not always be possible (for
1150 instance, on a shared filesystem). Removing locks may also be
1151 instance, on a shared filesystem). Removing locks may also be
1151 blocked by filesystem permissions.
1152 blocked by filesystem permissions.
1152
1153
1153 Returns 0 if no locks are held.
1154 Returns 0 if no locks are held.
1154
1155
1155 """
1156 """
1156
1157
1157 if opts.get('force_lock'):
1158 if opts.get('force_lock'):
1158 repo.svfs.unlink('lock')
1159 repo.svfs.unlink('lock')
1159 if opts.get('force_wlock'):
1160 if opts.get('force_wlock'):
1160 repo.vfs.unlink('wlock')
1161 repo.vfs.unlink('wlock')
1161 if opts.get('force_lock') or opts.get('force_lock'):
1162 if opts.get('force_lock') or opts.get('force_lock'):
1162 return 0
1163 return 0
1163
1164
1164 now = time.time()
1165 now = time.time()
1165 held = 0
1166 held = 0
1166
1167
1167 def report(vfs, name, method):
1168 def report(vfs, name, method):
1168 # this causes stale locks to get reaped for more accurate reporting
1169 # this causes stale locks to get reaped for more accurate reporting
1169 try:
1170 try:
1170 l = method(False)
1171 l = method(False)
1171 except error.LockHeld:
1172 except error.LockHeld:
1172 l = None
1173 l = None
1173
1174
1174 if l:
1175 if l:
1175 l.release()
1176 l.release()
1176 else:
1177 else:
1177 try:
1178 try:
1178 stat = vfs.lstat(name)
1179 stat = vfs.lstat(name)
1179 age = now - stat.st_mtime
1180 age = now - stat.st_mtime
1180 user = util.username(stat.st_uid)
1181 user = util.username(stat.st_uid)
1181 locker = vfs.readlock(name)
1182 locker = vfs.readlock(name)
1182 if ":" in locker:
1183 if ":" in locker:
1183 host, pid = locker.split(':')
1184 host, pid = locker.split(':')
1184 if host == socket.gethostname():
1185 if host == socket.gethostname():
1185 locker = 'user %s, process %s' % (user, pid)
1186 locker = 'user %s, process %s' % (user, pid)
1186 else:
1187 else:
1187 locker = 'user %s, process %s, host %s' \
1188 locker = 'user %s, process %s, host %s' \
1188 % (user, pid, host)
1189 % (user, pid, host)
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 return 1
1191 return 1
1191 except OSError as e:
1192 except OSError as e:
1192 if e.errno != errno.ENOENT:
1193 if e.errno != errno.ENOENT:
1193 raise
1194 raise
1194
1195
1195 ui.write(("%-6s free\n") % (name + ":"))
1196 ui.write(("%-6s free\n") % (name + ":"))
1196 return 0
1197 return 0
1197
1198
1198 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1200 held += report(repo.vfs, "wlock", repo.wlock)
1200
1201
1201 return held
1202 return held
1202
1203
1203 @command('debugmergestate', [], '')
1204 @command('debugmergestate', [], '')
1204 def debugmergestate(ui, repo, *args):
1205 def debugmergestate(ui, repo, *args):
1205 """print merge state
1206 """print merge state
1206
1207
1207 Use --verbose to print out information about whether v1 or v2 merge state
1208 Use --verbose to print out information about whether v1 or v2 merge state
1208 was chosen."""
1209 was chosen."""
1209 def _hashornull(h):
1210 def _hashornull(h):
1210 if h == nullhex:
1211 if h == nullhex:
1211 return 'null'
1212 return 'null'
1212 else:
1213 else:
1213 return h
1214 return h
1214
1215
1215 def printrecords(version):
1216 def printrecords(version):
1216 ui.write(('* version %s records\n') % version)
1217 ui.write(('* version %s records\n') % version)
1217 if version == 1:
1218 if version == 1:
1218 records = v1records
1219 records = v1records
1219 else:
1220 else:
1220 records = v2records
1221 records = v2records
1221
1222
1222 for rtype, record in records:
1223 for rtype, record in records:
1223 # pretty print some record types
1224 # pretty print some record types
1224 if rtype == 'L':
1225 if rtype == 'L':
1225 ui.write(('local: %s\n') % record)
1226 ui.write(('local: %s\n') % record)
1226 elif rtype == 'O':
1227 elif rtype == 'O':
1227 ui.write(('other: %s\n') % record)
1228 ui.write(('other: %s\n') % record)
1228 elif rtype == 'm':
1229 elif rtype == 'm':
1229 driver, mdstate = record.split('\0', 1)
1230 driver, mdstate = record.split('\0', 1)
1230 ui.write(('merge driver: %s (state "%s")\n')
1231 ui.write(('merge driver: %s (state "%s")\n')
1231 % (driver, mdstate))
1232 % (driver, mdstate))
1232 elif rtype in 'FDC':
1233 elif rtype in 'FDC':
1233 r = record.split('\0')
1234 r = record.split('\0')
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 if version == 1:
1236 if version == 1:
1236 onode = 'not stored in v1 format'
1237 onode = 'not stored in v1 format'
1237 flags = r[7]
1238 flags = r[7]
1238 else:
1239 else:
1239 onode, flags = r[7:9]
1240 onode, flags = r[7:9]
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 % (f, rtype, state, _hashornull(hash)))
1242 % (f, rtype, state, _hashornull(hash)))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' ancestor path: %s (node %s)\n')
1244 ui.write((' ancestor path: %s (node %s)\n')
1244 % (afile, _hashornull(anode)))
1245 % (afile, _hashornull(anode)))
1245 ui.write((' other path: %s (node %s)\n')
1246 ui.write((' other path: %s (node %s)\n')
1246 % (ofile, _hashornull(onode)))
1247 % (ofile, _hashornull(onode)))
1247 elif rtype == 'f':
1248 elif rtype == 'f':
1248 filename, rawextras = record.split('\0', 1)
1249 filename, rawextras = record.split('\0', 1)
1249 extras = rawextras.split('\0')
1250 extras = rawextras.split('\0')
1250 i = 0
1251 i = 0
1251 extrastrings = []
1252 extrastrings = []
1252 while i < len(extras):
1253 while i < len(extras):
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 i += 2
1255 i += 2
1255
1256
1256 ui.write(('file extras: %s (%s)\n')
1257 ui.write(('file extras: %s (%s)\n')
1257 % (filename, ', '.join(extrastrings)))
1258 % (filename, ', '.join(extrastrings)))
1258 elif rtype == 'l':
1259 elif rtype == 'l':
1259 labels = record.split('\0', 2)
1260 labels = record.split('\0', 2)
1260 labels = [l for l in labels if len(l) > 0]
1261 labels = [l for l in labels if len(l) > 0]
1261 ui.write(('labels:\n'))
1262 ui.write(('labels:\n'))
1262 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' other: %s\n' % labels[1]))
1264 ui.write((' other: %s\n' % labels[1]))
1264 if len(labels) > 2:
1265 if len(labels) > 2:
1265 ui.write((' base: %s\n' % labels[2]))
1266 ui.write((' base: %s\n' % labels[2]))
1266 else:
1267 else:
1267 ui.write(('unrecognized entry: %s\t%s\n')
1268 ui.write(('unrecognized entry: %s\t%s\n')
1268 % (rtype, record.replace('\0', '\t')))
1269 % (rtype, record.replace('\0', '\t')))
1269
1270
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # command is pretty low-level.
1273 # command is pretty low-level.
1273 ms = mergemod.mergestate(repo)
1274 ms = mergemod.mergestate(repo)
1274
1275
1275 # sort so that reasonable information is on top
1276 # sort so that reasonable information is on top
1276 v1records = ms._readrecordsv1()
1277 v1records = ms._readrecordsv1()
1277 v2records = ms._readrecordsv2()
1278 v2records = ms._readrecordsv2()
1278 order = 'LOml'
1279 order = 'LOml'
1279 def key(r):
1280 def key(r):
1280 idx = order.find(r[0])
1281 idx = order.find(r[0])
1281 if idx == -1:
1282 if idx == -1:
1282 return (1, r[1])
1283 return (1, r[1])
1283 else:
1284 else:
1284 return (0, idx)
1285 return (0, idx)
1285 v1records.sort(key=key)
1286 v1records.sort(key=key)
1286 v2records.sort(key=key)
1287 v2records.sort(key=key)
1287
1288
1288 if not v1records and not v2records:
1289 if not v1records and not v2records:
1289 ui.write(('no merge state found\n'))
1290 ui.write(('no merge state found\n'))
1290 elif not v2records:
1291 elif not v2records:
1291 ui.note(('no version 2 merge state\n'))
1292 ui.note(('no version 2 merge state\n'))
1292 printrecords(1)
1293 printrecords(1)
1293 elif ms._v1v2match(v1records, v2records):
1294 elif ms._v1v2match(v1records, v2records):
1294 ui.note(('v1 and v2 states match: using v2\n'))
1295 ui.note(('v1 and v2 states match: using v2\n'))
1295 printrecords(2)
1296 printrecords(2)
1296 else:
1297 else:
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 printrecords(1)
1299 printrecords(1)
1299 if ui.verbose:
1300 if ui.verbose:
1300 printrecords(2)
1301 printrecords(2)
1301
1302
1302 @command('debugnamecomplete', [], _('NAME...'))
1303 @command('debugnamecomplete', [], _('NAME...'))
1303 def debugnamecomplete(ui, repo, *args):
1304 def debugnamecomplete(ui, repo, *args):
1304 '''complete "names" - tags, open branch names, bookmark names'''
1305 '''complete "names" - tags, open branch names, bookmark names'''
1305
1306
1306 names = set()
1307 names = set()
1307 # since we previously only listed open branches, we will handle that
1308 # since we previously only listed open branches, we will handle that
1308 # specially (after this for loop)
1309 # specially (after this for loop)
1309 for name, ns in repo.names.iteritems():
1310 for name, ns in repo.names.iteritems():
1310 if name != 'branches':
1311 if name != 'branches':
1311 names.update(ns.listnames(repo))
1312 names.update(ns.listnames(repo))
1312 names.update(tag for (tag, heads, tip, closed)
1313 names.update(tag for (tag, heads, tip, closed)
1313 in repo.branchmap().iterbranches() if not closed)
1314 in repo.branchmap().iterbranches() if not closed)
1314 completions = set()
1315 completions = set()
1315 if not args:
1316 if not args:
1316 args = ['']
1317 args = ['']
1317 for a in args:
1318 for a in args:
1318 completions.update(n for n in names if n.startswith(a))
1319 completions.update(n for n in names if n.startswith(a))
1319 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n')
1321 ui.write('\n')
1321
1322
1322 @command('debugobsolete',
1323 @command('debugobsolete',
1323 [('', 'flags', 0, _('markers flag')),
1324 [('', 'flags', 0, _('markers flag')),
1324 ('', 'record-parents', False,
1325 ('', 'record-parents', False,
1325 _('record parent information for the precursor')),
1326 _('record parent information for the precursor')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('', 'index', False, _('display index of the marker')),
1328 ('', 'index', False, _('display index of the marker')),
1328 ('', 'delete', [], _('delete markers specified by indices')),
1329 ('', 'delete', [], _('delete markers specified by indices')),
1329 ] + commands.commitopts2 + commands.formatteropts,
1330 ] + commands.commitopts2 + commands.formatteropts,
1330 _('[OBSOLETED [REPLACEMENT ...]]'))
1331 _('[OBSOLETED [REPLACEMENT ...]]'))
1331 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1332 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1332 """create arbitrary obsolete marker
1333 """create arbitrary obsolete marker
1333
1334
1334 With no arguments, displays the list of obsolescence markers."""
1335 With no arguments, displays the list of obsolescence markers."""
1335
1336
1336 def parsenodeid(s):
1337 def parsenodeid(s):
1337 try:
1338 try:
1338 # We do not use revsingle/revrange functions here to accept
1339 # We do not use revsingle/revrange functions here to accept
1339 # arbitrary node identifiers, possibly not present in the
1340 # arbitrary node identifiers, possibly not present in the
1340 # local repository.
1341 # local repository.
1341 n = bin(s)
1342 n = bin(s)
1342 if len(n) != len(nullid):
1343 if len(n) != len(nullid):
1343 raise TypeError()
1344 raise TypeError()
1344 return n
1345 return n
1345 except TypeError:
1346 except TypeError:
1346 raise error.Abort('changeset references must be full hexadecimal '
1347 raise error.Abort('changeset references must be full hexadecimal '
1347 'node identifiers')
1348 'node identifiers')
1348
1349
1349 if opts.get('delete'):
1350 if opts.get('delete'):
1350 indices = []
1351 indices = []
1351 for v in opts.get('delete'):
1352 for v in opts.get('delete'):
1352 try:
1353 try:
1353 indices.append(int(v))
1354 indices.append(int(v))
1354 except ValueError:
1355 except ValueError:
1355 raise error.Abort(_('invalid index value: %r') % v,
1356 raise error.Abort(_('invalid index value: %r') % v,
1356 hint=_('use integers for indices'))
1357 hint=_('use integers for indices'))
1357
1358
1358 if repo.currenttransaction():
1359 if repo.currenttransaction():
1359 raise error.Abort(_('cannot delete obsmarkers in the middle '
1360 raise error.Abort(_('cannot delete obsmarkers in the middle '
1360 'of transaction.'))
1361 'of transaction.'))
1361
1362
1362 with repo.lock():
1363 with repo.lock():
1363 n = repair.deleteobsmarkers(repo.obsstore, indices)
1364 n = repair.deleteobsmarkers(repo.obsstore, indices)
1364 ui.write(_('deleted %i obsolescence markers\n') % n)
1365 ui.write(_('deleted %i obsolescence markers\n') % n)
1365
1366
1366 return
1367 return
1367
1368
1368 if precursor is not None:
1369 if precursor is not None:
1369 if opts['rev']:
1370 if opts['rev']:
1370 raise error.Abort('cannot select revision when creating marker')
1371 raise error.Abort('cannot select revision when creating marker')
1371 metadata = {}
1372 metadata = {}
1372 metadata['user'] = opts['user'] or ui.username()
1373 metadata['user'] = opts['user'] or ui.username()
1373 succs = tuple(parsenodeid(succ) for succ in successors)
1374 succs = tuple(parsenodeid(succ) for succ in successors)
1374 l = repo.lock()
1375 l = repo.lock()
1375 try:
1376 try:
1376 tr = repo.transaction('debugobsolete')
1377 tr = repo.transaction('debugobsolete')
1377 try:
1378 try:
1378 date = opts.get('date')
1379 date = opts.get('date')
1379 if date:
1380 if date:
1380 date = util.parsedate(date)
1381 date = util.parsedate(date)
1381 else:
1382 else:
1382 date = None
1383 date = None
1383 prec = parsenodeid(precursor)
1384 prec = parsenodeid(precursor)
1384 parents = None
1385 parents = None
1385 if opts['record_parents']:
1386 if opts['record_parents']:
1386 if prec not in repo.unfiltered():
1387 if prec not in repo.unfiltered():
1387 raise error.Abort('cannot used --record-parents on '
1388 raise error.Abort('cannot used --record-parents on '
1388 'unknown changesets')
1389 'unknown changesets')
1389 parents = repo.unfiltered()[prec].parents()
1390 parents = repo.unfiltered()[prec].parents()
1390 parents = tuple(p.node() for p in parents)
1391 parents = tuple(p.node() for p in parents)
1391 repo.obsstore.create(tr, prec, succs, opts['flags'],
1392 repo.obsstore.create(tr, prec, succs, opts['flags'],
1392 parents=parents, date=date,
1393 parents=parents, date=date,
1393 metadata=metadata)
1394 metadata=metadata)
1394 tr.close()
1395 tr.close()
1395 except ValueError as exc:
1396 except ValueError as exc:
1396 raise error.Abort(_('bad obsmarker input: %s') % exc)
1397 raise error.Abort(_('bad obsmarker input: %s') % exc)
1397 finally:
1398 finally:
1398 tr.release()
1399 tr.release()
1399 finally:
1400 finally:
1400 l.release()
1401 l.release()
1401 else:
1402 else:
1402 if opts['rev']:
1403 if opts['rev']:
1403 revs = scmutil.revrange(repo, opts['rev'])
1404 revs = scmutil.revrange(repo, opts['rev'])
1404 nodes = [repo[r].node() for r in revs]
1405 nodes = [repo[r].node() for r in revs]
1405 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1406 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1406 markers.sort(key=lambda x: x._data)
1407 markers.sort(key=lambda x: x._data)
1407 else:
1408 else:
1408 markers = obsolete.getmarkers(repo)
1409 markers = obsolete.getmarkers(repo)
1409
1410
1410 markerstoiter = markers
1411 markerstoiter = markers
1411 isrelevant = lambda m: True
1412 isrelevant = lambda m: True
1412 if opts.get('rev') and opts.get('index'):
1413 if opts.get('rev') and opts.get('index'):
1413 markerstoiter = obsolete.getmarkers(repo)
1414 markerstoiter = obsolete.getmarkers(repo)
1414 markerset = set(markers)
1415 markerset = set(markers)
1415 isrelevant = lambda m: m in markerset
1416 isrelevant = lambda m: m in markerset
1416
1417
1417 fm = ui.formatter('debugobsolete', opts)
1418 fm = ui.formatter('debugobsolete', opts)
1418 for i, m in enumerate(markerstoiter):
1419 for i, m in enumerate(markerstoiter):
1419 if not isrelevant(m):
1420 if not isrelevant(m):
1420 # marker can be irrelevant when we're iterating over a set
1421 # marker can be irrelevant when we're iterating over a set
1421 # of markers (markerstoiter) which is bigger than the set
1422 # of markers (markerstoiter) which is bigger than the set
1422 # of markers we want to display (markers)
1423 # of markers we want to display (markers)
1423 # this can happen if both --index and --rev options are
1424 # this can happen if both --index and --rev options are
1424 # provided and thus we need to iterate over all of the markers
1425 # provided and thus we need to iterate over all of the markers
1425 # to get the correct indices, but only display the ones that
1426 # to get the correct indices, but only display the ones that
1426 # are relevant to --rev value
1427 # are relevant to --rev value
1427 continue
1428 continue
1428 fm.startitem()
1429 fm.startitem()
1429 ind = i if opts.get('index') else None
1430 ind = i if opts.get('index') else None
1430 cmdutil.showmarker(fm, m, index=ind)
1431 cmdutil.showmarker(fm, m, index=ind)
1431 fm.end()
1432 fm.end()
1432
1433
1433 @command('debugpathcomplete',
1434 @command('debugpathcomplete',
1434 [('f', 'full', None, _('complete an entire path')),
1435 [('f', 'full', None, _('complete an entire path')),
1435 ('n', 'normal', None, _('show only normal files')),
1436 ('n', 'normal', None, _('show only normal files')),
1436 ('a', 'added', None, _('show only added files')),
1437 ('a', 'added', None, _('show only added files')),
1437 ('r', 'removed', None, _('show only removed files'))],
1438 ('r', 'removed', None, _('show only removed files'))],
1438 _('FILESPEC...'))
1439 _('FILESPEC...'))
1439 def debugpathcomplete(ui, repo, *specs, **opts):
1440 def debugpathcomplete(ui, repo, *specs, **opts):
1440 '''complete part or all of a tracked path
1441 '''complete part or all of a tracked path
1441
1442
1442 This command supports shells that offer path name completion. It
1443 This command supports shells that offer path name completion. It
1443 currently completes only files already known to the dirstate.
1444 currently completes only files already known to the dirstate.
1444
1445
1445 Completion extends only to the next path segment unless
1446 Completion extends only to the next path segment unless
1446 --full is specified, in which case entire paths are used.'''
1447 --full is specified, in which case entire paths are used.'''
1447
1448
1448 def complete(path, acceptable):
1449 def complete(path, acceptable):
1449 dirstate = repo.dirstate
1450 dirstate = repo.dirstate
1450 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1451 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1451 rootdir = repo.root + pycompat.ossep
1452 rootdir = repo.root + pycompat.ossep
1452 if spec != repo.root and not spec.startswith(rootdir):
1453 if spec != repo.root and not spec.startswith(rootdir):
1453 return [], []
1454 return [], []
1454 if os.path.isdir(spec):
1455 if os.path.isdir(spec):
1455 spec += '/'
1456 spec += '/'
1456 spec = spec[len(rootdir):]
1457 spec = spec[len(rootdir):]
1457 fixpaths = pycompat.ossep != '/'
1458 fixpaths = pycompat.ossep != '/'
1458 if fixpaths:
1459 if fixpaths:
1459 spec = spec.replace(pycompat.ossep, '/')
1460 spec = spec.replace(pycompat.ossep, '/')
1460 speclen = len(spec)
1461 speclen = len(spec)
1461 fullpaths = opts['full']
1462 fullpaths = opts['full']
1462 files, dirs = set(), set()
1463 files, dirs = set(), set()
1463 adddir, addfile = dirs.add, files.add
1464 adddir, addfile = dirs.add, files.add
1464 for f, st in dirstate.iteritems():
1465 for f, st in dirstate.iteritems():
1465 if f.startswith(spec) and st[0] in acceptable:
1466 if f.startswith(spec) and st[0] in acceptable:
1466 if fixpaths:
1467 if fixpaths:
1467 f = f.replace('/', pycompat.ossep)
1468 f = f.replace('/', pycompat.ossep)
1468 if fullpaths:
1469 if fullpaths:
1469 addfile(f)
1470 addfile(f)
1470 continue
1471 continue
1471 s = f.find(pycompat.ossep, speclen)
1472 s = f.find(pycompat.ossep, speclen)
1472 if s >= 0:
1473 if s >= 0:
1473 adddir(f[:s])
1474 adddir(f[:s])
1474 else:
1475 else:
1475 addfile(f)
1476 addfile(f)
1476 return files, dirs
1477 return files, dirs
1477
1478
1478 acceptable = ''
1479 acceptable = ''
1479 if opts['normal']:
1480 if opts['normal']:
1480 acceptable += 'nm'
1481 acceptable += 'nm'
1481 if opts['added']:
1482 if opts['added']:
1482 acceptable += 'a'
1483 acceptable += 'a'
1483 if opts['removed']:
1484 if opts['removed']:
1484 acceptable += 'r'
1485 acceptable += 'r'
1485 cwd = repo.getcwd()
1486 cwd = repo.getcwd()
1486 if not specs:
1487 if not specs:
1487 specs = ['.']
1488 specs = ['.']
1488
1489
1489 files, dirs = set(), set()
1490 files, dirs = set(), set()
1490 for spec in specs:
1491 for spec in specs:
1491 f, d = complete(spec, acceptable or 'nmar')
1492 f, d = complete(spec, acceptable or 'nmar')
1492 files.update(f)
1493 files.update(f)
1493 dirs.update(d)
1494 dirs.update(d)
1494 files.update(dirs)
1495 files.update(dirs)
1495 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1496 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1496 ui.write('\n')
1497 ui.write('\n')
1497
1498
1498 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1499 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1499 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1500 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1500 '''access the pushkey key/value protocol
1501 '''access the pushkey key/value protocol
1501
1502
1502 With two args, list the keys in the given namespace.
1503 With two args, list the keys in the given namespace.
1503
1504
1504 With five args, set a key to new if it currently is set to old.
1505 With five args, set a key to new if it currently is set to old.
1505 Reports success or failure.
1506 Reports success or failure.
1506 '''
1507 '''
1507
1508
1508 target = hg.peer(ui, {}, repopath)
1509 target = hg.peer(ui, {}, repopath)
1509 if keyinfo:
1510 if keyinfo:
1510 key, old, new = keyinfo
1511 key, old, new = keyinfo
1511 r = target.pushkey(namespace, key, old, new)
1512 r = target.pushkey(namespace, key, old, new)
1512 ui.status(str(r) + '\n')
1513 ui.status(str(r) + '\n')
1513 return not r
1514 return not r
1514 else:
1515 else:
1515 for k, v in sorted(target.listkeys(namespace).iteritems()):
1516 for k, v in sorted(target.listkeys(namespace).iteritems()):
1516 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1517 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1517 v.encode('string-escape')))
1518 v.encode('string-escape')))
1518
1519
1519 @command('debugpvec', [], _('A B'))
1520 @command('debugpvec', [], _('A B'))
1520 def debugpvec(ui, repo, a, b=None):
1521 def debugpvec(ui, repo, a, b=None):
1521 ca = scmutil.revsingle(repo, a)
1522 ca = scmutil.revsingle(repo, a)
1522 cb = scmutil.revsingle(repo, b)
1523 cb = scmutil.revsingle(repo, b)
1523 pa = pvec.ctxpvec(ca)
1524 pa = pvec.ctxpvec(ca)
1524 pb = pvec.ctxpvec(cb)
1525 pb = pvec.ctxpvec(cb)
1525 if pa == pb:
1526 if pa == pb:
1526 rel = "="
1527 rel = "="
1527 elif pa > pb:
1528 elif pa > pb:
1528 rel = ">"
1529 rel = ">"
1529 elif pa < pb:
1530 elif pa < pb:
1530 rel = "<"
1531 rel = "<"
1531 elif pa | pb:
1532 elif pa | pb:
1532 rel = "|"
1533 rel = "|"
1533 ui.write(_("a: %s\n") % pa)
1534 ui.write(_("a: %s\n") % pa)
1534 ui.write(_("b: %s\n") % pb)
1535 ui.write(_("b: %s\n") % pb)
1535 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1536 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1536 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1537 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1537 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1538 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1538 pa.distance(pb), rel))
1539 pa.distance(pb), rel))
1539
1540
1540 @command('debugrebuilddirstate|debugrebuildstate',
1541 @command('debugrebuilddirstate|debugrebuildstate',
1541 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1542 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1542 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1543 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1543 'the working copy parent')),
1544 'the working copy parent')),
1544 ],
1545 ],
1545 _('[-r REV]'))
1546 _('[-r REV]'))
1546 def debugrebuilddirstate(ui, repo, rev, **opts):
1547 def debugrebuilddirstate(ui, repo, rev, **opts):
1547 """rebuild the dirstate as it would look like for the given revision
1548 """rebuild the dirstate as it would look like for the given revision
1548
1549
1549 If no revision is specified the first current parent will be used.
1550 If no revision is specified the first current parent will be used.
1550
1551
1551 The dirstate will be set to the files of the given revision.
1552 The dirstate will be set to the files of the given revision.
1552 The actual working directory content or existing dirstate
1553 The actual working directory content or existing dirstate
1553 information such as adds or removes is not considered.
1554 information such as adds or removes is not considered.
1554
1555
1555 ``minimal`` will only rebuild the dirstate status for files that claim to be
1556 ``minimal`` will only rebuild the dirstate status for files that claim to be
1556 tracked but are not in the parent manifest, or that exist in the parent
1557 tracked but are not in the parent manifest, or that exist in the parent
1557 manifest but are not in the dirstate. It will not change adds, removes, or
1558 manifest but are not in the dirstate. It will not change adds, removes, or
1558 modified files that are in the working copy parent.
1559 modified files that are in the working copy parent.
1559
1560
1560 One use of this command is to make the next :hg:`status` invocation
1561 One use of this command is to make the next :hg:`status` invocation
1561 check the actual file content.
1562 check the actual file content.
1562 """
1563 """
1563 ctx = scmutil.revsingle(repo, rev)
1564 ctx = scmutil.revsingle(repo, rev)
1564 with repo.wlock():
1565 with repo.wlock():
1565 dirstate = repo.dirstate
1566 dirstate = repo.dirstate
1566 changedfiles = None
1567 changedfiles = None
1567 # See command doc for what minimal does.
1568 # See command doc for what minimal does.
1568 if opts.get('minimal'):
1569 if opts.get('minimal'):
1569 manifestfiles = set(ctx.manifest().keys())
1570 manifestfiles = set(ctx.manifest().keys())
1570 dirstatefiles = set(dirstate)
1571 dirstatefiles = set(dirstate)
1571 manifestonly = manifestfiles - dirstatefiles
1572 manifestonly = manifestfiles - dirstatefiles
1572 dsonly = dirstatefiles - manifestfiles
1573 dsonly = dirstatefiles - manifestfiles
1573 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1574 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1574 changedfiles = manifestonly | dsnotadded
1575 changedfiles = manifestonly | dsnotadded
1575
1576
1576 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1577 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1577
1578
1578 @command('debugrebuildfncache', [], '')
1579 @command('debugrebuildfncache', [], '')
1579 def debugrebuildfncache(ui, repo):
1580 def debugrebuildfncache(ui, repo):
1580 """rebuild the fncache file"""
1581 """rebuild the fncache file"""
1581 repair.rebuildfncache(ui, repo)
1582 repair.rebuildfncache(ui, repo)
1582
1583
1583 @command('debugrename',
1584 @command('debugrename',
1584 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1585 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1585 _('[-r REV] FILE'))
1586 _('[-r REV] FILE'))
1586 def debugrename(ui, repo, file1, *pats, **opts):
1587 def debugrename(ui, repo, file1, *pats, **opts):
1587 """dump rename information"""
1588 """dump rename information"""
1588
1589
1589 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 m = scmutil.match(ctx, (file1,) + pats, opts)
1591 m = scmutil.match(ctx, (file1,) + pats, opts)
1591 for abs in ctx.walk(m):
1592 for abs in ctx.walk(m):
1592 fctx = ctx[abs]
1593 fctx = ctx[abs]
1593 o = fctx.filelog().renamed(fctx.filenode())
1594 o = fctx.filelog().renamed(fctx.filenode())
1594 rel = m.rel(abs)
1595 rel = m.rel(abs)
1595 if o:
1596 if o:
1596 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1597 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1597 else:
1598 else:
1598 ui.write(_("%s not renamed\n") % rel)
1599 ui.write(_("%s not renamed\n") % rel)
1599
1600
1600 @command('debugrevlog', commands.debugrevlogopts +
1601 @command('debugrevlog', commands.debugrevlogopts +
1601 [('d', 'dump', False, _('dump index data'))],
1602 [('d', 'dump', False, _('dump index data'))],
1602 _('-c|-m|FILE'),
1603 _('-c|-m|FILE'),
1603 optionalrepo=True)
1604 optionalrepo=True)
1604 def debugrevlog(ui, repo, file_=None, **opts):
1605 def debugrevlog(ui, repo, file_=None, **opts):
1605 """show data and statistics about a revlog"""
1606 """show data and statistics about a revlog"""
1606 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1607 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1607
1608
1608 if opts.get("dump"):
1609 if opts.get("dump"):
1609 numrevs = len(r)
1610 numrevs = len(r)
1610 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1611 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1611 " rawsize totalsize compression heads chainlen\n"))
1612 " rawsize totalsize compression heads chainlen\n"))
1612 ts = 0
1613 ts = 0
1613 heads = set()
1614 heads = set()
1614
1615
1615 for rev in xrange(numrevs):
1616 for rev in xrange(numrevs):
1616 dbase = r.deltaparent(rev)
1617 dbase = r.deltaparent(rev)
1617 if dbase == -1:
1618 if dbase == -1:
1618 dbase = rev
1619 dbase = rev
1619 cbase = r.chainbase(rev)
1620 cbase = r.chainbase(rev)
1620 clen = r.chainlen(rev)
1621 clen = r.chainlen(rev)
1621 p1, p2 = r.parentrevs(rev)
1622 p1, p2 = r.parentrevs(rev)
1622 rs = r.rawsize(rev)
1623 rs = r.rawsize(rev)
1623 ts = ts + rs
1624 ts = ts + rs
1624 heads -= set(r.parentrevs(rev))
1625 heads -= set(r.parentrevs(rev))
1625 heads.add(rev)
1626 heads.add(rev)
1626 try:
1627 try:
1627 compression = ts / r.end(rev)
1628 compression = ts / r.end(rev)
1628 except ZeroDivisionError:
1629 except ZeroDivisionError:
1629 compression = 0
1630 compression = 0
1630 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1631 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1631 "%11d %5d %8d\n" %
1632 "%11d %5d %8d\n" %
1632 (rev, p1, p2, r.start(rev), r.end(rev),
1633 (rev, p1, p2, r.start(rev), r.end(rev),
1633 r.start(dbase), r.start(cbase),
1634 r.start(dbase), r.start(cbase),
1634 r.start(p1), r.start(p2),
1635 r.start(p1), r.start(p2),
1635 rs, ts, compression, len(heads), clen))
1636 rs, ts, compression, len(heads), clen))
1636 return 0
1637 return 0
1637
1638
1638 v = r.version
1639 v = r.version
1639 format = v & 0xFFFF
1640 format = v & 0xFFFF
1640 flags = []
1641 flags = []
1641 gdelta = False
1642 gdelta = False
1642 if v & revlog.REVLOGNGINLINEDATA:
1643 if v & revlog.REVLOGNGINLINEDATA:
1643 flags.append('inline')
1644 flags.append('inline')
1644 if v & revlog.REVLOGGENERALDELTA:
1645 if v & revlog.REVLOGGENERALDELTA:
1645 gdelta = True
1646 gdelta = True
1646 flags.append('generaldelta')
1647 flags.append('generaldelta')
1647 if not flags:
1648 if not flags:
1648 flags = ['(none)']
1649 flags = ['(none)']
1649
1650
1650 nummerges = 0
1651 nummerges = 0
1651 numfull = 0
1652 numfull = 0
1652 numprev = 0
1653 numprev = 0
1653 nump1 = 0
1654 nump1 = 0
1654 nump2 = 0
1655 nump2 = 0
1655 numother = 0
1656 numother = 0
1656 nump1prev = 0
1657 nump1prev = 0
1657 nump2prev = 0
1658 nump2prev = 0
1658 chainlengths = []
1659 chainlengths = []
1659
1660
1660 datasize = [None, 0, 0]
1661 datasize = [None, 0, 0]
1661 fullsize = [None, 0, 0]
1662 fullsize = [None, 0, 0]
1662 deltasize = [None, 0, 0]
1663 deltasize = [None, 0, 0]
1663 chunktypecounts = {}
1664 chunktypecounts = {}
1664 chunktypesizes = {}
1665 chunktypesizes = {}
1665
1666
1666 def addsize(size, l):
1667 def addsize(size, l):
1667 if l[0] is None or size < l[0]:
1668 if l[0] is None or size < l[0]:
1668 l[0] = size
1669 l[0] = size
1669 if size > l[1]:
1670 if size > l[1]:
1670 l[1] = size
1671 l[1] = size
1671 l[2] += size
1672 l[2] += size
1672
1673
1673 numrevs = len(r)
1674 numrevs = len(r)
1674 for rev in xrange(numrevs):
1675 for rev in xrange(numrevs):
1675 p1, p2 = r.parentrevs(rev)
1676 p1, p2 = r.parentrevs(rev)
1676 delta = r.deltaparent(rev)
1677 delta = r.deltaparent(rev)
1677 if format > 0:
1678 if format > 0:
1678 addsize(r.rawsize(rev), datasize)
1679 addsize(r.rawsize(rev), datasize)
1679 if p2 != nullrev:
1680 if p2 != nullrev:
1680 nummerges += 1
1681 nummerges += 1
1681 size = r.length(rev)
1682 size = r.length(rev)
1682 if delta == nullrev:
1683 if delta == nullrev:
1683 chainlengths.append(0)
1684 chainlengths.append(0)
1684 numfull += 1
1685 numfull += 1
1685 addsize(size, fullsize)
1686 addsize(size, fullsize)
1686 else:
1687 else:
1687 chainlengths.append(chainlengths[delta] + 1)
1688 chainlengths.append(chainlengths[delta] + 1)
1688 addsize(size, deltasize)
1689 addsize(size, deltasize)
1689 if delta == rev - 1:
1690 if delta == rev - 1:
1690 numprev += 1
1691 numprev += 1
1691 if delta == p1:
1692 if delta == p1:
1692 nump1prev += 1
1693 nump1prev += 1
1693 elif delta == p2:
1694 elif delta == p2:
1694 nump2prev += 1
1695 nump2prev += 1
1695 elif delta == p1:
1696 elif delta == p1:
1696 nump1 += 1
1697 nump1 += 1
1697 elif delta == p2:
1698 elif delta == p2:
1698 nump2 += 1
1699 nump2 += 1
1699 elif delta != nullrev:
1700 elif delta != nullrev:
1700 numother += 1
1701 numother += 1
1701
1702
1702 # Obtain data on the raw chunks in the revlog.
1703 # Obtain data on the raw chunks in the revlog.
1703 chunk = r._chunkraw(rev, rev)[1]
1704 chunk = r._chunkraw(rev, rev)[1]
1704 if chunk:
1705 if chunk:
1705 chunktype = chunk[0]
1706 chunktype = chunk[0]
1706 else:
1707 else:
1707 chunktype = 'empty'
1708 chunktype = 'empty'
1708
1709
1709 if chunktype not in chunktypecounts:
1710 if chunktype not in chunktypecounts:
1710 chunktypecounts[chunktype] = 0
1711 chunktypecounts[chunktype] = 0
1711 chunktypesizes[chunktype] = 0
1712 chunktypesizes[chunktype] = 0
1712
1713
1713 chunktypecounts[chunktype] += 1
1714 chunktypecounts[chunktype] += 1
1714 chunktypesizes[chunktype] += size
1715 chunktypesizes[chunktype] += size
1715
1716
1716 # Adjust size min value for empty cases
1717 # Adjust size min value for empty cases
1717 for size in (datasize, fullsize, deltasize):
1718 for size in (datasize, fullsize, deltasize):
1718 if size[0] is None:
1719 if size[0] is None:
1719 size[0] = 0
1720 size[0] = 0
1720
1721
1721 numdeltas = numrevs - numfull
1722 numdeltas = numrevs - numfull
1722 numoprev = numprev - nump1prev - nump2prev
1723 numoprev = numprev - nump1prev - nump2prev
1723 totalrawsize = datasize[2]
1724 totalrawsize = datasize[2]
1724 datasize[2] /= numrevs
1725 datasize[2] /= numrevs
1725 fulltotal = fullsize[2]
1726 fulltotal = fullsize[2]
1726 fullsize[2] /= numfull
1727 fullsize[2] /= numfull
1727 deltatotal = deltasize[2]
1728 deltatotal = deltasize[2]
1728 if numrevs - numfull > 0:
1729 if numrevs - numfull > 0:
1729 deltasize[2] /= numrevs - numfull
1730 deltasize[2] /= numrevs - numfull
1730 totalsize = fulltotal + deltatotal
1731 totalsize = fulltotal + deltatotal
1731 avgchainlen = sum(chainlengths) / numrevs
1732 avgchainlen = sum(chainlengths) / numrevs
1732 maxchainlen = max(chainlengths)
1733 maxchainlen = max(chainlengths)
1733 compratio = 1
1734 compratio = 1
1734 if totalsize:
1735 if totalsize:
1735 compratio = totalrawsize / totalsize
1736 compratio = totalrawsize / totalsize
1736
1737
1737 basedfmtstr = '%%%dd\n'
1738 basedfmtstr = '%%%dd\n'
1738 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1739 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1739
1740
1740 def dfmtstr(max):
1741 def dfmtstr(max):
1741 return basedfmtstr % len(str(max))
1742 return basedfmtstr % len(str(max))
1742 def pcfmtstr(max, padding=0):
1743 def pcfmtstr(max, padding=0):
1743 return basepcfmtstr % (len(str(max)), ' ' * padding)
1744 return basepcfmtstr % (len(str(max)), ' ' * padding)
1744
1745
1745 def pcfmt(value, total):
1746 def pcfmt(value, total):
1746 if total:
1747 if total:
1747 return (value, 100 * float(value) / total)
1748 return (value, 100 * float(value) / total)
1748 else:
1749 else:
1749 return value, 100.0
1750 return value, 100.0
1750
1751
1751 ui.write(('format : %d\n') % format)
1752 ui.write(('format : %d\n') % format)
1752 ui.write(('flags : %s\n') % ', '.join(flags))
1753 ui.write(('flags : %s\n') % ', '.join(flags))
1753
1754
1754 ui.write('\n')
1755 ui.write('\n')
1755 fmt = pcfmtstr(totalsize)
1756 fmt = pcfmtstr(totalsize)
1756 fmt2 = dfmtstr(totalsize)
1757 fmt2 = dfmtstr(totalsize)
1757 ui.write(('revisions : ') + fmt2 % numrevs)
1758 ui.write(('revisions : ') + fmt2 % numrevs)
1758 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1759 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1759 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1760 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1760 ui.write(('revisions : ') + fmt2 % numrevs)
1761 ui.write(('revisions : ') + fmt2 % numrevs)
1761 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1762 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1762 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1763 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1763 ui.write(('revision size : ') + fmt2 % totalsize)
1764 ui.write(('revision size : ') + fmt2 % totalsize)
1764 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1765 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1765 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1766 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1766
1767
1767 def fmtchunktype(chunktype):
1768 def fmtchunktype(chunktype):
1768 if chunktype == 'empty':
1769 if chunktype == 'empty':
1769 return ' %s : ' % chunktype
1770 return ' %s : ' % chunktype
1770 elif chunktype in string.ascii_letters:
1771 elif chunktype in string.ascii_letters:
1771 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1772 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1772 else:
1773 else:
1773 return ' 0x%s : ' % hex(chunktype)
1774 return ' 0x%s : ' % hex(chunktype)
1774
1775
1775 ui.write('\n')
1776 ui.write('\n')
1776 ui.write(('chunks : ') + fmt2 % numrevs)
1777 ui.write(('chunks : ') + fmt2 % numrevs)
1777 for chunktype in sorted(chunktypecounts):
1778 for chunktype in sorted(chunktypecounts):
1778 ui.write(fmtchunktype(chunktype))
1779 ui.write(fmtchunktype(chunktype))
1779 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1780 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1780 ui.write(('chunks size : ') + fmt2 % totalsize)
1781 ui.write(('chunks size : ') + fmt2 % totalsize)
1781 for chunktype in sorted(chunktypecounts):
1782 for chunktype in sorted(chunktypecounts):
1782 ui.write(fmtchunktype(chunktype))
1783 ui.write(fmtchunktype(chunktype))
1783 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1784 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1784
1785
1785 ui.write('\n')
1786 ui.write('\n')
1786 fmt = dfmtstr(max(avgchainlen, compratio))
1787 fmt = dfmtstr(max(avgchainlen, compratio))
1787 ui.write(('avg chain length : ') + fmt % avgchainlen)
1788 ui.write(('avg chain length : ') + fmt % avgchainlen)
1788 ui.write(('max chain length : ') + fmt % maxchainlen)
1789 ui.write(('max chain length : ') + fmt % maxchainlen)
1789 ui.write(('compression ratio : ') + fmt % compratio)
1790 ui.write(('compression ratio : ') + fmt % compratio)
1790
1791
1791 if format > 0:
1792 if format > 0:
1792 ui.write('\n')
1793 ui.write('\n')
1793 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1794 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1794 % tuple(datasize))
1795 % tuple(datasize))
1795 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1796 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1796 % tuple(fullsize))
1797 % tuple(fullsize))
1797 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1798 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1798 % tuple(deltasize))
1799 % tuple(deltasize))
1799
1800
1800 if numdeltas > 0:
1801 if numdeltas > 0:
1801 ui.write('\n')
1802 ui.write('\n')
1802 fmt = pcfmtstr(numdeltas)
1803 fmt = pcfmtstr(numdeltas)
1803 fmt2 = pcfmtstr(numdeltas, 4)
1804 fmt2 = pcfmtstr(numdeltas, 4)
1804 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1805 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1805 if numprev > 0:
1806 if numprev > 0:
1806 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1807 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1807 numprev))
1808 numprev))
1808 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1809 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1809 numprev))
1810 numprev))
1810 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1811 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1811 numprev))
1812 numprev))
1812 if gdelta:
1813 if gdelta:
1813 ui.write(('deltas against p1 : ')
1814 ui.write(('deltas against p1 : ')
1814 + fmt % pcfmt(nump1, numdeltas))
1815 + fmt % pcfmt(nump1, numdeltas))
1815 ui.write(('deltas against p2 : ')
1816 ui.write(('deltas against p2 : ')
1816 + fmt % pcfmt(nump2, numdeltas))
1817 + fmt % pcfmt(nump2, numdeltas))
1817 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1818 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1818 numdeltas))
1819 numdeltas))
1819
1820
1820 @command('debugrevspec',
1821 @command('debugrevspec',
1821 [('', 'optimize', None,
1822 [('', 'optimize', None,
1822 _('print parsed tree after optimizing (DEPRECATED)')),
1823 _('print parsed tree after optimizing (DEPRECATED)')),
1823 ('p', 'show-stage', [],
1824 ('p', 'show-stage', [],
1824 _('print parsed tree at the given stage'), _('NAME')),
1825 _('print parsed tree at the given stage'), _('NAME')),
1825 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1826 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1826 ('', 'verify-optimized', False, _('verify optimized result')),
1827 ('', 'verify-optimized', False, _('verify optimized result')),
1827 ],
1828 ],
1828 ('REVSPEC'))
1829 ('REVSPEC'))
1829 def debugrevspec(ui, repo, expr, **opts):
1830 def debugrevspec(ui, repo, expr, **opts):
1830 """parse and apply a revision specification
1831 """parse and apply a revision specification
1831
1832
1832 Use -p/--show-stage option to print the parsed tree at the given stages.
1833 Use -p/--show-stage option to print the parsed tree at the given stages.
1833 Use -p all to print tree at every stage.
1834 Use -p all to print tree at every stage.
1834
1835
1835 Use --verify-optimized to compare the optimized result with the unoptimized
1836 Use --verify-optimized to compare the optimized result with the unoptimized
1836 one. Returns 1 if the optimized result differs.
1837 one. Returns 1 if the optimized result differs.
1837 """
1838 """
1838 stages = [
1839 stages = [
1839 ('parsed', lambda tree: tree),
1840 ('parsed', lambda tree: tree),
1840 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1841 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1841 ('concatenated', revsetlang.foldconcat),
1842 ('concatenated', revsetlang.foldconcat),
1842 ('analyzed', revsetlang.analyze),
1843 ('analyzed', revsetlang.analyze),
1843 ('optimized', revsetlang.optimize),
1844 ('optimized', revsetlang.optimize),
1844 ]
1845 ]
1845 if opts['no_optimized']:
1846 if opts['no_optimized']:
1846 stages = stages[:-1]
1847 stages = stages[:-1]
1847 if opts['verify_optimized'] and opts['no_optimized']:
1848 if opts['verify_optimized'] and opts['no_optimized']:
1848 raise error.Abort(_('cannot use --verify-optimized with '
1849 raise error.Abort(_('cannot use --verify-optimized with '
1849 '--no-optimized'))
1850 '--no-optimized'))
1850 stagenames = set(n for n, f in stages)
1851 stagenames = set(n for n, f in stages)
1851
1852
1852 showalways = set()
1853 showalways = set()
1853 showchanged = set()
1854 showchanged = set()
1854 if ui.verbose and not opts['show_stage']:
1855 if ui.verbose and not opts['show_stage']:
1855 # show parsed tree by --verbose (deprecated)
1856 # show parsed tree by --verbose (deprecated)
1856 showalways.add('parsed')
1857 showalways.add('parsed')
1857 showchanged.update(['expanded', 'concatenated'])
1858 showchanged.update(['expanded', 'concatenated'])
1858 if opts['optimize']:
1859 if opts['optimize']:
1859 showalways.add('optimized')
1860 showalways.add('optimized')
1860 if opts['show_stage'] and opts['optimize']:
1861 if opts['show_stage'] and opts['optimize']:
1861 raise error.Abort(_('cannot use --optimize with --show-stage'))
1862 raise error.Abort(_('cannot use --optimize with --show-stage'))
1862 if opts['show_stage'] == ['all']:
1863 if opts['show_stage'] == ['all']:
1863 showalways.update(stagenames)
1864 showalways.update(stagenames)
1864 else:
1865 else:
1865 for n in opts['show_stage']:
1866 for n in opts['show_stage']:
1866 if n not in stagenames:
1867 if n not in stagenames:
1867 raise error.Abort(_('invalid stage name: %s') % n)
1868 raise error.Abort(_('invalid stage name: %s') % n)
1868 showalways.update(opts['show_stage'])
1869 showalways.update(opts['show_stage'])
1869
1870
1870 treebystage = {}
1871 treebystage = {}
1871 printedtree = None
1872 printedtree = None
1872 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1873 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1873 for n, f in stages:
1874 for n, f in stages:
1874 treebystage[n] = tree = f(tree)
1875 treebystage[n] = tree = f(tree)
1875 if n in showalways or (n in showchanged and tree != printedtree):
1876 if n in showalways or (n in showchanged and tree != printedtree):
1876 if opts['show_stage'] or n != 'parsed':
1877 if opts['show_stage'] or n != 'parsed':
1877 ui.write(("* %s:\n") % n)
1878 ui.write(("* %s:\n") % n)
1878 ui.write(revsetlang.prettyformat(tree), "\n")
1879 ui.write(revsetlang.prettyformat(tree), "\n")
1879 printedtree = tree
1880 printedtree = tree
1880
1881
1881 if opts['verify_optimized']:
1882 if opts['verify_optimized']:
1882 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1883 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1883 brevs = revset.makematcher(treebystage['optimized'])(repo)
1884 brevs = revset.makematcher(treebystage['optimized'])(repo)
1884 if ui.verbose:
1885 if ui.verbose:
1885 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1886 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1886 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1887 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1887 arevs = list(arevs)
1888 arevs = list(arevs)
1888 brevs = list(brevs)
1889 brevs = list(brevs)
1889 if arevs == brevs:
1890 if arevs == brevs:
1890 return 0
1891 return 0
1891 ui.write(('--- analyzed\n'), label='diff.file_a')
1892 ui.write(('--- analyzed\n'), label='diff.file_a')
1892 ui.write(('+++ optimized\n'), label='diff.file_b')
1893 ui.write(('+++ optimized\n'), label='diff.file_b')
1893 sm = difflib.SequenceMatcher(None, arevs, brevs)
1894 sm = difflib.SequenceMatcher(None, arevs, brevs)
1894 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1895 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1895 if tag in ('delete', 'replace'):
1896 if tag in ('delete', 'replace'):
1896 for c in arevs[alo:ahi]:
1897 for c in arevs[alo:ahi]:
1897 ui.write('-%s\n' % c, label='diff.deleted')
1898 ui.write('-%s\n' % c, label='diff.deleted')
1898 if tag in ('insert', 'replace'):
1899 if tag in ('insert', 'replace'):
1899 for c in brevs[blo:bhi]:
1900 for c in brevs[blo:bhi]:
1900 ui.write('+%s\n' % c, label='diff.inserted')
1901 ui.write('+%s\n' % c, label='diff.inserted')
1901 if tag == 'equal':
1902 if tag == 'equal':
1902 for c in arevs[alo:ahi]:
1903 for c in arevs[alo:ahi]:
1903 ui.write(' %s\n' % c)
1904 ui.write(' %s\n' % c)
1904 return 1
1905 return 1
1905
1906
1906 func = revset.makematcher(tree)
1907 func = revset.makematcher(tree)
1907 revs = func(repo)
1908 revs = func(repo)
1908 if ui.verbose:
1909 if ui.verbose:
1909 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1910 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1910 for c in revs:
1911 for c in revs:
1911 ui.write("%s\n" % c)
1912 ui.write("%s\n" % c)
1912
1913
1913 @command('debugsetparents', [], _('REV1 [REV2]'))
1914 @command('debugsetparents', [], _('REV1 [REV2]'))
1914 def debugsetparents(ui, repo, rev1, rev2=None):
1915 def debugsetparents(ui, repo, rev1, rev2=None):
1915 """manually set the parents of the current working directory
1916 """manually set the parents of the current working directory
1916
1917
1917 This is useful for writing repository conversion tools, but should
1918 This is useful for writing repository conversion tools, but should
1918 be used with care. For example, neither the working directory nor the
1919 be used with care. For example, neither the working directory nor the
1919 dirstate is updated, so file status may be incorrect after running this
1920 dirstate is updated, so file status may be incorrect after running this
1920 command.
1921 command.
1921
1922
1922 Returns 0 on success.
1923 Returns 0 on success.
1923 """
1924 """
1924
1925
1925 r1 = scmutil.revsingle(repo, rev1).node()
1926 r1 = scmutil.revsingle(repo, rev1).node()
1926 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1927 r2 = scmutil.revsingle(repo, rev2, 'null').node()
1927
1928
1928 with repo.wlock():
1929 with repo.wlock():
1929 repo.setparents(r1, r2)
1930 repo.setparents(r1, r2)
1930
1931
1931 @command('debugsub',
1932 @command('debugsub',
1932 [('r', 'rev', '',
1933 [('r', 'rev', '',
1933 _('revision to check'), _('REV'))],
1934 _('revision to check'), _('REV'))],
1934 _('[-r REV] [REV]'))
1935 _('[-r REV] [REV]'))
1935 def debugsub(ui, repo, rev=None):
1936 def debugsub(ui, repo, rev=None):
1936 ctx = scmutil.revsingle(repo, rev, None)
1937 ctx = scmutil.revsingle(repo, rev, None)
1937 for k, v in sorted(ctx.substate.items()):
1938 for k, v in sorted(ctx.substate.items()):
1938 ui.write(('path %s\n') % k)
1939 ui.write(('path %s\n') % k)
1939 ui.write((' source %s\n') % v[0])
1940 ui.write((' source %s\n') % v[0])
1940 ui.write((' revision %s\n') % v[1])
1941 ui.write((' revision %s\n') % v[1])
1941
1942
1942 @command('debugsuccessorssets',
1943 @command('debugsuccessorssets',
1943 [],
1944 [],
1944 _('[REV]'))
1945 _('[REV]'))
1945 def debugsuccessorssets(ui, repo, *revs):
1946 def debugsuccessorssets(ui, repo, *revs):
1946 """show set of successors for revision
1947 """show set of successors for revision
1947
1948
1948 A successors set of changeset A is a consistent group of revisions that
1949 A successors set of changeset A is a consistent group of revisions that
1949 succeed A. It contains non-obsolete changesets only.
1950 succeed A. It contains non-obsolete changesets only.
1950
1951
1951 In most cases a changeset A has a single successors set containing a single
1952 In most cases a changeset A has a single successors set containing a single
1952 successor (changeset A replaced by A').
1953 successor (changeset A replaced by A').
1953
1954
1954 A changeset that is made obsolete with no successors are called "pruned".
1955 A changeset that is made obsolete with no successors are called "pruned".
1955 Such changesets have no successors sets at all.
1956 Such changesets have no successors sets at all.
1956
1957
1957 A changeset that has been "split" will have a successors set containing
1958 A changeset that has been "split" will have a successors set containing
1958 more than one successor.
1959 more than one successor.
1959
1960
1960 A changeset that has been rewritten in multiple different ways is called
1961 A changeset that has been rewritten in multiple different ways is called
1961 "divergent". Such changesets have multiple successor sets (each of which
1962 "divergent". Such changesets have multiple successor sets (each of which
1962 may also be split, i.e. have multiple successors).
1963 may also be split, i.e. have multiple successors).
1963
1964
1964 Results are displayed as follows::
1965 Results are displayed as follows::
1965
1966
1966 <rev1>
1967 <rev1>
1967 <successors-1A>
1968 <successors-1A>
1968 <rev2>
1969 <rev2>
1969 <successors-2A>
1970 <successors-2A>
1970 <successors-2B1> <successors-2B2> <successors-2B3>
1971 <successors-2B1> <successors-2B2> <successors-2B3>
1971
1972
1972 Here rev2 has two possible (i.e. divergent) successors sets. The first
1973 Here rev2 has two possible (i.e. divergent) successors sets. The first
1973 holds one element, whereas the second holds three (i.e. the changeset has
1974 holds one element, whereas the second holds three (i.e. the changeset has
1974 been split).
1975 been split).
1975 """
1976 """
1976 # passed to successorssets caching computation from one call to another
1977 # passed to successorssets caching computation from one call to another
1977 cache = {}
1978 cache = {}
1978 ctx2str = str
1979 ctx2str = str
1979 node2str = short
1980 node2str = short
1980 if ui.debug():
1981 if ui.debug():
1981 def ctx2str(ctx):
1982 def ctx2str(ctx):
1982 return ctx.hex()
1983 return ctx.hex()
1983 node2str = hex
1984 node2str = hex
1984 for rev in scmutil.revrange(repo, revs):
1985 for rev in scmutil.revrange(repo, revs):
1985 ctx = repo[rev]
1986 ctx = repo[rev]
1986 ui.write('%s\n'% ctx2str(ctx))
1987 ui.write('%s\n'% ctx2str(ctx))
1987 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1988 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
1988 if succsset:
1989 if succsset:
1989 ui.write(' ')
1990 ui.write(' ')
1990 ui.write(node2str(succsset[0]))
1991 ui.write(node2str(succsset[0]))
1991 for node in succsset[1:]:
1992 for node in succsset[1:]:
1992 ui.write(' ')
1993 ui.write(' ')
1993 ui.write(node2str(node))
1994 ui.write(node2str(node))
1994 ui.write('\n')
1995 ui.write('\n')
1995
1996
1996 @command('debugtemplate',
1997 @command('debugtemplate',
1997 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
1998 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
1998 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
1999 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
1999 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2000 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2000 optionalrepo=True)
2001 optionalrepo=True)
2001 def debugtemplate(ui, repo, tmpl, **opts):
2002 def debugtemplate(ui, repo, tmpl, **opts):
2002 """parse and apply a template
2003 """parse and apply a template
2003
2004
2004 If -r/--rev is given, the template is processed as a log template and
2005 If -r/--rev is given, the template is processed as a log template and
2005 applied to the given changesets. Otherwise, it is processed as a generic
2006 applied to the given changesets. Otherwise, it is processed as a generic
2006 template.
2007 template.
2007
2008
2008 Use --verbose to print the parsed tree.
2009 Use --verbose to print the parsed tree.
2009 """
2010 """
2010 revs = None
2011 revs = None
2011 if opts['rev']:
2012 if opts['rev']:
2012 if repo is None:
2013 if repo is None:
2013 raise error.RepoError(_('there is no Mercurial repository here '
2014 raise error.RepoError(_('there is no Mercurial repository here '
2014 '(.hg not found)'))
2015 '(.hg not found)'))
2015 revs = scmutil.revrange(repo, opts['rev'])
2016 revs = scmutil.revrange(repo, opts['rev'])
2016
2017
2017 props = {}
2018 props = {}
2018 for d in opts['define']:
2019 for d in opts['define']:
2019 try:
2020 try:
2020 k, v = (e.strip() for e in d.split('=', 1))
2021 k, v = (e.strip() for e in d.split('=', 1))
2021 if not k:
2022 if not k:
2022 raise ValueError
2023 raise ValueError
2023 props[k] = v
2024 props[k] = v
2024 except ValueError:
2025 except ValueError:
2025 raise error.Abort(_('malformed keyword definition: %s') % d)
2026 raise error.Abort(_('malformed keyword definition: %s') % d)
2026
2027
2027 if ui.verbose:
2028 if ui.verbose:
2028 aliases = ui.configitems('templatealias')
2029 aliases = ui.configitems('templatealias')
2029 tree = templater.parse(tmpl)
2030 tree = templater.parse(tmpl)
2030 ui.note(templater.prettyformat(tree), '\n')
2031 ui.note(templater.prettyformat(tree), '\n')
2031 newtree = templater.expandaliases(tree, aliases)
2032 newtree = templater.expandaliases(tree, aliases)
2032 if newtree != tree:
2033 if newtree != tree:
2033 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2034 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2034
2035
2035 mapfile = None
2036 mapfile = None
2036 if revs is None:
2037 if revs is None:
2037 k = 'debugtemplate'
2038 k = 'debugtemplate'
2038 t = formatter.maketemplater(ui, k, tmpl)
2039 t = formatter.maketemplater(ui, k, tmpl)
2039 ui.write(templater.stringify(t(k, **props)))
2040 ui.write(templater.stringify(t(k, **props)))
2040 else:
2041 else:
2041 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2042 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2042 mapfile, buffered=False)
2043 mapfile, buffered=False)
2043 for r in revs:
2044 for r in revs:
2044 displayer.show(repo[r], **props)
2045 displayer.show(repo[r], **props)
2045 displayer.close()
2046 displayer.close()
2046
2047
2047 @command('debugupgraderepo', [
2048 @command('debugupgraderepo', [
2048 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2049 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2049 ('', 'run', False, _('performs an upgrade')),
2050 ('', 'run', False, _('performs an upgrade')),
2050 ])
2051 ])
2051 def debugupgraderepo(ui, repo, run=False, optimize=None):
2052 def debugupgraderepo(ui, repo, run=False, optimize=None):
2052 """upgrade a repository to use different features
2053 """upgrade a repository to use different features
2053
2054
2054 If no arguments are specified, the repository is evaluated for upgrade
2055 If no arguments are specified, the repository is evaluated for upgrade
2055 and a list of problems and potential optimizations is printed.
2056 and a list of problems and potential optimizations is printed.
2056
2057
2057 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2058 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2058 can be influenced via additional arguments. More details will be provided
2059 can be influenced via additional arguments. More details will be provided
2059 by the command output when run without ``--run``.
2060 by the command output when run without ``--run``.
2060
2061
2061 During the upgrade, the repository will be locked and no writes will be
2062 During the upgrade, the repository will be locked and no writes will be
2062 allowed.
2063 allowed.
2063
2064
2064 At the end of the upgrade, the repository may not be readable while new
2065 At the end of the upgrade, the repository may not be readable while new
2065 repository data is swapped in. This window will be as long as it takes to
2066 repository data is swapped in. This window will be as long as it takes to
2066 rename some directories inside the ``.hg`` directory. On most machines, this
2067 rename some directories inside the ``.hg`` directory. On most machines, this
2067 should complete almost instantaneously and the chances of a consumer being
2068 should complete almost instantaneously and the chances of a consumer being
2068 unable to access the repository should be low.
2069 unable to access the repository should be low.
2069 """
2070 """
2070 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2071 return repair.upgraderepo(ui, repo, run=run, optimize=optimize)
2071
2072
2072 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2073 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2073 inferrepo=True)
2074 inferrepo=True)
2074 def debugwalk(ui, repo, *pats, **opts):
2075 def debugwalk(ui, repo, *pats, **opts):
2075 """show how files match on given patterns"""
2076 """show how files match on given patterns"""
2076 m = scmutil.match(repo[None], pats, opts)
2077 m = scmutil.match(repo[None], pats, opts)
2077 items = list(repo.walk(m))
2078 items = list(repo.walk(m))
2078 if not items:
2079 if not items:
2079 return
2080 return
2080 f = lambda fn: fn
2081 f = lambda fn: fn
2081 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2082 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2082 f = lambda fn: util.normpath(fn)
2083 f = lambda fn: util.normpath(fn)
2083 fmt = 'f %%-%ds %%-%ds %%s' % (
2084 fmt = 'f %%-%ds %%-%ds %%s' % (
2084 max([len(abs) for abs in items]),
2085 max([len(abs) for abs in items]),
2085 max([len(m.rel(abs)) for abs in items]))
2086 max([len(m.rel(abs)) for abs in items]))
2086 for abs in items:
2087 for abs in items:
2087 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2088 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2088 ui.write("%s\n" % line.rstrip())
2089 ui.write("%s\n" % line.rstrip())
2089
2090
2090 @command('debugwireargs',
2091 @command('debugwireargs',
2091 [('', 'three', '', 'three'),
2092 [('', 'three', '', 'three'),
2092 ('', 'four', '', 'four'),
2093 ('', 'four', '', 'four'),
2093 ('', 'five', '', 'five'),
2094 ('', 'five', '', 'five'),
2094 ] + commands.remoteopts,
2095 ] + commands.remoteopts,
2095 _('REPO [OPTIONS]... [ONE [TWO]]'),
2096 _('REPO [OPTIONS]... [ONE [TWO]]'),
2096 norepo=True)
2097 norepo=True)
2097 def debugwireargs(ui, repopath, *vals, **opts):
2098 def debugwireargs(ui, repopath, *vals, **opts):
2098 repo = hg.peer(ui, opts, repopath)
2099 repo = hg.peer(ui, opts, repopath)
2099 for opt in commands.remoteopts:
2100 for opt in commands.remoteopts:
2100 del opts[opt[1]]
2101 del opts[opt[1]]
2101 args = {}
2102 args = {}
2102 for k, v in opts.iteritems():
2103 for k, v in opts.iteritems():
2103 if v:
2104 if v:
2104 args[k] = v
2105 args[k] = v
2105 # run twice to check that we don't mess up the stream for the next command
2106 # run twice to check that we don't mess up the stream for the next command
2106 res1 = repo.debugwireargs(*vals, **args)
2107 res1 = repo.debugwireargs(*vals, **args)
2107 res2 = repo.debugwireargs(*vals, **args)
2108 res2 = repo.debugwireargs(*vals, **args)
2108 ui.write("%s\n" % res1)
2109 ui.write("%s\n" % res1)
2109 if res1 != res2:
2110 if res1 != res2:
2110 ui.warn("%s\n" % res2)
2111 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now