##// END OF EJS Templates
debugcommands: use repo[None].walk instead of repo.walk
Augie Fackler -
r32363:a275186b default
parent child Browse files
Show More
@@ -1,2204 +1,2204 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 commands,
35 commands,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 filemerge,
43 filemerge,
44 fileset,
44 fileset,
45 formatter,
45 formatter,
46 hg,
46 hg,
47 localrepo,
47 localrepo,
48 lock as lockmod,
48 lock as lockmod,
49 merge as mergemod,
49 merge as mergemod,
50 obsolete,
50 obsolete,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 # We reuse the command table from commands because it is easier than
74 # We reuse the command table from commands because it is easier than
75 # teaching dispatch about multiple tables.
75 # teaching dispatch about multiple tables.
76 command = registrar.command(commands.table)
76 command = registrar.command(commands.table)
77
77
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
79 def debugancestor(ui, repo, *args):
79 def debugancestor(ui, repo, *args):
80 """find the ancestor revision of two revisions in a given index"""
80 """find the ancestor revision of two revisions in a given index"""
81 if len(args) == 3:
81 if len(args) == 3:
82 index, rev1, rev2 = args
82 index, rev1, rev2 = args
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
84 lookup = r.lookup
84 lookup = r.lookup
85 elif len(args) == 2:
85 elif len(args) == 2:
86 if not repo:
86 if not repo:
87 raise error.Abort(_('there is no Mercurial repository here '
87 raise error.Abort(_('there is no Mercurial repository here '
88 '(.hg not found)'))
88 '(.hg not found)'))
89 rev1, rev2 = args
89 rev1, rev2 = args
90 r = repo.changelog
90 r = repo.changelog
91 lookup = repo.lookup
91 lookup = repo.lookup
92 else:
92 else:
93 raise error.Abort(_('either two or three arguments required'))
93 raise error.Abort(_('either two or three arguments required'))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
96
96
97 @command('debugapplystreamclonebundle', [], 'FILE')
97 @command('debugapplystreamclonebundle', [], 'FILE')
98 def debugapplystreamclonebundle(ui, repo, fname):
98 def debugapplystreamclonebundle(ui, repo, fname):
99 """apply a stream clone bundle file"""
99 """apply a stream clone bundle file"""
100 f = hg.openpath(ui, fname)
100 f = hg.openpath(ui, fname)
101 gen = exchange.readbundle(ui, f, fname)
101 gen = exchange.readbundle(ui, f, fname)
102 gen.apply(repo)
102 gen.apply(repo)
103
103
104 @command('debugbuilddag',
104 @command('debugbuilddag',
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
107 ('n', 'new-file', None, _('add new file at each rev'))],
107 ('n', 'new-file', None, _('add new file at each rev'))],
108 _('[OPTION]... [TEXT]'))
108 _('[OPTION]... [TEXT]'))
109 def debugbuilddag(ui, repo, text=None,
109 def debugbuilddag(ui, repo, text=None,
110 mergeable_file=False,
110 mergeable_file=False,
111 overwritten_file=False,
111 overwritten_file=False,
112 new_file=False):
112 new_file=False):
113 """builds a repo with a given DAG from scratch in the current empty repo
113 """builds a repo with a given DAG from scratch in the current empty repo
114
114
115 The description of the DAG is read from stdin if not given on the
115 The description of the DAG is read from stdin if not given on the
116 command line.
116 command line.
117
117
118 Elements:
118 Elements:
119
119
120 - "+n" is a linear run of n nodes based on the current default parent
120 - "+n" is a linear run of n nodes based on the current default parent
121 - "." is a single node based on the current default parent
121 - "." is a single node based on the current default parent
122 - "$" resets the default parent to null (implied at the start);
122 - "$" resets the default parent to null (implied at the start);
123 otherwise the default parent is always the last node created
123 otherwise the default parent is always the last node created
124 - "<p" sets the default parent to the backref p
124 - "<p" sets the default parent to the backref p
125 - "*p" is a fork at parent p, which is a backref
125 - "*p" is a fork at parent p, which is a backref
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
127 - "/p2" is a merge of the preceding node and p2
127 - "/p2" is a merge of the preceding node and p2
128 - ":tag" defines a local tag for the preceding node
128 - ":tag" defines a local tag for the preceding node
129 - "@branch" sets the named branch for subsequent nodes
129 - "@branch" sets the named branch for subsequent nodes
130 - "#...\\n" is a comment up to the end of the line
130 - "#...\\n" is a comment up to the end of the line
131
131
132 Whitespace between the above elements is ignored.
132 Whitespace between the above elements is ignored.
133
133
134 A backref is either
134 A backref is either
135
135
136 - a number n, which references the node curr-n, where curr is the current
136 - a number n, which references the node curr-n, where curr is the current
137 node, or
137 node, or
138 - the name of a local tag you placed earlier using ":tag", or
138 - the name of a local tag you placed earlier using ":tag", or
139 - empty to denote the default parent.
139 - empty to denote the default parent.
140
140
141 All string valued-elements are either strictly alphanumeric, or must
141 All string valued-elements are either strictly alphanumeric, or must
142 be enclosed in double quotes ("..."), with "\\" as escape character.
142 be enclosed in double quotes ("..."), with "\\" as escape character.
143 """
143 """
144
144
145 if text is None:
145 if text is None:
146 ui.status(_("reading DAG from stdin\n"))
146 ui.status(_("reading DAG from stdin\n"))
147 text = ui.fin.read()
147 text = ui.fin.read()
148
148
149 cl = repo.changelog
149 cl = repo.changelog
150 if len(cl) > 0:
150 if len(cl) > 0:
151 raise error.Abort(_('repository is not empty'))
151 raise error.Abort(_('repository is not empty'))
152
152
153 # determine number of revs in DAG
153 # determine number of revs in DAG
154 total = 0
154 total = 0
155 for type, data in dagparser.parsedag(text):
155 for type, data in dagparser.parsedag(text):
156 if type == 'n':
156 if type == 'n':
157 total += 1
157 total += 1
158
158
159 if mergeable_file:
159 if mergeable_file:
160 linesperrev = 2
160 linesperrev = 2
161 # make a file with k lines per rev
161 # make a file with k lines per rev
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
163 initialmergedlines.append("")
163 initialmergedlines.append("")
164
164
165 tags = []
165 tags = []
166
166
167 wlock = lock = tr = None
167 wlock = lock = tr = None
168 try:
168 try:
169 wlock = repo.wlock()
169 wlock = repo.wlock()
170 lock = repo.lock()
170 lock = repo.lock()
171 tr = repo.transaction("builddag")
171 tr = repo.transaction("builddag")
172
172
173 at = -1
173 at = -1
174 atbranch = 'default'
174 atbranch = 'default'
175 nodeids = []
175 nodeids = []
176 id = 0
176 id = 0
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
178 for type, data in dagparser.parsedag(text):
178 for type, data in dagparser.parsedag(text):
179 if type == 'n':
179 if type == 'n':
180 ui.note(('node %s\n' % str(data)))
180 ui.note(('node %s\n' % str(data)))
181 id, ps = data
181 id, ps = data
182
182
183 files = []
183 files = []
184 fctxs = {}
184 fctxs = {}
185
185
186 p2 = None
186 p2 = None
187 if mergeable_file:
187 if mergeable_file:
188 fn = "mf"
188 fn = "mf"
189 p1 = repo[ps[0]]
189 p1 = repo[ps[0]]
190 if len(ps) > 1:
190 if len(ps) > 1:
191 p2 = repo[ps[1]]
191 p2 = repo[ps[1]]
192 pa = p1.ancestor(p2)
192 pa = p1.ancestor(p2)
193 base, local, other = [x[fn].data() for x in (pa, p1,
193 base, local, other = [x[fn].data() for x in (pa, p1,
194 p2)]
194 p2)]
195 m3 = simplemerge.Merge3Text(base, local, other)
195 m3 = simplemerge.Merge3Text(base, local, other)
196 ml = [l.strip() for l in m3.merge_lines()]
196 ml = [l.strip() for l in m3.merge_lines()]
197 ml.append("")
197 ml.append("")
198 elif at > 0:
198 elif at > 0:
199 ml = p1[fn].data().split("\n")
199 ml = p1[fn].data().split("\n")
200 else:
200 else:
201 ml = initialmergedlines
201 ml = initialmergedlines
202 ml[id * linesperrev] += " r%i" % id
202 ml[id * linesperrev] += " r%i" % id
203 mergedtext = "\n".join(ml)
203 mergedtext = "\n".join(ml)
204 files.append(fn)
204 files.append(fn)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
206
206
207 if overwritten_file:
207 if overwritten_file:
208 fn = "of"
208 fn = "of"
209 files.append(fn)
209 files.append(fn)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211
211
212 if new_file:
212 if new_file:
213 fn = "nf%i" % id
213 fn = "nf%i" % id
214 files.append(fn)
214 files.append(fn)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
216 if len(ps) > 1:
216 if len(ps) > 1:
217 if not p2:
217 if not p2:
218 p2 = repo[ps[1]]
218 p2 = repo[ps[1]]
219 for fn in p2:
219 for fn in p2:
220 if fn.startswith("nf"):
220 if fn.startswith("nf"):
221 files.append(fn)
221 files.append(fn)
222 fctxs[fn] = p2[fn]
222 fctxs[fn] = p2[fn]
223
223
224 def fctxfn(repo, cx, path):
224 def fctxfn(repo, cx, path):
225 return fctxs.get(path)
225 return fctxs.get(path)
226
226
227 if len(ps) == 0 or ps[0] < 0:
227 if len(ps) == 0 or ps[0] < 0:
228 pars = [None, None]
228 pars = [None, None]
229 elif len(ps) == 1:
229 elif len(ps) == 1:
230 pars = [nodeids[ps[0]], None]
230 pars = [nodeids[ps[0]], None]
231 else:
231 else:
232 pars = [nodeids[p] for p in ps]
232 pars = [nodeids[p] for p in ps]
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
234 date=(id, 0),
234 date=(id, 0),
235 user="debugbuilddag",
235 user="debugbuilddag",
236 extra={'branch': atbranch})
236 extra={'branch': atbranch})
237 nodeid = repo.commitctx(cx)
237 nodeid = repo.commitctx(cx)
238 nodeids.append(nodeid)
238 nodeids.append(nodeid)
239 at = id
239 at = id
240 elif type == 'l':
240 elif type == 'l':
241 id, name = data
241 id, name = data
242 ui.note(('tag %s\n' % name))
242 ui.note(('tag %s\n' % name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
244 elif type == 'a':
244 elif type == 'a':
245 ui.note(('branch %s\n' % data))
245 ui.note(('branch %s\n' % data))
246 atbranch = data
246 atbranch = data
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
248 tr.close()
248 tr.close()
249
249
250 if tags:
250 if tags:
251 repo.vfs.write("localtags", "".join(tags))
251 repo.vfs.write("localtags", "".join(tags))
252 finally:
252 finally:
253 ui.progress(_('building'), None)
253 ui.progress(_('building'), None)
254 release(tr, lock, wlock)
254 release(tr, lock, wlock)
255
255
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
257 indent_string = ' ' * indent
257 indent_string = ' ' * indent
258 if all:
258 if all:
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
260 % indent_string)
260 % indent_string)
261
261
262 def showchunks(named):
262 def showchunks(named):
263 ui.write("\n%s%s\n" % (indent_string, named))
263 ui.write("\n%s%s\n" % (indent_string, named))
264 chain = None
264 chain = None
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
266 node = chunkdata['node']
266 node = chunkdata['node']
267 p1 = chunkdata['p1']
267 p1 = chunkdata['p1']
268 p2 = chunkdata['p2']
268 p2 = chunkdata['p2']
269 cs = chunkdata['cs']
269 cs = chunkdata['cs']
270 deltabase = chunkdata['deltabase']
270 deltabase = chunkdata['deltabase']
271 delta = chunkdata['delta']
271 delta = chunkdata['delta']
272 ui.write("%s%s %s %s %s %s %s\n" %
272 ui.write("%s%s %s %s %s %s %s\n" %
273 (indent_string, hex(node), hex(p1), hex(p2),
273 (indent_string, hex(node), hex(p1), hex(p2),
274 hex(cs), hex(deltabase), len(delta)))
274 hex(cs), hex(deltabase), len(delta)))
275 chain = node
275 chain = node
276
276
277 chunkdata = gen.changelogheader()
277 chunkdata = gen.changelogheader()
278 showchunks("changelog")
278 showchunks("changelog")
279 chunkdata = gen.manifestheader()
279 chunkdata = gen.manifestheader()
280 showchunks("manifest")
280 showchunks("manifest")
281 for chunkdata in iter(gen.filelogheader, {}):
281 for chunkdata in iter(gen.filelogheader, {}):
282 fname = chunkdata['filename']
282 fname = chunkdata['filename']
283 showchunks(fname)
283 showchunks(fname)
284 else:
284 else:
285 if isinstance(gen, bundle2.unbundle20):
285 if isinstance(gen, bundle2.unbundle20):
286 raise error.Abort(_('use debugbundle2 for this file'))
286 raise error.Abort(_('use debugbundle2 for this file'))
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 chain = None
288 chain = None
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
290 node = chunkdata['node']
290 node = chunkdata['node']
291 ui.write("%s%s\n" % (indent_string, hex(node)))
291 ui.write("%s%s\n" % (indent_string, hex(node)))
292 chain = node
292 chain = node
293
293
294 def _debugbundle2(ui, gen, all=None, **opts):
294 def _debugbundle2(ui, gen, all=None, **opts):
295 """lists the contents of a bundle2"""
295 """lists the contents of a bundle2"""
296 if not isinstance(gen, bundle2.unbundle20):
296 if not isinstance(gen, bundle2.unbundle20):
297 raise error.Abort(_('not a bundle2 file'))
297 raise error.Abort(_('not a bundle2 file'))
298 ui.write(('Stream params: %s\n' % repr(gen.params)))
298 ui.write(('Stream params: %s\n' % repr(gen.params)))
299 for part in gen.iterparts():
299 for part in gen.iterparts():
300 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
300 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
301 if part.type == 'changegroup':
301 if part.type == 'changegroup':
302 version = part.params.get('version', '01')
302 version = part.params.get('version', '01')
303 cg = changegroup.getunbundler(version, part, 'UN')
303 cg = changegroup.getunbundler(version, part, 'UN')
304 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
304 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
305
305
306 @command('debugbundle',
306 @command('debugbundle',
307 [('a', 'all', None, _('show all details')),
307 [('a', 'all', None, _('show all details')),
308 ('', 'spec', None, _('print the bundlespec of the bundle'))],
308 ('', 'spec', None, _('print the bundlespec of the bundle'))],
309 _('FILE'),
309 _('FILE'),
310 norepo=True)
310 norepo=True)
311 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
311 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
312 """lists the contents of a bundle"""
312 """lists the contents of a bundle"""
313 with hg.openpath(ui, bundlepath) as f:
313 with hg.openpath(ui, bundlepath) as f:
314 if spec:
314 if spec:
315 spec = exchange.getbundlespec(ui, f)
315 spec = exchange.getbundlespec(ui, f)
316 ui.write('%s\n' % spec)
316 ui.write('%s\n' % spec)
317 return
317 return
318
318
319 gen = exchange.readbundle(ui, f, bundlepath)
319 gen = exchange.readbundle(ui, f, bundlepath)
320 if isinstance(gen, bundle2.unbundle20):
320 if isinstance(gen, bundle2.unbundle20):
321 return _debugbundle2(ui, gen, all=all, **opts)
321 return _debugbundle2(ui, gen, all=all, **opts)
322 _debugchangegroup(ui, gen, all=all, **opts)
322 _debugchangegroup(ui, gen, all=all, **opts)
323
323
324 @command('debugcheckstate', [], '')
324 @command('debugcheckstate', [], '')
325 def debugcheckstate(ui, repo):
325 def debugcheckstate(ui, repo):
326 """validate the correctness of the current dirstate"""
326 """validate the correctness of the current dirstate"""
327 parent1, parent2 = repo.dirstate.parents()
327 parent1, parent2 = repo.dirstate.parents()
328 m1 = repo[parent1].manifest()
328 m1 = repo[parent1].manifest()
329 m2 = repo[parent2].manifest()
329 m2 = repo[parent2].manifest()
330 errors = 0
330 errors = 0
331 for f in repo.dirstate:
331 for f in repo.dirstate:
332 state = repo.dirstate[f]
332 state = repo.dirstate[f]
333 if state in "nr" and f not in m1:
333 if state in "nr" and f not in m1:
334 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
334 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
335 errors += 1
335 errors += 1
336 if state in "a" and f in m1:
336 if state in "a" and f in m1:
337 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
337 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
338 errors += 1
338 errors += 1
339 if state in "m" and f not in m1 and f not in m2:
339 if state in "m" and f not in m1 and f not in m2:
340 ui.warn(_("%s in state %s, but not in either manifest\n") %
340 ui.warn(_("%s in state %s, but not in either manifest\n") %
341 (f, state))
341 (f, state))
342 errors += 1
342 errors += 1
343 for f in m1:
343 for f in m1:
344 state = repo.dirstate[f]
344 state = repo.dirstate[f]
345 if state not in "nrm":
345 if state not in "nrm":
346 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
346 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
347 errors += 1
347 errors += 1
348 if errors:
348 if errors:
349 error = _(".hg/dirstate inconsistent with current parent's manifest")
349 error = _(".hg/dirstate inconsistent with current parent's manifest")
350 raise error.Abort(error)
350 raise error.Abort(error)
351
351
352 @command('debugcolor',
352 @command('debugcolor',
353 [('', 'style', None, _('show all configured styles'))],
353 [('', 'style', None, _('show all configured styles'))],
354 'hg debugcolor')
354 'hg debugcolor')
355 def debugcolor(ui, repo, **opts):
355 def debugcolor(ui, repo, **opts):
356 """show available color, effects or style"""
356 """show available color, effects or style"""
357 ui.write(('color mode: %s\n') % ui._colormode)
357 ui.write(('color mode: %s\n') % ui._colormode)
358 if opts.get('style'):
358 if opts.get('style'):
359 return _debugdisplaystyle(ui)
359 return _debugdisplaystyle(ui)
360 else:
360 else:
361 return _debugdisplaycolor(ui)
361 return _debugdisplaycolor(ui)
362
362
363 def _debugdisplaycolor(ui):
363 def _debugdisplaycolor(ui):
364 ui = ui.copy()
364 ui = ui.copy()
365 ui._styles.clear()
365 ui._styles.clear()
366 for effect in color._activeeffects(ui).keys():
366 for effect in color._activeeffects(ui).keys():
367 ui._styles[effect] = effect
367 ui._styles[effect] = effect
368 if ui._terminfoparams:
368 if ui._terminfoparams:
369 for k, v in ui.configitems('color'):
369 for k, v in ui.configitems('color'):
370 if k.startswith('color.'):
370 if k.startswith('color.'):
371 ui._styles[k] = k[6:]
371 ui._styles[k] = k[6:]
372 elif k.startswith('terminfo.'):
372 elif k.startswith('terminfo.'):
373 ui._styles[k] = k[9:]
373 ui._styles[k] = k[9:]
374 ui.write(_('available colors:\n'))
374 ui.write(_('available colors:\n'))
375 # sort label with a '_' after the other to group '_background' entry.
375 # sort label with a '_' after the other to group '_background' entry.
376 items = sorted(ui._styles.items(),
376 items = sorted(ui._styles.items(),
377 key=lambda i: ('_' in i[0], i[0], i[1]))
377 key=lambda i: ('_' in i[0], i[0], i[1]))
378 for colorname, label in items:
378 for colorname, label in items:
379 ui.write(('%s\n') % colorname, label=label)
379 ui.write(('%s\n') % colorname, label=label)
380
380
381 def _debugdisplaystyle(ui):
381 def _debugdisplaystyle(ui):
382 ui.write(_('available style:\n'))
382 ui.write(_('available style:\n'))
383 width = max(len(s) for s in ui._styles)
383 width = max(len(s) for s in ui._styles)
384 for label, effects in sorted(ui._styles.items()):
384 for label, effects in sorted(ui._styles.items()):
385 ui.write('%s' % label, label=label)
385 ui.write('%s' % label, label=label)
386 if effects:
386 if effects:
387 # 50
387 # 50
388 ui.write(': ')
388 ui.write(': ')
389 ui.write(' ' * (max(0, width - len(label))))
389 ui.write(' ' * (max(0, width - len(label))))
390 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
390 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
391 ui.write('\n')
391 ui.write('\n')
392
392
393 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
393 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
394 def debugcommands(ui, cmd='', *args):
394 def debugcommands(ui, cmd='', *args):
395 """list all available commands and options"""
395 """list all available commands and options"""
396 for cmd, vals in sorted(commands.table.iteritems()):
396 for cmd, vals in sorted(commands.table.iteritems()):
397 cmd = cmd.split('|')[0].strip('^')
397 cmd = cmd.split('|')[0].strip('^')
398 opts = ', '.join([i[1] for i in vals[1]])
398 opts = ', '.join([i[1] for i in vals[1]])
399 ui.write('%s: %s\n' % (cmd, opts))
399 ui.write('%s: %s\n' % (cmd, opts))
400
400
401 @command('debugcomplete',
401 @command('debugcomplete',
402 [('o', 'options', None, _('show the command options'))],
402 [('o', 'options', None, _('show the command options'))],
403 _('[-o] CMD'),
403 _('[-o] CMD'),
404 norepo=True)
404 norepo=True)
405 def debugcomplete(ui, cmd='', **opts):
405 def debugcomplete(ui, cmd='', **opts):
406 """returns the completion list associated with the given command"""
406 """returns the completion list associated with the given command"""
407
407
408 if opts.get('options'):
408 if opts.get('options'):
409 options = []
409 options = []
410 otables = [commands.globalopts]
410 otables = [commands.globalopts]
411 if cmd:
411 if cmd:
412 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
412 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
413 otables.append(entry[1])
413 otables.append(entry[1])
414 for t in otables:
414 for t in otables:
415 for o in t:
415 for o in t:
416 if "(DEPRECATED)" in o[3]:
416 if "(DEPRECATED)" in o[3]:
417 continue
417 continue
418 if o[0]:
418 if o[0]:
419 options.append('-%s' % o[0])
419 options.append('-%s' % o[0])
420 options.append('--%s' % o[1])
420 options.append('--%s' % o[1])
421 ui.write("%s\n" % "\n".join(options))
421 ui.write("%s\n" % "\n".join(options))
422 return
422 return
423
423
424 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
424 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, commands.table)
425 if ui.verbose:
425 if ui.verbose:
426 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
426 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
427 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
427 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
428
428
429 @command('debugcreatestreamclonebundle', [], 'FILE')
429 @command('debugcreatestreamclonebundle', [], 'FILE')
430 def debugcreatestreamclonebundle(ui, repo, fname):
430 def debugcreatestreamclonebundle(ui, repo, fname):
431 """create a stream clone bundle file
431 """create a stream clone bundle file
432
432
433 Stream bundles are special bundles that are essentially archives of
433 Stream bundles are special bundles that are essentially archives of
434 revlog files. They are commonly used for cloning very quickly.
434 revlog files. They are commonly used for cloning very quickly.
435 """
435 """
436 requirements, gen = streamclone.generatebundlev1(repo)
436 requirements, gen = streamclone.generatebundlev1(repo)
437 changegroup.writechunks(ui, gen, fname)
437 changegroup.writechunks(ui, gen, fname)
438
438
439 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
439 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
440
440
441 @command('debugdag',
441 @command('debugdag',
442 [('t', 'tags', None, _('use tags as labels')),
442 [('t', 'tags', None, _('use tags as labels')),
443 ('b', 'branches', None, _('annotate with branch names')),
443 ('b', 'branches', None, _('annotate with branch names')),
444 ('', 'dots', None, _('use dots for runs')),
444 ('', 'dots', None, _('use dots for runs')),
445 ('s', 'spaces', None, _('separate elements by spaces'))],
445 ('s', 'spaces', None, _('separate elements by spaces'))],
446 _('[OPTION]... [FILE [REV]...]'),
446 _('[OPTION]... [FILE [REV]...]'),
447 optionalrepo=True)
447 optionalrepo=True)
448 def debugdag(ui, repo, file_=None, *revs, **opts):
448 def debugdag(ui, repo, file_=None, *revs, **opts):
449 """format the changelog or an index DAG as a concise textual description
449 """format the changelog or an index DAG as a concise textual description
450
450
451 If you pass a revlog index, the revlog's DAG is emitted. If you list
451 If you pass a revlog index, the revlog's DAG is emitted. If you list
452 revision numbers, they get labeled in the output as rN.
452 revision numbers, they get labeled in the output as rN.
453
453
454 Otherwise, the changelog DAG of the current repo is emitted.
454 Otherwise, the changelog DAG of the current repo is emitted.
455 """
455 """
456 spaces = opts.get('spaces')
456 spaces = opts.get('spaces')
457 dots = opts.get('dots')
457 dots = opts.get('dots')
458 if file_:
458 if file_:
459 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
459 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
460 file_)
460 file_)
461 revs = set((int(r) for r in revs))
461 revs = set((int(r) for r in revs))
462 def events():
462 def events():
463 for r in rlog:
463 for r in rlog:
464 yield 'n', (r, list(p for p in rlog.parentrevs(r)
464 yield 'n', (r, list(p for p in rlog.parentrevs(r)
465 if p != -1))
465 if p != -1))
466 if r in revs:
466 if r in revs:
467 yield 'l', (r, "r%i" % r)
467 yield 'l', (r, "r%i" % r)
468 elif repo:
468 elif repo:
469 cl = repo.changelog
469 cl = repo.changelog
470 tags = opts.get('tags')
470 tags = opts.get('tags')
471 branches = opts.get('branches')
471 branches = opts.get('branches')
472 if tags:
472 if tags:
473 labels = {}
473 labels = {}
474 for l, n in repo.tags().items():
474 for l, n in repo.tags().items():
475 labels.setdefault(cl.rev(n), []).append(l)
475 labels.setdefault(cl.rev(n), []).append(l)
476 def events():
476 def events():
477 b = "default"
477 b = "default"
478 for r in cl:
478 for r in cl:
479 if branches:
479 if branches:
480 newb = cl.read(cl.node(r))[5]['branch']
480 newb = cl.read(cl.node(r))[5]['branch']
481 if newb != b:
481 if newb != b:
482 yield 'a', newb
482 yield 'a', newb
483 b = newb
483 b = newb
484 yield 'n', (r, list(p for p in cl.parentrevs(r)
484 yield 'n', (r, list(p for p in cl.parentrevs(r)
485 if p != -1))
485 if p != -1))
486 if tags:
486 if tags:
487 ls = labels.get(r)
487 ls = labels.get(r)
488 if ls:
488 if ls:
489 for l in ls:
489 for l in ls:
490 yield 'l', (r, l)
490 yield 'l', (r, l)
491 else:
491 else:
492 raise error.Abort(_('need repo for changelog dag'))
492 raise error.Abort(_('need repo for changelog dag'))
493
493
494 for line in dagparser.dagtextlines(events(),
494 for line in dagparser.dagtextlines(events(),
495 addspaces=spaces,
495 addspaces=spaces,
496 wraplabels=True,
496 wraplabels=True,
497 wrapannotations=True,
497 wrapannotations=True,
498 wrapnonlinear=dots,
498 wrapnonlinear=dots,
499 usedots=dots,
499 usedots=dots,
500 maxlinewidth=70):
500 maxlinewidth=70):
501 ui.write(line)
501 ui.write(line)
502 ui.write("\n")
502 ui.write("\n")
503
503
504 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
504 @command('debugdata', commands.debugrevlogopts, _('-c|-m|FILE REV'))
505 def debugdata(ui, repo, file_, rev=None, **opts):
505 def debugdata(ui, repo, file_, rev=None, **opts):
506 """dump the contents of a data file revision"""
506 """dump the contents of a data file revision"""
507 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
507 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
508 if rev is not None:
508 if rev is not None:
509 raise error.CommandError('debugdata', _('invalid arguments'))
509 raise error.CommandError('debugdata', _('invalid arguments'))
510 file_, rev = None, file_
510 file_, rev = None, file_
511 elif rev is None:
511 elif rev is None:
512 raise error.CommandError('debugdata', _('invalid arguments'))
512 raise error.CommandError('debugdata', _('invalid arguments'))
513 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
513 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
514 try:
514 try:
515 ui.write(r.revision(r.lookup(rev), raw=True))
515 ui.write(r.revision(r.lookup(rev), raw=True))
516 except KeyError:
516 except KeyError:
517 raise error.Abort(_('invalid revision identifier %s') % rev)
517 raise error.Abort(_('invalid revision identifier %s') % rev)
518
518
519 @command('debugdate',
519 @command('debugdate',
520 [('e', 'extended', None, _('try extended date formats'))],
520 [('e', 'extended', None, _('try extended date formats'))],
521 _('[-e] DATE [RANGE]'),
521 _('[-e] DATE [RANGE]'),
522 norepo=True, optionalrepo=True)
522 norepo=True, optionalrepo=True)
523 def debugdate(ui, date, range=None, **opts):
523 def debugdate(ui, date, range=None, **opts):
524 """parse and display a date"""
524 """parse and display a date"""
525 if opts["extended"]:
525 if opts["extended"]:
526 d = util.parsedate(date, util.extendeddateformats)
526 d = util.parsedate(date, util.extendeddateformats)
527 else:
527 else:
528 d = util.parsedate(date)
528 d = util.parsedate(date)
529 ui.write(("internal: %s %s\n") % d)
529 ui.write(("internal: %s %s\n") % d)
530 ui.write(("standard: %s\n") % util.datestr(d))
530 ui.write(("standard: %s\n") % util.datestr(d))
531 if range:
531 if range:
532 m = util.matchdate(range)
532 m = util.matchdate(range)
533 ui.write(("match: %s\n") % m(d[0]))
533 ui.write(("match: %s\n") % m(d[0]))
534
534
535 @command('debugdeltachain',
535 @command('debugdeltachain',
536 commands.debugrevlogopts + commands.formatteropts,
536 commands.debugrevlogopts + commands.formatteropts,
537 _('-c|-m|FILE'),
537 _('-c|-m|FILE'),
538 optionalrepo=True)
538 optionalrepo=True)
539 def debugdeltachain(ui, repo, file_=None, **opts):
539 def debugdeltachain(ui, repo, file_=None, **opts):
540 """dump information about delta chains in a revlog
540 """dump information about delta chains in a revlog
541
541
542 Output can be templatized. Available template keywords are:
542 Output can be templatized. Available template keywords are:
543
543
544 :``rev``: revision number
544 :``rev``: revision number
545 :``chainid``: delta chain identifier (numbered by unique base)
545 :``chainid``: delta chain identifier (numbered by unique base)
546 :``chainlen``: delta chain length to this revision
546 :``chainlen``: delta chain length to this revision
547 :``prevrev``: previous revision in delta chain
547 :``prevrev``: previous revision in delta chain
548 :``deltatype``: role of delta / how it was computed
548 :``deltatype``: role of delta / how it was computed
549 :``compsize``: compressed size of revision
549 :``compsize``: compressed size of revision
550 :``uncompsize``: uncompressed size of revision
550 :``uncompsize``: uncompressed size of revision
551 :``chainsize``: total size of compressed revisions in chain
551 :``chainsize``: total size of compressed revisions in chain
552 :``chainratio``: total chain size divided by uncompressed revision size
552 :``chainratio``: total chain size divided by uncompressed revision size
553 (new delta chains typically start at ratio 2.00)
553 (new delta chains typically start at ratio 2.00)
554 :``lindist``: linear distance from base revision in delta chain to end
554 :``lindist``: linear distance from base revision in delta chain to end
555 of this revision
555 of this revision
556 :``extradist``: total size of revisions not part of this delta chain from
556 :``extradist``: total size of revisions not part of this delta chain from
557 base of delta chain to end of this revision; a measurement
557 base of delta chain to end of this revision; a measurement
558 of how much extra data we need to read/seek across to read
558 of how much extra data we need to read/seek across to read
559 the delta chain for this revision
559 the delta chain for this revision
560 :``extraratio``: extradist divided by chainsize; another representation of
560 :``extraratio``: extradist divided by chainsize; another representation of
561 how much unrelated data is needed to load this delta chain
561 how much unrelated data is needed to load this delta chain
562 """
562 """
563 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
563 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
564 index = r.index
564 index = r.index
565 generaldelta = r.version & revlog.FLAG_GENERALDELTA
565 generaldelta = r.version & revlog.FLAG_GENERALDELTA
566
566
567 def revinfo(rev):
567 def revinfo(rev):
568 e = index[rev]
568 e = index[rev]
569 compsize = e[1]
569 compsize = e[1]
570 uncompsize = e[2]
570 uncompsize = e[2]
571 chainsize = 0
571 chainsize = 0
572
572
573 if generaldelta:
573 if generaldelta:
574 if e[3] == e[5]:
574 if e[3] == e[5]:
575 deltatype = 'p1'
575 deltatype = 'p1'
576 elif e[3] == e[6]:
576 elif e[3] == e[6]:
577 deltatype = 'p2'
577 deltatype = 'p2'
578 elif e[3] == rev - 1:
578 elif e[3] == rev - 1:
579 deltatype = 'prev'
579 deltatype = 'prev'
580 elif e[3] == rev:
580 elif e[3] == rev:
581 deltatype = 'base'
581 deltatype = 'base'
582 else:
582 else:
583 deltatype = 'other'
583 deltatype = 'other'
584 else:
584 else:
585 if e[3] == rev:
585 if e[3] == rev:
586 deltatype = 'base'
586 deltatype = 'base'
587 else:
587 else:
588 deltatype = 'prev'
588 deltatype = 'prev'
589
589
590 chain = r._deltachain(rev)[0]
590 chain = r._deltachain(rev)[0]
591 for iterrev in chain:
591 for iterrev in chain:
592 e = index[iterrev]
592 e = index[iterrev]
593 chainsize += e[1]
593 chainsize += e[1]
594
594
595 return compsize, uncompsize, deltatype, chain, chainsize
595 return compsize, uncompsize, deltatype, chain, chainsize
596
596
597 fm = ui.formatter('debugdeltachain', opts)
597 fm = ui.formatter('debugdeltachain', opts)
598
598
599 fm.plain(' rev chain# chainlen prev delta '
599 fm.plain(' rev chain# chainlen prev delta '
600 'size rawsize chainsize ratio lindist extradist '
600 'size rawsize chainsize ratio lindist extradist '
601 'extraratio\n')
601 'extraratio\n')
602
602
603 chainbases = {}
603 chainbases = {}
604 for rev in r:
604 for rev in r:
605 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
605 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
606 chainbase = chain[0]
606 chainbase = chain[0]
607 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
607 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
608 basestart = r.start(chainbase)
608 basestart = r.start(chainbase)
609 revstart = r.start(rev)
609 revstart = r.start(rev)
610 lineardist = revstart + comp - basestart
610 lineardist = revstart + comp - basestart
611 extradist = lineardist - chainsize
611 extradist = lineardist - chainsize
612 try:
612 try:
613 prevrev = chain[-2]
613 prevrev = chain[-2]
614 except IndexError:
614 except IndexError:
615 prevrev = -1
615 prevrev = -1
616
616
617 chainratio = float(chainsize) / float(uncomp)
617 chainratio = float(chainsize) / float(uncomp)
618 extraratio = float(extradist) / float(chainsize)
618 extraratio = float(extradist) / float(chainsize)
619
619
620 fm.startitem()
620 fm.startitem()
621 fm.write('rev chainid chainlen prevrev deltatype compsize '
621 fm.write('rev chainid chainlen prevrev deltatype compsize '
622 'uncompsize chainsize chainratio lindist extradist '
622 'uncompsize chainsize chainratio lindist extradist '
623 'extraratio',
623 'extraratio',
624 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
624 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
625 rev, chainid, len(chain), prevrev, deltatype, comp,
625 rev, chainid, len(chain), prevrev, deltatype, comp,
626 uncomp, chainsize, chainratio, lineardist, extradist,
626 uncomp, chainsize, chainratio, lineardist, extradist,
627 extraratio,
627 extraratio,
628 rev=rev, chainid=chainid, chainlen=len(chain),
628 rev=rev, chainid=chainid, chainlen=len(chain),
629 prevrev=prevrev, deltatype=deltatype, compsize=comp,
629 prevrev=prevrev, deltatype=deltatype, compsize=comp,
630 uncompsize=uncomp, chainsize=chainsize,
630 uncompsize=uncomp, chainsize=chainsize,
631 chainratio=chainratio, lindist=lineardist,
631 chainratio=chainratio, lindist=lineardist,
632 extradist=extradist, extraratio=extraratio)
632 extradist=extradist, extraratio=extraratio)
633
633
634 fm.end()
634 fm.end()
635
635
636 @command('debugdirstate|debugstate',
636 @command('debugdirstate|debugstate',
637 [('', 'nodates', None, _('do not display the saved mtime')),
637 [('', 'nodates', None, _('do not display the saved mtime')),
638 ('', 'datesort', None, _('sort by saved mtime'))],
638 ('', 'datesort', None, _('sort by saved mtime'))],
639 _('[OPTION]...'))
639 _('[OPTION]...'))
640 def debugstate(ui, repo, **opts):
640 def debugstate(ui, repo, **opts):
641 """show the contents of the current dirstate"""
641 """show the contents of the current dirstate"""
642
642
643 nodates = opts.get('nodates')
643 nodates = opts.get('nodates')
644 datesort = opts.get('datesort')
644 datesort = opts.get('datesort')
645
645
646 timestr = ""
646 timestr = ""
647 if datesort:
647 if datesort:
648 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
648 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
649 else:
649 else:
650 keyfunc = None # sort by filename
650 keyfunc = None # sort by filename
651 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
651 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
652 if ent[3] == -1:
652 if ent[3] == -1:
653 timestr = 'unset '
653 timestr = 'unset '
654 elif nodates:
654 elif nodates:
655 timestr = 'set '
655 timestr = 'set '
656 else:
656 else:
657 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
657 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
658 time.localtime(ent[3]))
658 time.localtime(ent[3]))
659 if ent[1] & 0o20000:
659 if ent[1] & 0o20000:
660 mode = 'lnk'
660 mode = 'lnk'
661 else:
661 else:
662 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
662 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
663 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
663 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
664 for f in repo.dirstate.copies():
664 for f in repo.dirstate.copies():
665 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
665 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
666
666
667 @command('debugdiscovery',
667 @command('debugdiscovery',
668 [('', 'old', None, _('use old-style discovery')),
668 [('', 'old', None, _('use old-style discovery')),
669 ('', 'nonheads', None,
669 ('', 'nonheads', None,
670 _('use old-style discovery with non-heads included')),
670 _('use old-style discovery with non-heads included')),
671 ] + commands.remoteopts,
671 ] + commands.remoteopts,
672 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
672 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
673 def debugdiscovery(ui, repo, remoteurl="default", **opts):
673 def debugdiscovery(ui, repo, remoteurl="default", **opts):
674 """runs the changeset discovery protocol in isolation"""
674 """runs the changeset discovery protocol in isolation"""
675 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
675 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
676 opts.get('branch'))
676 opts.get('branch'))
677 remote = hg.peer(repo, opts, remoteurl)
677 remote = hg.peer(repo, opts, remoteurl)
678 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
678 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
679
679
680 # make sure tests are repeatable
680 # make sure tests are repeatable
681 random.seed(12323)
681 random.seed(12323)
682
682
683 def doit(localheads, remoteheads, remote=remote):
683 def doit(localheads, remoteheads, remote=remote):
684 if opts.get('old'):
684 if opts.get('old'):
685 if localheads:
685 if localheads:
686 raise error.Abort('cannot use localheads with old style '
686 raise error.Abort('cannot use localheads with old style '
687 'discovery')
687 'discovery')
688 if not util.safehasattr(remote, 'branches'):
688 if not util.safehasattr(remote, 'branches'):
689 # enable in-client legacy support
689 # enable in-client legacy support
690 remote = localrepo.locallegacypeer(remote.local())
690 remote = localrepo.locallegacypeer(remote.local())
691 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
691 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
692 force=True)
692 force=True)
693 common = set(common)
693 common = set(common)
694 if not opts.get('nonheads'):
694 if not opts.get('nonheads'):
695 ui.write(("unpruned common: %s\n") %
695 ui.write(("unpruned common: %s\n") %
696 " ".join(sorted(short(n) for n in common)))
696 " ".join(sorted(short(n) for n in common)))
697 dag = dagutil.revlogdag(repo.changelog)
697 dag = dagutil.revlogdag(repo.changelog)
698 all = dag.ancestorset(dag.internalizeall(common))
698 all = dag.ancestorset(dag.internalizeall(common))
699 common = dag.externalizeall(dag.headsetofconnecteds(all))
699 common = dag.externalizeall(dag.headsetofconnecteds(all))
700 else:
700 else:
701 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
701 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
702 common = set(common)
702 common = set(common)
703 rheads = set(hds)
703 rheads = set(hds)
704 lheads = set(repo.heads())
704 lheads = set(repo.heads())
705 ui.write(("common heads: %s\n") %
705 ui.write(("common heads: %s\n") %
706 " ".join(sorted(short(n) for n in common)))
706 " ".join(sorted(short(n) for n in common)))
707 if lheads <= common:
707 if lheads <= common:
708 ui.write(("local is subset\n"))
708 ui.write(("local is subset\n"))
709 elif rheads <= common:
709 elif rheads <= common:
710 ui.write(("remote is subset\n"))
710 ui.write(("remote is subset\n"))
711
711
712 serverlogs = opts.get('serverlog')
712 serverlogs = opts.get('serverlog')
713 if serverlogs:
713 if serverlogs:
714 for filename in serverlogs:
714 for filename in serverlogs:
715 with open(filename, 'r') as logfile:
715 with open(filename, 'r') as logfile:
716 line = logfile.readline()
716 line = logfile.readline()
717 while line:
717 while line:
718 parts = line.strip().split(';')
718 parts = line.strip().split(';')
719 op = parts[1]
719 op = parts[1]
720 if op == 'cg':
720 if op == 'cg':
721 pass
721 pass
722 elif op == 'cgss':
722 elif op == 'cgss':
723 doit(parts[2].split(' '), parts[3].split(' '))
723 doit(parts[2].split(' '), parts[3].split(' '))
724 elif op == 'unb':
724 elif op == 'unb':
725 doit(parts[3].split(' '), parts[2].split(' '))
725 doit(parts[3].split(' '), parts[2].split(' '))
726 line = logfile.readline()
726 line = logfile.readline()
727 else:
727 else:
728 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
728 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
729 opts.get('remote_head'))
729 opts.get('remote_head'))
730 localrevs = opts.get('local_head')
730 localrevs = opts.get('local_head')
731 doit(localrevs, remoterevs)
731 doit(localrevs, remoterevs)
732
732
733 @command('debugextensions', commands.formatteropts, [], norepo=True)
733 @command('debugextensions', commands.formatteropts, [], norepo=True)
734 def debugextensions(ui, **opts):
734 def debugextensions(ui, **opts):
735 '''show information about active extensions'''
735 '''show information about active extensions'''
736 exts = extensions.extensions(ui)
736 exts = extensions.extensions(ui)
737 hgver = util.version()
737 hgver = util.version()
738 fm = ui.formatter('debugextensions', opts)
738 fm = ui.formatter('debugextensions', opts)
739 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
739 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
740 isinternal = extensions.ismoduleinternal(extmod)
740 isinternal = extensions.ismoduleinternal(extmod)
741 extsource = pycompat.fsencode(extmod.__file__)
741 extsource = pycompat.fsencode(extmod.__file__)
742 if isinternal:
742 if isinternal:
743 exttestedwith = [] # never expose magic string to users
743 exttestedwith = [] # never expose magic string to users
744 else:
744 else:
745 exttestedwith = getattr(extmod, 'testedwith', '').split()
745 exttestedwith = getattr(extmod, 'testedwith', '').split()
746 extbuglink = getattr(extmod, 'buglink', None)
746 extbuglink = getattr(extmod, 'buglink', None)
747
747
748 fm.startitem()
748 fm.startitem()
749
749
750 if ui.quiet or ui.verbose:
750 if ui.quiet or ui.verbose:
751 fm.write('name', '%s\n', extname)
751 fm.write('name', '%s\n', extname)
752 else:
752 else:
753 fm.write('name', '%s', extname)
753 fm.write('name', '%s', extname)
754 if isinternal or hgver in exttestedwith:
754 if isinternal or hgver in exttestedwith:
755 fm.plain('\n')
755 fm.plain('\n')
756 elif not exttestedwith:
756 elif not exttestedwith:
757 fm.plain(_(' (untested!)\n'))
757 fm.plain(_(' (untested!)\n'))
758 else:
758 else:
759 lasttestedversion = exttestedwith[-1]
759 lasttestedversion = exttestedwith[-1]
760 fm.plain(' (%s!)\n' % lasttestedversion)
760 fm.plain(' (%s!)\n' % lasttestedversion)
761
761
762 fm.condwrite(ui.verbose and extsource, 'source',
762 fm.condwrite(ui.verbose and extsource, 'source',
763 _(' location: %s\n'), extsource or "")
763 _(' location: %s\n'), extsource or "")
764
764
765 if ui.verbose:
765 if ui.verbose:
766 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
766 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
767 fm.data(bundled=isinternal)
767 fm.data(bundled=isinternal)
768
768
769 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
769 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
770 _(' tested with: %s\n'),
770 _(' tested with: %s\n'),
771 fm.formatlist(exttestedwith, name='ver'))
771 fm.formatlist(exttestedwith, name='ver'))
772
772
773 fm.condwrite(ui.verbose and extbuglink, 'buglink',
773 fm.condwrite(ui.verbose and extbuglink, 'buglink',
774 _(' bug reporting: %s\n'), extbuglink or "")
774 _(' bug reporting: %s\n'), extbuglink or "")
775
775
776 fm.end()
776 fm.end()
777
777
778 @command('debugfileset',
778 @command('debugfileset',
779 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
779 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
780 _('[-r REV] FILESPEC'))
780 _('[-r REV] FILESPEC'))
781 def debugfileset(ui, repo, expr, **opts):
781 def debugfileset(ui, repo, expr, **opts):
782 '''parse and apply a fileset specification'''
782 '''parse and apply a fileset specification'''
783 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
783 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
784 if ui.verbose:
784 if ui.verbose:
785 tree = fileset.parse(expr)
785 tree = fileset.parse(expr)
786 ui.note(fileset.prettyformat(tree), "\n")
786 ui.note(fileset.prettyformat(tree), "\n")
787
787
788 for f in ctx.getfileset(expr):
788 for f in ctx.getfileset(expr):
789 ui.write("%s\n" % f)
789 ui.write("%s\n" % f)
790
790
791 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
791 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
792 def debugfsinfo(ui, path="."):
792 def debugfsinfo(ui, path="."):
793 """show information detected about current filesystem"""
793 """show information detected about current filesystem"""
794 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
794 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
795 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
795 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
796 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
796 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
797 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
797 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
798 casesensitive = '(unknown)'
798 casesensitive = '(unknown)'
799 try:
799 try:
800 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
800 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
801 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
801 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
802 except OSError:
802 except OSError:
803 pass
803 pass
804 ui.write(('case-sensitive: %s\n') % casesensitive)
804 ui.write(('case-sensitive: %s\n') % casesensitive)
805
805
806 @command('debuggetbundle',
806 @command('debuggetbundle',
807 [('H', 'head', [], _('id of head node'), _('ID')),
807 [('H', 'head', [], _('id of head node'), _('ID')),
808 ('C', 'common', [], _('id of common node'), _('ID')),
808 ('C', 'common', [], _('id of common node'), _('ID')),
809 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
809 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
810 _('REPO FILE [-H|-C ID]...'),
810 _('REPO FILE [-H|-C ID]...'),
811 norepo=True)
811 norepo=True)
812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
812 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
813 """retrieves a bundle from a repo
813 """retrieves a bundle from a repo
814
814
815 Every ID must be a full-length hex node id string. Saves the bundle to the
815 Every ID must be a full-length hex node id string. Saves the bundle to the
816 given file.
816 given file.
817 """
817 """
818 repo = hg.peer(ui, opts, repopath)
818 repo = hg.peer(ui, opts, repopath)
819 if not repo.capable('getbundle'):
819 if not repo.capable('getbundle'):
820 raise error.Abort("getbundle() not supported by target repository")
820 raise error.Abort("getbundle() not supported by target repository")
821 args = {}
821 args = {}
822 if common:
822 if common:
823 args['common'] = [bin(s) for s in common]
823 args['common'] = [bin(s) for s in common]
824 if head:
824 if head:
825 args['heads'] = [bin(s) for s in head]
825 args['heads'] = [bin(s) for s in head]
826 # TODO: get desired bundlecaps from command line.
826 # TODO: get desired bundlecaps from command line.
827 args['bundlecaps'] = None
827 args['bundlecaps'] = None
828 bundle = repo.getbundle('debug', **args)
828 bundle = repo.getbundle('debug', **args)
829
829
830 bundletype = opts.get('type', 'bzip2').lower()
830 bundletype = opts.get('type', 'bzip2').lower()
831 btypes = {'none': 'HG10UN',
831 btypes = {'none': 'HG10UN',
832 'bzip2': 'HG10BZ',
832 'bzip2': 'HG10BZ',
833 'gzip': 'HG10GZ',
833 'gzip': 'HG10GZ',
834 'bundle2': 'HG20'}
834 'bundle2': 'HG20'}
835 bundletype = btypes.get(bundletype)
835 bundletype = btypes.get(bundletype)
836 if bundletype not in bundle2.bundletypes:
836 if bundletype not in bundle2.bundletypes:
837 raise error.Abort(_('unknown bundle type specified with --type'))
837 raise error.Abort(_('unknown bundle type specified with --type'))
838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
839
839
840 @command('debugignore', [], '[FILE]')
840 @command('debugignore', [], '[FILE]')
841 def debugignore(ui, repo, *files, **opts):
841 def debugignore(ui, repo, *files, **opts):
842 """display the combined ignore pattern and information about ignored files
842 """display the combined ignore pattern and information about ignored files
843
843
844 With no argument display the combined ignore pattern.
844 With no argument display the combined ignore pattern.
845
845
846 Given space separated file names, shows if the given file is ignored and
846 Given space separated file names, shows if the given file is ignored and
847 if so, show the ignore rule (file and line number) that matched it.
847 if so, show the ignore rule (file and line number) that matched it.
848 """
848 """
849 ignore = repo.dirstate._ignore
849 ignore = repo.dirstate._ignore
850 if not files:
850 if not files:
851 # Show all the patterns
851 # Show all the patterns
852 includepat = getattr(ignore, 'includepat', None)
852 includepat = getattr(ignore, 'includepat', None)
853 if includepat is not None:
853 if includepat is not None:
854 ui.write("%s\n" % includepat)
854 ui.write("%s\n" % includepat)
855 else:
855 else:
856 raise error.Abort(_("no ignore patterns found"))
856 raise error.Abort(_("no ignore patterns found"))
857 else:
857 else:
858 for f in files:
858 for f in files:
859 nf = util.normpath(f)
859 nf = util.normpath(f)
860 ignored = None
860 ignored = None
861 ignoredata = None
861 ignoredata = None
862 if nf != '.':
862 if nf != '.':
863 if ignore(nf):
863 if ignore(nf):
864 ignored = nf
864 ignored = nf
865 ignoredata = repo.dirstate._ignorefileandline(nf)
865 ignoredata = repo.dirstate._ignorefileandline(nf)
866 else:
866 else:
867 for p in util.finddirs(nf):
867 for p in util.finddirs(nf):
868 if ignore(p):
868 if ignore(p):
869 ignored = p
869 ignored = p
870 ignoredata = repo.dirstate._ignorefileandline(p)
870 ignoredata = repo.dirstate._ignorefileandline(p)
871 break
871 break
872 if ignored:
872 if ignored:
873 if ignored == nf:
873 if ignored == nf:
874 ui.write(_("%s is ignored\n") % f)
874 ui.write(_("%s is ignored\n") % f)
875 else:
875 else:
876 ui.write(_("%s is ignored because of "
876 ui.write(_("%s is ignored because of "
877 "containing folder %s\n")
877 "containing folder %s\n")
878 % (f, ignored))
878 % (f, ignored))
879 ignorefile, lineno, line = ignoredata
879 ignorefile, lineno, line = ignoredata
880 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
880 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
881 % (ignorefile, lineno, line))
881 % (ignorefile, lineno, line))
882 else:
882 else:
883 ui.write(_("%s is not ignored\n") % f)
883 ui.write(_("%s is not ignored\n") % f)
884
884
885 @command('debugindex', commands.debugrevlogopts +
885 @command('debugindex', commands.debugrevlogopts +
886 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
886 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
887 _('[-f FORMAT] -c|-m|FILE'),
887 _('[-f FORMAT] -c|-m|FILE'),
888 optionalrepo=True)
888 optionalrepo=True)
889 def debugindex(ui, repo, file_=None, **opts):
889 def debugindex(ui, repo, file_=None, **opts):
890 """dump the contents of an index file"""
890 """dump the contents of an index file"""
891 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
891 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
892 format = opts.get('format', 0)
892 format = opts.get('format', 0)
893 if format not in (0, 1):
893 if format not in (0, 1):
894 raise error.Abort(_("unknown format %d") % format)
894 raise error.Abort(_("unknown format %d") % format)
895
895
896 generaldelta = r.version & revlog.FLAG_GENERALDELTA
896 generaldelta = r.version & revlog.FLAG_GENERALDELTA
897 if generaldelta:
897 if generaldelta:
898 basehdr = ' delta'
898 basehdr = ' delta'
899 else:
899 else:
900 basehdr = ' base'
900 basehdr = ' base'
901
901
902 if ui.debugflag:
902 if ui.debugflag:
903 shortfn = hex
903 shortfn = hex
904 else:
904 else:
905 shortfn = short
905 shortfn = short
906
906
907 # There might not be anything in r, so have a sane default
907 # There might not be anything in r, so have a sane default
908 idlen = 12
908 idlen = 12
909 for i in r:
909 for i in r:
910 idlen = len(shortfn(r.node(i)))
910 idlen = len(shortfn(r.node(i)))
911 break
911 break
912
912
913 if format == 0:
913 if format == 0:
914 ui.write((" rev offset length " + basehdr + " linkrev"
914 ui.write((" rev offset length " + basehdr + " linkrev"
915 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
915 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
916 elif format == 1:
916 elif format == 1:
917 ui.write((" rev flag offset length"
917 ui.write((" rev flag offset length"
918 " size " + basehdr + " link p1 p2"
918 " size " + basehdr + " link p1 p2"
919 " %s\n") % "nodeid".rjust(idlen))
919 " %s\n") % "nodeid".rjust(idlen))
920
920
921 for i in r:
921 for i in r:
922 node = r.node(i)
922 node = r.node(i)
923 if generaldelta:
923 if generaldelta:
924 base = r.deltaparent(i)
924 base = r.deltaparent(i)
925 else:
925 else:
926 base = r.chainbase(i)
926 base = r.chainbase(i)
927 if format == 0:
927 if format == 0:
928 try:
928 try:
929 pp = r.parents(node)
929 pp = r.parents(node)
930 except Exception:
930 except Exception:
931 pp = [nullid, nullid]
931 pp = [nullid, nullid]
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
932 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
933 i, r.start(i), r.length(i), base, r.linkrev(i),
933 i, r.start(i), r.length(i), base, r.linkrev(i),
934 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
934 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
935 elif format == 1:
935 elif format == 1:
936 pr = r.parentrevs(i)
936 pr = r.parentrevs(i)
937 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
937 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
938 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
938 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
939 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
939 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
940
940
941 @command('debugindexdot', commands.debugrevlogopts,
941 @command('debugindexdot', commands.debugrevlogopts,
942 _('-c|-m|FILE'), optionalrepo=True)
942 _('-c|-m|FILE'), optionalrepo=True)
943 def debugindexdot(ui, repo, file_=None, **opts):
943 def debugindexdot(ui, repo, file_=None, **opts):
944 """dump an index DAG as a graphviz dot file"""
944 """dump an index DAG as a graphviz dot file"""
945 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
945 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
946 ui.write(("digraph G {\n"))
946 ui.write(("digraph G {\n"))
947 for i in r:
947 for i in r:
948 node = r.node(i)
948 node = r.node(i)
949 pp = r.parents(node)
949 pp = r.parents(node)
950 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
950 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
951 if pp[1] != nullid:
951 if pp[1] != nullid:
952 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
952 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
953 ui.write("}\n")
953 ui.write("}\n")
954
954
955 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
955 @command('debuginstall', [] + commands.formatteropts, '', norepo=True)
956 def debuginstall(ui, **opts):
956 def debuginstall(ui, **opts):
957 '''test Mercurial installation
957 '''test Mercurial installation
958
958
959 Returns 0 on success.
959 Returns 0 on success.
960 '''
960 '''
961
961
962 def writetemp(contents):
962 def writetemp(contents):
963 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
963 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
964 f = os.fdopen(fd, pycompat.sysstr("wb"))
964 f = os.fdopen(fd, pycompat.sysstr("wb"))
965 f.write(contents)
965 f.write(contents)
966 f.close()
966 f.close()
967 return name
967 return name
968
968
969 problems = 0
969 problems = 0
970
970
971 fm = ui.formatter('debuginstall', opts)
971 fm = ui.formatter('debuginstall', opts)
972 fm.startitem()
972 fm.startitem()
973
973
974 # encoding
974 # encoding
975 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
975 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
976 err = None
976 err = None
977 try:
977 try:
978 encoding.fromlocal("test")
978 encoding.fromlocal("test")
979 except error.Abort as inst:
979 except error.Abort as inst:
980 err = inst
980 err = inst
981 problems += 1
981 problems += 1
982 fm.condwrite(err, 'encodingerror', _(" %s\n"
982 fm.condwrite(err, 'encodingerror', _(" %s\n"
983 " (check that your locale is properly set)\n"), err)
983 " (check that your locale is properly set)\n"), err)
984
984
985 # Python
985 # Python
986 fm.write('pythonexe', _("checking Python executable (%s)\n"),
986 fm.write('pythonexe', _("checking Python executable (%s)\n"),
987 pycompat.sysexecutable)
987 pycompat.sysexecutable)
988 fm.write('pythonver', _("checking Python version (%s)\n"),
988 fm.write('pythonver', _("checking Python version (%s)\n"),
989 ("%d.%d.%d" % sys.version_info[:3]))
989 ("%d.%d.%d" % sys.version_info[:3]))
990 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
990 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
991 os.path.dirname(pycompat.fsencode(os.__file__)))
991 os.path.dirname(pycompat.fsencode(os.__file__)))
992
992
993 security = set(sslutil.supportedprotocols)
993 security = set(sslutil.supportedprotocols)
994 if sslutil.hassni:
994 if sslutil.hassni:
995 security.add('sni')
995 security.add('sni')
996
996
997 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
997 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
998 fm.formatlist(sorted(security), name='protocol',
998 fm.formatlist(sorted(security), name='protocol',
999 fmt='%s', sep=','))
999 fmt='%s', sep=','))
1000
1000
1001 # These are warnings, not errors. So don't increment problem count. This
1001 # These are warnings, not errors. So don't increment problem count. This
1002 # may change in the future.
1002 # may change in the future.
1003 if 'tls1.2' not in security:
1003 if 'tls1.2' not in security:
1004 fm.plain(_(' TLS 1.2 not supported by Python install; '
1004 fm.plain(_(' TLS 1.2 not supported by Python install; '
1005 'network connections lack modern security\n'))
1005 'network connections lack modern security\n'))
1006 if 'sni' not in security:
1006 if 'sni' not in security:
1007 fm.plain(_(' SNI not supported by Python install; may have '
1007 fm.plain(_(' SNI not supported by Python install; may have '
1008 'connectivity issues with some servers\n'))
1008 'connectivity issues with some servers\n'))
1009
1009
1010 # TODO print CA cert info
1010 # TODO print CA cert info
1011
1011
1012 # hg version
1012 # hg version
1013 hgver = util.version()
1013 hgver = util.version()
1014 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1014 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1015 hgver.split('+')[0])
1015 hgver.split('+')[0])
1016 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1016 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1017 '+'.join(hgver.split('+')[1:]))
1017 '+'.join(hgver.split('+')[1:]))
1018
1018
1019 # compiled modules
1019 # compiled modules
1020 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1020 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1021 policy.policy)
1021 policy.policy)
1022 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1022 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1023 os.path.dirname(pycompat.fsencode(__file__)))
1023 os.path.dirname(pycompat.fsencode(__file__)))
1024
1024
1025 if policy.policy in ('c', 'allow'):
1025 if policy.policy in ('c', 'allow'):
1026 err = None
1026 err = None
1027 try:
1027 try:
1028 from . import (
1028 from . import (
1029 base85,
1029 base85,
1030 bdiff,
1030 bdiff,
1031 mpatch,
1031 mpatch,
1032 osutil,
1032 osutil,
1033 )
1033 )
1034 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1034 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1035 except Exception as inst:
1035 except Exception as inst:
1036 err = inst
1036 err = inst
1037 problems += 1
1037 problems += 1
1038 fm.condwrite(err, 'extensionserror', " %s\n", err)
1038 fm.condwrite(err, 'extensionserror', " %s\n", err)
1039
1039
1040 compengines = util.compengines._engines.values()
1040 compengines = util.compengines._engines.values()
1041 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1041 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1042 fm.formatlist(sorted(e.name() for e in compengines),
1042 fm.formatlist(sorted(e.name() for e in compengines),
1043 name='compengine', fmt='%s', sep=', '))
1043 name='compengine', fmt='%s', sep=', '))
1044 fm.write('compenginesavail', _('checking available compression engines '
1044 fm.write('compenginesavail', _('checking available compression engines '
1045 '(%s)\n'),
1045 '(%s)\n'),
1046 fm.formatlist(sorted(e.name() for e in compengines
1046 fm.formatlist(sorted(e.name() for e in compengines
1047 if e.available()),
1047 if e.available()),
1048 name='compengine', fmt='%s', sep=', '))
1048 name='compengine', fmt='%s', sep=', '))
1049 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1049 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1050 fm.write('compenginesserver', _('checking available compression engines '
1050 fm.write('compenginesserver', _('checking available compression engines '
1051 'for wire protocol (%s)\n'),
1051 'for wire protocol (%s)\n'),
1052 fm.formatlist([e.name() for e in wirecompengines
1052 fm.formatlist([e.name() for e in wirecompengines
1053 if e.wireprotosupport()],
1053 if e.wireprotosupport()],
1054 name='compengine', fmt='%s', sep=', '))
1054 name='compengine', fmt='%s', sep=', '))
1055
1055
1056 # templates
1056 # templates
1057 p = templater.templatepaths()
1057 p = templater.templatepaths()
1058 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1058 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1059 fm.condwrite(not p, '', _(" no template directories found\n"))
1059 fm.condwrite(not p, '', _(" no template directories found\n"))
1060 if p:
1060 if p:
1061 m = templater.templatepath("map-cmdline.default")
1061 m = templater.templatepath("map-cmdline.default")
1062 if m:
1062 if m:
1063 # template found, check if it is working
1063 # template found, check if it is working
1064 err = None
1064 err = None
1065 try:
1065 try:
1066 templater.templater.frommapfile(m)
1066 templater.templater.frommapfile(m)
1067 except Exception as inst:
1067 except Exception as inst:
1068 err = inst
1068 err = inst
1069 p = None
1069 p = None
1070 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1070 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1071 else:
1071 else:
1072 p = None
1072 p = None
1073 fm.condwrite(p, 'defaulttemplate',
1073 fm.condwrite(p, 'defaulttemplate',
1074 _("checking default template (%s)\n"), m)
1074 _("checking default template (%s)\n"), m)
1075 fm.condwrite(not m, 'defaulttemplatenotfound',
1075 fm.condwrite(not m, 'defaulttemplatenotfound',
1076 _(" template '%s' not found\n"), "default")
1076 _(" template '%s' not found\n"), "default")
1077 if not p:
1077 if not p:
1078 problems += 1
1078 problems += 1
1079 fm.condwrite(not p, '',
1079 fm.condwrite(not p, '',
1080 _(" (templates seem to have been installed incorrectly)\n"))
1080 _(" (templates seem to have been installed incorrectly)\n"))
1081
1081
1082 # editor
1082 # editor
1083 editor = ui.geteditor()
1083 editor = ui.geteditor()
1084 editor = util.expandpath(editor)
1084 editor = util.expandpath(editor)
1085 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1085 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1086 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1086 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1087 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1087 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1088 _(" No commit editor set and can't find %s in PATH\n"
1088 _(" No commit editor set and can't find %s in PATH\n"
1089 " (specify a commit editor in your configuration"
1089 " (specify a commit editor in your configuration"
1090 " file)\n"), not cmdpath and editor == 'vi' and editor)
1090 " file)\n"), not cmdpath and editor == 'vi' and editor)
1091 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1091 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1092 _(" Can't find editor '%s' in PATH\n"
1092 _(" Can't find editor '%s' in PATH\n"
1093 " (specify a commit editor in your configuration"
1093 " (specify a commit editor in your configuration"
1094 " file)\n"), not cmdpath and editor)
1094 " file)\n"), not cmdpath and editor)
1095 if not cmdpath and editor != 'vi':
1095 if not cmdpath and editor != 'vi':
1096 problems += 1
1096 problems += 1
1097
1097
1098 # check username
1098 # check username
1099 username = None
1099 username = None
1100 err = None
1100 err = None
1101 try:
1101 try:
1102 username = ui.username()
1102 username = ui.username()
1103 except error.Abort as e:
1103 except error.Abort as e:
1104 err = e
1104 err = e
1105 problems += 1
1105 problems += 1
1106
1106
1107 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1107 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1108 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1108 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1109 " (specify a username in your configuration file)\n"), err)
1109 " (specify a username in your configuration file)\n"), err)
1110
1110
1111 fm.condwrite(not problems, '',
1111 fm.condwrite(not problems, '',
1112 _("no problems detected\n"))
1112 _("no problems detected\n"))
1113 if not problems:
1113 if not problems:
1114 fm.data(problems=problems)
1114 fm.data(problems=problems)
1115 fm.condwrite(problems, 'problems',
1115 fm.condwrite(problems, 'problems',
1116 _("%d problems detected,"
1116 _("%d problems detected,"
1117 " please check your install!\n"), problems)
1117 " please check your install!\n"), problems)
1118 fm.end()
1118 fm.end()
1119
1119
1120 return problems
1120 return problems
1121
1121
1122 @command('debugknown', [], _('REPO ID...'), norepo=True)
1122 @command('debugknown', [], _('REPO ID...'), norepo=True)
1123 def debugknown(ui, repopath, *ids, **opts):
1123 def debugknown(ui, repopath, *ids, **opts):
1124 """test whether node ids are known to a repo
1124 """test whether node ids are known to a repo
1125
1125
1126 Every ID must be a full-length hex node id string. Returns a list of 0s
1126 Every ID must be a full-length hex node id string. Returns a list of 0s
1127 and 1s indicating unknown/known.
1127 and 1s indicating unknown/known.
1128 """
1128 """
1129 repo = hg.peer(ui, opts, repopath)
1129 repo = hg.peer(ui, opts, repopath)
1130 if not repo.capable('known'):
1130 if not repo.capable('known'):
1131 raise error.Abort("known() not supported by target repository")
1131 raise error.Abort("known() not supported by target repository")
1132 flags = repo.known([bin(s) for s in ids])
1132 flags = repo.known([bin(s) for s in ids])
1133 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1133 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1134
1134
1135 @command('debuglabelcomplete', [], _('LABEL...'))
1135 @command('debuglabelcomplete', [], _('LABEL...'))
1136 def debuglabelcomplete(ui, repo, *args):
1136 def debuglabelcomplete(ui, repo, *args):
1137 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1137 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1138 debugnamecomplete(ui, repo, *args)
1138 debugnamecomplete(ui, repo, *args)
1139
1139
1140 @command('debuglocks',
1140 @command('debuglocks',
1141 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1141 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1142 ('W', 'force-wlock', None,
1142 ('W', 'force-wlock', None,
1143 _('free the working state lock (DANGEROUS)'))],
1143 _('free the working state lock (DANGEROUS)'))],
1144 _('[OPTION]...'))
1144 _('[OPTION]...'))
1145 def debuglocks(ui, repo, **opts):
1145 def debuglocks(ui, repo, **opts):
1146 """show or modify state of locks
1146 """show or modify state of locks
1147
1147
1148 By default, this command will show which locks are held. This
1148 By default, this command will show which locks are held. This
1149 includes the user and process holding the lock, the amount of time
1149 includes the user and process holding the lock, the amount of time
1150 the lock has been held, and the machine name where the process is
1150 the lock has been held, and the machine name where the process is
1151 running if it's not local.
1151 running if it's not local.
1152
1152
1153 Locks protect the integrity of Mercurial's data, so should be
1153 Locks protect the integrity of Mercurial's data, so should be
1154 treated with care. System crashes or other interruptions may cause
1154 treated with care. System crashes or other interruptions may cause
1155 locks to not be properly released, though Mercurial will usually
1155 locks to not be properly released, though Mercurial will usually
1156 detect and remove such stale locks automatically.
1156 detect and remove such stale locks automatically.
1157
1157
1158 However, detecting stale locks may not always be possible (for
1158 However, detecting stale locks may not always be possible (for
1159 instance, on a shared filesystem). Removing locks may also be
1159 instance, on a shared filesystem). Removing locks may also be
1160 blocked by filesystem permissions.
1160 blocked by filesystem permissions.
1161
1161
1162 Returns 0 if no locks are held.
1162 Returns 0 if no locks are held.
1163
1163
1164 """
1164 """
1165
1165
1166 if opts.get('force_lock'):
1166 if opts.get('force_lock'):
1167 repo.svfs.unlink('lock')
1167 repo.svfs.unlink('lock')
1168 if opts.get('force_wlock'):
1168 if opts.get('force_wlock'):
1169 repo.vfs.unlink('wlock')
1169 repo.vfs.unlink('wlock')
1170 if opts.get('force_lock') or opts.get('force_lock'):
1170 if opts.get('force_lock') or opts.get('force_lock'):
1171 return 0
1171 return 0
1172
1172
1173 now = time.time()
1173 now = time.time()
1174 held = 0
1174 held = 0
1175
1175
1176 def report(vfs, name, method):
1176 def report(vfs, name, method):
1177 # this causes stale locks to get reaped for more accurate reporting
1177 # this causes stale locks to get reaped for more accurate reporting
1178 try:
1178 try:
1179 l = method(False)
1179 l = method(False)
1180 except error.LockHeld:
1180 except error.LockHeld:
1181 l = None
1181 l = None
1182
1182
1183 if l:
1183 if l:
1184 l.release()
1184 l.release()
1185 else:
1185 else:
1186 try:
1186 try:
1187 stat = vfs.lstat(name)
1187 stat = vfs.lstat(name)
1188 age = now - stat.st_mtime
1188 age = now - stat.st_mtime
1189 user = util.username(stat.st_uid)
1189 user = util.username(stat.st_uid)
1190 locker = vfs.readlock(name)
1190 locker = vfs.readlock(name)
1191 if ":" in locker:
1191 if ":" in locker:
1192 host, pid = locker.split(':')
1192 host, pid = locker.split(':')
1193 if host == socket.gethostname():
1193 if host == socket.gethostname():
1194 locker = 'user %s, process %s' % (user, pid)
1194 locker = 'user %s, process %s' % (user, pid)
1195 else:
1195 else:
1196 locker = 'user %s, process %s, host %s' \
1196 locker = 'user %s, process %s, host %s' \
1197 % (user, pid, host)
1197 % (user, pid, host)
1198 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1198 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1199 return 1
1199 return 1
1200 except OSError as e:
1200 except OSError as e:
1201 if e.errno != errno.ENOENT:
1201 if e.errno != errno.ENOENT:
1202 raise
1202 raise
1203
1203
1204 ui.write(("%-6s free\n") % (name + ":"))
1204 ui.write(("%-6s free\n") % (name + ":"))
1205 return 0
1205 return 0
1206
1206
1207 held += report(repo.svfs, "lock", repo.lock)
1207 held += report(repo.svfs, "lock", repo.lock)
1208 held += report(repo.vfs, "wlock", repo.wlock)
1208 held += report(repo.vfs, "wlock", repo.wlock)
1209
1209
1210 return held
1210 return held
1211
1211
1212 @command('debugmergestate', [], '')
1212 @command('debugmergestate', [], '')
1213 def debugmergestate(ui, repo, *args):
1213 def debugmergestate(ui, repo, *args):
1214 """print merge state
1214 """print merge state
1215
1215
1216 Use --verbose to print out information about whether v1 or v2 merge state
1216 Use --verbose to print out information about whether v1 or v2 merge state
1217 was chosen."""
1217 was chosen."""
1218 def _hashornull(h):
1218 def _hashornull(h):
1219 if h == nullhex:
1219 if h == nullhex:
1220 return 'null'
1220 return 'null'
1221 else:
1221 else:
1222 return h
1222 return h
1223
1223
1224 def printrecords(version):
1224 def printrecords(version):
1225 ui.write(('* version %s records\n') % version)
1225 ui.write(('* version %s records\n') % version)
1226 if version == 1:
1226 if version == 1:
1227 records = v1records
1227 records = v1records
1228 else:
1228 else:
1229 records = v2records
1229 records = v2records
1230
1230
1231 for rtype, record in records:
1231 for rtype, record in records:
1232 # pretty print some record types
1232 # pretty print some record types
1233 if rtype == 'L':
1233 if rtype == 'L':
1234 ui.write(('local: %s\n') % record)
1234 ui.write(('local: %s\n') % record)
1235 elif rtype == 'O':
1235 elif rtype == 'O':
1236 ui.write(('other: %s\n') % record)
1236 ui.write(('other: %s\n') % record)
1237 elif rtype == 'm':
1237 elif rtype == 'm':
1238 driver, mdstate = record.split('\0', 1)
1238 driver, mdstate = record.split('\0', 1)
1239 ui.write(('merge driver: %s (state "%s")\n')
1239 ui.write(('merge driver: %s (state "%s")\n')
1240 % (driver, mdstate))
1240 % (driver, mdstate))
1241 elif rtype in 'FDC':
1241 elif rtype in 'FDC':
1242 r = record.split('\0')
1242 r = record.split('\0')
1243 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1243 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1244 if version == 1:
1244 if version == 1:
1245 onode = 'not stored in v1 format'
1245 onode = 'not stored in v1 format'
1246 flags = r[7]
1246 flags = r[7]
1247 else:
1247 else:
1248 onode, flags = r[7:9]
1248 onode, flags = r[7:9]
1249 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1249 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1250 % (f, rtype, state, _hashornull(hash)))
1250 % (f, rtype, state, _hashornull(hash)))
1251 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1251 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1252 ui.write((' ancestor path: %s (node %s)\n')
1252 ui.write((' ancestor path: %s (node %s)\n')
1253 % (afile, _hashornull(anode)))
1253 % (afile, _hashornull(anode)))
1254 ui.write((' other path: %s (node %s)\n')
1254 ui.write((' other path: %s (node %s)\n')
1255 % (ofile, _hashornull(onode)))
1255 % (ofile, _hashornull(onode)))
1256 elif rtype == 'f':
1256 elif rtype == 'f':
1257 filename, rawextras = record.split('\0', 1)
1257 filename, rawextras = record.split('\0', 1)
1258 extras = rawextras.split('\0')
1258 extras = rawextras.split('\0')
1259 i = 0
1259 i = 0
1260 extrastrings = []
1260 extrastrings = []
1261 while i < len(extras):
1261 while i < len(extras):
1262 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1262 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1263 i += 2
1263 i += 2
1264
1264
1265 ui.write(('file extras: %s (%s)\n')
1265 ui.write(('file extras: %s (%s)\n')
1266 % (filename, ', '.join(extrastrings)))
1266 % (filename, ', '.join(extrastrings)))
1267 elif rtype == 'l':
1267 elif rtype == 'l':
1268 labels = record.split('\0', 2)
1268 labels = record.split('\0', 2)
1269 labels = [l for l in labels if len(l) > 0]
1269 labels = [l for l in labels if len(l) > 0]
1270 ui.write(('labels:\n'))
1270 ui.write(('labels:\n'))
1271 ui.write((' local: %s\n' % labels[0]))
1271 ui.write((' local: %s\n' % labels[0]))
1272 ui.write((' other: %s\n' % labels[1]))
1272 ui.write((' other: %s\n' % labels[1]))
1273 if len(labels) > 2:
1273 if len(labels) > 2:
1274 ui.write((' base: %s\n' % labels[2]))
1274 ui.write((' base: %s\n' % labels[2]))
1275 else:
1275 else:
1276 ui.write(('unrecognized entry: %s\t%s\n')
1276 ui.write(('unrecognized entry: %s\t%s\n')
1277 % (rtype, record.replace('\0', '\t')))
1277 % (rtype, record.replace('\0', '\t')))
1278
1278
1279 # Avoid mergestate.read() since it may raise an exception for unsupported
1279 # Avoid mergestate.read() since it may raise an exception for unsupported
1280 # merge state records. We shouldn't be doing this, but this is OK since this
1280 # merge state records. We shouldn't be doing this, but this is OK since this
1281 # command is pretty low-level.
1281 # command is pretty low-level.
1282 ms = mergemod.mergestate(repo)
1282 ms = mergemod.mergestate(repo)
1283
1283
1284 # sort so that reasonable information is on top
1284 # sort so that reasonable information is on top
1285 v1records = ms._readrecordsv1()
1285 v1records = ms._readrecordsv1()
1286 v2records = ms._readrecordsv2()
1286 v2records = ms._readrecordsv2()
1287 order = 'LOml'
1287 order = 'LOml'
1288 def key(r):
1288 def key(r):
1289 idx = order.find(r[0])
1289 idx = order.find(r[0])
1290 if idx == -1:
1290 if idx == -1:
1291 return (1, r[1])
1291 return (1, r[1])
1292 else:
1292 else:
1293 return (0, idx)
1293 return (0, idx)
1294 v1records.sort(key=key)
1294 v1records.sort(key=key)
1295 v2records.sort(key=key)
1295 v2records.sort(key=key)
1296
1296
1297 if not v1records and not v2records:
1297 if not v1records and not v2records:
1298 ui.write(('no merge state found\n'))
1298 ui.write(('no merge state found\n'))
1299 elif not v2records:
1299 elif not v2records:
1300 ui.note(('no version 2 merge state\n'))
1300 ui.note(('no version 2 merge state\n'))
1301 printrecords(1)
1301 printrecords(1)
1302 elif ms._v1v2match(v1records, v2records):
1302 elif ms._v1v2match(v1records, v2records):
1303 ui.note(('v1 and v2 states match: using v2\n'))
1303 ui.note(('v1 and v2 states match: using v2\n'))
1304 printrecords(2)
1304 printrecords(2)
1305 else:
1305 else:
1306 ui.note(('v1 and v2 states mismatch: using v1\n'))
1306 ui.note(('v1 and v2 states mismatch: using v1\n'))
1307 printrecords(1)
1307 printrecords(1)
1308 if ui.verbose:
1308 if ui.verbose:
1309 printrecords(2)
1309 printrecords(2)
1310
1310
1311 @command('debugnamecomplete', [], _('NAME...'))
1311 @command('debugnamecomplete', [], _('NAME...'))
1312 def debugnamecomplete(ui, repo, *args):
1312 def debugnamecomplete(ui, repo, *args):
1313 '''complete "names" - tags, open branch names, bookmark names'''
1313 '''complete "names" - tags, open branch names, bookmark names'''
1314
1314
1315 names = set()
1315 names = set()
1316 # since we previously only listed open branches, we will handle that
1316 # since we previously only listed open branches, we will handle that
1317 # specially (after this for loop)
1317 # specially (after this for loop)
1318 for name, ns in repo.names.iteritems():
1318 for name, ns in repo.names.iteritems():
1319 if name != 'branches':
1319 if name != 'branches':
1320 names.update(ns.listnames(repo))
1320 names.update(ns.listnames(repo))
1321 names.update(tag for (tag, heads, tip, closed)
1321 names.update(tag for (tag, heads, tip, closed)
1322 in repo.branchmap().iterbranches() if not closed)
1322 in repo.branchmap().iterbranches() if not closed)
1323 completions = set()
1323 completions = set()
1324 if not args:
1324 if not args:
1325 args = ['']
1325 args = ['']
1326 for a in args:
1326 for a in args:
1327 completions.update(n for n in names if n.startswith(a))
1327 completions.update(n for n in names if n.startswith(a))
1328 ui.write('\n'.join(sorted(completions)))
1328 ui.write('\n'.join(sorted(completions)))
1329 ui.write('\n')
1329 ui.write('\n')
1330
1330
1331 @command('debugobsolete',
1331 @command('debugobsolete',
1332 [('', 'flags', 0, _('markers flag')),
1332 [('', 'flags', 0, _('markers flag')),
1333 ('', 'record-parents', False,
1333 ('', 'record-parents', False,
1334 _('record parent information for the precursor')),
1334 _('record parent information for the precursor')),
1335 ('r', 'rev', [], _('display markers relevant to REV')),
1335 ('r', 'rev', [], _('display markers relevant to REV')),
1336 ('', 'index', False, _('display index of the marker')),
1336 ('', 'index', False, _('display index of the marker')),
1337 ('', 'delete', [], _('delete markers specified by indices')),
1337 ('', 'delete', [], _('delete markers specified by indices')),
1338 ] + commands.commitopts2 + commands.formatteropts,
1338 ] + commands.commitopts2 + commands.formatteropts,
1339 _('[OBSOLETED [REPLACEMENT ...]]'))
1339 _('[OBSOLETED [REPLACEMENT ...]]'))
1340 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1340 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1341 """create arbitrary obsolete marker
1341 """create arbitrary obsolete marker
1342
1342
1343 With no arguments, displays the list of obsolescence markers."""
1343 With no arguments, displays the list of obsolescence markers."""
1344
1344
1345 def parsenodeid(s):
1345 def parsenodeid(s):
1346 try:
1346 try:
1347 # We do not use revsingle/revrange functions here to accept
1347 # We do not use revsingle/revrange functions here to accept
1348 # arbitrary node identifiers, possibly not present in the
1348 # arbitrary node identifiers, possibly not present in the
1349 # local repository.
1349 # local repository.
1350 n = bin(s)
1350 n = bin(s)
1351 if len(n) != len(nullid):
1351 if len(n) != len(nullid):
1352 raise TypeError()
1352 raise TypeError()
1353 return n
1353 return n
1354 except TypeError:
1354 except TypeError:
1355 raise error.Abort('changeset references must be full hexadecimal '
1355 raise error.Abort('changeset references must be full hexadecimal '
1356 'node identifiers')
1356 'node identifiers')
1357
1357
1358 if opts.get('delete'):
1358 if opts.get('delete'):
1359 indices = []
1359 indices = []
1360 for v in opts.get('delete'):
1360 for v in opts.get('delete'):
1361 try:
1361 try:
1362 indices.append(int(v))
1362 indices.append(int(v))
1363 except ValueError:
1363 except ValueError:
1364 raise error.Abort(_('invalid index value: %r') % v,
1364 raise error.Abort(_('invalid index value: %r') % v,
1365 hint=_('use integers for indices'))
1365 hint=_('use integers for indices'))
1366
1366
1367 if repo.currenttransaction():
1367 if repo.currenttransaction():
1368 raise error.Abort(_('cannot delete obsmarkers in the middle '
1368 raise error.Abort(_('cannot delete obsmarkers in the middle '
1369 'of transaction.'))
1369 'of transaction.'))
1370
1370
1371 with repo.lock():
1371 with repo.lock():
1372 n = repair.deleteobsmarkers(repo.obsstore, indices)
1372 n = repair.deleteobsmarkers(repo.obsstore, indices)
1373 ui.write(_('deleted %i obsolescence markers\n') % n)
1373 ui.write(_('deleted %i obsolescence markers\n') % n)
1374
1374
1375 return
1375 return
1376
1376
1377 if precursor is not None:
1377 if precursor is not None:
1378 if opts['rev']:
1378 if opts['rev']:
1379 raise error.Abort('cannot select revision when creating marker')
1379 raise error.Abort('cannot select revision when creating marker')
1380 metadata = {}
1380 metadata = {}
1381 metadata['user'] = opts['user'] or ui.username()
1381 metadata['user'] = opts['user'] or ui.username()
1382 succs = tuple(parsenodeid(succ) for succ in successors)
1382 succs = tuple(parsenodeid(succ) for succ in successors)
1383 l = repo.lock()
1383 l = repo.lock()
1384 try:
1384 try:
1385 tr = repo.transaction('debugobsolete')
1385 tr = repo.transaction('debugobsolete')
1386 try:
1386 try:
1387 date = opts.get('date')
1387 date = opts.get('date')
1388 if date:
1388 if date:
1389 date = util.parsedate(date)
1389 date = util.parsedate(date)
1390 else:
1390 else:
1391 date = None
1391 date = None
1392 prec = parsenodeid(precursor)
1392 prec = parsenodeid(precursor)
1393 parents = None
1393 parents = None
1394 if opts['record_parents']:
1394 if opts['record_parents']:
1395 if prec not in repo.unfiltered():
1395 if prec not in repo.unfiltered():
1396 raise error.Abort('cannot used --record-parents on '
1396 raise error.Abort('cannot used --record-parents on '
1397 'unknown changesets')
1397 'unknown changesets')
1398 parents = repo.unfiltered()[prec].parents()
1398 parents = repo.unfiltered()[prec].parents()
1399 parents = tuple(p.node() for p in parents)
1399 parents = tuple(p.node() for p in parents)
1400 repo.obsstore.create(tr, prec, succs, opts['flags'],
1400 repo.obsstore.create(tr, prec, succs, opts['flags'],
1401 parents=parents, date=date,
1401 parents=parents, date=date,
1402 metadata=metadata)
1402 metadata=metadata)
1403 tr.close()
1403 tr.close()
1404 except ValueError as exc:
1404 except ValueError as exc:
1405 raise error.Abort(_('bad obsmarker input: %s') % exc)
1405 raise error.Abort(_('bad obsmarker input: %s') % exc)
1406 finally:
1406 finally:
1407 tr.release()
1407 tr.release()
1408 finally:
1408 finally:
1409 l.release()
1409 l.release()
1410 else:
1410 else:
1411 if opts['rev']:
1411 if opts['rev']:
1412 revs = scmutil.revrange(repo, opts['rev'])
1412 revs = scmutil.revrange(repo, opts['rev'])
1413 nodes = [repo[r].node() for r in revs]
1413 nodes = [repo[r].node() for r in revs]
1414 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1414 markers = list(obsolete.getmarkers(repo, nodes=nodes))
1415 markers.sort(key=lambda x: x._data)
1415 markers.sort(key=lambda x: x._data)
1416 else:
1416 else:
1417 markers = obsolete.getmarkers(repo)
1417 markers = obsolete.getmarkers(repo)
1418
1418
1419 markerstoiter = markers
1419 markerstoiter = markers
1420 isrelevant = lambda m: True
1420 isrelevant = lambda m: True
1421 if opts.get('rev') and opts.get('index'):
1421 if opts.get('rev') and opts.get('index'):
1422 markerstoiter = obsolete.getmarkers(repo)
1422 markerstoiter = obsolete.getmarkers(repo)
1423 markerset = set(markers)
1423 markerset = set(markers)
1424 isrelevant = lambda m: m in markerset
1424 isrelevant = lambda m: m in markerset
1425
1425
1426 fm = ui.formatter('debugobsolete', opts)
1426 fm = ui.formatter('debugobsolete', opts)
1427 for i, m in enumerate(markerstoiter):
1427 for i, m in enumerate(markerstoiter):
1428 if not isrelevant(m):
1428 if not isrelevant(m):
1429 # marker can be irrelevant when we're iterating over a set
1429 # marker can be irrelevant when we're iterating over a set
1430 # of markers (markerstoiter) which is bigger than the set
1430 # of markers (markerstoiter) which is bigger than the set
1431 # of markers we want to display (markers)
1431 # of markers we want to display (markers)
1432 # this can happen if both --index and --rev options are
1432 # this can happen if both --index and --rev options are
1433 # provided and thus we need to iterate over all of the markers
1433 # provided and thus we need to iterate over all of the markers
1434 # to get the correct indices, but only display the ones that
1434 # to get the correct indices, but only display the ones that
1435 # are relevant to --rev value
1435 # are relevant to --rev value
1436 continue
1436 continue
1437 fm.startitem()
1437 fm.startitem()
1438 ind = i if opts.get('index') else None
1438 ind = i if opts.get('index') else None
1439 cmdutil.showmarker(fm, m, index=ind)
1439 cmdutil.showmarker(fm, m, index=ind)
1440 fm.end()
1440 fm.end()
1441
1441
1442 @command('debugpathcomplete',
1442 @command('debugpathcomplete',
1443 [('f', 'full', None, _('complete an entire path')),
1443 [('f', 'full', None, _('complete an entire path')),
1444 ('n', 'normal', None, _('show only normal files')),
1444 ('n', 'normal', None, _('show only normal files')),
1445 ('a', 'added', None, _('show only added files')),
1445 ('a', 'added', None, _('show only added files')),
1446 ('r', 'removed', None, _('show only removed files'))],
1446 ('r', 'removed', None, _('show only removed files'))],
1447 _('FILESPEC...'))
1447 _('FILESPEC...'))
1448 def debugpathcomplete(ui, repo, *specs, **opts):
1448 def debugpathcomplete(ui, repo, *specs, **opts):
1449 '''complete part or all of a tracked path
1449 '''complete part or all of a tracked path
1450
1450
1451 This command supports shells that offer path name completion. It
1451 This command supports shells that offer path name completion. It
1452 currently completes only files already known to the dirstate.
1452 currently completes only files already known to the dirstate.
1453
1453
1454 Completion extends only to the next path segment unless
1454 Completion extends only to the next path segment unless
1455 --full is specified, in which case entire paths are used.'''
1455 --full is specified, in which case entire paths are used.'''
1456
1456
1457 def complete(path, acceptable):
1457 def complete(path, acceptable):
1458 dirstate = repo.dirstate
1458 dirstate = repo.dirstate
1459 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1459 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1460 rootdir = repo.root + pycompat.ossep
1460 rootdir = repo.root + pycompat.ossep
1461 if spec != repo.root and not spec.startswith(rootdir):
1461 if spec != repo.root and not spec.startswith(rootdir):
1462 return [], []
1462 return [], []
1463 if os.path.isdir(spec):
1463 if os.path.isdir(spec):
1464 spec += '/'
1464 spec += '/'
1465 spec = spec[len(rootdir):]
1465 spec = spec[len(rootdir):]
1466 fixpaths = pycompat.ossep != '/'
1466 fixpaths = pycompat.ossep != '/'
1467 if fixpaths:
1467 if fixpaths:
1468 spec = spec.replace(pycompat.ossep, '/')
1468 spec = spec.replace(pycompat.ossep, '/')
1469 speclen = len(spec)
1469 speclen = len(spec)
1470 fullpaths = opts['full']
1470 fullpaths = opts['full']
1471 files, dirs = set(), set()
1471 files, dirs = set(), set()
1472 adddir, addfile = dirs.add, files.add
1472 adddir, addfile = dirs.add, files.add
1473 for f, st in dirstate.iteritems():
1473 for f, st in dirstate.iteritems():
1474 if f.startswith(spec) and st[0] in acceptable:
1474 if f.startswith(spec) and st[0] in acceptable:
1475 if fixpaths:
1475 if fixpaths:
1476 f = f.replace('/', pycompat.ossep)
1476 f = f.replace('/', pycompat.ossep)
1477 if fullpaths:
1477 if fullpaths:
1478 addfile(f)
1478 addfile(f)
1479 continue
1479 continue
1480 s = f.find(pycompat.ossep, speclen)
1480 s = f.find(pycompat.ossep, speclen)
1481 if s >= 0:
1481 if s >= 0:
1482 adddir(f[:s])
1482 adddir(f[:s])
1483 else:
1483 else:
1484 addfile(f)
1484 addfile(f)
1485 return files, dirs
1485 return files, dirs
1486
1486
1487 acceptable = ''
1487 acceptable = ''
1488 if opts['normal']:
1488 if opts['normal']:
1489 acceptable += 'nm'
1489 acceptable += 'nm'
1490 if opts['added']:
1490 if opts['added']:
1491 acceptable += 'a'
1491 acceptable += 'a'
1492 if opts['removed']:
1492 if opts['removed']:
1493 acceptable += 'r'
1493 acceptable += 'r'
1494 cwd = repo.getcwd()
1494 cwd = repo.getcwd()
1495 if not specs:
1495 if not specs:
1496 specs = ['.']
1496 specs = ['.']
1497
1497
1498 files, dirs = set(), set()
1498 files, dirs = set(), set()
1499 for spec in specs:
1499 for spec in specs:
1500 f, d = complete(spec, acceptable or 'nmar')
1500 f, d = complete(spec, acceptable or 'nmar')
1501 files.update(f)
1501 files.update(f)
1502 dirs.update(d)
1502 dirs.update(d)
1503 files.update(dirs)
1503 files.update(dirs)
1504 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1504 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1505 ui.write('\n')
1505 ui.write('\n')
1506
1506
1507 @command('debugpickmergetool',
1507 @command('debugpickmergetool',
1508 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1508 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1509 ('', 'changedelete', None, _('emulate merging change and delete')),
1509 ('', 'changedelete', None, _('emulate merging change and delete')),
1510 ] + commands.walkopts + commands.mergetoolopts,
1510 ] + commands.walkopts + commands.mergetoolopts,
1511 _('[PATTERN]...'),
1511 _('[PATTERN]...'),
1512 inferrepo=True)
1512 inferrepo=True)
1513 def debugpickmergetool(ui, repo, *pats, **opts):
1513 def debugpickmergetool(ui, repo, *pats, **opts):
1514 """examine which merge tool is chosen for specified file
1514 """examine which merge tool is chosen for specified file
1515
1515
1516 As described in :hg:`help merge-tools`, Mercurial examines
1516 As described in :hg:`help merge-tools`, Mercurial examines
1517 configurations below in this order to decide which merge tool is
1517 configurations below in this order to decide which merge tool is
1518 chosen for specified file.
1518 chosen for specified file.
1519
1519
1520 1. ``--tool`` option
1520 1. ``--tool`` option
1521 2. ``HGMERGE`` environment variable
1521 2. ``HGMERGE`` environment variable
1522 3. configurations in ``merge-patterns`` section
1522 3. configurations in ``merge-patterns`` section
1523 4. configuration of ``ui.merge``
1523 4. configuration of ``ui.merge``
1524 5. configurations in ``merge-tools`` section
1524 5. configurations in ``merge-tools`` section
1525 6. ``hgmerge`` tool (for historical reason only)
1525 6. ``hgmerge`` tool (for historical reason only)
1526 7. default tool for fallback (``:merge`` or ``:prompt``)
1526 7. default tool for fallback (``:merge`` or ``:prompt``)
1527
1527
1528 This command writes out examination result in the style below::
1528 This command writes out examination result in the style below::
1529
1529
1530 FILE = MERGETOOL
1530 FILE = MERGETOOL
1531
1531
1532 By default, all files known in the first parent context of the
1532 By default, all files known in the first parent context of the
1533 working directory are examined. Use file patterns and/or -I/-X
1533 working directory are examined. Use file patterns and/or -I/-X
1534 options to limit target files. -r/--rev is also useful to examine
1534 options to limit target files. -r/--rev is also useful to examine
1535 files in another context without actual updating to it.
1535 files in another context without actual updating to it.
1536
1536
1537 With --debug, this command shows warning messages while matching
1537 With --debug, this command shows warning messages while matching
1538 against ``merge-patterns`` and so on, too. It is recommended to
1538 against ``merge-patterns`` and so on, too. It is recommended to
1539 use this option with explicit file patterns and/or -I/-X options,
1539 use this option with explicit file patterns and/or -I/-X options,
1540 because this option increases amount of output per file according
1540 because this option increases amount of output per file according
1541 to configurations in hgrc.
1541 to configurations in hgrc.
1542
1542
1543 With -v/--verbose, this command shows configurations below at
1543 With -v/--verbose, this command shows configurations below at
1544 first (only if specified).
1544 first (only if specified).
1545
1545
1546 - ``--tool`` option
1546 - ``--tool`` option
1547 - ``HGMERGE`` environment variable
1547 - ``HGMERGE`` environment variable
1548 - configuration of ``ui.merge``
1548 - configuration of ``ui.merge``
1549
1549
1550 If merge tool is chosen before matching against
1550 If merge tool is chosen before matching against
1551 ``merge-patterns``, this command can't show any helpful
1551 ``merge-patterns``, this command can't show any helpful
1552 information, even with --debug. In such case, information above is
1552 information, even with --debug. In such case, information above is
1553 useful to know why a merge tool is chosen.
1553 useful to know why a merge tool is chosen.
1554 """
1554 """
1555 overrides = {}
1555 overrides = {}
1556 if opts['tool']:
1556 if opts['tool']:
1557 overrides[('ui', 'forcemerge')] = opts['tool']
1557 overrides[('ui', 'forcemerge')] = opts['tool']
1558 ui.note(('with --tool %r\n') % (opts['tool']))
1558 ui.note(('with --tool %r\n') % (opts['tool']))
1559
1559
1560 with ui.configoverride(overrides, 'debugmergepatterns'):
1560 with ui.configoverride(overrides, 'debugmergepatterns'):
1561 hgmerge = encoding.environ.get("HGMERGE")
1561 hgmerge = encoding.environ.get("HGMERGE")
1562 if hgmerge is not None:
1562 if hgmerge is not None:
1563 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1563 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1564 uimerge = ui.config("ui", "merge")
1564 uimerge = ui.config("ui", "merge")
1565 if uimerge:
1565 if uimerge:
1566 ui.note(('with ui.merge=%r\n') % (uimerge))
1566 ui.note(('with ui.merge=%r\n') % (uimerge))
1567
1567
1568 ctx = scmutil.revsingle(repo, opts.get('rev'))
1568 ctx = scmutil.revsingle(repo, opts.get('rev'))
1569 m = scmutil.match(ctx, pats, opts)
1569 m = scmutil.match(ctx, pats, opts)
1570 changedelete = opts['changedelete']
1570 changedelete = opts['changedelete']
1571 for path in ctx.walk(m):
1571 for path in ctx.walk(m):
1572 fctx = ctx[path]
1572 fctx = ctx[path]
1573 try:
1573 try:
1574 if not ui.debugflag:
1574 if not ui.debugflag:
1575 ui.pushbuffer(error=True)
1575 ui.pushbuffer(error=True)
1576 tool, toolpath = filemerge._picktool(repo, ui, path,
1576 tool, toolpath = filemerge._picktool(repo, ui, path,
1577 fctx.isbinary(),
1577 fctx.isbinary(),
1578 'l' in fctx.flags(),
1578 'l' in fctx.flags(),
1579 changedelete)
1579 changedelete)
1580 finally:
1580 finally:
1581 if not ui.debugflag:
1581 if not ui.debugflag:
1582 ui.popbuffer()
1582 ui.popbuffer()
1583 ui.write(('%s = %s\n') % (path, tool))
1583 ui.write(('%s = %s\n') % (path, tool))
1584
1584
1585 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1585 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1586 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1586 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1587 '''access the pushkey key/value protocol
1587 '''access the pushkey key/value protocol
1588
1588
1589 With two args, list the keys in the given namespace.
1589 With two args, list the keys in the given namespace.
1590
1590
1591 With five args, set a key to new if it currently is set to old.
1591 With five args, set a key to new if it currently is set to old.
1592 Reports success or failure.
1592 Reports success or failure.
1593 '''
1593 '''
1594
1594
1595 target = hg.peer(ui, {}, repopath)
1595 target = hg.peer(ui, {}, repopath)
1596 if keyinfo:
1596 if keyinfo:
1597 key, old, new = keyinfo
1597 key, old, new = keyinfo
1598 r = target.pushkey(namespace, key, old, new)
1598 r = target.pushkey(namespace, key, old, new)
1599 ui.status(str(r) + '\n')
1599 ui.status(str(r) + '\n')
1600 return not r
1600 return not r
1601 else:
1601 else:
1602 for k, v in sorted(target.listkeys(namespace).iteritems()):
1602 for k, v in sorted(target.listkeys(namespace).iteritems()):
1603 ui.write("%s\t%s\n" % (util.escapestr(k),
1603 ui.write("%s\t%s\n" % (util.escapestr(k),
1604 util.escapestr(v)))
1604 util.escapestr(v)))
1605
1605
1606 @command('debugpvec', [], _('A B'))
1606 @command('debugpvec', [], _('A B'))
1607 def debugpvec(ui, repo, a, b=None):
1607 def debugpvec(ui, repo, a, b=None):
1608 ca = scmutil.revsingle(repo, a)
1608 ca = scmutil.revsingle(repo, a)
1609 cb = scmutil.revsingle(repo, b)
1609 cb = scmutil.revsingle(repo, b)
1610 pa = pvec.ctxpvec(ca)
1610 pa = pvec.ctxpvec(ca)
1611 pb = pvec.ctxpvec(cb)
1611 pb = pvec.ctxpvec(cb)
1612 if pa == pb:
1612 if pa == pb:
1613 rel = "="
1613 rel = "="
1614 elif pa > pb:
1614 elif pa > pb:
1615 rel = ">"
1615 rel = ">"
1616 elif pa < pb:
1616 elif pa < pb:
1617 rel = "<"
1617 rel = "<"
1618 elif pa | pb:
1618 elif pa | pb:
1619 rel = "|"
1619 rel = "|"
1620 ui.write(_("a: %s\n") % pa)
1620 ui.write(_("a: %s\n") % pa)
1621 ui.write(_("b: %s\n") % pb)
1621 ui.write(_("b: %s\n") % pb)
1622 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1622 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1623 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1623 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1624 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1624 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1625 pa.distance(pb), rel))
1625 pa.distance(pb), rel))
1626
1626
1627 @command('debugrebuilddirstate|debugrebuildstate',
1627 @command('debugrebuilddirstate|debugrebuildstate',
1628 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1628 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1629 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1629 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1630 'the working copy parent')),
1630 'the working copy parent')),
1631 ],
1631 ],
1632 _('[-r REV]'))
1632 _('[-r REV]'))
1633 def debugrebuilddirstate(ui, repo, rev, **opts):
1633 def debugrebuilddirstate(ui, repo, rev, **opts):
1634 """rebuild the dirstate as it would look like for the given revision
1634 """rebuild the dirstate as it would look like for the given revision
1635
1635
1636 If no revision is specified the first current parent will be used.
1636 If no revision is specified the first current parent will be used.
1637
1637
1638 The dirstate will be set to the files of the given revision.
1638 The dirstate will be set to the files of the given revision.
1639 The actual working directory content or existing dirstate
1639 The actual working directory content or existing dirstate
1640 information such as adds or removes is not considered.
1640 information such as adds or removes is not considered.
1641
1641
1642 ``minimal`` will only rebuild the dirstate status for files that claim to be
1642 ``minimal`` will only rebuild the dirstate status for files that claim to be
1643 tracked but are not in the parent manifest, or that exist in the parent
1643 tracked but are not in the parent manifest, or that exist in the parent
1644 manifest but are not in the dirstate. It will not change adds, removes, or
1644 manifest but are not in the dirstate. It will not change adds, removes, or
1645 modified files that are in the working copy parent.
1645 modified files that are in the working copy parent.
1646
1646
1647 One use of this command is to make the next :hg:`status` invocation
1647 One use of this command is to make the next :hg:`status` invocation
1648 check the actual file content.
1648 check the actual file content.
1649 """
1649 """
1650 ctx = scmutil.revsingle(repo, rev)
1650 ctx = scmutil.revsingle(repo, rev)
1651 with repo.wlock():
1651 with repo.wlock():
1652 dirstate = repo.dirstate
1652 dirstate = repo.dirstate
1653 changedfiles = None
1653 changedfiles = None
1654 # See command doc for what minimal does.
1654 # See command doc for what minimal does.
1655 if opts.get('minimal'):
1655 if opts.get('minimal'):
1656 manifestfiles = set(ctx.manifest().keys())
1656 manifestfiles = set(ctx.manifest().keys())
1657 dirstatefiles = set(dirstate)
1657 dirstatefiles = set(dirstate)
1658 manifestonly = manifestfiles - dirstatefiles
1658 manifestonly = manifestfiles - dirstatefiles
1659 dsonly = dirstatefiles - manifestfiles
1659 dsonly = dirstatefiles - manifestfiles
1660 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1660 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1661 changedfiles = manifestonly | dsnotadded
1661 changedfiles = manifestonly | dsnotadded
1662
1662
1663 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1663 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1664
1664
1665 @command('debugrebuildfncache', [], '')
1665 @command('debugrebuildfncache', [], '')
1666 def debugrebuildfncache(ui, repo):
1666 def debugrebuildfncache(ui, repo):
1667 """rebuild the fncache file"""
1667 """rebuild the fncache file"""
1668 repair.rebuildfncache(ui, repo)
1668 repair.rebuildfncache(ui, repo)
1669
1669
1670 @command('debugrename',
1670 @command('debugrename',
1671 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1671 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1672 _('[-r REV] FILE'))
1672 _('[-r REV] FILE'))
1673 def debugrename(ui, repo, file1, *pats, **opts):
1673 def debugrename(ui, repo, file1, *pats, **opts):
1674 """dump rename information"""
1674 """dump rename information"""
1675
1675
1676 ctx = scmutil.revsingle(repo, opts.get('rev'))
1676 ctx = scmutil.revsingle(repo, opts.get('rev'))
1677 m = scmutil.match(ctx, (file1,) + pats, opts)
1677 m = scmutil.match(ctx, (file1,) + pats, opts)
1678 for abs in ctx.walk(m):
1678 for abs in ctx.walk(m):
1679 fctx = ctx[abs]
1679 fctx = ctx[abs]
1680 o = fctx.filelog().renamed(fctx.filenode())
1680 o = fctx.filelog().renamed(fctx.filenode())
1681 rel = m.rel(abs)
1681 rel = m.rel(abs)
1682 if o:
1682 if o:
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1683 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1684 else:
1684 else:
1685 ui.write(_("%s not renamed\n") % rel)
1685 ui.write(_("%s not renamed\n") % rel)
1686
1686
1687 @command('debugrevlog', commands.debugrevlogopts +
1687 @command('debugrevlog', commands.debugrevlogopts +
1688 [('d', 'dump', False, _('dump index data'))],
1688 [('d', 'dump', False, _('dump index data'))],
1689 _('-c|-m|FILE'),
1689 _('-c|-m|FILE'),
1690 optionalrepo=True)
1690 optionalrepo=True)
1691 def debugrevlog(ui, repo, file_=None, **opts):
1691 def debugrevlog(ui, repo, file_=None, **opts):
1692 """show data and statistics about a revlog"""
1692 """show data and statistics about a revlog"""
1693 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1693 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1694
1694
1695 if opts.get("dump"):
1695 if opts.get("dump"):
1696 numrevs = len(r)
1696 numrevs = len(r)
1697 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1697 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1698 " rawsize totalsize compression heads chainlen\n"))
1698 " rawsize totalsize compression heads chainlen\n"))
1699 ts = 0
1699 ts = 0
1700 heads = set()
1700 heads = set()
1701
1701
1702 for rev in xrange(numrevs):
1702 for rev in xrange(numrevs):
1703 dbase = r.deltaparent(rev)
1703 dbase = r.deltaparent(rev)
1704 if dbase == -1:
1704 if dbase == -1:
1705 dbase = rev
1705 dbase = rev
1706 cbase = r.chainbase(rev)
1706 cbase = r.chainbase(rev)
1707 clen = r.chainlen(rev)
1707 clen = r.chainlen(rev)
1708 p1, p2 = r.parentrevs(rev)
1708 p1, p2 = r.parentrevs(rev)
1709 rs = r.rawsize(rev)
1709 rs = r.rawsize(rev)
1710 ts = ts + rs
1710 ts = ts + rs
1711 heads -= set(r.parentrevs(rev))
1711 heads -= set(r.parentrevs(rev))
1712 heads.add(rev)
1712 heads.add(rev)
1713 try:
1713 try:
1714 compression = ts / r.end(rev)
1714 compression = ts / r.end(rev)
1715 except ZeroDivisionError:
1715 except ZeroDivisionError:
1716 compression = 0
1716 compression = 0
1717 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1717 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1718 "%11d %5d %8d\n" %
1718 "%11d %5d %8d\n" %
1719 (rev, p1, p2, r.start(rev), r.end(rev),
1719 (rev, p1, p2, r.start(rev), r.end(rev),
1720 r.start(dbase), r.start(cbase),
1720 r.start(dbase), r.start(cbase),
1721 r.start(p1), r.start(p2),
1721 r.start(p1), r.start(p2),
1722 rs, ts, compression, len(heads), clen))
1722 rs, ts, compression, len(heads), clen))
1723 return 0
1723 return 0
1724
1724
1725 v = r.version
1725 v = r.version
1726 format = v & 0xFFFF
1726 format = v & 0xFFFF
1727 flags = []
1727 flags = []
1728 gdelta = False
1728 gdelta = False
1729 if v & revlog.FLAG_INLINE_DATA:
1729 if v & revlog.FLAG_INLINE_DATA:
1730 flags.append('inline')
1730 flags.append('inline')
1731 if v & revlog.FLAG_GENERALDELTA:
1731 if v & revlog.FLAG_GENERALDELTA:
1732 gdelta = True
1732 gdelta = True
1733 flags.append('generaldelta')
1733 flags.append('generaldelta')
1734 if not flags:
1734 if not flags:
1735 flags = ['(none)']
1735 flags = ['(none)']
1736
1736
1737 nummerges = 0
1737 nummerges = 0
1738 numfull = 0
1738 numfull = 0
1739 numprev = 0
1739 numprev = 0
1740 nump1 = 0
1740 nump1 = 0
1741 nump2 = 0
1741 nump2 = 0
1742 numother = 0
1742 numother = 0
1743 nump1prev = 0
1743 nump1prev = 0
1744 nump2prev = 0
1744 nump2prev = 0
1745 chainlengths = []
1745 chainlengths = []
1746
1746
1747 datasize = [None, 0, 0]
1747 datasize = [None, 0, 0]
1748 fullsize = [None, 0, 0]
1748 fullsize = [None, 0, 0]
1749 deltasize = [None, 0, 0]
1749 deltasize = [None, 0, 0]
1750 chunktypecounts = {}
1750 chunktypecounts = {}
1751 chunktypesizes = {}
1751 chunktypesizes = {}
1752
1752
1753 def addsize(size, l):
1753 def addsize(size, l):
1754 if l[0] is None or size < l[0]:
1754 if l[0] is None or size < l[0]:
1755 l[0] = size
1755 l[0] = size
1756 if size > l[1]:
1756 if size > l[1]:
1757 l[1] = size
1757 l[1] = size
1758 l[2] += size
1758 l[2] += size
1759
1759
1760 numrevs = len(r)
1760 numrevs = len(r)
1761 for rev in xrange(numrevs):
1761 for rev in xrange(numrevs):
1762 p1, p2 = r.parentrevs(rev)
1762 p1, p2 = r.parentrevs(rev)
1763 delta = r.deltaparent(rev)
1763 delta = r.deltaparent(rev)
1764 if format > 0:
1764 if format > 0:
1765 addsize(r.rawsize(rev), datasize)
1765 addsize(r.rawsize(rev), datasize)
1766 if p2 != nullrev:
1766 if p2 != nullrev:
1767 nummerges += 1
1767 nummerges += 1
1768 size = r.length(rev)
1768 size = r.length(rev)
1769 if delta == nullrev:
1769 if delta == nullrev:
1770 chainlengths.append(0)
1770 chainlengths.append(0)
1771 numfull += 1
1771 numfull += 1
1772 addsize(size, fullsize)
1772 addsize(size, fullsize)
1773 else:
1773 else:
1774 chainlengths.append(chainlengths[delta] + 1)
1774 chainlengths.append(chainlengths[delta] + 1)
1775 addsize(size, deltasize)
1775 addsize(size, deltasize)
1776 if delta == rev - 1:
1776 if delta == rev - 1:
1777 numprev += 1
1777 numprev += 1
1778 if delta == p1:
1778 if delta == p1:
1779 nump1prev += 1
1779 nump1prev += 1
1780 elif delta == p2:
1780 elif delta == p2:
1781 nump2prev += 1
1781 nump2prev += 1
1782 elif delta == p1:
1782 elif delta == p1:
1783 nump1 += 1
1783 nump1 += 1
1784 elif delta == p2:
1784 elif delta == p2:
1785 nump2 += 1
1785 nump2 += 1
1786 elif delta != nullrev:
1786 elif delta != nullrev:
1787 numother += 1
1787 numother += 1
1788
1788
1789 # Obtain data on the raw chunks in the revlog.
1789 # Obtain data on the raw chunks in the revlog.
1790 segment = r._getsegmentforrevs(rev, rev)[1]
1790 segment = r._getsegmentforrevs(rev, rev)[1]
1791 if segment:
1791 if segment:
1792 chunktype = segment[0]
1792 chunktype = segment[0]
1793 else:
1793 else:
1794 chunktype = 'empty'
1794 chunktype = 'empty'
1795
1795
1796 if chunktype not in chunktypecounts:
1796 if chunktype not in chunktypecounts:
1797 chunktypecounts[chunktype] = 0
1797 chunktypecounts[chunktype] = 0
1798 chunktypesizes[chunktype] = 0
1798 chunktypesizes[chunktype] = 0
1799
1799
1800 chunktypecounts[chunktype] += 1
1800 chunktypecounts[chunktype] += 1
1801 chunktypesizes[chunktype] += size
1801 chunktypesizes[chunktype] += size
1802
1802
1803 # Adjust size min value for empty cases
1803 # Adjust size min value for empty cases
1804 for size in (datasize, fullsize, deltasize):
1804 for size in (datasize, fullsize, deltasize):
1805 if size[0] is None:
1805 if size[0] is None:
1806 size[0] = 0
1806 size[0] = 0
1807
1807
1808 numdeltas = numrevs - numfull
1808 numdeltas = numrevs - numfull
1809 numoprev = numprev - nump1prev - nump2prev
1809 numoprev = numprev - nump1prev - nump2prev
1810 totalrawsize = datasize[2]
1810 totalrawsize = datasize[2]
1811 datasize[2] /= numrevs
1811 datasize[2] /= numrevs
1812 fulltotal = fullsize[2]
1812 fulltotal = fullsize[2]
1813 fullsize[2] /= numfull
1813 fullsize[2] /= numfull
1814 deltatotal = deltasize[2]
1814 deltatotal = deltasize[2]
1815 if numrevs - numfull > 0:
1815 if numrevs - numfull > 0:
1816 deltasize[2] /= numrevs - numfull
1816 deltasize[2] /= numrevs - numfull
1817 totalsize = fulltotal + deltatotal
1817 totalsize = fulltotal + deltatotal
1818 avgchainlen = sum(chainlengths) / numrevs
1818 avgchainlen = sum(chainlengths) / numrevs
1819 maxchainlen = max(chainlengths)
1819 maxchainlen = max(chainlengths)
1820 compratio = 1
1820 compratio = 1
1821 if totalsize:
1821 if totalsize:
1822 compratio = totalrawsize / totalsize
1822 compratio = totalrawsize / totalsize
1823
1823
1824 basedfmtstr = '%%%dd\n'
1824 basedfmtstr = '%%%dd\n'
1825 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1825 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1826
1826
1827 def dfmtstr(max):
1827 def dfmtstr(max):
1828 return basedfmtstr % len(str(max))
1828 return basedfmtstr % len(str(max))
1829 def pcfmtstr(max, padding=0):
1829 def pcfmtstr(max, padding=0):
1830 return basepcfmtstr % (len(str(max)), ' ' * padding)
1830 return basepcfmtstr % (len(str(max)), ' ' * padding)
1831
1831
1832 def pcfmt(value, total):
1832 def pcfmt(value, total):
1833 if total:
1833 if total:
1834 return (value, 100 * float(value) / total)
1834 return (value, 100 * float(value) / total)
1835 else:
1835 else:
1836 return value, 100.0
1836 return value, 100.0
1837
1837
1838 ui.write(('format : %d\n') % format)
1838 ui.write(('format : %d\n') % format)
1839 ui.write(('flags : %s\n') % ', '.join(flags))
1839 ui.write(('flags : %s\n') % ', '.join(flags))
1840
1840
1841 ui.write('\n')
1841 ui.write('\n')
1842 fmt = pcfmtstr(totalsize)
1842 fmt = pcfmtstr(totalsize)
1843 fmt2 = dfmtstr(totalsize)
1843 fmt2 = dfmtstr(totalsize)
1844 ui.write(('revisions : ') + fmt2 % numrevs)
1844 ui.write(('revisions : ') + fmt2 % numrevs)
1845 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1845 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1846 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1846 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1847 ui.write(('revisions : ') + fmt2 % numrevs)
1847 ui.write(('revisions : ') + fmt2 % numrevs)
1848 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1848 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1849 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1849 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1850 ui.write(('revision size : ') + fmt2 % totalsize)
1850 ui.write(('revision size : ') + fmt2 % totalsize)
1851 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1851 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1852 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1852 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1853
1853
1854 def fmtchunktype(chunktype):
1854 def fmtchunktype(chunktype):
1855 if chunktype == 'empty':
1855 if chunktype == 'empty':
1856 return ' %s : ' % chunktype
1856 return ' %s : ' % chunktype
1857 elif chunktype in string.ascii_letters:
1857 elif chunktype in string.ascii_letters:
1858 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1858 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1859 else:
1859 else:
1860 return ' 0x%s : ' % hex(chunktype)
1860 return ' 0x%s : ' % hex(chunktype)
1861
1861
1862 ui.write('\n')
1862 ui.write('\n')
1863 ui.write(('chunks : ') + fmt2 % numrevs)
1863 ui.write(('chunks : ') + fmt2 % numrevs)
1864 for chunktype in sorted(chunktypecounts):
1864 for chunktype in sorted(chunktypecounts):
1865 ui.write(fmtchunktype(chunktype))
1865 ui.write(fmtchunktype(chunktype))
1866 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1866 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1867 ui.write(('chunks size : ') + fmt2 % totalsize)
1867 ui.write(('chunks size : ') + fmt2 % totalsize)
1868 for chunktype in sorted(chunktypecounts):
1868 for chunktype in sorted(chunktypecounts):
1869 ui.write(fmtchunktype(chunktype))
1869 ui.write(fmtchunktype(chunktype))
1870 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1870 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1871
1871
1872 ui.write('\n')
1872 ui.write('\n')
1873 fmt = dfmtstr(max(avgchainlen, compratio))
1873 fmt = dfmtstr(max(avgchainlen, compratio))
1874 ui.write(('avg chain length : ') + fmt % avgchainlen)
1874 ui.write(('avg chain length : ') + fmt % avgchainlen)
1875 ui.write(('max chain length : ') + fmt % maxchainlen)
1875 ui.write(('max chain length : ') + fmt % maxchainlen)
1876 ui.write(('compression ratio : ') + fmt % compratio)
1876 ui.write(('compression ratio : ') + fmt % compratio)
1877
1877
1878 if format > 0:
1878 if format > 0:
1879 ui.write('\n')
1879 ui.write('\n')
1880 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1880 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1881 % tuple(datasize))
1881 % tuple(datasize))
1882 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1882 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1883 % tuple(fullsize))
1883 % tuple(fullsize))
1884 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1884 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1885 % tuple(deltasize))
1885 % tuple(deltasize))
1886
1886
1887 if numdeltas > 0:
1887 if numdeltas > 0:
1888 ui.write('\n')
1888 ui.write('\n')
1889 fmt = pcfmtstr(numdeltas)
1889 fmt = pcfmtstr(numdeltas)
1890 fmt2 = pcfmtstr(numdeltas, 4)
1890 fmt2 = pcfmtstr(numdeltas, 4)
1891 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1891 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1892 if numprev > 0:
1892 if numprev > 0:
1893 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1893 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1894 numprev))
1894 numprev))
1895 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1895 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1896 numprev))
1896 numprev))
1897 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1897 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1898 numprev))
1898 numprev))
1899 if gdelta:
1899 if gdelta:
1900 ui.write(('deltas against p1 : ')
1900 ui.write(('deltas against p1 : ')
1901 + fmt % pcfmt(nump1, numdeltas))
1901 + fmt % pcfmt(nump1, numdeltas))
1902 ui.write(('deltas against p2 : ')
1902 ui.write(('deltas against p2 : ')
1903 + fmt % pcfmt(nump2, numdeltas))
1903 + fmt % pcfmt(nump2, numdeltas))
1904 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1904 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1905 numdeltas))
1905 numdeltas))
1906
1906
1907 @command('debugrevspec',
1907 @command('debugrevspec',
1908 [('', 'optimize', None,
1908 [('', 'optimize', None,
1909 _('print parsed tree after optimizing (DEPRECATED)')),
1909 _('print parsed tree after optimizing (DEPRECATED)')),
1910 ('p', 'show-stage', [],
1910 ('p', 'show-stage', [],
1911 _('print parsed tree at the given stage'), _('NAME')),
1911 _('print parsed tree at the given stage'), _('NAME')),
1912 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1912 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1913 ('', 'verify-optimized', False, _('verify optimized result')),
1913 ('', 'verify-optimized', False, _('verify optimized result')),
1914 ],
1914 ],
1915 ('REVSPEC'))
1915 ('REVSPEC'))
1916 def debugrevspec(ui, repo, expr, **opts):
1916 def debugrevspec(ui, repo, expr, **opts):
1917 """parse and apply a revision specification
1917 """parse and apply a revision specification
1918
1918
1919 Use -p/--show-stage option to print the parsed tree at the given stages.
1919 Use -p/--show-stage option to print the parsed tree at the given stages.
1920 Use -p all to print tree at every stage.
1920 Use -p all to print tree at every stage.
1921
1921
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1923 one. Returns 1 if the optimized result differs.
1923 one. Returns 1 if the optimized result differs.
1924 """
1924 """
1925 stages = [
1925 stages = [
1926 ('parsed', lambda tree: tree),
1926 ('parsed', lambda tree: tree),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1928 ('concatenated', revsetlang.foldconcat),
1928 ('concatenated', revsetlang.foldconcat),
1929 ('analyzed', revsetlang.analyze),
1929 ('analyzed', revsetlang.analyze),
1930 ('optimized', revsetlang.optimize),
1930 ('optimized', revsetlang.optimize),
1931 ]
1931 ]
1932 if opts['no_optimized']:
1932 if opts['no_optimized']:
1933 stages = stages[:-1]
1933 stages = stages[:-1]
1934 if opts['verify_optimized'] and opts['no_optimized']:
1934 if opts['verify_optimized'] and opts['no_optimized']:
1935 raise error.Abort(_('cannot use --verify-optimized with '
1935 raise error.Abort(_('cannot use --verify-optimized with '
1936 '--no-optimized'))
1936 '--no-optimized'))
1937 stagenames = set(n for n, f in stages)
1937 stagenames = set(n for n, f in stages)
1938
1938
1939 showalways = set()
1939 showalways = set()
1940 showchanged = set()
1940 showchanged = set()
1941 if ui.verbose and not opts['show_stage']:
1941 if ui.verbose and not opts['show_stage']:
1942 # show parsed tree by --verbose (deprecated)
1942 # show parsed tree by --verbose (deprecated)
1943 showalways.add('parsed')
1943 showalways.add('parsed')
1944 showchanged.update(['expanded', 'concatenated'])
1944 showchanged.update(['expanded', 'concatenated'])
1945 if opts['optimize']:
1945 if opts['optimize']:
1946 showalways.add('optimized')
1946 showalways.add('optimized')
1947 if opts['show_stage'] and opts['optimize']:
1947 if opts['show_stage'] and opts['optimize']:
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1949 if opts['show_stage'] == ['all']:
1949 if opts['show_stage'] == ['all']:
1950 showalways.update(stagenames)
1950 showalways.update(stagenames)
1951 else:
1951 else:
1952 for n in opts['show_stage']:
1952 for n in opts['show_stage']:
1953 if n not in stagenames:
1953 if n not in stagenames:
1954 raise error.Abort(_('invalid stage name: %s') % n)
1954 raise error.Abort(_('invalid stage name: %s') % n)
1955 showalways.update(opts['show_stage'])
1955 showalways.update(opts['show_stage'])
1956
1956
1957 treebystage = {}
1957 treebystage = {}
1958 printedtree = None
1958 printedtree = None
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1960 for n, f in stages:
1960 for n, f in stages:
1961 treebystage[n] = tree = f(tree)
1961 treebystage[n] = tree = f(tree)
1962 if n in showalways or (n in showchanged and tree != printedtree):
1962 if n in showalways or (n in showchanged and tree != printedtree):
1963 if opts['show_stage'] or n != 'parsed':
1963 if opts['show_stage'] or n != 'parsed':
1964 ui.write(("* %s:\n") % n)
1964 ui.write(("* %s:\n") % n)
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1966 printedtree = tree
1966 printedtree = tree
1967
1967
1968 if opts['verify_optimized']:
1968 if opts['verify_optimized']:
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1971 if ui.verbose:
1971 if ui.verbose:
1972 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 ui.note(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1973 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 ui.note(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1974 arevs = list(arevs)
1974 arevs = list(arevs)
1975 brevs = list(brevs)
1975 brevs = list(brevs)
1976 if arevs == brevs:
1976 if arevs == brevs:
1977 return 0
1977 return 0
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1982 if tag in ('delete', 'replace'):
1982 if tag in ('delete', 'replace'):
1983 for c in arevs[alo:ahi]:
1983 for c in arevs[alo:ahi]:
1984 ui.write('-%s\n' % c, label='diff.deleted')
1984 ui.write('-%s\n' % c, label='diff.deleted')
1985 if tag in ('insert', 'replace'):
1985 if tag in ('insert', 'replace'):
1986 for c in brevs[blo:bhi]:
1986 for c in brevs[blo:bhi]:
1987 ui.write('+%s\n' % c, label='diff.inserted')
1987 ui.write('+%s\n' % c, label='diff.inserted')
1988 if tag == 'equal':
1988 if tag == 'equal':
1989 for c in arevs[alo:ahi]:
1989 for c in arevs[alo:ahi]:
1990 ui.write(' %s\n' % c)
1990 ui.write(' %s\n' % c)
1991 return 1
1991 return 1
1992
1992
1993 func = revset.makematcher(tree)
1993 func = revset.makematcher(tree)
1994 revs = func(repo)
1994 revs = func(repo)
1995 if ui.verbose:
1995 if ui.verbose:
1996 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 ui.note(("* set:\n"), smartset.prettyformat(revs), "\n")
1997 for c in revs:
1997 for c in revs:
1998 ui.write("%s\n" % c)
1998 ui.write("%s\n" % c)
1999
1999
2000 @command('debugsetparents', [], _('REV1 [REV2]'))
2000 @command('debugsetparents', [], _('REV1 [REV2]'))
2001 def debugsetparents(ui, repo, rev1, rev2=None):
2001 def debugsetparents(ui, repo, rev1, rev2=None):
2002 """manually set the parents of the current working directory
2002 """manually set the parents of the current working directory
2003
2003
2004 This is useful for writing repository conversion tools, but should
2004 This is useful for writing repository conversion tools, but should
2005 be used with care. For example, neither the working directory nor the
2005 be used with care. For example, neither the working directory nor the
2006 dirstate is updated, so file status may be incorrect after running this
2006 dirstate is updated, so file status may be incorrect after running this
2007 command.
2007 command.
2008
2008
2009 Returns 0 on success.
2009 Returns 0 on success.
2010 """
2010 """
2011
2011
2012 r1 = scmutil.revsingle(repo, rev1).node()
2012 r1 = scmutil.revsingle(repo, rev1).node()
2013 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2013 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2014
2014
2015 with repo.wlock():
2015 with repo.wlock():
2016 repo.setparents(r1, r2)
2016 repo.setparents(r1, r2)
2017
2017
2018 @command('debugsub',
2018 @command('debugsub',
2019 [('r', 'rev', '',
2019 [('r', 'rev', '',
2020 _('revision to check'), _('REV'))],
2020 _('revision to check'), _('REV'))],
2021 _('[-r REV] [REV]'))
2021 _('[-r REV] [REV]'))
2022 def debugsub(ui, repo, rev=None):
2022 def debugsub(ui, repo, rev=None):
2023 ctx = scmutil.revsingle(repo, rev, None)
2023 ctx = scmutil.revsingle(repo, rev, None)
2024 for k, v in sorted(ctx.substate.items()):
2024 for k, v in sorted(ctx.substate.items()):
2025 ui.write(('path %s\n') % k)
2025 ui.write(('path %s\n') % k)
2026 ui.write((' source %s\n') % v[0])
2026 ui.write((' source %s\n') % v[0])
2027 ui.write((' revision %s\n') % v[1])
2027 ui.write((' revision %s\n') % v[1])
2028
2028
2029 @command('debugsuccessorssets',
2029 @command('debugsuccessorssets',
2030 [],
2030 [],
2031 _('[REV]'))
2031 _('[REV]'))
2032 def debugsuccessorssets(ui, repo, *revs):
2032 def debugsuccessorssets(ui, repo, *revs):
2033 """show set of successors for revision
2033 """show set of successors for revision
2034
2034
2035 A successors set of changeset A is a consistent group of revisions that
2035 A successors set of changeset A is a consistent group of revisions that
2036 succeed A. It contains non-obsolete changesets only.
2036 succeed A. It contains non-obsolete changesets only.
2037
2037
2038 In most cases a changeset A has a single successors set containing a single
2038 In most cases a changeset A has a single successors set containing a single
2039 successor (changeset A replaced by A').
2039 successor (changeset A replaced by A').
2040
2040
2041 A changeset that is made obsolete with no successors are called "pruned".
2041 A changeset that is made obsolete with no successors are called "pruned".
2042 Such changesets have no successors sets at all.
2042 Such changesets have no successors sets at all.
2043
2043
2044 A changeset that has been "split" will have a successors set containing
2044 A changeset that has been "split" will have a successors set containing
2045 more than one successor.
2045 more than one successor.
2046
2046
2047 A changeset that has been rewritten in multiple different ways is called
2047 A changeset that has been rewritten in multiple different ways is called
2048 "divergent". Such changesets have multiple successor sets (each of which
2048 "divergent". Such changesets have multiple successor sets (each of which
2049 may also be split, i.e. have multiple successors).
2049 may also be split, i.e. have multiple successors).
2050
2050
2051 Results are displayed as follows::
2051 Results are displayed as follows::
2052
2052
2053 <rev1>
2053 <rev1>
2054 <successors-1A>
2054 <successors-1A>
2055 <rev2>
2055 <rev2>
2056 <successors-2A>
2056 <successors-2A>
2057 <successors-2B1> <successors-2B2> <successors-2B3>
2057 <successors-2B1> <successors-2B2> <successors-2B3>
2058
2058
2059 Here rev2 has two possible (i.e. divergent) successors sets. The first
2059 Here rev2 has two possible (i.e. divergent) successors sets. The first
2060 holds one element, whereas the second holds three (i.e. the changeset has
2060 holds one element, whereas the second holds three (i.e. the changeset has
2061 been split).
2061 been split).
2062 """
2062 """
2063 # passed to successorssets caching computation from one call to another
2063 # passed to successorssets caching computation from one call to another
2064 cache = {}
2064 cache = {}
2065 ctx2str = str
2065 ctx2str = str
2066 node2str = short
2066 node2str = short
2067 if ui.debug():
2067 if ui.debug():
2068 def ctx2str(ctx):
2068 def ctx2str(ctx):
2069 return ctx.hex()
2069 return ctx.hex()
2070 node2str = hex
2070 node2str = hex
2071 for rev in scmutil.revrange(repo, revs):
2071 for rev in scmutil.revrange(repo, revs):
2072 ctx = repo[rev]
2072 ctx = repo[rev]
2073 ui.write('%s\n'% ctx2str(ctx))
2073 ui.write('%s\n'% ctx2str(ctx))
2074 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2074 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2075 if succsset:
2075 if succsset:
2076 ui.write(' ')
2076 ui.write(' ')
2077 ui.write(node2str(succsset[0]))
2077 ui.write(node2str(succsset[0]))
2078 for node in succsset[1:]:
2078 for node in succsset[1:]:
2079 ui.write(' ')
2079 ui.write(' ')
2080 ui.write(node2str(node))
2080 ui.write(node2str(node))
2081 ui.write('\n')
2081 ui.write('\n')
2082
2082
2083 @command('debugtemplate',
2083 @command('debugtemplate',
2084 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2084 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2085 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2085 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2086 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2086 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2087 optionalrepo=True)
2087 optionalrepo=True)
2088 def debugtemplate(ui, repo, tmpl, **opts):
2088 def debugtemplate(ui, repo, tmpl, **opts):
2089 """parse and apply a template
2089 """parse and apply a template
2090
2090
2091 If -r/--rev is given, the template is processed as a log template and
2091 If -r/--rev is given, the template is processed as a log template and
2092 applied to the given changesets. Otherwise, it is processed as a generic
2092 applied to the given changesets. Otherwise, it is processed as a generic
2093 template.
2093 template.
2094
2094
2095 Use --verbose to print the parsed tree.
2095 Use --verbose to print the parsed tree.
2096 """
2096 """
2097 revs = None
2097 revs = None
2098 if opts['rev']:
2098 if opts['rev']:
2099 if repo is None:
2099 if repo is None:
2100 raise error.RepoError(_('there is no Mercurial repository here '
2100 raise error.RepoError(_('there is no Mercurial repository here '
2101 '(.hg not found)'))
2101 '(.hg not found)'))
2102 revs = scmutil.revrange(repo, opts['rev'])
2102 revs = scmutil.revrange(repo, opts['rev'])
2103
2103
2104 props = {}
2104 props = {}
2105 for d in opts['define']:
2105 for d in opts['define']:
2106 try:
2106 try:
2107 k, v = (e.strip() for e in d.split('=', 1))
2107 k, v = (e.strip() for e in d.split('=', 1))
2108 if not k or k == 'ui':
2108 if not k or k == 'ui':
2109 raise ValueError
2109 raise ValueError
2110 props[k] = v
2110 props[k] = v
2111 except ValueError:
2111 except ValueError:
2112 raise error.Abort(_('malformed keyword definition: %s') % d)
2112 raise error.Abort(_('malformed keyword definition: %s') % d)
2113
2113
2114 if ui.verbose:
2114 if ui.verbose:
2115 aliases = ui.configitems('templatealias')
2115 aliases = ui.configitems('templatealias')
2116 tree = templater.parse(tmpl)
2116 tree = templater.parse(tmpl)
2117 ui.note(templater.prettyformat(tree), '\n')
2117 ui.note(templater.prettyformat(tree), '\n')
2118 newtree = templater.expandaliases(tree, aliases)
2118 newtree = templater.expandaliases(tree, aliases)
2119 if newtree != tree:
2119 if newtree != tree:
2120 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2120 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2121
2121
2122 mapfile = None
2122 mapfile = None
2123 if revs is None:
2123 if revs is None:
2124 k = 'debugtemplate'
2124 k = 'debugtemplate'
2125 t = formatter.maketemplater(ui, k, tmpl)
2125 t = formatter.maketemplater(ui, k, tmpl)
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2127 else:
2127 else:
2128 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2128 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2129 mapfile, buffered=False)
2129 mapfile, buffered=False)
2130 for r in revs:
2130 for r in revs:
2131 displayer.show(repo[r], **props)
2131 displayer.show(repo[r], **props)
2132 displayer.close()
2132 displayer.close()
2133
2133
2134 @command('debugupdatecaches', [])
2134 @command('debugupdatecaches', [])
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2136 """warm all known caches in the repository"""
2136 """warm all known caches in the repository"""
2137 with repo.wlock():
2137 with repo.wlock():
2138 with repo.lock():
2138 with repo.lock():
2139 repo.updatecaches()
2139 repo.updatecaches()
2140
2140
2141 @command('debugupgraderepo', [
2141 @command('debugupgraderepo', [
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2143 ('', 'run', False, _('performs an upgrade')),
2143 ('', 'run', False, _('performs an upgrade')),
2144 ])
2144 ])
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2146 """upgrade a repository to use different features
2146 """upgrade a repository to use different features
2147
2147
2148 If no arguments are specified, the repository is evaluated for upgrade
2148 If no arguments are specified, the repository is evaluated for upgrade
2149 and a list of problems and potential optimizations is printed.
2149 and a list of problems and potential optimizations is printed.
2150
2150
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2152 can be influenced via additional arguments. More details will be provided
2152 can be influenced via additional arguments. More details will be provided
2153 by the command output when run without ``--run``.
2153 by the command output when run without ``--run``.
2154
2154
2155 During the upgrade, the repository will be locked and no writes will be
2155 During the upgrade, the repository will be locked and no writes will be
2156 allowed.
2156 allowed.
2157
2157
2158 At the end of the upgrade, the repository may not be readable while new
2158 At the end of the upgrade, the repository may not be readable while new
2159 repository data is swapped in. This window will be as long as it takes to
2159 repository data is swapped in. This window will be as long as it takes to
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2161 should complete almost instantaneously and the chances of a consumer being
2161 should complete almost instantaneously and the chances of a consumer being
2162 unable to access the repository should be low.
2162 unable to access the repository should be low.
2163 """
2163 """
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2165
2165
2166 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2166 @command('debugwalk', commands.walkopts, _('[OPTION]... [FILE]...'),
2167 inferrepo=True)
2167 inferrepo=True)
2168 def debugwalk(ui, repo, *pats, **opts):
2168 def debugwalk(ui, repo, *pats, **opts):
2169 """show how files match on given patterns"""
2169 """show how files match on given patterns"""
2170 m = scmutil.match(repo[None], pats, opts)
2170 m = scmutil.match(repo[None], pats, opts)
2171 items = list(repo.walk(m))
2171 items = list(repo[None].walk(m))
2172 if not items:
2172 if not items:
2173 return
2173 return
2174 f = lambda fn: fn
2174 f = lambda fn: fn
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 f = lambda fn: util.normpath(fn)
2176 f = lambda fn: util.normpath(fn)
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 max([len(abs) for abs in items]),
2178 max([len(abs) for abs in items]),
2179 max([len(m.rel(abs)) for abs in items]))
2179 max([len(m.rel(abs)) for abs in items]))
2180 for abs in items:
2180 for abs in items:
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 ui.write("%s\n" % line.rstrip())
2182 ui.write("%s\n" % line.rstrip())
2183
2183
2184 @command('debugwireargs',
2184 @command('debugwireargs',
2185 [('', 'three', '', 'three'),
2185 [('', 'three', '', 'three'),
2186 ('', 'four', '', 'four'),
2186 ('', 'four', '', 'four'),
2187 ('', 'five', '', 'five'),
2187 ('', 'five', '', 'five'),
2188 ] + commands.remoteopts,
2188 ] + commands.remoteopts,
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 norepo=True)
2190 norepo=True)
2191 def debugwireargs(ui, repopath, *vals, **opts):
2191 def debugwireargs(ui, repopath, *vals, **opts):
2192 repo = hg.peer(ui, opts, repopath)
2192 repo = hg.peer(ui, opts, repopath)
2193 for opt in commands.remoteopts:
2193 for opt in commands.remoteopts:
2194 del opts[opt[1]]
2194 del opts[opt[1]]
2195 args = {}
2195 args = {}
2196 for k, v in opts.iteritems():
2196 for k, v in opts.iteritems():
2197 if v:
2197 if v:
2198 args[k] = v
2198 args[k] = v
2199 # run twice to check that we don't mess up the stream for the next command
2199 # run twice to check that we don't mess up the stream for the next command
2200 res1 = repo.debugwireargs(*vals, **args)
2200 res1 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2202 ui.write("%s\n" % res1)
2202 ui.write("%s\n" % res1)
2203 if res1 != res2:
2203 if res1 != res2:
2204 ui.warn("%s\n" % res2)
2204 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now