##// END OF EJS Templates
debugcommands: pass part, not read data, into _debugobsmarker()...
Martin von Zweigbergk -
r33029:b482d80e default
parent child Browse files
Show More
@@ -1,2204 +1,2205 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 phases,
50 phases,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 command = registrar.command()
74 command = registrar.command()
75
75
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 def debugancestor(ui, repo, *args):
77 def debugancestor(ui, repo, *args):
78 """find the ancestor revision of two revisions in a given index"""
78 """find the ancestor revision of two revisions in a given index"""
79 if len(args) == 3:
79 if len(args) == 3:
80 index, rev1, rev2 = args
80 index, rev1, rev2 = args
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 lookup = r.lookup
82 lookup = r.lookup
83 elif len(args) == 2:
83 elif len(args) == 2:
84 if not repo:
84 if not repo:
85 raise error.Abort(_('there is no Mercurial repository here '
85 raise error.Abort(_('there is no Mercurial repository here '
86 '(.hg not found)'))
86 '(.hg not found)'))
87 rev1, rev2 = args
87 rev1, rev2 = args
88 r = repo.changelog
88 r = repo.changelog
89 lookup = repo.lookup
89 lookup = repo.lookup
90 else:
90 else:
91 raise error.Abort(_('either two or three arguments required'))
91 raise error.Abort(_('either two or three arguments required'))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94
94
95 @command('debugapplystreamclonebundle', [], 'FILE')
95 @command('debugapplystreamclonebundle', [], 'FILE')
96 def debugapplystreamclonebundle(ui, repo, fname):
96 def debugapplystreamclonebundle(ui, repo, fname):
97 """apply a stream clone bundle file"""
97 """apply a stream clone bundle file"""
98 f = hg.openpath(ui, fname)
98 f = hg.openpath(ui, fname)
99 gen = exchange.readbundle(ui, f, fname)
99 gen = exchange.readbundle(ui, f, fname)
100 gen.apply(repo)
100 gen.apply(repo)
101
101
102 @command('debugbuilddag',
102 @command('debugbuilddag',
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('n', 'new-file', None, _('add new file at each rev'))],
105 ('n', 'new-file', None, _('add new file at each rev'))],
106 _('[OPTION]... [TEXT]'))
106 _('[OPTION]... [TEXT]'))
107 def debugbuilddag(ui, repo, text=None,
107 def debugbuilddag(ui, repo, text=None,
108 mergeable_file=False,
108 mergeable_file=False,
109 overwritten_file=False,
109 overwritten_file=False,
110 new_file=False):
110 new_file=False):
111 """builds a repo with a given DAG from scratch in the current empty repo
111 """builds a repo with a given DAG from scratch in the current empty repo
112
112
113 The description of the DAG is read from stdin if not given on the
113 The description of the DAG is read from stdin if not given on the
114 command line.
114 command line.
115
115
116 Elements:
116 Elements:
117
117
118 - "+n" is a linear run of n nodes based on the current default parent
118 - "+n" is a linear run of n nodes based on the current default parent
119 - "." is a single node based on the current default parent
119 - "." is a single node based on the current default parent
120 - "$" resets the default parent to null (implied at the start);
120 - "$" resets the default parent to null (implied at the start);
121 otherwise the default parent is always the last node created
121 otherwise the default parent is always the last node created
122 - "<p" sets the default parent to the backref p
122 - "<p" sets the default parent to the backref p
123 - "*p" is a fork at parent p, which is a backref
123 - "*p" is a fork at parent p, which is a backref
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "/p2" is a merge of the preceding node and p2
125 - "/p2" is a merge of the preceding node and p2
126 - ":tag" defines a local tag for the preceding node
126 - ":tag" defines a local tag for the preceding node
127 - "@branch" sets the named branch for subsequent nodes
127 - "@branch" sets the named branch for subsequent nodes
128 - "#...\\n" is a comment up to the end of the line
128 - "#...\\n" is a comment up to the end of the line
129
129
130 Whitespace between the above elements is ignored.
130 Whitespace between the above elements is ignored.
131
131
132 A backref is either
132 A backref is either
133
133
134 - a number n, which references the node curr-n, where curr is the current
134 - a number n, which references the node curr-n, where curr is the current
135 node, or
135 node, or
136 - the name of a local tag you placed earlier using ":tag", or
136 - the name of a local tag you placed earlier using ":tag", or
137 - empty to denote the default parent.
137 - empty to denote the default parent.
138
138
139 All string valued-elements are either strictly alphanumeric, or must
139 All string valued-elements are either strictly alphanumeric, or must
140 be enclosed in double quotes ("..."), with "\\" as escape character.
140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 """
141 """
142
142
143 if text is None:
143 if text is None:
144 ui.status(_("reading DAG from stdin\n"))
144 ui.status(_("reading DAG from stdin\n"))
145 text = ui.fin.read()
145 text = ui.fin.read()
146
146
147 cl = repo.changelog
147 cl = repo.changelog
148 if len(cl) > 0:
148 if len(cl) > 0:
149 raise error.Abort(_('repository is not empty'))
149 raise error.Abort(_('repository is not empty'))
150
150
151 # determine number of revs in DAG
151 # determine number of revs in DAG
152 total = 0
152 total = 0
153 for type, data in dagparser.parsedag(text):
153 for type, data in dagparser.parsedag(text):
154 if type == 'n':
154 if type == 'n':
155 total += 1
155 total += 1
156
156
157 if mergeable_file:
157 if mergeable_file:
158 linesperrev = 2
158 linesperrev = 2
159 # make a file with k lines per rev
159 # make a file with k lines per rev
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines.append("")
161 initialmergedlines.append("")
162
162
163 tags = []
163 tags = []
164
164
165 wlock = lock = tr = None
165 wlock = lock = tr = None
166 try:
166 try:
167 wlock = repo.wlock()
167 wlock = repo.wlock()
168 lock = repo.lock()
168 lock = repo.lock()
169 tr = repo.transaction("builddag")
169 tr = repo.transaction("builddag")
170
170
171 at = -1
171 at = -1
172 atbranch = 'default'
172 atbranch = 'default'
173 nodeids = []
173 nodeids = []
174 id = 0
174 id = 0
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 ui.note(('node %s\n' % str(data)))
178 ui.note(('node %s\n' % str(data)))
179 id, ps = data
179 id, ps = data
180
180
181 files = []
181 files = []
182 fctxs = {}
182 fctxs = {}
183
183
184 p2 = None
184 p2 = None
185 if mergeable_file:
185 if mergeable_file:
186 fn = "mf"
186 fn = "mf"
187 p1 = repo[ps[0]]
187 p1 = repo[ps[0]]
188 if len(ps) > 1:
188 if len(ps) > 1:
189 p2 = repo[ps[1]]
189 p2 = repo[ps[1]]
190 pa = p1.ancestor(p2)
190 pa = p1.ancestor(p2)
191 base, local, other = [x[fn].data() for x in (pa, p1,
191 base, local, other = [x[fn].data() for x in (pa, p1,
192 p2)]
192 p2)]
193 m3 = simplemerge.Merge3Text(base, local, other)
193 m3 = simplemerge.Merge3Text(base, local, other)
194 ml = [l.strip() for l in m3.merge_lines()]
194 ml = [l.strip() for l in m3.merge_lines()]
195 ml.append("")
195 ml.append("")
196 elif at > 0:
196 elif at > 0:
197 ml = p1[fn].data().split("\n")
197 ml = p1[fn].data().split("\n")
198 else:
198 else:
199 ml = initialmergedlines
199 ml = initialmergedlines
200 ml[id * linesperrev] += " r%i" % id
200 ml[id * linesperrev] += " r%i" % id
201 mergedtext = "\n".join(ml)
201 mergedtext = "\n".join(ml)
202 files.append(fn)
202 files.append(fn)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204
204
205 if overwritten_file:
205 if overwritten_file:
206 fn = "of"
206 fn = "of"
207 files.append(fn)
207 files.append(fn)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209
209
210 if new_file:
210 if new_file:
211 fn = "nf%i" % id
211 fn = "nf%i" % id
212 files.append(fn)
212 files.append(fn)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 if len(ps) > 1:
214 if len(ps) > 1:
215 if not p2:
215 if not p2:
216 p2 = repo[ps[1]]
216 p2 = repo[ps[1]]
217 for fn in p2:
217 for fn in p2:
218 if fn.startswith("nf"):
218 if fn.startswith("nf"):
219 files.append(fn)
219 files.append(fn)
220 fctxs[fn] = p2[fn]
220 fctxs[fn] = p2[fn]
221
221
222 def fctxfn(repo, cx, path):
222 def fctxfn(repo, cx, path):
223 return fctxs.get(path)
223 return fctxs.get(path)
224
224
225 if len(ps) == 0 or ps[0] < 0:
225 if len(ps) == 0 or ps[0] < 0:
226 pars = [None, None]
226 pars = [None, None]
227 elif len(ps) == 1:
227 elif len(ps) == 1:
228 pars = [nodeids[ps[0]], None]
228 pars = [nodeids[ps[0]], None]
229 else:
229 else:
230 pars = [nodeids[p] for p in ps]
230 pars = [nodeids[p] for p in ps]
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 date=(id, 0),
232 date=(id, 0),
233 user="debugbuilddag",
233 user="debugbuilddag",
234 extra={'branch': atbranch})
234 extra={'branch': atbranch})
235 nodeid = repo.commitctx(cx)
235 nodeid = repo.commitctx(cx)
236 nodeids.append(nodeid)
236 nodeids.append(nodeid)
237 at = id
237 at = id
238 elif type == 'l':
238 elif type == 'l':
239 id, name = data
239 id, name = data
240 ui.note(('tag %s\n' % name))
240 ui.note(('tag %s\n' % name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 elif type == 'a':
242 elif type == 'a':
243 ui.note(('branch %s\n' % data))
243 ui.note(('branch %s\n' % data))
244 atbranch = data
244 atbranch = data
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 tr.close()
246 tr.close()
247
247
248 if tags:
248 if tags:
249 repo.vfs.write("localtags", "".join(tags))
249 repo.vfs.write("localtags", "".join(tags))
250 finally:
250 finally:
251 ui.progress(_('building'), None)
251 ui.progress(_('building'), None)
252 release(tr, lock, wlock)
252 release(tr, lock, wlock)
253
253
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 indent_string = ' ' * indent
255 indent_string = ' ' * indent
256 if all:
256 if all:
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 % indent_string)
258 % indent_string)
259
259
260 def showchunks(named):
260 def showchunks(named):
261 ui.write("\n%s%s\n" % (indent_string, named))
261 ui.write("\n%s%s\n" % (indent_string, named))
262 chain = None
262 chain = None
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 node = chunkdata['node']
264 node = chunkdata['node']
265 p1 = chunkdata['p1']
265 p1 = chunkdata['p1']
266 p2 = chunkdata['p2']
266 p2 = chunkdata['p2']
267 cs = chunkdata['cs']
267 cs = chunkdata['cs']
268 deltabase = chunkdata['deltabase']
268 deltabase = chunkdata['deltabase']
269 delta = chunkdata['delta']
269 delta = chunkdata['delta']
270 ui.write("%s%s %s %s %s %s %s\n" %
270 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
271 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
272 hex(cs), hex(deltabase), len(delta)))
273 chain = node
273 chain = node
274
274
275 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
276 showchunks("changelog")
276 showchunks("changelog")
277 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
278 showchunks("manifest")
278 showchunks("manifest")
279 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
280 fname = chunkdata['filename']
280 fname = chunkdata['filename']
281 showchunks(fname)
281 showchunks(fname)
282 else:
282 else:
283 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
284 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 chain = None
286 chain = None
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 node = chunkdata['node']
288 node = chunkdata['node']
289 ui.write("%s%s\n" % (indent_string, hex(node)))
289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 chain = node
290 chain = node
291
291
292 def _debugobsmarkers(ui, data, indent=0, **opts):
292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 data = part.read()
294 indent_string = ' ' * indent
295 indent_string = ' ' * indent
295 try:
296 try:
296 version, markers = obsolete._readmarkers(data)
297 version, markers = obsolete._readmarkers(data)
297 except error.UnknownVersion as exc:
298 except error.UnknownVersion as exc:
298 msg = "%sunsupported version: %s (%d bytes)\n"
299 msg = "%sunsupported version: %s (%d bytes)\n"
299 msg %= indent_string, exc.version, len(data)
300 msg %= indent_string, exc.version, len(data)
300 ui.write(msg)
301 ui.write(msg)
301 else:
302 else:
302 msg = "%sversion: %s (%d bytes)\n"
303 msg = "%sversion: %s (%d bytes)\n"
303 msg %= indent_string, version, len(data)
304 msg %= indent_string, version, len(data)
304 ui.write(msg)
305 ui.write(msg)
305 fm = ui.formatter('debugobsolete', opts)
306 fm = ui.formatter('debugobsolete', opts)
306 for rawmarker in sorted(markers):
307 for rawmarker in sorted(markers):
307 m = obsolete.marker(None, rawmarker)
308 m = obsolete.marker(None, rawmarker)
308 fm.startitem()
309 fm.startitem()
309 fm.plain(indent_string)
310 fm.plain(indent_string)
310 cmdutil.showmarker(fm, m)
311 cmdutil.showmarker(fm, m)
311 fm.end()
312 fm.end()
312
313
313 def _debugbundle2(ui, gen, all=None, **opts):
314 def _debugbundle2(ui, gen, all=None, **opts):
314 """lists the contents of a bundle2"""
315 """lists the contents of a bundle2"""
315 if not isinstance(gen, bundle2.unbundle20):
316 if not isinstance(gen, bundle2.unbundle20):
316 raise error.Abort(_('not a bundle2 file'))
317 raise error.Abort(_('not a bundle2 file'))
317 ui.write(('Stream params: %s\n' % repr(gen.params)))
318 ui.write(('Stream params: %s\n' % repr(gen.params)))
318 parttypes = opts.get('part_type', [])
319 parttypes = opts.get('part_type', [])
319 for part in gen.iterparts():
320 for part in gen.iterparts():
320 if parttypes and part.type not in parttypes:
321 if parttypes and part.type not in parttypes:
321 continue
322 continue
322 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
323 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
323 if part.type == 'changegroup':
324 if part.type == 'changegroup':
324 version = part.params.get('version', '01')
325 version = part.params.get('version', '01')
325 cg = changegroup.getunbundler(version, part, 'UN')
326 cg = changegroup.getunbundler(version, part, 'UN')
326 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
327 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
327 if part.type == 'obsmarkers':
328 if part.type == 'obsmarkers':
328 _debugobsmarkers(ui, part.read(), indent=4, **opts)
329 _debugobsmarkers(ui, part, indent=4, **opts)
329
330
330 @command('debugbundle',
331 @command('debugbundle',
331 [('a', 'all', None, _('show all details')),
332 [('a', 'all', None, _('show all details')),
332 ('', 'part-type', [], _('show only the named part type')),
333 ('', 'part-type', [], _('show only the named part type')),
333 ('', 'spec', None, _('print the bundlespec of the bundle'))],
334 ('', 'spec', None, _('print the bundlespec of the bundle'))],
334 _('FILE'),
335 _('FILE'),
335 norepo=True)
336 norepo=True)
336 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
337 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
337 """lists the contents of a bundle"""
338 """lists the contents of a bundle"""
338 with hg.openpath(ui, bundlepath) as f:
339 with hg.openpath(ui, bundlepath) as f:
339 if spec:
340 if spec:
340 spec = exchange.getbundlespec(ui, f)
341 spec = exchange.getbundlespec(ui, f)
341 ui.write('%s\n' % spec)
342 ui.write('%s\n' % spec)
342 return
343 return
343
344
344 gen = exchange.readbundle(ui, f, bundlepath)
345 gen = exchange.readbundle(ui, f, bundlepath)
345 if isinstance(gen, bundle2.unbundle20):
346 if isinstance(gen, bundle2.unbundle20):
346 return _debugbundle2(ui, gen, all=all, **opts)
347 return _debugbundle2(ui, gen, all=all, **opts)
347 _debugchangegroup(ui, gen, all=all, **opts)
348 _debugchangegroup(ui, gen, all=all, **opts)
348
349
349 @command('debugcheckstate', [], '')
350 @command('debugcheckstate', [], '')
350 def debugcheckstate(ui, repo):
351 def debugcheckstate(ui, repo):
351 """validate the correctness of the current dirstate"""
352 """validate the correctness of the current dirstate"""
352 parent1, parent2 = repo.dirstate.parents()
353 parent1, parent2 = repo.dirstate.parents()
353 m1 = repo[parent1].manifest()
354 m1 = repo[parent1].manifest()
354 m2 = repo[parent2].manifest()
355 m2 = repo[parent2].manifest()
355 errors = 0
356 errors = 0
356 for f in repo.dirstate:
357 for f in repo.dirstate:
357 state = repo.dirstate[f]
358 state = repo.dirstate[f]
358 if state in "nr" and f not in m1:
359 if state in "nr" and f not in m1:
359 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
360 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
360 errors += 1
361 errors += 1
361 if state in "a" and f in m1:
362 if state in "a" and f in m1:
362 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
363 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
363 errors += 1
364 errors += 1
364 if state in "m" and f not in m1 and f not in m2:
365 if state in "m" and f not in m1 and f not in m2:
365 ui.warn(_("%s in state %s, but not in either manifest\n") %
366 ui.warn(_("%s in state %s, but not in either manifest\n") %
366 (f, state))
367 (f, state))
367 errors += 1
368 errors += 1
368 for f in m1:
369 for f in m1:
369 state = repo.dirstate[f]
370 state = repo.dirstate[f]
370 if state not in "nrm":
371 if state not in "nrm":
371 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
372 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
372 errors += 1
373 errors += 1
373 if errors:
374 if errors:
374 error = _(".hg/dirstate inconsistent with current parent's manifest")
375 error = _(".hg/dirstate inconsistent with current parent's manifest")
375 raise error.Abort(error)
376 raise error.Abort(error)
376
377
377 @command('debugcolor',
378 @command('debugcolor',
378 [('', 'style', None, _('show all configured styles'))],
379 [('', 'style', None, _('show all configured styles'))],
379 'hg debugcolor')
380 'hg debugcolor')
380 def debugcolor(ui, repo, **opts):
381 def debugcolor(ui, repo, **opts):
381 """show available color, effects or style"""
382 """show available color, effects or style"""
382 ui.write(('color mode: %s\n') % ui._colormode)
383 ui.write(('color mode: %s\n') % ui._colormode)
383 if opts.get('style'):
384 if opts.get('style'):
384 return _debugdisplaystyle(ui)
385 return _debugdisplaystyle(ui)
385 else:
386 else:
386 return _debugdisplaycolor(ui)
387 return _debugdisplaycolor(ui)
387
388
388 def _debugdisplaycolor(ui):
389 def _debugdisplaycolor(ui):
389 ui = ui.copy()
390 ui = ui.copy()
390 ui._styles.clear()
391 ui._styles.clear()
391 for effect in color._activeeffects(ui).keys():
392 for effect in color._activeeffects(ui).keys():
392 ui._styles[effect] = effect
393 ui._styles[effect] = effect
393 if ui._terminfoparams:
394 if ui._terminfoparams:
394 for k, v in ui.configitems('color'):
395 for k, v in ui.configitems('color'):
395 if k.startswith('color.'):
396 if k.startswith('color.'):
396 ui._styles[k] = k[6:]
397 ui._styles[k] = k[6:]
397 elif k.startswith('terminfo.'):
398 elif k.startswith('terminfo.'):
398 ui._styles[k] = k[9:]
399 ui._styles[k] = k[9:]
399 ui.write(_('available colors:\n'))
400 ui.write(_('available colors:\n'))
400 # sort label with a '_' after the other to group '_background' entry.
401 # sort label with a '_' after the other to group '_background' entry.
401 items = sorted(ui._styles.items(),
402 items = sorted(ui._styles.items(),
402 key=lambda i: ('_' in i[0], i[0], i[1]))
403 key=lambda i: ('_' in i[0], i[0], i[1]))
403 for colorname, label in items:
404 for colorname, label in items:
404 ui.write(('%s\n') % colorname, label=label)
405 ui.write(('%s\n') % colorname, label=label)
405
406
406 def _debugdisplaystyle(ui):
407 def _debugdisplaystyle(ui):
407 ui.write(_('available style:\n'))
408 ui.write(_('available style:\n'))
408 width = max(len(s) for s in ui._styles)
409 width = max(len(s) for s in ui._styles)
409 for label, effects in sorted(ui._styles.items()):
410 for label, effects in sorted(ui._styles.items()):
410 ui.write('%s' % label, label=label)
411 ui.write('%s' % label, label=label)
411 if effects:
412 if effects:
412 # 50
413 # 50
413 ui.write(': ')
414 ui.write(': ')
414 ui.write(' ' * (max(0, width - len(label))))
415 ui.write(' ' * (max(0, width - len(label))))
415 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
416 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
416 ui.write('\n')
417 ui.write('\n')
417
418
418 @command('debugcreatestreamclonebundle', [], 'FILE')
419 @command('debugcreatestreamclonebundle', [], 'FILE')
419 def debugcreatestreamclonebundle(ui, repo, fname):
420 def debugcreatestreamclonebundle(ui, repo, fname):
420 """create a stream clone bundle file
421 """create a stream clone bundle file
421
422
422 Stream bundles are special bundles that are essentially archives of
423 Stream bundles are special bundles that are essentially archives of
423 revlog files. They are commonly used for cloning very quickly.
424 revlog files. They are commonly used for cloning very quickly.
424 """
425 """
425 # TODO we may want to turn this into an abort when this functionality
426 # TODO we may want to turn this into an abort when this functionality
426 # is moved into `hg bundle`.
427 # is moved into `hg bundle`.
427 if phases.hassecret(repo):
428 if phases.hassecret(repo):
428 ui.warn(_('(warning: stream clone bundle will contain secret '
429 ui.warn(_('(warning: stream clone bundle will contain secret '
429 'revisions)\n'))
430 'revisions)\n'))
430
431
431 requirements, gen = streamclone.generatebundlev1(repo)
432 requirements, gen = streamclone.generatebundlev1(repo)
432 changegroup.writechunks(ui, gen, fname)
433 changegroup.writechunks(ui, gen, fname)
433
434
434 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
435 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
435
436
436 @command('debugdag',
437 @command('debugdag',
437 [('t', 'tags', None, _('use tags as labels')),
438 [('t', 'tags', None, _('use tags as labels')),
438 ('b', 'branches', None, _('annotate with branch names')),
439 ('b', 'branches', None, _('annotate with branch names')),
439 ('', 'dots', None, _('use dots for runs')),
440 ('', 'dots', None, _('use dots for runs')),
440 ('s', 'spaces', None, _('separate elements by spaces'))],
441 ('s', 'spaces', None, _('separate elements by spaces'))],
441 _('[OPTION]... [FILE [REV]...]'),
442 _('[OPTION]... [FILE [REV]...]'),
442 optionalrepo=True)
443 optionalrepo=True)
443 def debugdag(ui, repo, file_=None, *revs, **opts):
444 def debugdag(ui, repo, file_=None, *revs, **opts):
444 """format the changelog or an index DAG as a concise textual description
445 """format the changelog or an index DAG as a concise textual description
445
446
446 If you pass a revlog index, the revlog's DAG is emitted. If you list
447 If you pass a revlog index, the revlog's DAG is emitted. If you list
447 revision numbers, they get labeled in the output as rN.
448 revision numbers, they get labeled in the output as rN.
448
449
449 Otherwise, the changelog DAG of the current repo is emitted.
450 Otherwise, the changelog DAG of the current repo is emitted.
450 """
451 """
451 spaces = opts.get('spaces')
452 spaces = opts.get('spaces')
452 dots = opts.get('dots')
453 dots = opts.get('dots')
453 if file_:
454 if file_:
454 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
455 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
455 file_)
456 file_)
456 revs = set((int(r) for r in revs))
457 revs = set((int(r) for r in revs))
457 def events():
458 def events():
458 for r in rlog:
459 for r in rlog:
459 yield 'n', (r, list(p for p in rlog.parentrevs(r)
460 yield 'n', (r, list(p for p in rlog.parentrevs(r)
460 if p != -1))
461 if p != -1))
461 if r in revs:
462 if r in revs:
462 yield 'l', (r, "r%i" % r)
463 yield 'l', (r, "r%i" % r)
463 elif repo:
464 elif repo:
464 cl = repo.changelog
465 cl = repo.changelog
465 tags = opts.get('tags')
466 tags = opts.get('tags')
466 branches = opts.get('branches')
467 branches = opts.get('branches')
467 if tags:
468 if tags:
468 labels = {}
469 labels = {}
469 for l, n in repo.tags().items():
470 for l, n in repo.tags().items():
470 labels.setdefault(cl.rev(n), []).append(l)
471 labels.setdefault(cl.rev(n), []).append(l)
471 def events():
472 def events():
472 b = "default"
473 b = "default"
473 for r in cl:
474 for r in cl:
474 if branches:
475 if branches:
475 newb = cl.read(cl.node(r))[5]['branch']
476 newb = cl.read(cl.node(r))[5]['branch']
476 if newb != b:
477 if newb != b:
477 yield 'a', newb
478 yield 'a', newb
478 b = newb
479 b = newb
479 yield 'n', (r, list(p for p in cl.parentrevs(r)
480 yield 'n', (r, list(p for p in cl.parentrevs(r)
480 if p != -1))
481 if p != -1))
481 if tags:
482 if tags:
482 ls = labels.get(r)
483 ls = labels.get(r)
483 if ls:
484 if ls:
484 for l in ls:
485 for l in ls:
485 yield 'l', (r, l)
486 yield 'l', (r, l)
486 else:
487 else:
487 raise error.Abort(_('need repo for changelog dag'))
488 raise error.Abort(_('need repo for changelog dag'))
488
489
489 for line in dagparser.dagtextlines(events(),
490 for line in dagparser.dagtextlines(events(),
490 addspaces=spaces,
491 addspaces=spaces,
491 wraplabels=True,
492 wraplabels=True,
492 wrapannotations=True,
493 wrapannotations=True,
493 wrapnonlinear=dots,
494 wrapnonlinear=dots,
494 usedots=dots,
495 usedots=dots,
495 maxlinewidth=70):
496 maxlinewidth=70):
496 ui.write(line)
497 ui.write(line)
497 ui.write("\n")
498 ui.write("\n")
498
499
499 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
500 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
500 def debugdata(ui, repo, file_, rev=None, **opts):
501 def debugdata(ui, repo, file_, rev=None, **opts):
501 """dump the contents of a data file revision"""
502 """dump the contents of a data file revision"""
502 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
503 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
503 if rev is not None:
504 if rev is not None:
504 raise error.CommandError('debugdata', _('invalid arguments'))
505 raise error.CommandError('debugdata', _('invalid arguments'))
505 file_, rev = None, file_
506 file_, rev = None, file_
506 elif rev is None:
507 elif rev is None:
507 raise error.CommandError('debugdata', _('invalid arguments'))
508 raise error.CommandError('debugdata', _('invalid arguments'))
508 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
509 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
509 try:
510 try:
510 ui.write(r.revision(r.lookup(rev), raw=True))
511 ui.write(r.revision(r.lookup(rev), raw=True))
511 except KeyError:
512 except KeyError:
512 raise error.Abort(_('invalid revision identifier %s') % rev)
513 raise error.Abort(_('invalid revision identifier %s') % rev)
513
514
514 @command('debugdate',
515 @command('debugdate',
515 [('e', 'extended', None, _('try extended date formats'))],
516 [('e', 'extended', None, _('try extended date formats'))],
516 _('[-e] DATE [RANGE]'),
517 _('[-e] DATE [RANGE]'),
517 norepo=True, optionalrepo=True)
518 norepo=True, optionalrepo=True)
518 def debugdate(ui, date, range=None, **opts):
519 def debugdate(ui, date, range=None, **opts):
519 """parse and display a date"""
520 """parse and display a date"""
520 if opts["extended"]:
521 if opts["extended"]:
521 d = util.parsedate(date, util.extendeddateformats)
522 d = util.parsedate(date, util.extendeddateformats)
522 else:
523 else:
523 d = util.parsedate(date)
524 d = util.parsedate(date)
524 ui.write(("internal: %s %s\n") % d)
525 ui.write(("internal: %s %s\n") % d)
525 ui.write(("standard: %s\n") % util.datestr(d))
526 ui.write(("standard: %s\n") % util.datestr(d))
526 if range:
527 if range:
527 m = util.matchdate(range)
528 m = util.matchdate(range)
528 ui.write(("match: %s\n") % m(d[0]))
529 ui.write(("match: %s\n") % m(d[0]))
529
530
530 @command('debugdeltachain',
531 @command('debugdeltachain',
531 cmdutil.debugrevlogopts + cmdutil.formatteropts,
532 cmdutil.debugrevlogopts + cmdutil.formatteropts,
532 _('-c|-m|FILE'),
533 _('-c|-m|FILE'),
533 optionalrepo=True)
534 optionalrepo=True)
534 def debugdeltachain(ui, repo, file_=None, **opts):
535 def debugdeltachain(ui, repo, file_=None, **opts):
535 """dump information about delta chains in a revlog
536 """dump information about delta chains in a revlog
536
537
537 Output can be templatized. Available template keywords are:
538 Output can be templatized. Available template keywords are:
538
539
539 :``rev``: revision number
540 :``rev``: revision number
540 :``chainid``: delta chain identifier (numbered by unique base)
541 :``chainid``: delta chain identifier (numbered by unique base)
541 :``chainlen``: delta chain length to this revision
542 :``chainlen``: delta chain length to this revision
542 :``prevrev``: previous revision in delta chain
543 :``prevrev``: previous revision in delta chain
543 :``deltatype``: role of delta / how it was computed
544 :``deltatype``: role of delta / how it was computed
544 :``compsize``: compressed size of revision
545 :``compsize``: compressed size of revision
545 :``uncompsize``: uncompressed size of revision
546 :``uncompsize``: uncompressed size of revision
546 :``chainsize``: total size of compressed revisions in chain
547 :``chainsize``: total size of compressed revisions in chain
547 :``chainratio``: total chain size divided by uncompressed revision size
548 :``chainratio``: total chain size divided by uncompressed revision size
548 (new delta chains typically start at ratio 2.00)
549 (new delta chains typically start at ratio 2.00)
549 :``lindist``: linear distance from base revision in delta chain to end
550 :``lindist``: linear distance from base revision in delta chain to end
550 of this revision
551 of this revision
551 :``extradist``: total size of revisions not part of this delta chain from
552 :``extradist``: total size of revisions not part of this delta chain from
552 base of delta chain to end of this revision; a measurement
553 base of delta chain to end of this revision; a measurement
553 of how much extra data we need to read/seek across to read
554 of how much extra data we need to read/seek across to read
554 the delta chain for this revision
555 the delta chain for this revision
555 :``extraratio``: extradist divided by chainsize; another representation of
556 :``extraratio``: extradist divided by chainsize; another representation of
556 how much unrelated data is needed to load this delta chain
557 how much unrelated data is needed to load this delta chain
557 """
558 """
558 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
559 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
559 index = r.index
560 index = r.index
560 generaldelta = r.version & revlog.FLAG_GENERALDELTA
561 generaldelta = r.version & revlog.FLAG_GENERALDELTA
561
562
562 def revinfo(rev):
563 def revinfo(rev):
563 e = index[rev]
564 e = index[rev]
564 compsize = e[1]
565 compsize = e[1]
565 uncompsize = e[2]
566 uncompsize = e[2]
566 chainsize = 0
567 chainsize = 0
567
568
568 if generaldelta:
569 if generaldelta:
569 if e[3] == e[5]:
570 if e[3] == e[5]:
570 deltatype = 'p1'
571 deltatype = 'p1'
571 elif e[3] == e[6]:
572 elif e[3] == e[6]:
572 deltatype = 'p2'
573 deltatype = 'p2'
573 elif e[3] == rev - 1:
574 elif e[3] == rev - 1:
574 deltatype = 'prev'
575 deltatype = 'prev'
575 elif e[3] == rev:
576 elif e[3] == rev:
576 deltatype = 'base'
577 deltatype = 'base'
577 else:
578 else:
578 deltatype = 'other'
579 deltatype = 'other'
579 else:
580 else:
580 if e[3] == rev:
581 if e[3] == rev:
581 deltatype = 'base'
582 deltatype = 'base'
582 else:
583 else:
583 deltatype = 'prev'
584 deltatype = 'prev'
584
585
585 chain = r._deltachain(rev)[0]
586 chain = r._deltachain(rev)[0]
586 for iterrev in chain:
587 for iterrev in chain:
587 e = index[iterrev]
588 e = index[iterrev]
588 chainsize += e[1]
589 chainsize += e[1]
589
590
590 return compsize, uncompsize, deltatype, chain, chainsize
591 return compsize, uncompsize, deltatype, chain, chainsize
591
592
592 fm = ui.formatter('debugdeltachain', opts)
593 fm = ui.formatter('debugdeltachain', opts)
593
594
594 fm.plain(' rev chain# chainlen prev delta '
595 fm.plain(' rev chain# chainlen prev delta '
595 'size rawsize chainsize ratio lindist extradist '
596 'size rawsize chainsize ratio lindist extradist '
596 'extraratio\n')
597 'extraratio\n')
597
598
598 chainbases = {}
599 chainbases = {}
599 for rev in r:
600 for rev in r:
600 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
601 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
601 chainbase = chain[0]
602 chainbase = chain[0]
602 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
603 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
603 basestart = r.start(chainbase)
604 basestart = r.start(chainbase)
604 revstart = r.start(rev)
605 revstart = r.start(rev)
605 lineardist = revstart + comp - basestart
606 lineardist = revstart + comp - basestart
606 extradist = lineardist - chainsize
607 extradist = lineardist - chainsize
607 try:
608 try:
608 prevrev = chain[-2]
609 prevrev = chain[-2]
609 except IndexError:
610 except IndexError:
610 prevrev = -1
611 prevrev = -1
611
612
612 chainratio = float(chainsize) / float(uncomp)
613 chainratio = float(chainsize) / float(uncomp)
613 extraratio = float(extradist) / float(chainsize)
614 extraratio = float(extradist) / float(chainsize)
614
615
615 fm.startitem()
616 fm.startitem()
616 fm.write('rev chainid chainlen prevrev deltatype compsize '
617 fm.write('rev chainid chainlen prevrev deltatype compsize '
617 'uncompsize chainsize chainratio lindist extradist '
618 'uncompsize chainsize chainratio lindist extradist '
618 'extraratio',
619 'extraratio',
619 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
620 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
620 rev, chainid, len(chain), prevrev, deltatype, comp,
621 rev, chainid, len(chain), prevrev, deltatype, comp,
621 uncomp, chainsize, chainratio, lineardist, extradist,
622 uncomp, chainsize, chainratio, lineardist, extradist,
622 extraratio,
623 extraratio,
623 rev=rev, chainid=chainid, chainlen=len(chain),
624 rev=rev, chainid=chainid, chainlen=len(chain),
624 prevrev=prevrev, deltatype=deltatype, compsize=comp,
625 prevrev=prevrev, deltatype=deltatype, compsize=comp,
625 uncompsize=uncomp, chainsize=chainsize,
626 uncompsize=uncomp, chainsize=chainsize,
626 chainratio=chainratio, lindist=lineardist,
627 chainratio=chainratio, lindist=lineardist,
627 extradist=extradist, extraratio=extraratio)
628 extradist=extradist, extraratio=extraratio)
628
629
629 fm.end()
630 fm.end()
630
631
631 @command('debugdirstate|debugstate',
632 @command('debugdirstate|debugstate',
632 [('', 'nodates', None, _('do not display the saved mtime')),
633 [('', 'nodates', None, _('do not display the saved mtime')),
633 ('', 'datesort', None, _('sort by saved mtime'))],
634 ('', 'datesort', None, _('sort by saved mtime'))],
634 _('[OPTION]...'))
635 _('[OPTION]...'))
635 def debugstate(ui, repo, **opts):
636 def debugstate(ui, repo, **opts):
636 """show the contents of the current dirstate"""
637 """show the contents of the current dirstate"""
637
638
638 nodates = opts.get('nodates')
639 nodates = opts.get('nodates')
639 datesort = opts.get('datesort')
640 datesort = opts.get('datesort')
640
641
641 timestr = ""
642 timestr = ""
642 if datesort:
643 if datesort:
643 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
644 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
644 else:
645 else:
645 keyfunc = None # sort by filename
646 keyfunc = None # sort by filename
646 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
647 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
647 if ent[3] == -1:
648 if ent[3] == -1:
648 timestr = 'unset '
649 timestr = 'unset '
649 elif nodates:
650 elif nodates:
650 timestr = 'set '
651 timestr = 'set '
651 else:
652 else:
652 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
653 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
653 time.localtime(ent[3]))
654 time.localtime(ent[3]))
654 if ent[1] & 0o20000:
655 if ent[1] & 0o20000:
655 mode = 'lnk'
656 mode = 'lnk'
656 else:
657 else:
657 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
658 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
658 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
659 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
659 for f in repo.dirstate.copies():
660 for f in repo.dirstate.copies():
660 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
661 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
661
662
662 @command('debugdiscovery',
663 @command('debugdiscovery',
663 [('', 'old', None, _('use old-style discovery')),
664 [('', 'old', None, _('use old-style discovery')),
664 ('', 'nonheads', None,
665 ('', 'nonheads', None,
665 _('use old-style discovery with non-heads included')),
666 _('use old-style discovery with non-heads included')),
666 ] + cmdutil.remoteopts,
667 ] + cmdutil.remoteopts,
667 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
668 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
668 def debugdiscovery(ui, repo, remoteurl="default", **opts):
669 def debugdiscovery(ui, repo, remoteurl="default", **opts):
669 """runs the changeset discovery protocol in isolation"""
670 """runs the changeset discovery protocol in isolation"""
670 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
671 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
671 opts.get('branch'))
672 opts.get('branch'))
672 remote = hg.peer(repo, opts, remoteurl)
673 remote = hg.peer(repo, opts, remoteurl)
673 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
674 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
674
675
675 # make sure tests are repeatable
676 # make sure tests are repeatable
676 random.seed(12323)
677 random.seed(12323)
677
678
678 def doit(localheads, remoteheads, remote=remote):
679 def doit(localheads, remoteheads, remote=remote):
679 if opts.get('old'):
680 if opts.get('old'):
680 if localheads:
681 if localheads:
681 raise error.Abort('cannot use localheads with old style '
682 raise error.Abort('cannot use localheads with old style '
682 'discovery')
683 'discovery')
683 if not util.safehasattr(remote, 'branches'):
684 if not util.safehasattr(remote, 'branches'):
684 # enable in-client legacy support
685 # enable in-client legacy support
685 remote = localrepo.locallegacypeer(remote.local())
686 remote = localrepo.locallegacypeer(remote.local())
686 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
687 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
687 force=True)
688 force=True)
688 common = set(common)
689 common = set(common)
689 if not opts.get('nonheads'):
690 if not opts.get('nonheads'):
690 ui.write(("unpruned common: %s\n") %
691 ui.write(("unpruned common: %s\n") %
691 " ".join(sorted(short(n) for n in common)))
692 " ".join(sorted(short(n) for n in common)))
692 dag = dagutil.revlogdag(repo.changelog)
693 dag = dagutil.revlogdag(repo.changelog)
693 all = dag.ancestorset(dag.internalizeall(common))
694 all = dag.ancestorset(dag.internalizeall(common))
694 common = dag.externalizeall(dag.headsetofconnecteds(all))
695 common = dag.externalizeall(dag.headsetofconnecteds(all))
695 else:
696 else:
696 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
697 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
697 common = set(common)
698 common = set(common)
698 rheads = set(hds)
699 rheads = set(hds)
699 lheads = set(repo.heads())
700 lheads = set(repo.heads())
700 ui.write(("common heads: %s\n") %
701 ui.write(("common heads: %s\n") %
701 " ".join(sorted(short(n) for n in common)))
702 " ".join(sorted(short(n) for n in common)))
702 if lheads <= common:
703 if lheads <= common:
703 ui.write(("local is subset\n"))
704 ui.write(("local is subset\n"))
704 elif rheads <= common:
705 elif rheads <= common:
705 ui.write(("remote is subset\n"))
706 ui.write(("remote is subset\n"))
706
707
707 serverlogs = opts.get('serverlog')
708 serverlogs = opts.get('serverlog')
708 if serverlogs:
709 if serverlogs:
709 for filename in serverlogs:
710 for filename in serverlogs:
710 with open(filename, 'r') as logfile:
711 with open(filename, 'r') as logfile:
711 line = logfile.readline()
712 line = logfile.readline()
712 while line:
713 while line:
713 parts = line.strip().split(';')
714 parts = line.strip().split(';')
714 op = parts[1]
715 op = parts[1]
715 if op == 'cg':
716 if op == 'cg':
716 pass
717 pass
717 elif op == 'cgss':
718 elif op == 'cgss':
718 doit(parts[2].split(' '), parts[3].split(' '))
719 doit(parts[2].split(' '), parts[3].split(' '))
719 elif op == 'unb':
720 elif op == 'unb':
720 doit(parts[3].split(' '), parts[2].split(' '))
721 doit(parts[3].split(' '), parts[2].split(' '))
721 line = logfile.readline()
722 line = logfile.readline()
722 else:
723 else:
723 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
724 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
724 opts.get('remote_head'))
725 opts.get('remote_head'))
725 localrevs = opts.get('local_head')
726 localrevs = opts.get('local_head')
726 doit(localrevs, remoterevs)
727 doit(localrevs, remoterevs)
727
728
728 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
729 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
729 def debugextensions(ui, **opts):
730 def debugextensions(ui, **opts):
730 '''show information about active extensions'''
731 '''show information about active extensions'''
731 exts = extensions.extensions(ui)
732 exts = extensions.extensions(ui)
732 hgver = util.version()
733 hgver = util.version()
733 fm = ui.formatter('debugextensions', opts)
734 fm = ui.formatter('debugextensions', opts)
734 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
735 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
735 isinternal = extensions.ismoduleinternal(extmod)
736 isinternal = extensions.ismoduleinternal(extmod)
736 extsource = pycompat.fsencode(extmod.__file__)
737 extsource = pycompat.fsencode(extmod.__file__)
737 if isinternal:
738 if isinternal:
738 exttestedwith = [] # never expose magic string to users
739 exttestedwith = [] # never expose magic string to users
739 else:
740 else:
740 exttestedwith = getattr(extmod, 'testedwith', '').split()
741 exttestedwith = getattr(extmod, 'testedwith', '').split()
741 extbuglink = getattr(extmod, 'buglink', None)
742 extbuglink = getattr(extmod, 'buglink', None)
742
743
743 fm.startitem()
744 fm.startitem()
744
745
745 if ui.quiet or ui.verbose:
746 if ui.quiet or ui.verbose:
746 fm.write('name', '%s\n', extname)
747 fm.write('name', '%s\n', extname)
747 else:
748 else:
748 fm.write('name', '%s', extname)
749 fm.write('name', '%s', extname)
749 if isinternal or hgver in exttestedwith:
750 if isinternal or hgver in exttestedwith:
750 fm.plain('\n')
751 fm.plain('\n')
751 elif not exttestedwith:
752 elif not exttestedwith:
752 fm.plain(_(' (untested!)\n'))
753 fm.plain(_(' (untested!)\n'))
753 else:
754 else:
754 lasttestedversion = exttestedwith[-1]
755 lasttestedversion = exttestedwith[-1]
755 fm.plain(' (%s!)\n' % lasttestedversion)
756 fm.plain(' (%s!)\n' % lasttestedversion)
756
757
757 fm.condwrite(ui.verbose and extsource, 'source',
758 fm.condwrite(ui.verbose and extsource, 'source',
758 _(' location: %s\n'), extsource or "")
759 _(' location: %s\n'), extsource or "")
759
760
760 if ui.verbose:
761 if ui.verbose:
761 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
762 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
762 fm.data(bundled=isinternal)
763 fm.data(bundled=isinternal)
763
764
764 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
765 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
765 _(' tested with: %s\n'),
766 _(' tested with: %s\n'),
766 fm.formatlist(exttestedwith, name='ver'))
767 fm.formatlist(exttestedwith, name='ver'))
767
768
768 fm.condwrite(ui.verbose and extbuglink, 'buglink',
769 fm.condwrite(ui.verbose and extbuglink, 'buglink',
769 _(' bug reporting: %s\n'), extbuglink or "")
770 _(' bug reporting: %s\n'), extbuglink or "")
770
771
771 fm.end()
772 fm.end()
772
773
773 @command('debugfileset',
774 @command('debugfileset',
774 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
775 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
775 _('[-r REV] FILESPEC'))
776 _('[-r REV] FILESPEC'))
776 def debugfileset(ui, repo, expr, **opts):
777 def debugfileset(ui, repo, expr, **opts):
777 '''parse and apply a fileset specification'''
778 '''parse and apply a fileset specification'''
778 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
779 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
779 if ui.verbose:
780 if ui.verbose:
780 tree = fileset.parse(expr)
781 tree = fileset.parse(expr)
781 ui.note(fileset.prettyformat(tree), "\n")
782 ui.note(fileset.prettyformat(tree), "\n")
782
783
783 for f in ctx.getfileset(expr):
784 for f in ctx.getfileset(expr):
784 ui.write("%s\n" % f)
785 ui.write("%s\n" % f)
785
786
786 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
787 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
787 def debugfsinfo(ui, path="."):
788 def debugfsinfo(ui, path="."):
788 """show information detected about current filesystem"""
789 """show information detected about current filesystem"""
789 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
790 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
790 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
791 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
791 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
792 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
792 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
793 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
793 casesensitive = '(unknown)'
794 casesensitive = '(unknown)'
794 try:
795 try:
795 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
796 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
796 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
797 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
797 except OSError:
798 except OSError:
798 pass
799 pass
799 ui.write(('case-sensitive: %s\n') % casesensitive)
800 ui.write(('case-sensitive: %s\n') % casesensitive)
800
801
801 @command('debuggetbundle',
802 @command('debuggetbundle',
802 [('H', 'head', [], _('id of head node'), _('ID')),
803 [('H', 'head', [], _('id of head node'), _('ID')),
803 ('C', 'common', [], _('id of common node'), _('ID')),
804 ('C', 'common', [], _('id of common node'), _('ID')),
804 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
805 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
805 _('REPO FILE [-H|-C ID]...'),
806 _('REPO FILE [-H|-C ID]...'),
806 norepo=True)
807 norepo=True)
807 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
808 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
808 """retrieves a bundle from a repo
809 """retrieves a bundle from a repo
809
810
810 Every ID must be a full-length hex node id string. Saves the bundle to the
811 Every ID must be a full-length hex node id string. Saves the bundle to the
811 given file.
812 given file.
812 """
813 """
813 repo = hg.peer(ui, opts, repopath)
814 repo = hg.peer(ui, opts, repopath)
814 if not repo.capable('getbundle'):
815 if not repo.capable('getbundle'):
815 raise error.Abort("getbundle() not supported by target repository")
816 raise error.Abort("getbundle() not supported by target repository")
816 args = {}
817 args = {}
817 if common:
818 if common:
818 args['common'] = [bin(s) for s in common]
819 args['common'] = [bin(s) for s in common]
819 if head:
820 if head:
820 args['heads'] = [bin(s) for s in head]
821 args['heads'] = [bin(s) for s in head]
821 # TODO: get desired bundlecaps from command line.
822 # TODO: get desired bundlecaps from command line.
822 args['bundlecaps'] = None
823 args['bundlecaps'] = None
823 bundle = repo.getbundle('debug', **args)
824 bundle = repo.getbundle('debug', **args)
824
825
825 bundletype = opts.get('type', 'bzip2').lower()
826 bundletype = opts.get('type', 'bzip2').lower()
826 btypes = {'none': 'HG10UN',
827 btypes = {'none': 'HG10UN',
827 'bzip2': 'HG10BZ',
828 'bzip2': 'HG10BZ',
828 'gzip': 'HG10GZ',
829 'gzip': 'HG10GZ',
829 'bundle2': 'HG20'}
830 'bundle2': 'HG20'}
830 bundletype = btypes.get(bundletype)
831 bundletype = btypes.get(bundletype)
831 if bundletype not in bundle2.bundletypes:
832 if bundletype not in bundle2.bundletypes:
832 raise error.Abort(_('unknown bundle type specified with --type'))
833 raise error.Abort(_('unknown bundle type specified with --type'))
833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
834 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
834
835
835 @command('debugignore', [], '[FILE]')
836 @command('debugignore', [], '[FILE]')
836 def debugignore(ui, repo, *files, **opts):
837 def debugignore(ui, repo, *files, **opts):
837 """display the combined ignore pattern and information about ignored files
838 """display the combined ignore pattern and information about ignored files
838
839
839 With no argument display the combined ignore pattern.
840 With no argument display the combined ignore pattern.
840
841
841 Given space separated file names, shows if the given file is ignored and
842 Given space separated file names, shows if the given file is ignored and
842 if so, show the ignore rule (file and line number) that matched it.
843 if so, show the ignore rule (file and line number) that matched it.
843 """
844 """
844 ignore = repo.dirstate._ignore
845 ignore = repo.dirstate._ignore
845 if not files:
846 if not files:
846 # Show all the patterns
847 # Show all the patterns
847 ui.write("%s\n" % repr(ignore))
848 ui.write("%s\n" % repr(ignore))
848 else:
849 else:
849 for f in files:
850 for f in files:
850 nf = util.normpath(f)
851 nf = util.normpath(f)
851 ignored = None
852 ignored = None
852 ignoredata = None
853 ignoredata = None
853 if nf != '.':
854 if nf != '.':
854 if ignore(nf):
855 if ignore(nf):
855 ignored = nf
856 ignored = nf
856 ignoredata = repo.dirstate._ignorefileandline(nf)
857 ignoredata = repo.dirstate._ignorefileandline(nf)
857 else:
858 else:
858 for p in util.finddirs(nf):
859 for p in util.finddirs(nf):
859 if ignore(p):
860 if ignore(p):
860 ignored = p
861 ignored = p
861 ignoredata = repo.dirstate._ignorefileandline(p)
862 ignoredata = repo.dirstate._ignorefileandline(p)
862 break
863 break
863 if ignored:
864 if ignored:
864 if ignored == nf:
865 if ignored == nf:
865 ui.write(_("%s is ignored\n") % f)
866 ui.write(_("%s is ignored\n") % f)
866 else:
867 else:
867 ui.write(_("%s is ignored because of "
868 ui.write(_("%s is ignored because of "
868 "containing folder %s\n")
869 "containing folder %s\n")
869 % (f, ignored))
870 % (f, ignored))
870 ignorefile, lineno, line = ignoredata
871 ignorefile, lineno, line = ignoredata
871 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
872 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
872 % (ignorefile, lineno, line))
873 % (ignorefile, lineno, line))
873 else:
874 else:
874 ui.write(_("%s is not ignored\n") % f)
875 ui.write(_("%s is not ignored\n") % f)
875
876
876 @command('debugindex', cmdutil.debugrevlogopts +
877 @command('debugindex', cmdutil.debugrevlogopts +
877 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
878 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
878 _('[-f FORMAT] -c|-m|FILE'),
879 _('[-f FORMAT] -c|-m|FILE'),
879 optionalrepo=True)
880 optionalrepo=True)
880 def debugindex(ui, repo, file_=None, **opts):
881 def debugindex(ui, repo, file_=None, **opts):
881 """dump the contents of an index file"""
882 """dump the contents of an index file"""
882 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
883 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
883 format = opts.get('format', 0)
884 format = opts.get('format', 0)
884 if format not in (0, 1):
885 if format not in (0, 1):
885 raise error.Abort(_("unknown format %d") % format)
886 raise error.Abort(_("unknown format %d") % format)
886
887
887 generaldelta = r.version & revlog.FLAG_GENERALDELTA
888 generaldelta = r.version & revlog.FLAG_GENERALDELTA
888 if generaldelta:
889 if generaldelta:
889 basehdr = ' delta'
890 basehdr = ' delta'
890 else:
891 else:
891 basehdr = ' base'
892 basehdr = ' base'
892
893
893 if ui.debugflag:
894 if ui.debugflag:
894 shortfn = hex
895 shortfn = hex
895 else:
896 else:
896 shortfn = short
897 shortfn = short
897
898
898 # There might not be anything in r, so have a sane default
899 # There might not be anything in r, so have a sane default
899 idlen = 12
900 idlen = 12
900 for i in r:
901 for i in r:
901 idlen = len(shortfn(r.node(i)))
902 idlen = len(shortfn(r.node(i)))
902 break
903 break
903
904
904 if format == 0:
905 if format == 0:
905 ui.write((" rev offset length " + basehdr + " linkrev"
906 ui.write((" rev offset length " + basehdr + " linkrev"
906 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
907 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
907 elif format == 1:
908 elif format == 1:
908 ui.write((" rev flag offset length"
909 ui.write((" rev flag offset length"
909 " size " + basehdr + " link p1 p2"
910 " size " + basehdr + " link p1 p2"
910 " %s\n") % "nodeid".rjust(idlen))
911 " %s\n") % "nodeid".rjust(idlen))
911
912
912 for i in r:
913 for i in r:
913 node = r.node(i)
914 node = r.node(i)
914 if generaldelta:
915 if generaldelta:
915 base = r.deltaparent(i)
916 base = r.deltaparent(i)
916 else:
917 else:
917 base = r.chainbase(i)
918 base = r.chainbase(i)
918 if format == 0:
919 if format == 0:
919 try:
920 try:
920 pp = r.parents(node)
921 pp = r.parents(node)
921 except Exception:
922 except Exception:
922 pp = [nullid, nullid]
923 pp = [nullid, nullid]
923 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
924 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
924 i, r.start(i), r.length(i), base, r.linkrev(i),
925 i, r.start(i), r.length(i), base, r.linkrev(i),
925 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
926 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
926 elif format == 1:
927 elif format == 1:
927 pr = r.parentrevs(i)
928 pr = r.parentrevs(i)
928 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
929 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
929 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
930 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
930 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
931 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
931
932
932 @command('debugindexdot', cmdutil.debugrevlogopts,
933 @command('debugindexdot', cmdutil.debugrevlogopts,
933 _('-c|-m|FILE'), optionalrepo=True)
934 _('-c|-m|FILE'), optionalrepo=True)
934 def debugindexdot(ui, repo, file_=None, **opts):
935 def debugindexdot(ui, repo, file_=None, **opts):
935 """dump an index DAG as a graphviz dot file"""
936 """dump an index DAG as a graphviz dot file"""
936 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
937 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
937 ui.write(("digraph G {\n"))
938 ui.write(("digraph G {\n"))
938 for i in r:
939 for i in r:
939 node = r.node(i)
940 node = r.node(i)
940 pp = r.parents(node)
941 pp = r.parents(node)
941 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
942 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
942 if pp[1] != nullid:
943 if pp[1] != nullid:
943 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
944 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
944 ui.write("}\n")
945 ui.write("}\n")
945
946
946 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
947 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
947 def debuginstall(ui, **opts):
948 def debuginstall(ui, **opts):
948 '''test Mercurial installation
949 '''test Mercurial installation
949
950
950 Returns 0 on success.
951 Returns 0 on success.
951 '''
952 '''
952
953
953 def writetemp(contents):
954 def writetemp(contents):
954 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
955 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
955 f = os.fdopen(fd, pycompat.sysstr("wb"))
956 f = os.fdopen(fd, pycompat.sysstr("wb"))
956 f.write(contents)
957 f.write(contents)
957 f.close()
958 f.close()
958 return name
959 return name
959
960
960 problems = 0
961 problems = 0
961
962
962 fm = ui.formatter('debuginstall', opts)
963 fm = ui.formatter('debuginstall', opts)
963 fm.startitem()
964 fm.startitem()
964
965
965 # encoding
966 # encoding
966 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
967 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
967 err = None
968 err = None
968 try:
969 try:
969 encoding.fromlocal("test")
970 encoding.fromlocal("test")
970 except error.Abort as inst:
971 except error.Abort as inst:
971 err = inst
972 err = inst
972 problems += 1
973 problems += 1
973 fm.condwrite(err, 'encodingerror', _(" %s\n"
974 fm.condwrite(err, 'encodingerror', _(" %s\n"
974 " (check that your locale is properly set)\n"), err)
975 " (check that your locale is properly set)\n"), err)
975
976
976 # Python
977 # Python
977 fm.write('pythonexe', _("checking Python executable (%s)\n"),
978 fm.write('pythonexe', _("checking Python executable (%s)\n"),
978 pycompat.sysexecutable)
979 pycompat.sysexecutable)
979 fm.write('pythonver', _("checking Python version (%s)\n"),
980 fm.write('pythonver', _("checking Python version (%s)\n"),
980 ("%d.%d.%d" % sys.version_info[:3]))
981 ("%d.%d.%d" % sys.version_info[:3]))
981 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
982 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
982 os.path.dirname(pycompat.fsencode(os.__file__)))
983 os.path.dirname(pycompat.fsencode(os.__file__)))
983
984
984 security = set(sslutil.supportedprotocols)
985 security = set(sslutil.supportedprotocols)
985 if sslutil.hassni:
986 if sslutil.hassni:
986 security.add('sni')
987 security.add('sni')
987
988
988 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
989 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
989 fm.formatlist(sorted(security), name='protocol',
990 fm.formatlist(sorted(security), name='protocol',
990 fmt='%s', sep=','))
991 fmt='%s', sep=','))
991
992
992 # These are warnings, not errors. So don't increment problem count. This
993 # These are warnings, not errors. So don't increment problem count. This
993 # may change in the future.
994 # may change in the future.
994 if 'tls1.2' not in security:
995 if 'tls1.2' not in security:
995 fm.plain(_(' TLS 1.2 not supported by Python install; '
996 fm.plain(_(' TLS 1.2 not supported by Python install; '
996 'network connections lack modern security\n'))
997 'network connections lack modern security\n'))
997 if 'sni' not in security:
998 if 'sni' not in security:
998 fm.plain(_(' SNI not supported by Python install; may have '
999 fm.plain(_(' SNI not supported by Python install; may have '
999 'connectivity issues with some servers\n'))
1000 'connectivity issues with some servers\n'))
1000
1001
1001 # TODO print CA cert info
1002 # TODO print CA cert info
1002
1003
1003 # hg version
1004 # hg version
1004 hgver = util.version()
1005 hgver = util.version()
1005 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1006 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1006 hgver.split('+')[0])
1007 hgver.split('+')[0])
1007 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1008 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1008 '+'.join(hgver.split('+')[1:]))
1009 '+'.join(hgver.split('+')[1:]))
1009
1010
1010 # compiled modules
1011 # compiled modules
1011 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1012 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1012 policy.policy)
1013 policy.policy)
1013 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1014 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1014 os.path.dirname(pycompat.fsencode(__file__)))
1015 os.path.dirname(pycompat.fsencode(__file__)))
1015
1016
1016 if policy.policy in ('c', 'allow'):
1017 if policy.policy in ('c', 'allow'):
1017 err = None
1018 err = None
1018 try:
1019 try:
1019 from .cext import (
1020 from .cext import (
1020 base85,
1021 base85,
1021 bdiff,
1022 bdiff,
1022 mpatch,
1023 mpatch,
1023 osutil,
1024 osutil,
1024 )
1025 )
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 except Exception as inst:
1027 except Exception as inst:
1027 err = inst
1028 err = inst
1028 problems += 1
1029 problems += 1
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030
1031
1031 compengines = util.compengines._engines.values()
1032 compengines = util.compengines._engines.values()
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1034 fm.formatlist(sorted(e.name() for e in compengines),
1034 name='compengine', fmt='%s', sep=', '))
1035 name='compengine', fmt='%s', sep=', '))
1035 fm.write('compenginesavail', _('checking available compression engines '
1036 fm.write('compenginesavail', _('checking available compression engines '
1036 '(%s)\n'),
1037 '(%s)\n'),
1037 fm.formatlist(sorted(e.name() for e in compengines
1038 fm.formatlist(sorted(e.name() for e in compengines
1038 if e.available()),
1039 if e.available()),
1039 name='compengine', fmt='%s', sep=', '))
1040 name='compengine', fmt='%s', sep=', '))
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 fm.write('compenginesserver', _('checking available compression engines '
1042 fm.write('compenginesserver', _('checking available compression engines '
1042 'for wire protocol (%s)\n'),
1043 'for wire protocol (%s)\n'),
1043 fm.formatlist([e.name() for e in wirecompengines
1044 fm.formatlist([e.name() for e in wirecompengines
1044 if e.wireprotosupport()],
1045 if e.wireprotosupport()],
1045 name='compengine', fmt='%s', sep=', '))
1046 name='compengine', fmt='%s', sep=', '))
1046
1047
1047 # templates
1048 # templates
1048 p = templater.templatepaths()
1049 p = templater.templatepaths()
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 if p:
1052 if p:
1052 m = templater.templatepath("map-cmdline.default")
1053 m = templater.templatepath("map-cmdline.default")
1053 if m:
1054 if m:
1054 # template found, check if it is working
1055 # template found, check if it is working
1055 err = None
1056 err = None
1056 try:
1057 try:
1057 templater.templater.frommapfile(m)
1058 templater.templater.frommapfile(m)
1058 except Exception as inst:
1059 except Exception as inst:
1059 err = inst
1060 err = inst
1060 p = None
1061 p = None
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 else:
1063 else:
1063 p = None
1064 p = None
1064 fm.condwrite(p, 'defaulttemplate',
1065 fm.condwrite(p, 'defaulttemplate',
1065 _("checking default template (%s)\n"), m)
1066 _("checking default template (%s)\n"), m)
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 _(" template '%s' not found\n"), "default")
1068 _(" template '%s' not found\n"), "default")
1068 if not p:
1069 if not p:
1069 problems += 1
1070 problems += 1
1070 fm.condwrite(not p, '',
1071 fm.condwrite(not p, '',
1071 _(" (templates seem to have been installed incorrectly)\n"))
1072 _(" (templates seem to have been installed incorrectly)\n"))
1072
1073
1073 # editor
1074 # editor
1074 editor = ui.geteditor()
1075 editor = ui.geteditor()
1075 editor = util.expandpath(editor)
1076 editor = util.expandpath(editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 _(" No commit editor set and can't find %s in PATH\n"
1080 _(" No commit editor set and can't find %s in PATH\n"
1080 " (specify a commit editor in your configuration"
1081 " (specify a commit editor in your configuration"
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 _(" Can't find editor '%s' in PATH\n"
1084 _(" Can't find editor '%s' in PATH\n"
1084 " (specify a commit editor in your configuration"
1085 " (specify a commit editor in your configuration"
1085 " file)\n"), not cmdpath and editor)
1086 " file)\n"), not cmdpath and editor)
1086 if not cmdpath and editor != 'vi':
1087 if not cmdpath and editor != 'vi':
1087 problems += 1
1088 problems += 1
1088
1089
1089 # check username
1090 # check username
1090 username = None
1091 username = None
1091 err = None
1092 err = None
1092 try:
1093 try:
1093 username = ui.username()
1094 username = ui.username()
1094 except error.Abort as e:
1095 except error.Abort as e:
1095 err = e
1096 err = e
1096 problems += 1
1097 problems += 1
1097
1098
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 " (specify a username in your configuration file)\n"), err)
1101 " (specify a username in your configuration file)\n"), err)
1101
1102
1102 fm.condwrite(not problems, '',
1103 fm.condwrite(not problems, '',
1103 _("no problems detected\n"))
1104 _("no problems detected\n"))
1104 if not problems:
1105 if not problems:
1105 fm.data(problems=problems)
1106 fm.data(problems=problems)
1106 fm.condwrite(problems, 'problems',
1107 fm.condwrite(problems, 'problems',
1107 _("%d problems detected,"
1108 _("%d problems detected,"
1108 " please check your install!\n"), problems)
1109 " please check your install!\n"), problems)
1109 fm.end()
1110 fm.end()
1110
1111
1111 return problems
1112 return problems
1112
1113
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 def debugknown(ui, repopath, *ids, **opts):
1115 def debugknown(ui, repopath, *ids, **opts):
1115 """test whether node ids are known to a repo
1116 """test whether node ids are known to a repo
1116
1117
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 and 1s indicating unknown/known.
1119 and 1s indicating unknown/known.
1119 """
1120 """
1120 repo = hg.peer(ui, opts, repopath)
1121 repo = hg.peer(ui, opts, repopath)
1121 if not repo.capable('known'):
1122 if not repo.capable('known'):
1122 raise error.Abort("known() not supported by target repository")
1123 raise error.Abort("known() not supported by target repository")
1123 flags = repo.known([bin(s) for s in ids])
1124 flags = repo.known([bin(s) for s in ids])
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125
1126
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1127 @command('debuglabelcomplete', [], _('LABEL...'))
1127 def debuglabelcomplete(ui, repo, *args):
1128 def debuglabelcomplete(ui, repo, *args):
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 debugnamecomplete(ui, repo, *args)
1130 debugnamecomplete(ui, repo, *args)
1130
1131
1131 @command('debuglocks',
1132 @command('debuglocks',
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 ('W', 'force-wlock', None,
1134 ('W', 'force-wlock', None,
1134 _('free the working state lock (DANGEROUS)'))],
1135 _('free the working state lock (DANGEROUS)'))],
1135 _('[OPTION]...'))
1136 _('[OPTION]...'))
1136 def debuglocks(ui, repo, **opts):
1137 def debuglocks(ui, repo, **opts):
1137 """show or modify state of locks
1138 """show or modify state of locks
1138
1139
1139 By default, this command will show which locks are held. This
1140 By default, this command will show which locks are held. This
1140 includes the user and process holding the lock, the amount of time
1141 includes the user and process holding the lock, the amount of time
1141 the lock has been held, and the machine name where the process is
1142 the lock has been held, and the machine name where the process is
1142 running if it's not local.
1143 running if it's not local.
1143
1144
1144 Locks protect the integrity of Mercurial's data, so should be
1145 Locks protect the integrity of Mercurial's data, so should be
1145 treated with care. System crashes or other interruptions may cause
1146 treated with care. System crashes or other interruptions may cause
1146 locks to not be properly released, though Mercurial will usually
1147 locks to not be properly released, though Mercurial will usually
1147 detect and remove such stale locks automatically.
1148 detect and remove such stale locks automatically.
1148
1149
1149 However, detecting stale locks may not always be possible (for
1150 However, detecting stale locks may not always be possible (for
1150 instance, on a shared filesystem). Removing locks may also be
1151 instance, on a shared filesystem). Removing locks may also be
1151 blocked by filesystem permissions.
1152 blocked by filesystem permissions.
1152
1153
1153 Returns 0 if no locks are held.
1154 Returns 0 if no locks are held.
1154
1155
1155 """
1156 """
1156
1157
1157 if opts.get('force_lock'):
1158 if opts.get('force_lock'):
1158 repo.svfs.unlink('lock')
1159 repo.svfs.unlink('lock')
1159 if opts.get('force_wlock'):
1160 if opts.get('force_wlock'):
1160 repo.vfs.unlink('wlock')
1161 repo.vfs.unlink('wlock')
1161 if opts.get('force_lock') or opts.get('force_lock'):
1162 if opts.get('force_lock') or opts.get('force_lock'):
1162 return 0
1163 return 0
1163
1164
1164 now = time.time()
1165 now = time.time()
1165 held = 0
1166 held = 0
1166
1167
1167 def report(vfs, name, method):
1168 def report(vfs, name, method):
1168 # this causes stale locks to get reaped for more accurate reporting
1169 # this causes stale locks to get reaped for more accurate reporting
1169 try:
1170 try:
1170 l = method(False)
1171 l = method(False)
1171 except error.LockHeld:
1172 except error.LockHeld:
1172 l = None
1173 l = None
1173
1174
1174 if l:
1175 if l:
1175 l.release()
1176 l.release()
1176 else:
1177 else:
1177 try:
1178 try:
1178 stat = vfs.lstat(name)
1179 stat = vfs.lstat(name)
1179 age = now - stat.st_mtime
1180 age = now - stat.st_mtime
1180 user = util.username(stat.st_uid)
1181 user = util.username(stat.st_uid)
1181 locker = vfs.readlock(name)
1182 locker = vfs.readlock(name)
1182 if ":" in locker:
1183 if ":" in locker:
1183 host, pid = locker.split(':')
1184 host, pid = locker.split(':')
1184 if host == socket.gethostname():
1185 if host == socket.gethostname():
1185 locker = 'user %s, process %s' % (user, pid)
1186 locker = 'user %s, process %s' % (user, pid)
1186 else:
1187 else:
1187 locker = 'user %s, process %s, host %s' \
1188 locker = 'user %s, process %s, host %s' \
1188 % (user, pid, host)
1189 % (user, pid, host)
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 return 1
1191 return 1
1191 except OSError as e:
1192 except OSError as e:
1192 if e.errno != errno.ENOENT:
1193 if e.errno != errno.ENOENT:
1193 raise
1194 raise
1194
1195
1195 ui.write(("%-6s free\n") % (name + ":"))
1196 ui.write(("%-6s free\n") % (name + ":"))
1196 return 0
1197 return 0
1197
1198
1198 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1200 held += report(repo.vfs, "wlock", repo.wlock)
1200
1201
1201 return held
1202 return held
1202
1203
1203 @command('debugmergestate', [], '')
1204 @command('debugmergestate', [], '')
1204 def debugmergestate(ui, repo, *args):
1205 def debugmergestate(ui, repo, *args):
1205 """print merge state
1206 """print merge state
1206
1207
1207 Use --verbose to print out information about whether v1 or v2 merge state
1208 Use --verbose to print out information about whether v1 or v2 merge state
1208 was chosen."""
1209 was chosen."""
1209 def _hashornull(h):
1210 def _hashornull(h):
1210 if h == nullhex:
1211 if h == nullhex:
1211 return 'null'
1212 return 'null'
1212 else:
1213 else:
1213 return h
1214 return h
1214
1215
1215 def printrecords(version):
1216 def printrecords(version):
1216 ui.write(('* version %s records\n') % version)
1217 ui.write(('* version %s records\n') % version)
1217 if version == 1:
1218 if version == 1:
1218 records = v1records
1219 records = v1records
1219 else:
1220 else:
1220 records = v2records
1221 records = v2records
1221
1222
1222 for rtype, record in records:
1223 for rtype, record in records:
1223 # pretty print some record types
1224 # pretty print some record types
1224 if rtype == 'L':
1225 if rtype == 'L':
1225 ui.write(('local: %s\n') % record)
1226 ui.write(('local: %s\n') % record)
1226 elif rtype == 'O':
1227 elif rtype == 'O':
1227 ui.write(('other: %s\n') % record)
1228 ui.write(('other: %s\n') % record)
1228 elif rtype == 'm':
1229 elif rtype == 'm':
1229 driver, mdstate = record.split('\0', 1)
1230 driver, mdstate = record.split('\0', 1)
1230 ui.write(('merge driver: %s (state "%s")\n')
1231 ui.write(('merge driver: %s (state "%s")\n')
1231 % (driver, mdstate))
1232 % (driver, mdstate))
1232 elif rtype in 'FDC':
1233 elif rtype in 'FDC':
1233 r = record.split('\0')
1234 r = record.split('\0')
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 if version == 1:
1236 if version == 1:
1236 onode = 'not stored in v1 format'
1237 onode = 'not stored in v1 format'
1237 flags = r[7]
1238 flags = r[7]
1238 else:
1239 else:
1239 onode, flags = r[7:9]
1240 onode, flags = r[7:9]
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 % (f, rtype, state, _hashornull(hash)))
1242 % (f, rtype, state, _hashornull(hash)))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' ancestor path: %s (node %s)\n')
1244 ui.write((' ancestor path: %s (node %s)\n')
1244 % (afile, _hashornull(anode)))
1245 % (afile, _hashornull(anode)))
1245 ui.write((' other path: %s (node %s)\n')
1246 ui.write((' other path: %s (node %s)\n')
1246 % (ofile, _hashornull(onode)))
1247 % (ofile, _hashornull(onode)))
1247 elif rtype == 'f':
1248 elif rtype == 'f':
1248 filename, rawextras = record.split('\0', 1)
1249 filename, rawextras = record.split('\0', 1)
1249 extras = rawextras.split('\0')
1250 extras = rawextras.split('\0')
1250 i = 0
1251 i = 0
1251 extrastrings = []
1252 extrastrings = []
1252 while i < len(extras):
1253 while i < len(extras):
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 i += 2
1255 i += 2
1255
1256
1256 ui.write(('file extras: %s (%s)\n')
1257 ui.write(('file extras: %s (%s)\n')
1257 % (filename, ', '.join(extrastrings)))
1258 % (filename, ', '.join(extrastrings)))
1258 elif rtype == 'l':
1259 elif rtype == 'l':
1259 labels = record.split('\0', 2)
1260 labels = record.split('\0', 2)
1260 labels = [l for l in labels if len(l) > 0]
1261 labels = [l for l in labels if len(l) > 0]
1261 ui.write(('labels:\n'))
1262 ui.write(('labels:\n'))
1262 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' other: %s\n' % labels[1]))
1264 ui.write((' other: %s\n' % labels[1]))
1264 if len(labels) > 2:
1265 if len(labels) > 2:
1265 ui.write((' base: %s\n' % labels[2]))
1266 ui.write((' base: %s\n' % labels[2]))
1266 else:
1267 else:
1267 ui.write(('unrecognized entry: %s\t%s\n')
1268 ui.write(('unrecognized entry: %s\t%s\n')
1268 % (rtype, record.replace('\0', '\t')))
1269 % (rtype, record.replace('\0', '\t')))
1269
1270
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # command is pretty low-level.
1273 # command is pretty low-level.
1273 ms = mergemod.mergestate(repo)
1274 ms = mergemod.mergestate(repo)
1274
1275
1275 # sort so that reasonable information is on top
1276 # sort so that reasonable information is on top
1276 v1records = ms._readrecordsv1()
1277 v1records = ms._readrecordsv1()
1277 v2records = ms._readrecordsv2()
1278 v2records = ms._readrecordsv2()
1278 order = 'LOml'
1279 order = 'LOml'
1279 def key(r):
1280 def key(r):
1280 idx = order.find(r[0])
1281 idx = order.find(r[0])
1281 if idx == -1:
1282 if idx == -1:
1282 return (1, r[1])
1283 return (1, r[1])
1283 else:
1284 else:
1284 return (0, idx)
1285 return (0, idx)
1285 v1records.sort(key=key)
1286 v1records.sort(key=key)
1286 v2records.sort(key=key)
1287 v2records.sort(key=key)
1287
1288
1288 if not v1records and not v2records:
1289 if not v1records and not v2records:
1289 ui.write(('no merge state found\n'))
1290 ui.write(('no merge state found\n'))
1290 elif not v2records:
1291 elif not v2records:
1291 ui.note(('no version 2 merge state\n'))
1292 ui.note(('no version 2 merge state\n'))
1292 printrecords(1)
1293 printrecords(1)
1293 elif ms._v1v2match(v1records, v2records):
1294 elif ms._v1v2match(v1records, v2records):
1294 ui.note(('v1 and v2 states match: using v2\n'))
1295 ui.note(('v1 and v2 states match: using v2\n'))
1295 printrecords(2)
1296 printrecords(2)
1296 else:
1297 else:
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 printrecords(1)
1299 printrecords(1)
1299 if ui.verbose:
1300 if ui.verbose:
1300 printrecords(2)
1301 printrecords(2)
1301
1302
1302 @command('debugnamecomplete', [], _('NAME...'))
1303 @command('debugnamecomplete', [], _('NAME...'))
1303 def debugnamecomplete(ui, repo, *args):
1304 def debugnamecomplete(ui, repo, *args):
1304 '''complete "names" - tags, open branch names, bookmark names'''
1305 '''complete "names" - tags, open branch names, bookmark names'''
1305
1306
1306 names = set()
1307 names = set()
1307 # since we previously only listed open branches, we will handle that
1308 # since we previously only listed open branches, we will handle that
1308 # specially (after this for loop)
1309 # specially (after this for loop)
1309 for name, ns in repo.names.iteritems():
1310 for name, ns in repo.names.iteritems():
1310 if name != 'branches':
1311 if name != 'branches':
1311 names.update(ns.listnames(repo))
1312 names.update(ns.listnames(repo))
1312 names.update(tag for (tag, heads, tip, closed)
1313 names.update(tag for (tag, heads, tip, closed)
1313 in repo.branchmap().iterbranches() if not closed)
1314 in repo.branchmap().iterbranches() if not closed)
1314 completions = set()
1315 completions = set()
1315 if not args:
1316 if not args:
1316 args = ['']
1317 args = ['']
1317 for a in args:
1318 for a in args:
1318 completions.update(n for n in names if n.startswith(a))
1319 completions.update(n for n in names if n.startswith(a))
1319 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n')
1321 ui.write('\n')
1321
1322
1322 @command('debugobsolete',
1323 @command('debugobsolete',
1323 [('', 'flags', 0, _('markers flag')),
1324 [('', 'flags', 0, _('markers flag')),
1324 ('', 'record-parents', False,
1325 ('', 'record-parents', False,
1325 _('record parent information for the precursor')),
1326 _('record parent information for the precursor')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('', 'exclusive', False, _('restrict display to markers only '
1328 ('', 'exclusive', False, _('restrict display to markers only '
1328 'relevant to REV')),
1329 'relevant to REV')),
1329 ('', 'index', False, _('display index of the marker')),
1330 ('', 'index', False, _('display index of the marker')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1331 ('', 'delete', [], _('delete markers specified by indices')),
1331 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1332 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 """create arbitrary obsolete marker
1335 """create arbitrary obsolete marker
1335
1336
1336 With no arguments, displays the list of obsolescence markers."""
1337 With no arguments, displays the list of obsolescence markers."""
1337
1338
1338 def parsenodeid(s):
1339 def parsenodeid(s):
1339 try:
1340 try:
1340 # We do not use revsingle/revrange functions here to accept
1341 # We do not use revsingle/revrange functions here to accept
1341 # arbitrary node identifiers, possibly not present in the
1342 # arbitrary node identifiers, possibly not present in the
1342 # local repository.
1343 # local repository.
1343 n = bin(s)
1344 n = bin(s)
1344 if len(n) != len(nullid):
1345 if len(n) != len(nullid):
1345 raise TypeError()
1346 raise TypeError()
1346 return n
1347 return n
1347 except TypeError:
1348 except TypeError:
1348 raise error.Abort('changeset references must be full hexadecimal '
1349 raise error.Abort('changeset references must be full hexadecimal '
1349 'node identifiers')
1350 'node identifiers')
1350
1351
1351 if opts.get('delete'):
1352 if opts.get('delete'):
1352 indices = []
1353 indices = []
1353 for v in opts.get('delete'):
1354 for v in opts.get('delete'):
1354 try:
1355 try:
1355 indices.append(int(v))
1356 indices.append(int(v))
1356 except ValueError:
1357 except ValueError:
1357 raise error.Abort(_('invalid index value: %r') % v,
1358 raise error.Abort(_('invalid index value: %r') % v,
1358 hint=_('use integers for indices'))
1359 hint=_('use integers for indices'))
1359
1360
1360 if repo.currenttransaction():
1361 if repo.currenttransaction():
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 'of transaction.'))
1363 'of transaction.'))
1363
1364
1364 with repo.lock():
1365 with repo.lock():
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1367 ui.write(_('deleted %i obsolescence markers\n') % n)
1367
1368
1368 return
1369 return
1369
1370
1370 if precursor is not None:
1371 if precursor is not None:
1371 if opts['rev']:
1372 if opts['rev']:
1372 raise error.Abort('cannot select revision when creating marker')
1373 raise error.Abort('cannot select revision when creating marker')
1373 metadata = {}
1374 metadata = {}
1374 metadata['user'] = opts['user'] or ui.username()
1375 metadata['user'] = opts['user'] or ui.username()
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1376 succs = tuple(parsenodeid(succ) for succ in successors)
1376 l = repo.lock()
1377 l = repo.lock()
1377 try:
1378 try:
1378 tr = repo.transaction('debugobsolete')
1379 tr = repo.transaction('debugobsolete')
1379 try:
1380 try:
1380 date = opts.get('date')
1381 date = opts.get('date')
1381 if date:
1382 if date:
1382 date = util.parsedate(date)
1383 date = util.parsedate(date)
1383 else:
1384 else:
1384 date = None
1385 date = None
1385 prec = parsenodeid(precursor)
1386 prec = parsenodeid(precursor)
1386 parents = None
1387 parents = None
1387 if opts['record_parents']:
1388 if opts['record_parents']:
1388 if prec not in repo.unfiltered():
1389 if prec not in repo.unfiltered():
1389 raise error.Abort('cannot used --record-parents on '
1390 raise error.Abort('cannot used --record-parents on '
1390 'unknown changesets')
1391 'unknown changesets')
1391 parents = repo.unfiltered()[prec].parents()
1392 parents = repo.unfiltered()[prec].parents()
1392 parents = tuple(p.node() for p in parents)
1393 parents = tuple(p.node() for p in parents)
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 parents=parents, date=date,
1395 parents=parents, date=date,
1395 metadata=metadata, ui=ui)
1396 metadata=metadata, ui=ui)
1396 tr.close()
1397 tr.close()
1397 except ValueError as exc:
1398 except ValueError as exc:
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 finally:
1400 finally:
1400 tr.release()
1401 tr.release()
1401 finally:
1402 finally:
1402 l.release()
1403 l.release()
1403 else:
1404 else:
1404 if opts['rev']:
1405 if opts['rev']:
1405 revs = scmutil.revrange(repo, opts['rev'])
1406 revs = scmutil.revrange(repo, opts['rev'])
1406 nodes = [repo[r].node() for r in revs]
1407 nodes = [repo[r].node() for r in revs]
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1408 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1408 exclusive=opts['exclusive']))
1409 exclusive=opts['exclusive']))
1409 markers.sort(key=lambda x: x._data)
1410 markers.sort(key=lambda x: x._data)
1410 else:
1411 else:
1411 markers = obsolete.getmarkers(repo)
1412 markers = obsolete.getmarkers(repo)
1412
1413
1413 markerstoiter = markers
1414 markerstoiter = markers
1414 isrelevant = lambda m: True
1415 isrelevant = lambda m: True
1415 if opts.get('rev') and opts.get('index'):
1416 if opts.get('rev') and opts.get('index'):
1416 markerstoiter = obsolete.getmarkers(repo)
1417 markerstoiter = obsolete.getmarkers(repo)
1417 markerset = set(markers)
1418 markerset = set(markers)
1418 isrelevant = lambda m: m in markerset
1419 isrelevant = lambda m: m in markerset
1419
1420
1420 fm = ui.formatter('debugobsolete', opts)
1421 fm = ui.formatter('debugobsolete', opts)
1421 for i, m in enumerate(markerstoiter):
1422 for i, m in enumerate(markerstoiter):
1422 if not isrelevant(m):
1423 if not isrelevant(m):
1423 # marker can be irrelevant when we're iterating over a set
1424 # marker can be irrelevant when we're iterating over a set
1424 # of markers (markerstoiter) which is bigger than the set
1425 # of markers (markerstoiter) which is bigger than the set
1425 # of markers we want to display (markers)
1426 # of markers we want to display (markers)
1426 # this can happen if both --index and --rev options are
1427 # this can happen if both --index and --rev options are
1427 # provided and thus we need to iterate over all of the markers
1428 # provided and thus we need to iterate over all of the markers
1428 # to get the correct indices, but only display the ones that
1429 # to get the correct indices, but only display the ones that
1429 # are relevant to --rev value
1430 # are relevant to --rev value
1430 continue
1431 continue
1431 fm.startitem()
1432 fm.startitem()
1432 ind = i if opts.get('index') else None
1433 ind = i if opts.get('index') else None
1433 cmdutil.showmarker(fm, m, index=ind)
1434 cmdutil.showmarker(fm, m, index=ind)
1434 fm.end()
1435 fm.end()
1435
1436
1436 @command('debugpathcomplete',
1437 @command('debugpathcomplete',
1437 [('f', 'full', None, _('complete an entire path')),
1438 [('f', 'full', None, _('complete an entire path')),
1438 ('n', 'normal', None, _('show only normal files')),
1439 ('n', 'normal', None, _('show only normal files')),
1439 ('a', 'added', None, _('show only added files')),
1440 ('a', 'added', None, _('show only added files')),
1440 ('r', 'removed', None, _('show only removed files'))],
1441 ('r', 'removed', None, _('show only removed files'))],
1441 _('FILESPEC...'))
1442 _('FILESPEC...'))
1442 def debugpathcomplete(ui, repo, *specs, **opts):
1443 def debugpathcomplete(ui, repo, *specs, **opts):
1443 '''complete part or all of a tracked path
1444 '''complete part or all of a tracked path
1444
1445
1445 This command supports shells that offer path name completion. It
1446 This command supports shells that offer path name completion. It
1446 currently completes only files already known to the dirstate.
1447 currently completes only files already known to the dirstate.
1447
1448
1448 Completion extends only to the next path segment unless
1449 Completion extends only to the next path segment unless
1449 --full is specified, in which case entire paths are used.'''
1450 --full is specified, in which case entire paths are used.'''
1450
1451
1451 def complete(path, acceptable):
1452 def complete(path, acceptable):
1452 dirstate = repo.dirstate
1453 dirstate = repo.dirstate
1453 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1454 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1454 rootdir = repo.root + pycompat.ossep
1455 rootdir = repo.root + pycompat.ossep
1455 if spec != repo.root and not spec.startswith(rootdir):
1456 if spec != repo.root and not spec.startswith(rootdir):
1456 return [], []
1457 return [], []
1457 if os.path.isdir(spec):
1458 if os.path.isdir(spec):
1458 spec += '/'
1459 spec += '/'
1459 spec = spec[len(rootdir):]
1460 spec = spec[len(rootdir):]
1460 fixpaths = pycompat.ossep != '/'
1461 fixpaths = pycompat.ossep != '/'
1461 if fixpaths:
1462 if fixpaths:
1462 spec = spec.replace(pycompat.ossep, '/')
1463 spec = spec.replace(pycompat.ossep, '/')
1463 speclen = len(spec)
1464 speclen = len(spec)
1464 fullpaths = opts['full']
1465 fullpaths = opts['full']
1465 files, dirs = set(), set()
1466 files, dirs = set(), set()
1466 adddir, addfile = dirs.add, files.add
1467 adddir, addfile = dirs.add, files.add
1467 for f, st in dirstate.iteritems():
1468 for f, st in dirstate.iteritems():
1468 if f.startswith(spec) and st[0] in acceptable:
1469 if f.startswith(spec) and st[0] in acceptable:
1469 if fixpaths:
1470 if fixpaths:
1470 f = f.replace('/', pycompat.ossep)
1471 f = f.replace('/', pycompat.ossep)
1471 if fullpaths:
1472 if fullpaths:
1472 addfile(f)
1473 addfile(f)
1473 continue
1474 continue
1474 s = f.find(pycompat.ossep, speclen)
1475 s = f.find(pycompat.ossep, speclen)
1475 if s >= 0:
1476 if s >= 0:
1476 adddir(f[:s])
1477 adddir(f[:s])
1477 else:
1478 else:
1478 addfile(f)
1479 addfile(f)
1479 return files, dirs
1480 return files, dirs
1480
1481
1481 acceptable = ''
1482 acceptable = ''
1482 if opts['normal']:
1483 if opts['normal']:
1483 acceptable += 'nm'
1484 acceptable += 'nm'
1484 if opts['added']:
1485 if opts['added']:
1485 acceptable += 'a'
1486 acceptable += 'a'
1486 if opts['removed']:
1487 if opts['removed']:
1487 acceptable += 'r'
1488 acceptable += 'r'
1488 cwd = repo.getcwd()
1489 cwd = repo.getcwd()
1489 if not specs:
1490 if not specs:
1490 specs = ['.']
1491 specs = ['.']
1491
1492
1492 files, dirs = set(), set()
1493 files, dirs = set(), set()
1493 for spec in specs:
1494 for spec in specs:
1494 f, d = complete(spec, acceptable or 'nmar')
1495 f, d = complete(spec, acceptable or 'nmar')
1495 files.update(f)
1496 files.update(f)
1496 dirs.update(d)
1497 dirs.update(d)
1497 files.update(dirs)
1498 files.update(dirs)
1498 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1499 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1499 ui.write('\n')
1500 ui.write('\n')
1500
1501
1501 @command('debugpickmergetool',
1502 @command('debugpickmergetool',
1502 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1503 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1503 ('', 'changedelete', None, _('emulate merging change and delete')),
1504 ('', 'changedelete', None, _('emulate merging change and delete')),
1504 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1505 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1505 _('[PATTERN]...'),
1506 _('[PATTERN]...'),
1506 inferrepo=True)
1507 inferrepo=True)
1507 def debugpickmergetool(ui, repo, *pats, **opts):
1508 def debugpickmergetool(ui, repo, *pats, **opts):
1508 """examine which merge tool is chosen for specified file
1509 """examine which merge tool is chosen for specified file
1509
1510
1510 As described in :hg:`help merge-tools`, Mercurial examines
1511 As described in :hg:`help merge-tools`, Mercurial examines
1511 configurations below in this order to decide which merge tool is
1512 configurations below in this order to decide which merge tool is
1512 chosen for specified file.
1513 chosen for specified file.
1513
1514
1514 1. ``--tool`` option
1515 1. ``--tool`` option
1515 2. ``HGMERGE`` environment variable
1516 2. ``HGMERGE`` environment variable
1516 3. configurations in ``merge-patterns`` section
1517 3. configurations in ``merge-patterns`` section
1517 4. configuration of ``ui.merge``
1518 4. configuration of ``ui.merge``
1518 5. configurations in ``merge-tools`` section
1519 5. configurations in ``merge-tools`` section
1519 6. ``hgmerge`` tool (for historical reason only)
1520 6. ``hgmerge`` tool (for historical reason only)
1520 7. default tool for fallback (``:merge`` or ``:prompt``)
1521 7. default tool for fallback (``:merge`` or ``:prompt``)
1521
1522
1522 This command writes out examination result in the style below::
1523 This command writes out examination result in the style below::
1523
1524
1524 FILE = MERGETOOL
1525 FILE = MERGETOOL
1525
1526
1526 By default, all files known in the first parent context of the
1527 By default, all files known in the first parent context of the
1527 working directory are examined. Use file patterns and/or -I/-X
1528 working directory are examined. Use file patterns and/or -I/-X
1528 options to limit target files. -r/--rev is also useful to examine
1529 options to limit target files. -r/--rev is also useful to examine
1529 files in another context without actual updating to it.
1530 files in another context without actual updating to it.
1530
1531
1531 With --debug, this command shows warning messages while matching
1532 With --debug, this command shows warning messages while matching
1532 against ``merge-patterns`` and so on, too. It is recommended to
1533 against ``merge-patterns`` and so on, too. It is recommended to
1533 use this option with explicit file patterns and/or -I/-X options,
1534 use this option with explicit file patterns and/or -I/-X options,
1534 because this option increases amount of output per file according
1535 because this option increases amount of output per file according
1535 to configurations in hgrc.
1536 to configurations in hgrc.
1536
1537
1537 With -v/--verbose, this command shows configurations below at
1538 With -v/--verbose, this command shows configurations below at
1538 first (only if specified).
1539 first (only if specified).
1539
1540
1540 - ``--tool`` option
1541 - ``--tool`` option
1541 - ``HGMERGE`` environment variable
1542 - ``HGMERGE`` environment variable
1542 - configuration of ``ui.merge``
1543 - configuration of ``ui.merge``
1543
1544
1544 If merge tool is chosen before matching against
1545 If merge tool is chosen before matching against
1545 ``merge-patterns``, this command can't show any helpful
1546 ``merge-patterns``, this command can't show any helpful
1546 information, even with --debug. In such case, information above is
1547 information, even with --debug. In such case, information above is
1547 useful to know why a merge tool is chosen.
1548 useful to know why a merge tool is chosen.
1548 """
1549 """
1549 overrides = {}
1550 overrides = {}
1550 if opts['tool']:
1551 if opts['tool']:
1551 overrides[('ui', 'forcemerge')] = opts['tool']
1552 overrides[('ui', 'forcemerge')] = opts['tool']
1552 ui.note(('with --tool %r\n') % (opts['tool']))
1553 ui.note(('with --tool %r\n') % (opts['tool']))
1553
1554
1554 with ui.configoverride(overrides, 'debugmergepatterns'):
1555 with ui.configoverride(overrides, 'debugmergepatterns'):
1555 hgmerge = encoding.environ.get("HGMERGE")
1556 hgmerge = encoding.environ.get("HGMERGE")
1556 if hgmerge is not None:
1557 if hgmerge is not None:
1557 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1558 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1558 uimerge = ui.config("ui", "merge")
1559 uimerge = ui.config("ui", "merge")
1559 if uimerge:
1560 if uimerge:
1560 ui.note(('with ui.merge=%r\n') % (uimerge))
1561 ui.note(('with ui.merge=%r\n') % (uimerge))
1561
1562
1562 ctx = scmutil.revsingle(repo, opts.get('rev'))
1563 ctx = scmutil.revsingle(repo, opts.get('rev'))
1563 m = scmutil.match(ctx, pats, opts)
1564 m = scmutil.match(ctx, pats, opts)
1564 changedelete = opts['changedelete']
1565 changedelete = opts['changedelete']
1565 for path in ctx.walk(m):
1566 for path in ctx.walk(m):
1566 fctx = ctx[path]
1567 fctx = ctx[path]
1567 try:
1568 try:
1568 if not ui.debugflag:
1569 if not ui.debugflag:
1569 ui.pushbuffer(error=True)
1570 ui.pushbuffer(error=True)
1570 tool, toolpath = filemerge._picktool(repo, ui, path,
1571 tool, toolpath = filemerge._picktool(repo, ui, path,
1571 fctx.isbinary(),
1572 fctx.isbinary(),
1572 'l' in fctx.flags(),
1573 'l' in fctx.flags(),
1573 changedelete)
1574 changedelete)
1574 finally:
1575 finally:
1575 if not ui.debugflag:
1576 if not ui.debugflag:
1576 ui.popbuffer()
1577 ui.popbuffer()
1577 ui.write(('%s = %s\n') % (path, tool))
1578 ui.write(('%s = %s\n') % (path, tool))
1578
1579
1579 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1580 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1580 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1581 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1581 '''access the pushkey key/value protocol
1582 '''access the pushkey key/value protocol
1582
1583
1583 With two args, list the keys in the given namespace.
1584 With two args, list the keys in the given namespace.
1584
1585
1585 With five args, set a key to new if it currently is set to old.
1586 With five args, set a key to new if it currently is set to old.
1586 Reports success or failure.
1587 Reports success or failure.
1587 '''
1588 '''
1588
1589
1589 target = hg.peer(ui, {}, repopath)
1590 target = hg.peer(ui, {}, repopath)
1590 if keyinfo:
1591 if keyinfo:
1591 key, old, new = keyinfo
1592 key, old, new = keyinfo
1592 r = target.pushkey(namespace, key, old, new)
1593 r = target.pushkey(namespace, key, old, new)
1593 ui.status(str(r) + '\n')
1594 ui.status(str(r) + '\n')
1594 return not r
1595 return not r
1595 else:
1596 else:
1596 for k, v in sorted(target.listkeys(namespace).iteritems()):
1597 for k, v in sorted(target.listkeys(namespace).iteritems()):
1597 ui.write("%s\t%s\n" % (util.escapestr(k),
1598 ui.write("%s\t%s\n" % (util.escapestr(k),
1598 util.escapestr(v)))
1599 util.escapestr(v)))
1599
1600
1600 @command('debugpvec', [], _('A B'))
1601 @command('debugpvec', [], _('A B'))
1601 def debugpvec(ui, repo, a, b=None):
1602 def debugpvec(ui, repo, a, b=None):
1602 ca = scmutil.revsingle(repo, a)
1603 ca = scmutil.revsingle(repo, a)
1603 cb = scmutil.revsingle(repo, b)
1604 cb = scmutil.revsingle(repo, b)
1604 pa = pvec.ctxpvec(ca)
1605 pa = pvec.ctxpvec(ca)
1605 pb = pvec.ctxpvec(cb)
1606 pb = pvec.ctxpvec(cb)
1606 if pa == pb:
1607 if pa == pb:
1607 rel = "="
1608 rel = "="
1608 elif pa > pb:
1609 elif pa > pb:
1609 rel = ">"
1610 rel = ">"
1610 elif pa < pb:
1611 elif pa < pb:
1611 rel = "<"
1612 rel = "<"
1612 elif pa | pb:
1613 elif pa | pb:
1613 rel = "|"
1614 rel = "|"
1614 ui.write(_("a: %s\n") % pa)
1615 ui.write(_("a: %s\n") % pa)
1615 ui.write(_("b: %s\n") % pb)
1616 ui.write(_("b: %s\n") % pb)
1616 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1617 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1617 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1618 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1618 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1619 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1619 pa.distance(pb), rel))
1620 pa.distance(pb), rel))
1620
1621
1621 @command('debugrebuilddirstate|debugrebuildstate',
1622 @command('debugrebuilddirstate|debugrebuildstate',
1622 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1623 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1623 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1624 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1624 'the working copy parent')),
1625 'the working copy parent')),
1625 ],
1626 ],
1626 _('[-r REV]'))
1627 _('[-r REV]'))
1627 def debugrebuilddirstate(ui, repo, rev, **opts):
1628 def debugrebuilddirstate(ui, repo, rev, **opts):
1628 """rebuild the dirstate as it would look like for the given revision
1629 """rebuild the dirstate as it would look like for the given revision
1629
1630
1630 If no revision is specified the first current parent will be used.
1631 If no revision is specified the first current parent will be used.
1631
1632
1632 The dirstate will be set to the files of the given revision.
1633 The dirstate will be set to the files of the given revision.
1633 The actual working directory content or existing dirstate
1634 The actual working directory content or existing dirstate
1634 information such as adds or removes is not considered.
1635 information such as adds or removes is not considered.
1635
1636
1636 ``minimal`` will only rebuild the dirstate status for files that claim to be
1637 ``minimal`` will only rebuild the dirstate status for files that claim to be
1637 tracked but are not in the parent manifest, or that exist in the parent
1638 tracked but are not in the parent manifest, or that exist in the parent
1638 manifest but are not in the dirstate. It will not change adds, removes, or
1639 manifest but are not in the dirstate. It will not change adds, removes, or
1639 modified files that are in the working copy parent.
1640 modified files that are in the working copy parent.
1640
1641
1641 One use of this command is to make the next :hg:`status` invocation
1642 One use of this command is to make the next :hg:`status` invocation
1642 check the actual file content.
1643 check the actual file content.
1643 """
1644 """
1644 ctx = scmutil.revsingle(repo, rev)
1645 ctx = scmutil.revsingle(repo, rev)
1645 with repo.wlock():
1646 with repo.wlock():
1646 dirstate = repo.dirstate
1647 dirstate = repo.dirstate
1647 changedfiles = None
1648 changedfiles = None
1648 # See command doc for what minimal does.
1649 # See command doc for what minimal does.
1649 if opts.get('minimal'):
1650 if opts.get('minimal'):
1650 manifestfiles = set(ctx.manifest().keys())
1651 manifestfiles = set(ctx.manifest().keys())
1651 dirstatefiles = set(dirstate)
1652 dirstatefiles = set(dirstate)
1652 manifestonly = manifestfiles - dirstatefiles
1653 manifestonly = manifestfiles - dirstatefiles
1653 dsonly = dirstatefiles - manifestfiles
1654 dsonly = dirstatefiles - manifestfiles
1654 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1655 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1655 changedfiles = manifestonly | dsnotadded
1656 changedfiles = manifestonly | dsnotadded
1656
1657
1657 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1658 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1658
1659
1659 @command('debugrebuildfncache', [], '')
1660 @command('debugrebuildfncache', [], '')
1660 def debugrebuildfncache(ui, repo):
1661 def debugrebuildfncache(ui, repo):
1661 """rebuild the fncache file"""
1662 """rebuild the fncache file"""
1662 repair.rebuildfncache(ui, repo)
1663 repair.rebuildfncache(ui, repo)
1663
1664
1664 @command('debugrename',
1665 @command('debugrename',
1665 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1666 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1666 _('[-r REV] FILE'))
1667 _('[-r REV] FILE'))
1667 def debugrename(ui, repo, file1, *pats, **opts):
1668 def debugrename(ui, repo, file1, *pats, **opts):
1668 """dump rename information"""
1669 """dump rename information"""
1669
1670
1670 ctx = scmutil.revsingle(repo, opts.get('rev'))
1671 ctx = scmutil.revsingle(repo, opts.get('rev'))
1671 m = scmutil.match(ctx, (file1,) + pats, opts)
1672 m = scmutil.match(ctx, (file1,) + pats, opts)
1672 for abs in ctx.walk(m):
1673 for abs in ctx.walk(m):
1673 fctx = ctx[abs]
1674 fctx = ctx[abs]
1674 o = fctx.filelog().renamed(fctx.filenode())
1675 o = fctx.filelog().renamed(fctx.filenode())
1675 rel = m.rel(abs)
1676 rel = m.rel(abs)
1676 if o:
1677 if o:
1677 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1678 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1678 else:
1679 else:
1679 ui.write(_("%s not renamed\n") % rel)
1680 ui.write(_("%s not renamed\n") % rel)
1680
1681
1681 @command('debugrevlog', cmdutil.debugrevlogopts +
1682 @command('debugrevlog', cmdutil.debugrevlogopts +
1682 [('d', 'dump', False, _('dump index data'))],
1683 [('d', 'dump', False, _('dump index data'))],
1683 _('-c|-m|FILE'),
1684 _('-c|-m|FILE'),
1684 optionalrepo=True)
1685 optionalrepo=True)
1685 def debugrevlog(ui, repo, file_=None, **opts):
1686 def debugrevlog(ui, repo, file_=None, **opts):
1686 """show data and statistics about a revlog"""
1687 """show data and statistics about a revlog"""
1687 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1688 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1688
1689
1689 if opts.get("dump"):
1690 if opts.get("dump"):
1690 numrevs = len(r)
1691 numrevs = len(r)
1691 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1692 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1692 " rawsize totalsize compression heads chainlen\n"))
1693 " rawsize totalsize compression heads chainlen\n"))
1693 ts = 0
1694 ts = 0
1694 heads = set()
1695 heads = set()
1695
1696
1696 for rev in xrange(numrevs):
1697 for rev in xrange(numrevs):
1697 dbase = r.deltaparent(rev)
1698 dbase = r.deltaparent(rev)
1698 if dbase == -1:
1699 if dbase == -1:
1699 dbase = rev
1700 dbase = rev
1700 cbase = r.chainbase(rev)
1701 cbase = r.chainbase(rev)
1701 clen = r.chainlen(rev)
1702 clen = r.chainlen(rev)
1702 p1, p2 = r.parentrevs(rev)
1703 p1, p2 = r.parentrevs(rev)
1703 rs = r.rawsize(rev)
1704 rs = r.rawsize(rev)
1704 ts = ts + rs
1705 ts = ts + rs
1705 heads -= set(r.parentrevs(rev))
1706 heads -= set(r.parentrevs(rev))
1706 heads.add(rev)
1707 heads.add(rev)
1707 try:
1708 try:
1708 compression = ts / r.end(rev)
1709 compression = ts / r.end(rev)
1709 except ZeroDivisionError:
1710 except ZeroDivisionError:
1710 compression = 0
1711 compression = 0
1711 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1712 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1712 "%11d %5d %8d\n" %
1713 "%11d %5d %8d\n" %
1713 (rev, p1, p2, r.start(rev), r.end(rev),
1714 (rev, p1, p2, r.start(rev), r.end(rev),
1714 r.start(dbase), r.start(cbase),
1715 r.start(dbase), r.start(cbase),
1715 r.start(p1), r.start(p2),
1716 r.start(p1), r.start(p2),
1716 rs, ts, compression, len(heads), clen))
1717 rs, ts, compression, len(heads), clen))
1717 return 0
1718 return 0
1718
1719
1719 v = r.version
1720 v = r.version
1720 format = v & 0xFFFF
1721 format = v & 0xFFFF
1721 flags = []
1722 flags = []
1722 gdelta = False
1723 gdelta = False
1723 if v & revlog.FLAG_INLINE_DATA:
1724 if v & revlog.FLAG_INLINE_DATA:
1724 flags.append('inline')
1725 flags.append('inline')
1725 if v & revlog.FLAG_GENERALDELTA:
1726 if v & revlog.FLAG_GENERALDELTA:
1726 gdelta = True
1727 gdelta = True
1727 flags.append('generaldelta')
1728 flags.append('generaldelta')
1728 if not flags:
1729 if not flags:
1729 flags = ['(none)']
1730 flags = ['(none)']
1730
1731
1731 nummerges = 0
1732 nummerges = 0
1732 numfull = 0
1733 numfull = 0
1733 numprev = 0
1734 numprev = 0
1734 nump1 = 0
1735 nump1 = 0
1735 nump2 = 0
1736 nump2 = 0
1736 numother = 0
1737 numother = 0
1737 nump1prev = 0
1738 nump1prev = 0
1738 nump2prev = 0
1739 nump2prev = 0
1739 chainlengths = []
1740 chainlengths = []
1740
1741
1741 datasize = [None, 0, 0]
1742 datasize = [None, 0, 0]
1742 fullsize = [None, 0, 0]
1743 fullsize = [None, 0, 0]
1743 deltasize = [None, 0, 0]
1744 deltasize = [None, 0, 0]
1744 chunktypecounts = {}
1745 chunktypecounts = {}
1745 chunktypesizes = {}
1746 chunktypesizes = {}
1746
1747
1747 def addsize(size, l):
1748 def addsize(size, l):
1748 if l[0] is None or size < l[0]:
1749 if l[0] is None or size < l[0]:
1749 l[0] = size
1750 l[0] = size
1750 if size > l[1]:
1751 if size > l[1]:
1751 l[1] = size
1752 l[1] = size
1752 l[2] += size
1753 l[2] += size
1753
1754
1754 numrevs = len(r)
1755 numrevs = len(r)
1755 for rev in xrange(numrevs):
1756 for rev in xrange(numrevs):
1756 p1, p2 = r.parentrevs(rev)
1757 p1, p2 = r.parentrevs(rev)
1757 delta = r.deltaparent(rev)
1758 delta = r.deltaparent(rev)
1758 if format > 0:
1759 if format > 0:
1759 addsize(r.rawsize(rev), datasize)
1760 addsize(r.rawsize(rev), datasize)
1760 if p2 != nullrev:
1761 if p2 != nullrev:
1761 nummerges += 1
1762 nummerges += 1
1762 size = r.length(rev)
1763 size = r.length(rev)
1763 if delta == nullrev:
1764 if delta == nullrev:
1764 chainlengths.append(0)
1765 chainlengths.append(0)
1765 numfull += 1
1766 numfull += 1
1766 addsize(size, fullsize)
1767 addsize(size, fullsize)
1767 else:
1768 else:
1768 chainlengths.append(chainlengths[delta] + 1)
1769 chainlengths.append(chainlengths[delta] + 1)
1769 addsize(size, deltasize)
1770 addsize(size, deltasize)
1770 if delta == rev - 1:
1771 if delta == rev - 1:
1771 numprev += 1
1772 numprev += 1
1772 if delta == p1:
1773 if delta == p1:
1773 nump1prev += 1
1774 nump1prev += 1
1774 elif delta == p2:
1775 elif delta == p2:
1775 nump2prev += 1
1776 nump2prev += 1
1776 elif delta == p1:
1777 elif delta == p1:
1777 nump1 += 1
1778 nump1 += 1
1778 elif delta == p2:
1779 elif delta == p2:
1779 nump2 += 1
1780 nump2 += 1
1780 elif delta != nullrev:
1781 elif delta != nullrev:
1781 numother += 1
1782 numother += 1
1782
1783
1783 # Obtain data on the raw chunks in the revlog.
1784 # Obtain data on the raw chunks in the revlog.
1784 segment = r._getsegmentforrevs(rev, rev)[1]
1785 segment = r._getsegmentforrevs(rev, rev)[1]
1785 if segment:
1786 if segment:
1786 chunktype = segment[0]
1787 chunktype = segment[0]
1787 else:
1788 else:
1788 chunktype = 'empty'
1789 chunktype = 'empty'
1789
1790
1790 if chunktype not in chunktypecounts:
1791 if chunktype not in chunktypecounts:
1791 chunktypecounts[chunktype] = 0
1792 chunktypecounts[chunktype] = 0
1792 chunktypesizes[chunktype] = 0
1793 chunktypesizes[chunktype] = 0
1793
1794
1794 chunktypecounts[chunktype] += 1
1795 chunktypecounts[chunktype] += 1
1795 chunktypesizes[chunktype] += size
1796 chunktypesizes[chunktype] += size
1796
1797
1797 # Adjust size min value for empty cases
1798 # Adjust size min value for empty cases
1798 for size in (datasize, fullsize, deltasize):
1799 for size in (datasize, fullsize, deltasize):
1799 if size[0] is None:
1800 if size[0] is None:
1800 size[0] = 0
1801 size[0] = 0
1801
1802
1802 numdeltas = numrevs - numfull
1803 numdeltas = numrevs - numfull
1803 numoprev = numprev - nump1prev - nump2prev
1804 numoprev = numprev - nump1prev - nump2prev
1804 totalrawsize = datasize[2]
1805 totalrawsize = datasize[2]
1805 datasize[2] /= numrevs
1806 datasize[2] /= numrevs
1806 fulltotal = fullsize[2]
1807 fulltotal = fullsize[2]
1807 fullsize[2] /= numfull
1808 fullsize[2] /= numfull
1808 deltatotal = deltasize[2]
1809 deltatotal = deltasize[2]
1809 if numrevs - numfull > 0:
1810 if numrevs - numfull > 0:
1810 deltasize[2] /= numrevs - numfull
1811 deltasize[2] /= numrevs - numfull
1811 totalsize = fulltotal + deltatotal
1812 totalsize = fulltotal + deltatotal
1812 avgchainlen = sum(chainlengths) / numrevs
1813 avgchainlen = sum(chainlengths) / numrevs
1813 maxchainlen = max(chainlengths)
1814 maxchainlen = max(chainlengths)
1814 compratio = 1
1815 compratio = 1
1815 if totalsize:
1816 if totalsize:
1816 compratio = totalrawsize / totalsize
1817 compratio = totalrawsize / totalsize
1817
1818
1818 basedfmtstr = '%%%dd\n'
1819 basedfmtstr = '%%%dd\n'
1819 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1820 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1820
1821
1821 def dfmtstr(max):
1822 def dfmtstr(max):
1822 return basedfmtstr % len(str(max))
1823 return basedfmtstr % len(str(max))
1823 def pcfmtstr(max, padding=0):
1824 def pcfmtstr(max, padding=0):
1824 return basepcfmtstr % (len(str(max)), ' ' * padding)
1825 return basepcfmtstr % (len(str(max)), ' ' * padding)
1825
1826
1826 def pcfmt(value, total):
1827 def pcfmt(value, total):
1827 if total:
1828 if total:
1828 return (value, 100 * float(value) / total)
1829 return (value, 100 * float(value) / total)
1829 else:
1830 else:
1830 return value, 100.0
1831 return value, 100.0
1831
1832
1832 ui.write(('format : %d\n') % format)
1833 ui.write(('format : %d\n') % format)
1833 ui.write(('flags : %s\n') % ', '.join(flags))
1834 ui.write(('flags : %s\n') % ', '.join(flags))
1834
1835
1835 ui.write('\n')
1836 ui.write('\n')
1836 fmt = pcfmtstr(totalsize)
1837 fmt = pcfmtstr(totalsize)
1837 fmt2 = dfmtstr(totalsize)
1838 fmt2 = dfmtstr(totalsize)
1838 ui.write(('revisions : ') + fmt2 % numrevs)
1839 ui.write(('revisions : ') + fmt2 % numrevs)
1839 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1840 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1840 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1841 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1841 ui.write(('revisions : ') + fmt2 % numrevs)
1842 ui.write(('revisions : ') + fmt2 % numrevs)
1842 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1843 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1843 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1844 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1844 ui.write(('revision size : ') + fmt2 % totalsize)
1845 ui.write(('revision size : ') + fmt2 % totalsize)
1845 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1846 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1846 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1847 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1847
1848
1848 def fmtchunktype(chunktype):
1849 def fmtchunktype(chunktype):
1849 if chunktype == 'empty':
1850 if chunktype == 'empty':
1850 return ' %s : ' % chunktype
1851 return ' %s : ' % chunktype
1851 elif chunktype in string.ascii_letters:
1852 elif chunktype in string.ascii_letters:
1852 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1853 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1853 else:
1854 else:
1854 return ' 0x%s : ' % hex(chunktype)
1855 return ' 0x%s : ' % hex(chunktype)
1855
1856
1856 ui.write('\n')
1857 ui.write('\n')
1857 ui.write(('chunks : ') + fmt2 % numrevs)
1858 ui.write(('chunks : ') + fmt2 % numrevs)
1858 for chunktype in sorted(chunktypecounts):
1859 for chunktype in sorted(chunktypecounts):
1859 ui.write(fmtchunktype(chunktype))
1860 ui.write(fmtchunktype(chunktype))
1860 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1861 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1861 ui.write(('chunks size : ') + fmt2 % totalsize)
1862 ui.write(('chunks size : ') + fmt2 % totalsize)
1862 for chunktype in sorted(chunktypecounts):
1863 for chunktype in sorted(chunktypecounts):
1863 ui.write(fmtchunktype(chunktype))
1864 ui.write(fmtchunktype(chunktype))
1864 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1865 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1865
1866
1866 ui.write('\n')
1867 ui.write('\n')
1867 fmt = dfmtstr(max(avgchainlen, compratio))
1868 fmt = dfmtstr(max(avgchainlen, compratio))
1868 ui.write(('avg chain length : ') + fmt % avgchainlen)
1869 ui.write(('avg chain length : ') + fmt % avgchainlen)
1869 ui.write(('max chain length : ') + fmt % maxchainlen)
1870 ui.write(('max chain length : ') + fmt % maxchainlen)
1870 ui.write(('compression ratio : ') + fmt % compratio)
1871 ui.write(('compression ratio : ') + fmt % compratio)
1871
1872
1872 if format > 0:
1873 if format > 0:
1873 ui.write('\n')
1874 ui.write('\n')
1874 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1875 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1875 % tuple(datasize))
1876 % tuple(datasize))
1876 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1877 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1877 % tuple(fullsize))
1878 % tuple(fullsize))
1878 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1879 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1879 % tuple(deltasize))
1880 % tuple(deltasize))
1880
1881
1881 if numdeltas > 0:
1882 if numdeltas > 0:
1882 ui.write('\n')
1883 ui.write('\n')
1883 fmt = pcfmtstr(numdeltas)
1884 fmt = pcfmtstr(numdeltas)
1884 fmt2 = pcfmtstr(numdeltas, 4)
1885 fmt2 = pcfmtstr(numdeltas, 4)
1885 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1886 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1886 if numprev > 0:
1887 if numprev > 0:
1887 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1888 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1888 numprev))
1889 numprev))
1889 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1890 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1890 numprev))
1891 numprev))
1891 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1892 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1892 numprev))
1893 numprev))
1893 if gdelta:
1894 if gdelta:
1894 ui.write(('deltas against p1 : ')
1895 ui.write(('deltas against p1 : ')
1895 + fmt % pcfmt(nump1, numdeltas))
1896 + fmt % pcfmt(nump1, numdeltas))
1896 ui.write(('deltas against p2 : ')
1897 ui.write(('deltas against p2 : ')
1897 + fmt % pcfmt(nump2, numdeltas))
1898 + fmt % pcfmt(nump2, numdeltas))
1898 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1899 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1899 numdeltas))
1900 numdeltas))
1900
1901
1901 @command('debugrevspec',
1902 @command('debugrevspec',
1902 [('', 'optimize', None,
1903 [('', 'optimize', None,
1903 _('print parsed tree after optimizing (DEPRECATED)')),
1904 _('print parsed tree after optimizing (DEPRECATED)')),
1904 ('', 'show-revs', True, _('print list of result revisions (default)')),
1905 ('', 'show-revs', True, _('print list of result revisions (default)')),
1905 ('s', 'show-set', None, _('print internal representation of result set')),
1906 ('s', 'show-set', None, _('print internal representation of result set')),
1906 ('p', 'show-stage', [],
1907 ('p', 'show-stage', [],
1907 _('print parsed tree at the given stage'), _('NAME')),
1908 _('print parsed tree at the given stage'), _('NAME')),
1908 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1909 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1909 ('', 'verify-optimized', False, _('verify optimized result')),
1910 ('', 'verify-optimized', False, _('verify optimized result')),
1910 ],
1911 ],
1911 ('REVSPEC'))
1912 ('REVSPEC'))
1912 def debugrevspec(ui, repo, expr, **opts):
1913 def debugrevspec(ui, repo, expr, **opts):
1913 """parse and apply a revision specification
1914 """parse and apply a revision specification
1914
1915
1915 Use -p/--show-stage option to print the parsed tree at the given stages.
1916 Use -p/--show-stage option to print the parsed tree at the given stages.
1916 Use -p all to print tree at every stage.
1917 Use -p all to print tree at every stage.
1917
1918
1918 Use --no-show-revs option with -s or -p to print only the set
1919 Use --no-show-revs option with -s or -p to print only the set
1919 representation or the parsed tree respectively.
1920 representation or the parsed tree respectively.
1920
1921
1921 Use --verify-optimized to compare the optimized result with the unoptimized
1922 Use --verify-optimized to compare the optimized result with the unoptimized
1922 one. Returns 1 if the optimized result differs.
1923 one. Returns 1 if the optimized result differs.
1923 """
1924 """
1924 stages = [
1925 stages = [
1925 ('parsed', lambda tree: tree),
1926 ('parsed', lambda tree: tree),
1926 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 ('concatenated', revsetlang.foldconcat),
1928 ('concatenated', revsetlang.foldconcat),
1928 ('analyzed', revsetlang.analyze),
1929 ('analyzed', revsetlang.analyze),
1929 ('optimized', revsetlang.optimize),
1930 ('optimized', revsetlang.optimize),
1930 ]
1931 ]
1931 if opts['no_optimized']:
1932 if opts['no_optimized']:
1932 stages = stages[:-1]
1933 stages = stages[:-1]
1933 if opts['verify_optimized'] and opts['no_optimized']:
1934 if opts['verify_optimized'] and opts['no_optimized']:
1934 raise error.Abort(_('cannot use --verify-optimized with '
1935 raise error.Abort(_('cannot use --verify-optimized with '
1935 '--no-optimized'))
1936 '--no-optimized'))
1936 stagenames = set(n for n, f in stages)
1937 stagenames = set(n for n, f in stages)
1937
1938
1938 showalways = set()
1939 showalways = set()
1939 showchanged = set()
1940 showchanged = set()
1940 if ui.verbose and not opts['show_stage']:
1941 if ui.verbose and not opts['show_stage']:
1941 # show parsed tree by --verbose (deprecated)
1942 # show parsed tree by --verbose (deprecated)
1942 showalways.add('parsed')
1943 showalways.add('parsed')
1943 showchanged.update(['expanded', 'concatenated'])
1944 showchanged.update(['expanded', 'concatenated'])
1944 if opts['optimize']:
1945 if opts['optimize']:
1945 showalways.add('optimized')
1946 showalways.add('optimized')
1946 if opts['show_stage'] and opts['optimize']:
1947 if opts['show_stage'] and opts['optimize']:
1947 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 if opts['show_stage'] == ['all']:
1949 if opts['show_stage'] == ['all']:
1949 showalways.update(stagenames)
1950 showalways.update(stagenames)
1950 else:
1951 else:
1951 for n in opts['show_stage']:
1952 for n in opts['show_stage']:
1952 if n not in stagenames:
1953 if n not in stagenames:
1953 raise error.Abort(_('invalid stage name: %s') % n)
1954 raise error.Abort(_('invalid stage name: %s') % n)
1954 showalways.update(opts['show_stage'])
1955 showalways.update(opts['show_stage'])
1955
1956
1956 treebystage = {}
1957 treebystage = {}
1957 printedtree = None
1958 printedtree = None
1958 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 for n, f in stages:
1960 for n, f in stages:
1960 treebystage[n] = tree = f(tree)
1961 treebystage[n] = tree = f(tree)
1961 if n in showalways or (n in showchanged and tree != printedtree):
1962 if n in showalways or (n in showchanged and tree != printedtree):
1962 if opts['show_stage'] or n != 'parsed':
1963 if opts['show_stage'] or n != 'parsed':
1963 ui.write(("* %s:\n") % n)
1964 ui.write(("* %s:\n") % n)
1964 ui.write(revsetlang.prettyformat(tree), "\n")
1965 ui.write(revsetlang.prettyformat(tree), "\n")
1965 printedtree = tree
1966 printedtree = tree
1966
1967
1967 if opts['verify_optimized']:
1968 if opts['verify_optimized']:
1968 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1971 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1971 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 arevs = list(arevs)
1974 arevs = list(arevs)
1974 brevs = list(brevs)
1975 brevs = list(brevs)
1975 if arevs == brevs:
1976 if arevs == brevs:
1976 return 0
1977 return 0
1977 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 if tag in ('delete', 'replace'):
1982 if tag in ('delete', 'replace'):
1982 for c in arevs[alo:ahi]:
1983 for c in arevs[alo:ahi]:
1983 ui.write('-%s\n' % c, label='diff.deleted')
1984 ui.write('-%s\n' % c, label='diff.deleted')
1984 if tag in ('insert', 'replace'):
1985 if tag in ('insert', 'replace'):
1985 for c in brevs[blo:bhi]:
1986 for c in brevs[blo:bhi]:
1986 ui.write('+%s\n' % c, label='diff.inserted')
1987 ui.write('+%s\n' % c, label='diff.inserted')
1987 if tag == 'equal':
1988 if tag == 'equal':
1988 for c in arevs[alo:ahi]:
1989 for c in arevs[alo:ahi]:
1989 ui.write(' %s\n' % c)
1990 ui.write(' %s\n' % c)
1990 return 1
1991 return 1
1991
1992
1992 func = revset.makematcher(tree)
1993 func = revset.makematcher(tree)
1993 revs = func(repo)
1994 revs = func(repo)
1994 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1995 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1995 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 if not opts['show_revs']:
1997 if not opts['show_revs']:
1997 return
1998 return
1998 for c in revs:
1999 for c in revs:
1999 ui.write("%s\n" % c)
2000 ui.write("%s\n" % c)
2000
2001
2001 @command('debugsetparents', [], _('REV1 [REV2]'))
2002 @command('debugsetparents', [], _('REV1 [REV2]'))
2002 def debugsetparents(ui, repo, rev1, rev2=None):
2003 def debugsetparents(ui, repo, rev1, rev2=None):
2003 """manually set the parents of the current working directory
2004 """manually set the parents of the current working directory
2004
2005
2005 This is useful for writing repository conversion tools, but should
2006 This is useful for writing repository conversion tools, but should
2006 be used with care. For example, neither the working directory nor the
2007 be used with care. For example, neither the working directory nor the
2007 dirstate is updated, so file status may be incorrect after running this
2008 dirstate is updated, so file status may be incorrect after running this
2008 command.
2009 command.
2009
2010
2010 Returns 0 on success.
2011 Returns 0 on success.
2011 """
2012 """
2012
2013
2013 r1 = scmutil.revsingle(repo, rev1).node()
2014 r1 = scmutil.revsingle(repo, rev1).node()
2014 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2015 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2015
2016
2016 with repo.wlock():
2017 with repo.wlock():
2017 repo.setparents(r1, r2)
2018 repo.setparents(r1, r2)
2018
2019
2019 @command('debugsub',
2020 @command('debugsub',
2020 [('r', 'rev', '',
2021 [('r', 'rev', '',
2021 _('revision to check'), _('REV'))],
2022 _('revision to check'), _('REV'))],
2022 _('[-r REV] [REV]'))
2023 _('[-r REV] [REV]'))
2023 def debugsub(ui, repo, rev=None):
2024 def debugsub(ui, repo, rev=None):
2024 ctx = scmutil.revsingle(repo, rev, None)
2025 ctx = scmutil.revsingle(repo, rev, None)
2025 for k, v in sorted(ctx.substate.items()):
2026 for k, v in sorted(ctx.substate.items()):
2026 ui.write(('path %s\n') % k)
2027 ui.write(('path %s\n') % k)
2027 ui.write((' source %s\n') % v[0])
2028 ui.write((' source %s\n') % v[0])
2028 ui.write((' revision %s\n') % v[1])
2029 ui.write((' revision %s\n') % v[1])
2029
2030
2030 @command('debugsuccessorssets',
2031 @command('debugsuccessorssets',
2031 [],
2032 [],
2032 _('[REV]'))
2033 _('[REV]'))
2033 def debugsuccessorssets(ui, repo, *revs):
2034 def debugsuccessorssets(ui, repo, *revs):
2034 """show set of successors for revision
2035 """show set of successors for revision
2035
2036
2036 A successors set of changeset A is a consistent group of revisions that
2037 A successors set of changeset A is a consistent group of revisions that
2037 succeed A. It contains non-obsolete changesets only.
2038 succeed A. It contains non-obsolete changesets only.
2038
2039
2039 In most cases a changeset A has a single successors set containing a single
2040 In most cases a changeset A has a single successors set containing a single
2040 successor (changeset A replaced by A').
2041 successor (changeset A replaced by A').
2041
2042
2042 A changeset that is made obsolete with no successors are called "pruned".
2043 A changeset that is made obsolete with no successors are called "pruned".
2043 Such changesets have no successors sets at all.
2044 Such changesets have no successors sets at all.
2044
2045
2045 A changeset that has been "split" will have a successors set containing
2046 A changeset that has been "split" will have a successors set containing
2046 more than one successor.
2047 more than one successor.
2047
2048
2048 A changeset that has been rewritten in multiple different ways is called
2049 A changeset that has been rewritten in multiple different ways is called
2049 "divergent". Such changesets have multiple successor sets (each of which
2050 "divergent". Such changesets have multiple successor sets (each of which
2050 may also be split, i.e. have multiple successors).
2051 may also be split, i.e. have multiple successors).
2051
2052
2052 Results are displayed as follows::
2053 Results are displayed as follows::
2053
2054
2054 <rev1>
2055 <rev1>
2055 <successors-1A>
2056 <successors-1A>
2056 <rev2>
2057 <rev2>
2057 <successors-2A>
2058 <successors-2A>
2058 <successors-2B1> <successors-2B2> <successors-2B3>
2059 <successors-2B1> <successors-2B2> <successors-2B3>
2059
2060
2060 Here rev2 has two possible (i.e. divergent) successors sets. The first
2061 Here rev2 has two possible (i.e. divergent) successors sets. The first
2061 holds one element, whereas the second holds three (i.e. the changeset has
2062 holds one element, whereas the second holds three (i.e. the changeset has
2062 been split).
2063 been split).
2063 """
2064 """
2064 # passed to successorssets caching computation from one call to another
2065 # passed to successorssets caching computation from one call to another
2065 cache = {}
2066 cache = {}
2066 ctx2str = str
2067 ctx2str = str
2067 node2str = short
2068 node2str = short
2068 if ui.debug():
2069 if ui.debug():
2069 def ctx2str(ctx):
2070 def ctx2str(ctx):
2070 return ctx.hex()
2071 return ctx.hex()
2071 node2str = hex
2072 node2str = hex
2072 for rev in scmutil.revrange(repo, revs):
2073 for rev in scmutil.revrange(repo, revs):
2073 ctx = repo[rev]
2074 ctx = repo[rev]
2074 ui.write('%s\n'% ctx2str(ctx))
2075 ui.write('%s\n'% ctx2str(ctx))
2075 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2076 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2076 if succsset:
2077 if succsset:
2077 ui.write(' ')
2078 ui.write(' ')
2078 ui.write(node2str(succsset[0]))
2079 ui.write(node2str(succsset[0]))
2079 for node in succsset[1:]:
2080 for node in succsset[1:]:
2080 ui.write(' ')
2081 ui.write(' ')
2081 ui.write(node2str(node))
2082 ui.write(node2str(node))
2082 ui.write('\n')
2083 ui.write('\n')
2083
2084
2084 @command('debugtemplate',
2085 @command('debugtemplate',
2085 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2086 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2086 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2087 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2087 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2088 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2088 optionalrepo=True)
2089 optionalrepo=True)
2089 def debugtemplate(ui, repo, tmpl, **opts):
2090 def debugtemplate(ui, repo, tmpl, **opts):
2090 """parse and apply a template
2091 """parse and apply a template
2091
2092
2092 If -r/--rev is given, the template is processed as a log template and
2093 If -r/--rev is given, the template is processed as a log template and
2093 applied to the given changesets. Otherwise, it is processed as a generic
2094 applied to the given changesets. Otherwise, it is processed as a generic
2094 template.
2095 template.
2095
2096
2096 Use --verbose to print the parsed tree.
2097 Use --verbose to print the parsed tree.
2097 """
2098 """
2098 revs = None
2099 revs = None
2099 if opts['rev']:
2100 if opts['rev']:
2100 if repo is None:
2101 if repo is None:
2101 raise error.RepoError(_('there is no Mercurial repository here '
2102 raise error.RepoError(_('there is no Mercurial repository here '
2102 '(.hg not found)'))
2103 '(.hg not found)'))
2103 revs = scmutil.revrange(repo, opts['rev'])
2104 revs = scmutil.revrange(repo, opts['rev'])
2104
2105
2105 props = {}
2106 props = {}
2106 for d in opts['define']:
2107 for d in opts['define']:
2107 try:
2108 try:
2108 k, v = (e.strip() for e in d.split('=', 1))
2109 k, v = (e.strip() for e in d.split('=', 1))
2109 if not k or k == 'ui':
2110 if not k or k == 'ui':
2110 raise ValueError
2111 raise ValueError
2111 props[k] = v
2112 props[k] = v
2112 except ValueError:
2113 except ValueError:
2113 raise error.Abort(_('malformed keyword definition: %s') % d)
2114 raise error.Abort(_('malformed keyword definition: %s') % d)
2114
2115
2115 if ui.verbose:
2116 if ui.verbose:
2116 aliases = ui.configitems('templatealias')
2117 aliases = ui.configitems('templatealias')
2117 tree = templater.parse(tmpl)
2118 tree = templater.parse(tmpl)
2118 ui.note(templater.prettyformat(tree), '\n')
2119 ui.note(templater.prettyformat(tree), '\n')
2119 newtree = templater.expandaliases(tree, aliases)
2120 newtree = templater.expandaliases(tree, aliases)
2120 if newtree != tree:
2121 if newtree != tree:
2121 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2122 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2122
2123
2123 if revs is None:
2124 if revs is None:
2124 t = formatter.maketemplater(ui, tmpl)
2125 t = formatter.maketemplater(ui, tmpl)
2125 props['ui'] = ui
2126 props['ui'] = ui
2126 ui.write(t.render(props))
2127 ui.write(t.render(props))
2127 else:
2128 else:
2128 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2129 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2129 for r in revs:
2130 for r in revs:
2130 displayer.show(repo[r], **props)
2131 displayer.show(repo[r], **props)
2131 displayer.close()
2132 displayer.close()
2132
2133
2133 @command('debugupdatecaches', [])
2134 @command('debugupdatecaches', [])
2134 def debugupdatecaches(ui, repo, *pats, **opts):
2135 def debugupdatecaches(ui, repo, *pats, **opts):
2135 """warm all known caches in the repository"""
2136 """warm all known caches in the repository"""
2136 with repo.wlock():
2137 with repo.wlock():
2137 with repo.lock():
2138 with repo.lock():
2138 repo.updatecaches()
2139 repo.updatecaches()
2139
2140
2140 @command('debugupgraderepo', [
2141 @command('debugupgraderepo', [
2141 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2142 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2142 ('', 'run', False, _('performs an upgrade')),
2143 ('', 'run', False, _('performs an upgrade')),
2143 ])
2144 ])
2144 def debugupgraderepo(ui, repo, run=False, optimize=None):
2145 def debugupgraderepo(ui, repo, run=False, optimize=None):
2145 """upgrade a repository to use different features
2146 """upgrade a repository to use different features
2146
2147
2147 If no arguments are specified, the repository is evaluated for upgrade
2148 If no arguments are specified, the repository is evaluated for upgrade
2148 and a list of problems and potential optimizations is printed.
2149 and a list of problems and potential optimizations is printed.
2149
2150
2150 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2151 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2151 can be influenced via additional arguments. More details will be provided
2152 can be influenced via additional arguments. More details will be provided
2152 by the command output when run without ``--run``.
2153 by the command output when run without ``--run``.
2153
2154
2154 During the upgrade, the repository will be locked and no writes will be
2155 During the upgrade, the repository will be locked and no writes will be
2155 allowed.
2156 allowed.
2156
2157
2157 At the end of the upgrade, the repository may not be readable while new
2158 At the end of the upgrade, the repository may not be readable while new
2158 repository data is swapped in. This window will be as long as it takes to
2159 repository data is swapped in. This window will be as long as it takes to
2159 rename some directories inside the ``.hg`` directory. On most machines, this
2160 rename some directories inside the ``.hg`` directory. On most machines, this
2160 should complete almost instantaneously and the chances of a consumer being
2161 should complete almost instantaneously and the chances of a consumer being
2161 unable to access the repository should be low.
2162 unable to access the repository should be low.
2162 """
2163 """
2163 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2164 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2164
2165
2165 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2166 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2166 inferrepo=True)
2167 inferrepo=True)
2167 def debugwalk(ui, repo, *pats, **opts):
2168 def debugwalk(ui, repo, *pats, **opts):
2168 """show how files match on given patterns"""
2169 """show how files match on given patterns"""
2169 m = scmutil.match(repo[None], pats, opts)
2170 m = scmutil.match(repo[None], pats, opts)
2170 ui.write(('matcher: %r\n' % m))
2171 ui.write(('matcher: %r\n' % m))
2171 items = list(repo[None].walk(m))
2172 items = list(repo[None].walk(m))
2172 if not items:
2173 if not items:
2173 return
2174 return
2174 f = lambda fn: fn
2175 f = lambda fn: fn
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 f = lambda fn: util.normpath(fn)
2177 f = lambda fn: util.normpath(fn)
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 max([len(abs) for abs in items]),
2179 max([len(abs) for abs in items]),
2179 max([len(m.rel(abs)) for abs in items]))
2180 max([len(m.rel(abs)) for abs in items]))
2180 for abs in items:
2181 for abs in items:
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 ui.write("%s\n" % line.rstrip())
2183 ui.write("%s\n" % line.rstrip())
2183
2184
2184 @command('debugwireargs',
2185 @command('debugwireargs',
2185 [('', 'three', '', 'three'),
2186 [('', 'three', '', 'three'),
2186 ('', 'four', '', 'four'),
2187 ('', 'four', '', 'four'),
2187 ('', 'five', '', 'five'),
2188 ('', 'five', '', 'five'),
2188 ] + cmdutil.remoteopts,
2189 ] + cmdutil.remoteopts,
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 norepo=True)
2191 norepo=True)
2191 def debugwireargs(ui, repopath, *vals, **opts):
2192 def debugwireargs(ui, repopath, *vals, **opts):
2192 repo = hg.peer(ui, opts, repopath)
2193 repo = hg.peer(ui, opts, repopath)
2193 for opt in cmdutil.remoteopts:
2194 for opt in cmdutil.remoteopts:
2194 del opts[opt[1]]
2195 del opts[opt[1]]
2195 args = {}
2196 args = {}
2196 for k, v in opts.iteritems():
2197 for k, v in opts.iteritems():
2197 if v:
2198 if v:
2198 args[k] = v
2199 args[k] = v
2199 # run twice to check that we don't mess up the stream for the next command
2200 # run twice to check that we don't mess up the stream for the next command
2200 res1 = repo.debugwireargs(*vals, **args)
2201 res1 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2202 res2 = repo.debugwireargs(*vals, **args)
2202 ui.write("%s\n" % res1)
2203 ui.write("%s\n" % res1)
2203 if res1 != res2:
2204 if res1 != res2:
2204 ui.warn("%s\n" % res2)
2205 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now