##// END OF EJS Templates
py3: use pycompat.bytestr() to convert str to bytes
Pulkit Goyal -
r33107:e88fdec9 default
parent child Browse files
Show More
@@ -1,2244 +1,2244
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 phases,
50 phases,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 command = registrar.command()
74 command = registrar.command()
75
75
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 def debugancestor(ui, repo, *args):
77 def debugancestor(ui, repo, *args):
78 """find the ancestor revision of two revisions in a given index"""
78 """find the ancestor revision of two revisions in a given index"""
79 if len(args) == 3:
79 if len(args) == 3:
80 index, rev1, rev2 = args
80 index, rev1, rev2 = args
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 lookup = r.lookup
82 lookup = r.lookup
83 elif len(args) == 2:
83 elif len(args) == 2:
84 if not repo:
84 if not repo:
85 raise error.Abort(_('there is no Mercurial repository here '
85 raise error.Abort(_('there is no Mercurial repository here '
86 '(.hg not found)'))
86 '(.hg not found)'))
87 rev1, rev2 = args
87 rev1, rev2 = args
88 r = repo.changelog
88 r = repo.changelog
89 lookup = repo.lookup
89 lookup = repo.lookup
90 else:
90 else:
91 raise error.Abort(_('either two or three arguments required'))
91 raise error.Abort(_('either two or three arguments required'))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94
94
95 @command('debugapplystreamclonebundle', [], 'FILE')
95 @command('debugapplystreamclonebundle', [], 'FILE')
96 def debugapplystreamclonebundle(ui, repo, fname):
96 def debugapplystreamclonebundle(ui, repo, fname):
97 """apply a stream clone bundle file"""
97 """apply a stream clone bundle file"""
98 f = hg.openpath(ui, fname)
98 f = hg.openpath(ui, fname)
99 gen = exchange.readbundle(ui, f, fname)
99 gen = exchange.readbundle(ui, f, fname)
100 gen.apply(repo)
100 gen.apply(repo)
101
101
102 @command('debugbuilddag',
102 @command('debugbuilddag',
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('n', 'new-file', None, _('add new file at each rev'))],
105 ('n', 'new-file', None, _('add new file at each rev'))],
106 _('[OPTION]... [TEXT]'))
106 _('[OPTION]... [TEXT]'))
107 def debugbuilddag(ui, repo, text=None,
107 def debugbuilddag(ui, repo, text=None,
108 mergeable_file=False,
108 mergeable_file=False,
109 overwritten_file=False,
109 overwritten_file=False,
110 new_file=False):
110 new_file=False):
111 """builds a repo with a given DAG from scratch in the current empty repo
111 """builds a repo with a given DAG from scratch in the current empty repo
112
112
113 The description of the DAG is read from stdin if not given on the
113 The description of the DAG is read from stdin if not given on the
114 command line.
114 command line.
115
115
116 Elements:
116 Elements:
117
117
118 - "+n" is a linear run of n nodes based on the current default parent
118 - "+n" is a linear run of n nodes based on the current default parent
119 - "." is a single node based on the current default parent
119 - "." is a single node based on the current default parent
120 - "$" resets the default parent to null (implied at the start);
120 - "$" resets the default parent to null (implied at the start);
121 otherwise the default parent is always the last node created
121 otherwise the default parent is always the last node created
122 - "<p" sets the default parent to the backref p
122 - "<p" sets the default parent to the backref p
123 - "*p" is a fork at parent p, which is a backref
123 - "*p" is a fork at parent p, which is a backref
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "/p2" is a merge of the preceding node and p2
125 - "/p2" is a merge of the preceding node and p2
126 - ":tag" defines a local tag for the preceding node
126 - ":tag" defines a local tag for the preceding node
127 - "@branch" sets the named branch for subsequent nodes
127 - "@branch" sets the named branch for subsequent nodes
128 - "#...\\n" is a comment up to the end of the line
128 - "#...\\n" is a comment up to the end of the line
129
129
130 Whitespace between the above elements is ignored.
130 Whitespace between the above elements is ignored.
131
131
132 A backref is either
132 A backref is either
133
133
134 - a number n, which references the node curr-n, where curr is the current
134 - a number n, which references the node curr-n, where curr is the current
135 node, or
135 node, or
136 - the name of a local tag you placed earlier using ":tag", or
136 - the name of a local tag you placed earlier using ":tag", or
137 - empty to denote the default parent.
137 - empty to denote the default parent.
138
138
139 All string valued-elements are either strictly alphanumeric, or must
139 All string valued-elements are either strictly alphanumeric, or must
140 be enclosed in double quotes ("..."), with "\\" as escape character.
140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 """
141 """
142
142
143 if text is None:
143 if text is None:
144 ui.status(_("reading DAG from stdin\n"))
144 ui.status(_("reading DAG from stdin\n"))
145 text = ui.fin.read()
145 text = ui.fin.read()
146
146
147 cl = repo.changelog
147 cl = repo.changelog
148 if len(cl) > 0:
148 if len(cl) > 0:
149 raise error.Abort(_('repository is not empty'))
149 raise error.Abort(_('repository is not empty'))
150
150
151 # determine number of revs in DAG
151 # determine number of revs in DAG
152 total = 0
152 total = 0
153 for type, data in dagparser.parsedag(text):
153 for type, data in dagparser.parsedag(text):
154 if type == 'n':
154 if type == 'n':
155 total += 1
155 total += 1
156
156
157 if mergeable_file:
157 if mergeable_file:
158 linesperrev = 2
158 linesperrev = 2
159 # make a file with k lines per rev
159 # make a file with k lines per rev
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines.append("")
161 initialmergedlines.append("")
162
162
163 tags = []
163 tags = []
164
164
165 wlock = lock = tr = None
165 wlock = lock = tr = None
166 try:
166 try:
167 wlock = repo.wlock()
167 wlock = repo.wlock()
168 lock = repo.lock()
168 lock = repo.lock()
169 tr = repo.transaction("builddag")
169 tr = repo.transaction("builddag")
170
170
171 at = -1
171 at = -1
172 atbranch = 'default'
172 atbranch = 'default'
173 nodeids = []
173 nodeids = []
174 id = 0
174 id = 0
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 ui.note(('node %s\n' % str(data)))
178 ui.note(('node %s\n' % str(data)))
179 id, ps = data
179 id, ps = data
180
180
181 files = []
181 files = []
182 fctxs = {}
182 fctxs = {}
183
183
184 p2 = None
184 p2 = None
185 if mergeable_file:
185 if mergeable_file:
186 fn = "mf"
186 fn = "mf"
187 p1 = repo[ps[0]]
187 p1 = repo[ps[0]]
188 if len(ps) > 1:
188 if len(ps) > 1:
189 p2 = repo[ps[1]]
189 p2 = repo[ps[1]]
190 pa = p1.ancestor(p2)
190 pa = p1.ancestor(p2)
191 base, local, other = [x[fn].data() for x in (pa, p1,
191 base, local, other = [x[fn].data() for x in (pa, p1,
192 p2)]
192 p2)]
193 m3 = simplemerge.Merge3Text(base, local, other)
193 m3 = simplemerge.Merge3Text(base, local, other)
194 ml = [l.strip() for l in m3.merge_lines()]
194 ml = [l.strip() for l in m3.merge_lines()]
195 ml.append("")
195 ml.append("")
196 elif at > 0:
196 elif at > 0:
197 ml = p1[fn].data().split("\n")
197 ml = p1[fn].data().split("\n")
198 else:
198 else:
199 ml = initialmergedlines
199 ml = initialmergedlines
200 ml[id * linesperrev] += " r%i" % id
200 ml[id * linesperrev] += " r%i" % id
201 mergedtext = "\n".join(ml)
201 mergedtext = "\n".join(ml)
202 files.append(fn)
202 files.append(fn)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204
204
205 if overwritten_file:
205 if overwritten_file:
206 fn = "of"
206 fn = "of"
207 files.append(fn)
207 files.append(fn)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209
209
210 if new_file:
210 if new_file:
211 fn = "nf%i" % id
211 fn = "nf%i" % id
212 files.append(fn)
212 files.append(fn)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 if len(ps) > 1:
214 if len(ps) > 1:
215 if not p2:
215 if not p2:
216 p2 = repo[ps[1]]
216 p2 = repo[ps[1]]
217 for fn in p2:
217 for fn in p2:
218 if fn.startswith("nf"):
218 if fn.startswith("nf"):
219 files.append(fn)
219 files.append(fn)
220 fctxs[fn] = p2[fn]
220 fctxs[fn] = p2[fn]
221
221
222 def fctxfn(repo, cx, path):
222 def fctxfn(repo, cx, path):
223 return fctxs.get(path)
223 return fctxs.get(path)
224
224
225 if len(ps) == 0 or ps[0] < 0:
225 if len(ps) == 0 or ps[0] < 0:
226 pars = [None, None]
226 pars = [None, None]
227 elif len(ps) == 1:
227 elif len(ps) == 1:
228 pars = [nodeids[ps[0]], None]
228 pars = [nodeids[ps[0]], None]
229 else:
229 else:
230 pars = [nodeids[p] for p in ps]
230 pars = [nodeids[p] for p in ps]
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 date=(id, 0),
232 date=(id, 0),
233 user="debugbuilddag",
233 user="debugbuilddag",
234 extra={'branch': atbranch})
234 extra={'branch': atbranch})
235 nodeid = repo.commitctx(cx)
235 nodeid = repo.commitctx(cx)
236 nodeids.append(nodeid)
236 nodeids.append(nodeid)
237 at = id
237 at = id
238 elif type == 'l':
238 elif type == 'l':
239 id, name = data
239 id, name = data
240 ui.note(('tag %s\n' % name))
240 ui.note(('tag %s\n' % name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 elif type == 'a':
242 elif type == 'a':
243 ui.note(('branch %s\n' % data))
243 ui.note(('branch %s\n' % data))
244 atbranch = data
244 atbranch = data
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 tr.close()
246 tr.close()
247
247
248 if tags:
248 if tags:
249 repo.vfs.write("localtags", "".join(tags))
249 repo.vfs.write("localtags", "".join(tags))
250 finally:
250 finally:
251 ui.progress(_('building'), None)
251 ui.progress(_('building'), None)
252 release(tr, lock, wlock)
252 release(tr, lock, wlock)
253
253
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 indent_string = ' ' * indent
255 indent_string = ' ' * indent
256 if all:
256 if all:
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 % indent_string)
258 % indent_string)
259
259
260 def showchunks(named):
260 def showchunks(named):
261 ui.write("\n%s%s\n" % (indent_string, named))
261 ui.write("\n%s%s\n" % (indent_string, named))
262 chain = None
262 chain = None
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 node = chunkdata['node']
264 node = chunkdata['node']
265 p1 = chunkdata['p1']
265 p1 = chunkdata['p1']
266 p2 = chunkdata['p2']
266 p2 = chunkdata['p2']
267 cs = chunkdata['cs']
267 cs = chunkdata['cs']
268 deltabase = chunkdata['deltabase']
268 deltabase = chunkdata['deltabase']
269 delta = chunkdata['delta']
269 delta = chunkdata['delta']
270 ui.write("%s%s %s %s %s %s %s\n" %
270 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
271 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
272 hex(cs), hex(deltabase), len(delta)))
273 chain = node
273 chain = node
274
274
275 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
276 showchunks("changelog")
276 showchunks("changelog")
277 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
278 showchunks("manifest")
278 showchunks("manifest")
279 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
280 fname = chunkdata['filename']
280 fname = chunkdata['filename']
281 showchunks(fname)
281 showchunks(fname)
282 else:
282 else:
283 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
284 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 chain = None
286 chain = None
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 node = chunkdata['node']
288 node = chunkdata['node']
289 ui.write("%s%s\n" % (indent_string, hex(node)))
289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 chain = node
290 chain = node
291
291
292 def _debugobsmarkers(ui, part, indent=0, **opts):
292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 opts = pycompat.byteskwargs(opts)
294 opts = pycompat.byteskwargs(opts)
295 data = part.read()
295 data = part.read()
296 indent_string = ' ' * indent
296 indent_string = ' ' * indent
297 try:
297 try:
298 version, markers = obsolete._readmarkers(data)
298 version, markers = obsolete._readmarkers(data)
299 except error.UnknownVersion as exc:
299 except error.UnknownVersion as exc:
300 msg = "%sunsupported version: %s (%d bytes)\n"
300 msg = "%sunsupported version: %s (%d bytes)\n"
301 msg %= indent_string, exc.version, len(data)
301 msg %= indent_string, exc.version, len(data)
302 ui.write(msg)
302 ui.write(msg)
303 else:
303 else:
304 msg = "%sversion: %s (%d bytes)\n"
304 msg = "%sversion: %s (%d bytes)\n"
305 msg %= indent_string, version, len(data)
305 msg %= indent_string, version, len(data)
306 ui.write(msg)
306 ui.write(msg)
307 fm = ui.formatter('debugobsolete', opts)
307 fm = ui.formatter('debugobsolete', opts)
308 for rawmarker in sorted(markers):
308 for rawmarker in sorted(markers):
309 m = obsolete.marker(None, rawmarker)
309 m = obsolete.marker(None, rawmarker)
310 fm.startitem()
310 fm.startitem()
311 fm.plain(indent_string)
311 fm.plain(indent_string)
312 cmdutil.showmarker(fm, m)
312 cmdutil.showmarker(fm, m)
313 fm.end()
313 fm.end()
314
314
315 def _debugphaseheads(ui, data, indent=0):
315 def _debugphaseheads(ui, data, indent=0):
316 """display version and markers contained in 'data'"""
316 """display version and markers contained in 'data'"""
317 indent_string = ' ' * indent
317 indent_string = ' ' * indent
318 headsbyphase = bundle2._readphaseheads(data)
318 headsbyphase = bundle2._readphaseheads(data)
319 for phase in phases.allphases:
319 for phase in phases.allphases:
320 for head in headsbyphase[phase]:
320 for head in headsbyphase[phase]:
321 ui.write(indent_string)
321 ui.write(indent_string)
322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
323
323
324 def _debugbundle2(ui, gen, all=None, **opts):
324 def _debugbundle2(ui, gen, all=None, **opts):
325 """lists the contents of a bundle2"""
325 """lists the contents of a bundle2"""
326 if not isinstance(gen, bundle2.unbundle20):
326 if not isinstance(gen, bundle2.unbundle20):
327 raise error.Abort(_('not a bundle2 file'))
327 raise error.Abort(_('not a bundle2 file'))
328 ui.write(('Stream params: %s\n' % repr(gen.params)))
328 ui.write(('Stream params: %s\n' % repr(gen.params)))
329 parttypes = opts.get(r'part_type', [])
329 parttypes = opts.get(r'part_type', [])
330 for part in gen.iterparts():
330 for part in gen.iterparts():
331 if parttypes and part.type not in parttypes:
331 if parttypes and part.type not in parttypes:
332 continue
332 continue
333 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
333 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
334 if part.type == 'changegroup':
334 if part.type == 'changegroup':
335 version = part.params.get('version', '01')
335 version = part.params.get('version', '01')
336 cg = changegroup.getunbundler(version, part, 'UN')
336 cg = changegroup.getunbundler(version, part, 'UN')
337 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
337 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
338 if part.type == 'obsmarkers':
338 if part.type == 'obsmarkers':
339 _debugobsmarkers(ui, part, indent=4, **opts)
339 _debugobsmarkers(ui, part, indent=4, **opts)
340 if part.type == 'phase-heads':
340 if part.type == 'phase-heads':
341 _debugphaseheads(ui, part, indent=4)
341 _debugphaseheads(ui, part, indent=4)
342
342
343 @command('debugbundle',
343 @command('debugbundle',
344 [('a', 'all', None, _('show all details')),
344 [('a', 'all', None, _('show all details')),
345 ('', 'part-type', [], _('show only the named part type')),
345 ('', 'part-type', [], _('show only the named part type')),
346 ('', 'spec', None, _('print the bundlespec of the bundle'))],
346 ('', 'spec', None, _('print the bundlespec of the bundle'))],
347 _('FILE'),
347 _('FILE'),
348 norepo=True)
348 norepo=True)
349 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
349 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
350 """lists the contents of a bundle"""
350 """lists the contents of a bundle"""
351 with hg.openpath(ui, bundlepath) as f:
351 with hg.openpath(ui, bundlepath) as f:
352 if spec:
352 if spec:
353 spec = exchange.getbundlespec(ui, f)
353 spec = exchange.getbundlespec(ui, f)
354 ui.write('%s\n' % spec)
354 ui.write('%s\n' % spec)
355 return
355 return
356
356
357 gen = exchange.readbundle(ui, f, bundlepath)
357 gen = exchange.readbundle(ui, f, bundlepath)
358 if isinstance(gen, bundle2.unbundle20):
358 if isinstance(gen, bundle2.unbundle20):
359 return _debugbundle2(ui, gen, all=all, **opts)
359 return _debugbundle2(ui, gen, all=all, **opts)
360 _debugchangegroup(ui, gen, all=all, **opts)
360 _debugchangegroup(ui, gen, all=all, **opts)
361
361
362 @command('debugcheckstate', [], '')
362 @command('debugcheckstate', [], '')
363 def debugcheckstate(ui, repo):
363 def debugcheckstate(ui, repo):
364 """validate the correctness of the current dirstate"""
364 """validate the correctness of the current dirstate"""
365 parent1, parent2 = repo.dirstate.parents()
365 parent1, parent2 = repo.dirstate.parents()
366 m1 = repo[parent1].manifest()
366 m1 = repo[parent1].manifest()
367 m2 = repo[parent2].manifest()
367 m2 = repo[parent2].manifest()
368 errors = 0
368 errors = 0
369 for f in repo.dirstate:
369 for f in repo.dirstate:
370 state = repo.dirstate[f]
370 state = repo.dirstate[f]
371 if state in "nr" and f not in m1:
371 if state in "nr" and f not in m1:
372 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
372 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
373 errors += 1
373 errors += 1
374 if state in "a" and f in m1:
374 if state in "a" and f in m1:
375 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
375 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
376 errors += 1
376 errors += 1
377 if state in "m" and f not in m1 and f not in m2:
377 if state in "m" and f not in m1 and f not in m2:
378 ui.warn(_("%s in state %s, but not in either manifest\n") %
378 ui.warn(_("%s in state %s, but not in either manifest\n") %
379 (f, state))
379 (f, state))
380 errors += 1
380 errors += 1
381 for f in m1:
381 for f in m1:
382 state = repo.dirstate[f]
382 state = repo.dirstate[f]
383 if state not in "nrm":
383 if state not in "nrm":
384 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
384 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
385 errors += 1
385 errors += 1
386 if errors:
386 if errors:
387 error = _(".hg/dirstate inconsistent with current parent's manifest")
387 error = _(".hg/dirstate inconsistent with current parent's manifest")
388 raise error.Abort(error)
388 raise error.Abort(error)
389
389
390 @command('debugcolor',
390 @command('debugcolor',
391 [('', 'style', None, _('show all configured styles'))],
391 [('', 'style', None, _('show all configured styles'))],
392 'hg debugcolor')
392 'hg debugcolor')
393 def debugcolor(ui, repo, **opts):
393 def debugcolor(ui, repo, **opts):
394 """show available color, effects or style"""
394 """show available color, effects or style"""
395 ui.write(('color mode: %s\n') % ui._colormode)
395 ui.write(('color mode: %s\n') % ui._colormode)
396 if opts.get(r'style'):
396 if opts.get(r'style'):
397 return _debugdisplaystyle(ui)
397 return _debugdisplaystyle(ui)
398 else:
398 else:
399 return _debugdisplaycolor(ui)
399 return _debugdisplaycolor(ui)
400
400
401 def _debugdisplaycolor(ui):
401 def _debugdisplaycolor(ui):
402 ui = ui.copy()
402 ui = ui.copy()
403 ui._styles.clear()
403 ui._styles.clear()
404 for effect in color._activeeffects(ui).keys():
404 for effect in color._activeeffects(ui).keys():
405 ui._styles[effect] = effect
405 ui._styles[effect] = effect
406 if ui._terminfoparams:
406 if ui._terminfoparams:
407 for k, v in ui.configitems('color'):
407 for k, v in ui.configitems('color'):
408 if k.startswith('color.'):
408 if k.startswith('color.'):
409 ui._styles[k] = k[6:]
409 ui._styles[k] = k[6:]
410 elif k.startswith('terminfo.'):
410 elif k.startswith('terminfo.'):
411 ui._styles[k] = k[9:]
411 ui._styles[k] = k[9:]
412 ui.write(_('available colors:\n'))
412 ui.write(_('available colors:\n'))
413 # sort label with a '_' after the other to group '_background' entry.
413 # sort label with a '_' after the other to group '_background' entry.
414 items = sorted(ui._styles.items(),
414 items = sorted(ui._styles.items(),
415 key=lambda i: ('_' in i[0], i[0], i[1]))
415 key=lambda i: ('_' in i[0], i[0], i[1]))
416 for colorname, label in items:
416 for colorname, label in items:
417 ui.write(('%s\n') % colorname, label=label)
417 ui.write(('%s\n') % colorname, label=label)
418
418
419 def _debugdisplaystyle(ui):
419 def _debugdisplaystyle(ui):
420 ui.write(_('available style:\n'))
420 ui.write(_('available style:\n'))
421 width = max(len(s) for s in ui._styles)
421 width = max(len(s) for s in ui._styles)
422 for label, effects in sorted(ui._styles.items()):
422 for label, effects in sorted(ui._styles.items()):
423 ui.write('%s' % label, label=label)
423 ui.write('%s' % label, label=label)
424 if effects:
424 if effects:
425 # 50
425 # 50
426 ui.write(': ')
426 ui.write(': ')
427 ui.write(' ' * (max(0, width - len(label))))
427 ui.write(' ' * (max(0, width - len(label))))
428 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
428 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
429 ui.write('\n')
429 ui.write('\n')
430
430
431 @command('debugcreatestreamclonebundle', [], 'FILE')
431 @command('debugcreatestreamclonebundle', [], 'FILE')
432 def debugcreatestreamclonebundle(ui, repo, fname):
432 def debugcreatestreamclonebundle(ui, repo, fname):
433 """create a stream clone bundle file
433 """create a stream clone bundle file
434
434
435 Stream bundles are special bundles that are essentially archives of
435 Stream bundles are special bundles that are essentially archives of
436 revlog files. They are commonly used for cloning very quickly.
436 revlog files. They are commonly used for cloning very quickly.
437 """
437 """
438 # TODO we may want to turn this into an abort when this functionality
438 # TODO we may want to turn this into an abort when this functionality
439 # is moved into `hg bundle`.
439 # is moved into `hg bundle`.
440 if phases.hassecret(repo):
440 if phases.hassecret(repo):
441 ui.warn(_('(warning: stream clone bundle will contain secret '
441 ui.warn(_('(warning: stream clone bundle will contain secret '
442 'revisions)\n'))
442 'revisions)\n'))
443
443
444 requirements, gen = streamclone.generatebundlev1(repo)
444 requirements, gen = streamclone.generatebundlev1(repo)
445 changegroup.writechunks(ui, gen, fname)
445 changegroup.writechunks(ui, gen, fname)
446
446
447 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
447 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
448
448
449 @command('debugdag',
449 @command('debugdag',
450 [('t', 'tags', None, _('use tags as labels')),
450 [('t', 'tags', None, _('use tags as labels')),
451 ('b', 'branches', None, _('annotate with branch names')),
451 ('b', 'branches', None, _('annotate with branch names')),
452 ('', 'dots', None, _('use dots for runs')),
452 ('', 'dots', None, _('use dots for runs')),
453 ('s', 'spaces', None, _('separate elements by spaces'))],
453 ('s', 'spaces', None, _('separate elements by spaces'))],
454 _('[OPTION]... [FILE [REV]...]'),
454 _('[OPTION]... [FILE [REV]...]'),
455 optionalrepo=True)
455 optionalrepo=True)
456 def debugdag(ui, repo, file_=None, *revs, **opts):
456 def debugdag(ui, repo, file_=None, *revs, **opts):
457 """format the changelog or an index DAG as a concise textual description
457 """format the changelog or an index DAG as a concise textual description
458
458
459 If you pass a revlog index, the revlog's DAG is emitted. If you list
459 If you pass a revlog index, the revlog's DAG is emitted. If you list
460 revision numbers, they get labeled in the output as rN.
460 revision numbers, they get labeled in the output as rN.
461
461
462 Otherwise, the changelog DAG of the current repo is emitted.
462 Otherwise, the changelog DAG of the current repo is emitted.
463 """
463 """
464 spaces = opts.get(r'spaces')
464 spaces = opts.get(r'spaces')
465 dots = opts.get(r'dots')
465 dots = opts.get(r'dots')
466 if file_:
466 if file_:
467 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
467 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
468 file_)
468 file_)
469 revs = set((int(r) for r in revs))
469 revs = set((int(r) for r in revs))
470 def events():
470 def events():
471 for r in rlog:
471 for r in rlog:
472 yield 'n', (r, list(p for p in rlog.parentrevs(r)
472 yield 'n', (r, list(p for p in rlog.parentrevs(r)
473 if p != -1))
473 if p != -1))
474 if r in revs:
474 if r in revs:
475 yield 'l', (r, "r%i" % r)
475 yield 'l', (r, "r%i" % r)
476 elif repo:
476 elif repo:
477 cl = repo.changelog
477 cl = repo.changelog
478 tags = opts.get(r'tags')
478 tags = opts.get(r'tags')
479 branches = opts.get(r'branches')
479 branches = opts.get(r'branches')
480 if tags:
480 if tags:
481 labels = {}
481 labels = {}
482 for l, n in repo.tags().items():
482 for l, n in repo.tags().items():
483 labels.setdefault(cl.rev(n), []).append(l)
483 labels.setdefault(cl.rev(n), []).append(l)
484 def events():
484 def events():
485 b = "default"
485 b = "default"
486 for r in cl:
486 for r in cl:
487 if branches:
487 if branches:
488 newb = cl.read(cl.node(r))[5]['branch']
488 newb = cl.read(cl.node(r))[5]['branch']
489 if newb != b:
489 if newb != b:
490 yield 'a', newb
490 yield 'a', newb
491 b = newb
491 b = newb
492 yield 'n', (r, list(p for p in cl.parentrevs(r)
492 yield 'n', (r, list(p for p in cl.parentrevs(r)
493 if p != -1))
493 if p != -1))
494 if tags:
494 if tags:
495 ls = labels.get(r)
495 ls = labels.get(r)
496 if ls:
496 if ls:
497 for l in ls:
497 for l in ls:
498 yield 'l', (r, l)
498 yield 'l', (r, l)
499 else:
499 else:
500 raise error.Abort(_('need repo for changelog dag'))
500 raise error.Abort(_('need repo for changelog dag'))
501
501
502 for line in dagparser.dagtextlines(events(),
502 for line in dagparser.dagtextlines(events(),
503 addspaces=spaces,
503 addspaces=spaces,
504 wraplabels=True,
504 wraplabels=True,
505 wrapannotations=True,
505 wrapannotations=True,
506 wrapnonlinear=dots,
506 wrapnonlinear=dots,
507 usedots=dots,
507 usedots=dots,
508 maxlinewidth=70):
508 maxlinewidth=70):
509 ui.write(line)
509 ui.write(line)
510 ui.write("\n")
510 ui.write("\n")
511
511
512 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
512 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
513 def debugdata(ui, repo, file_, rev=None, **opts):
513 def debugdata(ui, repo, file_, rev=None, **opts):
514 """dump the contents of a data file revision"""
514 """dump the contents of a data file revision"""
515 opts = pycompat.byteskwargs(opts)
515 opts = pycompat.byteskwargs(opts)
516 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
516 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
517 if rev is not None:
517 if rev is not None:
518 raise error.CommandError('debugdata', _('invalid arguments'))
518 raise error.CommandError('debugdata', _('invalid arguments'))
519 file_, rev = None, file_
519 file_, rev = None, file_
520 elif rev is None:
520 elif rev is None:
521 raise error.CommandError('debugdata', _('invalid arguments'))
521 raise error.CommandError('debugdata', _('invalid arguments'))
522 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
522 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
523 try:
523 try:
524 ui.write(r.revision(r.lookup(rev), raw=True))
524 ui.write(r.revision(r.lookup(rev), raw=True))
525 except KeyError:
525 except KeyError:
526 raise error.Abort(_('invalid revision identifier %s') % rev)
526 raise error.Abort(_('invalid revision identifier %s') % rev)
527
527
528 @command('debugdate',
528 @command('debugdate',
529 [('e', 'extended', None, _('try extended date formats'))],
529 [('e', 'extended', None, _('try extended date formats'))],
530 _('[-e] DATE [RANGE]'),
530 _('[-e] DATE [RANGE]'),
531 norepo=True, optionalrepo=True)
531 norepo=True, optionalrepo=True)
532 def debugdate(ui, date, range=None, **opts):
532 def debugdate(ui, date, range=None, **opts):
533 """parse and display a date"""
533 """parse and display a date"""
534 if opts[r"extended"]:
534 if opts[r"extended"]:
535 d = util.parsedate(date, util.extendeddateformats)
535 d = util.parsedate(date, util.extendeddateformats)
536 else:
536 else:
537 d = util.parsedate(date)
537 d = util.parsedate(date)
538 ui.write(("internal: %s %s\n") % d)
538 ui.write(("internal: %s %s\n") % d)
539 ui.write(("standard: %s\n") % util.datestr(d))
539 ui.write(("standard: %s\n") % util.datestr(d))
540 if range:
540 if range:
541 m = util.matchdate(range)
541 m = util.matchdate(range)
542 ui.write(("match: %s\n") % m(d[0]))
542 ui.write(("match: %s\n") % m(d[0]))
543
543
544 @command('debugdeltachain',
544 @command('debugdeltachain',
545 cmdutil.debugrevlogopts + cmdutil.formatteropts,
545 cmdutil.debugrevlogopts + cmdutil.formatteropts,
546 _('-c|-m|FILE'),
546 _('-c|-m|FILE'),
547 optionalrepo=True)
547 optionalrepo=True)
548 def debugdeltachain(ui, repo, file_=None, **opts):
548 def debugdeltachain(ui, repo, file_=None, **opts):
549 """dump information about delta chains in a revlog
549 """dump information about delta chains in a revlog
550
550
551 Output can be templatized. Available template keywords are:
551 Output can be templatized. Available template keywords are:
552
552
553 :``rev``: revision number
553 :``rev``: revision number
554 :``chainid``: delta chain identifier (numbered by unique base)
554 :``chainid``: delta chain identifier (numbered by unique base)
555 :``chainlen``: delta chain length to this revision
555 :``chainlen``: delta chain length to this revision
556 :``prevrev``: previous revision in delta chain
556 :``prevrev``: previous revision in delta chain
557 :``deltatype``: role of delta / how it was computed
557 :``deltatype``: role of delta / how it was computed
558 :``compsize``: compressed size of revision
558 :``compsize``: compressed size of revision
559 :``uncompsize``: uncompressed size of revision
559 :``uncompsize``: uncompressed size of revision
560 :``chainsize``: total size of compressed revisions in chain
560 :``chainsize``: total size of compressed revisions in chain
561 :``chainratio``: total chain size divided by uncompressed revision size
561 :``chainratio``: total chain size divided by uncompressed revision size
562 (new delta chains typically start at ratio 2.00)
562 (new delta chains typically start at ratio 2.00)
563 :``lindist``: linear distance from base revision in delta chain to end
563 :``lindist``: linear distance from base revision in delta chain to end
564 of this revision
564 of this revision
565 :``extradist``: total size of revisions not part of this delta chain from
565 :``extradist``: total size of revisions not part of this delta chain from
566 base of delta chain to end of this revision; a measurement
566 base of delta chain to end of this revision; a measurement
567 of how much extra data we need to read/seek across to read
567 of how much extra data we need to read/seek across to read
568 the delta chain for this revision
568 the delta chain for this revision
569 :``extraratio``: extradist divided by chainsize; another representation of
569 :``extraratio``: extradist divided by chainsize; another representation of
570 how much unrelated data is needed to load this delta chain
570 how much unrelated data is needed to load this delta chain
571 """
571 """
572 opts = pycompat.byteskwargs(opts)
572 opts = pycompat.byteskwargs(opts)
573 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
573 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
574 index = r.index
574 index = r.index
575 generaldelta = r.version & revlog.FLAG_GENERALDELTA
575 generaldelta = r.version & revlog.FLAG_GENERALDELTA
576
576
577 def revinfo(rev):
577 def revinfo(rev):
578 e = index[rev]
578 e = index[rev]
579 compsize = e[1]
579 compsize = e[1]
580 uncompsize = e[2]
580 uncompsize = e[2]
581 chainsize = 0
581 chainsize = 0
582
582
583 if generaldelta:
583 if generaldelta:
584 if e[3] == e[5]:
584 if e[3] == e[5]:
585 deltatype = 'p1'
585 deltatype = 'p1'
586 elif e[3] == e[6]:
586 elif e[3] == e[6]:
587 deltatype = 'p2'
587 deltatype = 'p2'
588 elif e[3] == rev - 1:
588 elif e[3] == rev - 1:
589 deltatype = 'prev'
589 deltatype = 'prev'
590 elif e[3] == rev:
590 elif e[3] == rev:
591 deltatype = 'base'
591 deltatype = 'base'
592 else:
592 else:
593 deltatype = 'other'
593 deltatype = 'other'
594 else:
594 else:
595 if e[3] == rev:
595 if e[3] == rev:
596 deltatype = 'base'
596 deltatype = 'base'
597 else:
597 else:
598 deltatype = 'prev'
598 deltatype = 'prev'
599
599
600 chain = r._deltachain(rev)[0]
600 chain = r._deltachain(rev)[0]
601 for iterrev in chain:
601 for iterrev in chain:
602 e = index[iterrev]
602 e = index[iterrev]
603 chainsize += e[1]
603 chainsize += e[1]
604
604
605 return compsize, uncompsize, deltatype, chain, chainsize
605 return compsize, uncompsize, deltatype, chain, chainsize
606
606
607 fm = ui.formatter('debugdeltachain', opts)
607 fm = ui.formatter('debugdeltachain', opts)
608
608
609 fm.plain(' rev chain# chainlen prev delta '
609 fm.plain(' rev chain# chainlen prev delta '
610 'size rawsize chainsize ratio lindist extradist '
610 'size rawsize chainsize ratio lindist extradist '
611 'extraratio\n')
611 'extraratio\n')
612
612
613 chainbases = {}
613 chainbases = {}
614 for rev in r:
614 for rev in r:
615 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
615 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
616 chainbase = chain[0]
616 chainbase = chain[0]
617 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
617 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
618 basestart = r.start(chainbase)
618 basestart = r.start(chainbase)
619 revstart = r.start(rev)
619 revstart = r.start(rev)
620 lineardist = revstart + comp - basestart
620 lineardist = revstart + comp - basestart
621 extradist = lineardist - chainsize
621 extradist = lineardist - chainsize
622 try:
622 try:
623 prevrev = chain[-2]
623 prevrev = chain[-2]
624 except IndexError:
624 except IndexError:
625 prevrev = -1
625 prevrev = -1
626
626
627 chainratio = float(chainsize) / float(uncomp)
627 chainratio = float(chainsize) / float(uncomp)
628 extraratio = float(extradist) / float(chainsize)
628 extraratio = float(extradist) / float(chainsize)
629
629
630 fm.startitem()
630 fm.startitem()
631 fm.write('rev chainid chainlen prevrev deltatype compsize '
631 fm.write('rev chainid chainlen prevrev deltatype compsize '
632 'uncompsize chainsize chainratio lindist extradist '
632 'uncompsize chainsize chainratio lindist extradist '
633 'extraratio',
633 'extraratio',
634 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
634 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
635 rev, chainid, len(chain), prevrev, deltatype, comp,
635 rev, chainid, len(chain), prevrev, deltatype, comp,
636 uncomp, chainsize, chainratio, lineardist, extradist,
636 uncomp, chainsize, chainratio, lineardist, extradist,
637 extraratio,
637 extraratio,
638 rev=rev, chainid=chainid, chainlen=len(chain),
638 rev=rev, chainid=chainid, chainlen=len(chain),
639 prevrev=prevrev, deltatype=deltatype, compsize=comp,
639 prevrev=prevrev, deltatype=deltatype, compsize=comp,
640 uncompsize=uncomp, chainsize=chainsize,
640 uncompsize=uncomp, chainsize=chainsize,
641 chainratio=chainratio, lindist=lineardist,
641 chainratio=chainratio, lindist=lineardist,
642 extradist=extradist, extraratio=extraratio)
642 extradist=extradist, extraratio=extraratio)
643
643
644 fm.end()
644 fm.end()
645
645
646 @command('debugdirstate|debugstate',
646 @command('debugdirstate|debugstate',
647 [('', 'nodates', None, _('do not display the saved mtime')),
647 [('', 'nodates', None, _('do not display the saved mtime')),
648 ('', 'datesort', None, _('sort by saved mtime'))],
648 ('', 'datesort', None, _('sort by saved mtime'))],
649 _('[OPTION]...'))
649 _('[OPTION]...'))
650 def debugstate(ui, repo, **opts):
650 def debugstate(ui, repo, **opts):
651 """show the contents of the current dirstate"""
651 """show the contents of the current dirstate"""
652
652
653 nodates = opts.get(r'nodates')
653 nodates = opts.get(r'nodates')
654 datesort = opts.get(r'datesort')
654 datesort = opts.get(r'datesort')
655
655
656 timestr = ""
656 timestr = ""
657 if datesort:
657 if datesort:
658 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
658 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
659 else:
659 else:
660 keyfunc = None # sort by filename
660 keyfunc = None # sort by filename
661 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
661 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
662 if ent[3] == -1:
662 if ent[3] == -1:
663 timestr = 'unset '
663 timestr = 'unset '
664 elif nodates:
664 elif nodates:
665 timestr = 'set '
665 timestr = 'set '
666 else:
666 else:
667 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
667 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
668 time.localtime(ent[3]))
668 time.localtime(ent[3]))
669 if ent[1] & 0o20000:
669 if ent[1] & 0o20000:
670 mode = 'lnk'
670 mode = 'lnk'
671 else:
671 else:
672 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
672 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
673 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
673 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
674 for f in repo.dirstate.copies():
674 for f in repo.dirstate.copies():
675 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
675 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
676
676
677 @command('debugdiscovery',
677 @command('debugdiscovery',
678 [('', 'old', None, _('use old-style discovery')),
678 [('', 'old', None, _('use old-style discovery')),
679 ('', 'nonheads', None,
679 ('', 'nonheads', None,
680 _('use old-style discovery with non-heads included')),
680 _('use old-style discovery with non-heads included')),
681 ] + cmdutil.remoteopts,
681 ] + cmdutil.remoteopts,
682 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
682 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
683 def debugdiscovery(ui, repo, remoteurl="default", **opts):
683 def debugdiscovery(ui, repo, remoteurl="default", **opts):
684 """runs the changeset discovery protocol in isolation"""
684 """runs the changeset discovery protocol in isolation"""
685 opts = pycompat.byteskwargs(opts)
685 opts = pycompat.byteskwargs(opts)
686 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
686 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
687 opts.get('branch'))
687 opts.get('branch'))
688 remote = hg.peer(repo, opts, remoteurl)
688 remote = hg.peer(repo, opts, remoteurl)
689 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
689 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
690
690
691 # make sure tests are repeatable
691 # make sure tests are repeatable
692 random.seed(12323)
692 random.seed(12323)
693
693
694 def doit(localheads, remoteheads, remote=remote):
694 def doit(localheads, remoteheads, remote=remote):
695 if opts.get('old'):
695 if opts.get('old'):
696 if localheads:
696 if localheads:
697 raise error.Abort('cannot use localheads with old style '
697 raise error.Abort('cannot use localheads with old style '
698 'discovery')
698 'discovery')
699 if not util.safehasattr(remote, 'branches'):
699 if not util.safehasattr(remote, 'branches'):
700 # enable in-client legacy support
700 # enable in-client legacy support
701 remote = localrepo.locallegacypeer(remote.local())
701 remote = localrepo.locallegacypeer(remote.local())
702 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
702 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
703 force=True)
703 force=True)
704 common = set(common)
704 common = set(common)
705 if not opts.get('nonheads'):
705 if not opts.get('nonheads'):
706 ui.write(("unpruned common: %s\n") %
706 ui.write(("unpruned common: %s\n") %
707 " ".join(sorted(short(n) for n in common)))
707 " ".join(sorted(short(n) for n in common)))
708 dag = dagutil.revlogdag(repo.changelog)
708 dag = dagutil.revlogdag(repo.changelog)
709 all = dag.ancestorset(dag.internalizeall(common))
709 all = dag.ancestorset(dag.internalizeall(common))
710 common = dag.externalizeall(dag.headsetofconnecteds(all))
710 common = dag.externalizeall(dag.headsetofconnecteds(all))
711 else:
711 else:
712 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
712 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
713 common = set(common)
713 common = set(common)
714 rheads = set(hds)
714 rheads = set(hds)
715 lheads = set(repo.heads())
715 lheads = set(repo.heads())
716 ui.write(("common heads: %s\n") %
716 ui.write(("common heads: %s\n") %
717 " ".join(sorted(short(n) for n in common)))
717 " ".join(sorted(short(n) for n in common)))
718 if lheads <= common:
718 if lheads <= common:
719 ui.write(("local is subset\n"))
719 ui.write(("local is subset\n"))
720 elif rheads <= common:
720 elif rheads <= common:
721 ui.write(("remote is subset\n"))
721 ui.write(("remote is subset\n"))
722
722
723 serverlogs = opts.get('serverlog')
723 serverlogs = opts.get('serverlog')
724 if serverlogs:
724 if serverlogs:
725 for filename in serverlogs:
725 for filename in serverlogs:
726 with open(filename, 'r') as logfile:
726 with open(filename, 'r') as logfile:
727 line = logfile.readline()
727 line = logfile.readline()
728 while line:
728 while line:
729 parts = line.strip().split(';')
729 parts = line.strip().split(';')
730 op = parts[1]
730 op = parts[1]
731 if op == 'cg':
731 if op == 'cg':
732 pass
732 pass
733 elif op == 'cgss':
733 elif op == 'cgss':
734 doit(parts[2].split(' '), parts[3].split(' '))
734 doit(parts[2].split(' '), parts[3].split(' '))
735 elif op == 'unb':
735 elif op == 'unb':
736 doit(parts[3].split(' '), parts[2].split(' '))
736 doit(parts[3].split(' '), parts[2].split(' '))
737 line = logfile.readline()
737 line = logfile.readline()
738 else:
738 else:
739 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
739 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
740 opts.get('remote_head'))
740 opts.get('remote_head'))
741 localrevs = opts.get('local_head')
741 localrevs = opts.get('local_head')
742 doit(localrevs, remoterevs)
742 doit(localrevs, remoterevs)
743
743
744 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
744 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
745 def debugextensions(ui, **opts):
745 def debugextensions(ui, **opts):
746 '''show information about active extensions'''
746 '''show information about active extensions'''
747 opts = pycompat.byteskwargs(opts)
747 opts = pycompat.byteskwargs(opts)
748 exts = extensions.extensions(ui)
748 exts = extensions.extensions(ui)
749 hgver = util.version()
749 hgver = util.version()
750 fm = ui.formatter('debugextensions', opts)
750 fm = ui.formatter('debugextensions', opts)
751 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
751 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
752 isinternal = extensions.ismoduleinternal(extmod)
752 isinternal = extensions.ismoduleinternal(extmod)
753 extsource = pycompat.fsencode(extmod.__file__)
753 extsource = pycompat.fsencode(extmod.__file__)
754 if isinternal:
754 if isinternal:
755 exttestedwith = [] # never expose magic string to users
755 exttestedwith = [] # never expose magic string to users
756 else:
756 else:
757 exttestedwith = getattr(extmod, 'testedwith', '').split()
757 exttestedwith = getattr(extmod, 'testedwith', '').split()
758 extbuglink = getattr(extmod, 'buglink', None)
758 extbuglink = getattr(extmod, 'buglink', None)
759
759
760 fm.startitem()
760 fm.startitem()
761
761
762 if ui.quiet or ui.verbose:
762 if ui.quiet or ui.verbose:
763 fm.write('name', '%s\n', extname)
763 fm.write('name', '%s\n', extname)
764 else:
764 else:
765 fm.write('name', '%s', extname)
765 fm.write('name', '%s', extname)
766 if isinternal or hgver in exttestedwith:
766 if isinternal or hgver in exttestedwith:
767 fm.plain('\n')
767 fm.plain('\n')
768 elif not exttestedwith:
768 elif not exttestedwith:
769 fm.plain(_(' (untested!)\n'))
769 fm.plain(_(' (untested!)\n'))
770 else:
770 else:
771 lasttestedversion = exttestedwith[-1]
771 lasttestedversion = exttestedwith[-1]
772 fm.plain(' (%s!)\n' % lasttestedversion)
772 fm.plain(' (%s!)\n' % lasttestedversion)
773
773
774 fm.condwrite(ui.verbose and extsource, 'source',
774 fm.condwrite(ui.verbose and extsource, 'source',
775 _(' location: %s\n'), extsource or "")
775 _(' location: %s\n'), extsource or "")
776
776
777 if ui.verbose:
777 if ui.verbose:
778 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
778 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
779 fm.data(bundled=isinternal)
779 fm.data(bundled=isinternal)
780
780
781 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
781 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
782 _(' tested with: %s\n'),
782 _(' tested with: %s\n'),
783 fm.formatlist(exttestedwith, name='ver'))
783 fm.formatlist(exttestedwith, name='ver'))
784
784
785 fm.condwrite(ui.verbose and extbuglink, 'buglink',
785 fm.condwrite(ui.verbose and extbuglink, 'buglink',
786 _(' bug reporting: %s\n'), extbuglink or "")
786 _(' bug reporting: %s\n'), extbuglink or "")
787
787
788 fm.end()
788 fm.end()
789
789
790 @command('debugfileset',
790 @command('debugfileset',
791 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
791 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
792 _('[-r REV] FILESPEC'))
792 _('[-r REV] FILESPEC'))
793 def debugfileset(ui, repo, expr, **opts):
793 def debugfileset(ui, repo, expr, **opts):
794 '''parse and apply a fileset specification'''
794 '''parse and apply a fileset specification'''
795 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
795 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
796 if ui.verbose:
796 if ui.verbose:
797 tree = fileset.parse(expr)
797 tree = fileset.parse(expr)
798 ui.note(fileset.prettyformat(tree), "\n")
798 ui.note(fileset.prettyformat(tree), "\n")
799
799
800 for f in ctx.getfileset(expr):
800 for f in ctx.getfileset(expr):
801 ui.write("%s\n" % f)
801 ui.write("%s\n" % f)
802
802
803 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
803 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
804 def debugfsinfo(ui, path="."):
804 def debugfsinfo(ui, path="."):
805 """show information detected about current filesystem"""
805 """show information detected about current filesystem"""
806 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
806 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
807 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
807 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
808 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
808 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
809 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
809 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
810 casesensitive = '(unknown)'
810 casesensitive = '(unknown)'
811 try:
811 try:
812 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
812 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
813 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
813 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
814 except OSError:
814 except OSError:
815 pass
815 pass
816 ui.write(('case-sensitive: %s\n') % casesensitive)
816 ui.write(('case-sensitive: %s\n') % casesensitive)
817
817
818 @command('debuggetbundle',
818 @command('debuggetbundle',
819 [('H', 'head', [], _('id of head node'), _('ID')),
819 [('H', 'head', [], _('id of head node'), _('ID')),
820 ('C', 'common', [], _('id of common node'), _('ID')),
820 ('C', 'common', [], _('id of common node'), _('ID')),
821 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
821 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
822 _('REPO FILE [-H|-C ID]...'),
822 _('REPO FILE [-H|-C ID]...'),
823 norepo=True)
823 norepo=True)
824 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
824 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
825 """retrieves a bundle from a repo
825 """retrieves a bundle from a repo
826
826
827 Every ID must be a full-length hex node id string. Saves the bundle to the
827 Every ID must be a full-length hex node id string. Saves the bundle to the
828 given file.
828 given file.
829 """
829 """
830 opts = pycompat.byteskwargs(opts)
830 opts = pycompat.byteskwargs(opts)
831 repo = hg.peer(ui, opts, repopath)
831 repo = hg.peer(ui, opts, repopath)
832 if not repo.capable('getbundle'):
832 if not repo.capable('getbundle'):
833 raise error.Abort("getbundle() not supported by target repository")
833 raise error.Abort("getbundle() not supported by target repository")
834 args = {}
834 args = {}
835 if common:
835 if common:
836 args[r'common'] = [bin(s) for s in common]
836 args[r'common'] = [bin(s) for s in common]
837 if head:
837 if head:
838 args[r'heads'] = [bin(s) for s in head]
838 args[r'heads'] = [bin(s) for s in head]
839 # TODO: get desired bundlecaps from command line.
839 # TODO: get desired bundlecaps from command line.
840 args[r'bundlecaps'] = None
840 args[r'bundlecaps'] = None
841 bundle = repo.getbundle('debug', **args)
841 bundle = repo.getbundle('debug', **args)
842
842
843 bundletype = opts.get('type', 'bzip2').lower()
843 bundletype = opts.get('type', 'bzip2').lower()
844 btypes = {'none': 'HG10UN',
844 btypes = {'none': 'HG10UN',
845 'bzip2': 'HG10BZ',
845 'bzip2': 'HG10BZ',
846 'gzip': 'HG10GZ',
846 'gzip': 'HG10GZ',
847 'bundle2': 'HG20'}
847 'bundle2': 'HG20'}
848 bundletype = btypes.get(bundletype)
848 bundletype = btypes.get(bundletype)
849 if bundletype not in bundle2.bundletypes:
849 if bundletype not in bundle2.bundletypes:
850 raise error.Abort(_('unknown bundle type specified with --type'))
850 raise error.Abort(_('unknown bundle type specified with --type'))
851 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
851 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
852
852
853 @command('debugignore', [], '[FILE]')
853 @command('debugignore', [], '[FILE]')
854 def debugignore(ui, repo, *files, **opts):
854 def debugignore(ui, repo, *files, **opts):
855 """display the combined ignore pattern and information about ignored files
855 """display the combined ignore pattern and information about ignored files
856
856
857 With no argument display the combined ignore pattern.
857 With no argument display the combined ignore pattern.
858
858
859 Given space separated file names, shows if the given file is ignored and
859 Given space separated file names, shows if the given file is ignored and
860 if so, show the ignore rule (file and line number) that matched it.
860 if so, show the ignore rule (file and line number) that matched it.
861 """
861 """
862 ignore = repo.dirstate._ignore
862 ignore = repo.dirstate._ignore
863 if not files:
863 if not files:
864 # Show all the patterns
864 # Show all the patterns
865 ui.write("%s\n" % repr(ignore))
865 ui.write("%s\n" % repr(ignore))
866 else:
866 else:
867 for f in files:
867 for f in files:
868 nf = util.normpath(f)
868 nf = util.normpath(f)
869 ignored = None
869 ignored = None
870 ignoredata = None
870 ignoredata = None
871 if nf != '.':
871 if nf != '.':
872 if ignore(nf):
872 if ignore(nf):
873 ignored = nf
873 ignored = nf
874 ignoredata = repo.dirstate._ignorefileandline(nf)
874 ignoredata = repo.dirstate._ignorefileandline(nf)
875 else:
875 else:
876 for p in util.finddirs(nf):
876 for p in util.finddirs(nf):
877 if ignore(p):
877 if ignore(p):
878 ignored = p
878 ignored = p
879 ignoredata = repo.dirstate._ignorefileandline(p)
879 ignoredata = repo.dirstate._ignorefileandline(p)
880 break
880 break
881 if ignored:
881 if ignored:
882 if ignored == nf:
882 if ignored == nf:
883 ui.write(_("%s is ignored\n") % f)
883 ui.write(_("%s is ignored\n") % f)
884 else:
884 else:
885 ui.write(_("%s is ignored because of "
885 ui.write(_("%s is ignored because of "
886 "containing folder %s\n")
886 "containing folder %s\n")
887 % (f, ignored))
887 % (f, ignored))
888 ignorefile, lineno, line = ignoredata
888 ignorefile, lineno, line = ignoredata
889 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
889 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
890 % (ignorefile, lineno, line))
890 % (ignorefile, lineno, line))
891 else:
891 else:
892 ui.write(_("%s is not ignored\n") % f)
892 ui.write(_("%s is not ignored\n") % f)
893
893
894 @command('debugindex', cmdutil.debugrevlogopts +
894 @command('debugindex', cmdutil.debugrevlogopts +
895 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
895 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
896 _('[-f FORMAT] -c|-m|FILE'),
896 _('[-f FORMAT] -c|-m|FILE'),
897 optionalrepo=True)
897 optionalrepo=True)
898 def debugindex(ui, repo, file_=None, **opts):
898 def debugindex(ui, repo, file_=None, **opts):
899 """dump the contents of an index file"""
899 """dump the contents of an index file"""
900 opts = pycompat.byteskwargs(opts)
900 opts = pycompat.byteskwargs(opts)
901 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
901 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
902 format = opts.get('format', 0)
902 format = opts.get('format', 0)
903 if format not in (0, 1):
903 if format not in (0, 1):
904 raise error.Abort(_("unknown format %d") % format)
904 raise error.Abort(_("unknown format %d") % format)
905
905
906 generaldelta = r.version & revlog.FLAG_GENERALDELTA
906 generaldelta = r.version & revlog.FLAG_GENERALDELTA
907 if generaldelta:
907 if generaldelta:
908 basehdr = ' delta'
908 basehdr = ' delta'
909 else:
909 else:
910 basehdr = ' base'
910 basehdr = ' base'
911
911
912 if ui.debugflag:
912 if ui.debugflag:
913 shortfn = hex
913 shortfn = hex
914 else:
914 else:
915 shortfn = short
915 shortfn = short
916
916
917 # There might not be anything in r, so have a sane default
917 # There might not be anything in r, so have a sane default
918 idlen = 12
918 idlen = 12
919 for i in r:
919 for i in r:
920 idlen = len(shortfn(r.node(i)))
920 idlen = len(shortfn(r.node(i)))
921 break
921 break
922
922
923 if format == 0:
923 if format == 0:
924 ui.write((" rev offset length " + basehdr + " linkrev"
924 ui.write((" rev offset length " + basehdr + " linkrev"
925 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
925 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
926 elif format == 1:
926 elif format == 1:
927 ui.write((" rev flag offset length"
927 ui.write((" rev flag offset length"
928 " size " + basehdr + " link p1 p2"
928 " size " + basehdr + " link p1 p2"
929 " %s\n") % "nodeid".rjust(idlen))
929 " %s\n") % "nodeid".rjust(idlen))
930
930
931 for i in r:
931 for i in r:
932 node = r.node(i)
932 node = r.node(i)
933 if generaldelta:
933 if generaldelta:
934 base = r.deltaparent(i)
934 base = r.deltaparent(i)
935 else:
935 else:
936 base = r.chainbase(i)
936 base = r.chainbase(i)
937 if format == 0:
937 if format == 0:
938 try:
938 try:
939 pp = r.parents(node)
939 pp = r.parents(node)
940 except Exception:
940 except Exception:
941 pp = [nullid, nullid]
941 pp = [nullid, nullid]
942 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
942 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
943 i, r.start(i), r.length(i), base, r.linkrev(i),
943 i, r.start(i), r.length(i), base, r.linkrev(i),
944 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
944 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
945 elif format == 1:
945 elif format == 1:
946 pr = r.parentrevs(i)
946 pr = r.parentrevs(i)
947 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
947 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
948 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
948 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
949 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
949 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
950
950
951 @command('debugindexdot', cmdutil.debugrevlogopts,
951 @command('debugindexdot', cmdutil.debugrevlogopts,
952 _('-c|-m|FILE'), optionalrepo=True)
952 _('-c|-m|FILE'), optionalrepo=True)
953 def debugindexdot(ui, repo, file_=None, **opts):
953 def debugindexdot(ui, repo, file_=None, **opts):
954 """dump an index DAG as a graphviz dot file"""
954 """dump an index DAG as a graphviz dot file"""
955 opts = pycompat.byteskwargs(opts)
955 opts = pycompat.byteskwargs(opts)
956 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
956 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
957 ui.write(("digraph G {\n"))
957 ui.write(("digraph G {\n"))
958 for i in r:
958 for i in r:
959 node = r.node(i)
959 node = r.node(i)
960 pp = r.parents(node)
960 pp = r.parents(node)
961 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
961 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
962 if pp[1] != nullid:
962 if pp[1] != nullid:
963 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
963 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
964 ui.write("}\n")
964 ui.write("}\n")
965
965
966 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
966 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
967 def debuginstall(ui, **opts):
967 def debuginstall(ui, **opts):
968 '''test Mercurial installation
968 '''test Mercurial installation
969
969
970 Returns 0 on success.
970 Returns 0 on success.
971 '''
971 '''
972 opts = pycompat.byteskwargs(opts)
972 opts = pycompat.byteskwargs(opts)
973
973
974 def writetemp(contents):
974 def writetemp(contents):
975 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
975 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
976 f = os.fdopen(fd, pycompat.sysstr("wb"))
976 f = os.fdopen(fd, pycompat.sysstr("wb"))
977 f.write(contents)
977 f.write(contents)
978 f.close()
978 f.close()
979 return name
979 return name
980
980
981 problems = 0
981 problems = 0
982
982
983 fm = ui.formatter('debuginstall', opts)
983 fm = ui.formatter('debuginstall', opts)
984 fm.startitem()
984 fm.startitem()
985
985
986 # encoding
986 # encoding
987 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
987 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
988 err = None
988 err = None
989 try:
989 try:
990 encoding.fromlocal("test")
990 encoding.fromlocal("test")
991 except error.Abort as inst:
991 except error.Abort as inst:
992 err = inst
992 err = inst
993 problems += 1
993 problems += 1
994 fm.condwrite(err, 'encodingerror', _(" %s\n"
994 fm.condwrite(err, 'encodingerror', _(" %s\n"
995 " (check that your locale is properly set)\n"), err)
995 " (check that your locale is properly set)\n"), err)
996
996
997 # Python
997 # Python
998 fm.write('pythonexe', _("checking Python executable (%s)\n"),
998 fm.write('pythonexe', _("checking Python executable (%s)\n"),
999 pycompat.sysexecutable)
999 pycompat.sysexecutable)
1000 fm.write('pythonver', _("checking Python version (%s)\n"),
1000 fm.write('pythonver', _("checking Python version (%s)\n"),
1001 ("%d.%d.%d" % sys.version_info[:3]))
1001 ("%d.%d.%d" % sys.version_info[:3]))
1002 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1002 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1003 os.path.dirname(pycompat.fsencode(os.__file__)))
1003 os.path.dirname(pycompat.fsencode(os.__file__)))
1004
1004
1005 security = set(sslutil.supportedprotocols)
1005 security = set(sslutil.supportedprotocols)
1006 if sslutil.hassni:
1006 if sslutil.hassni:
1007 security.add('sni')
1007 security.add('sni')
1008
1008
1009 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1009 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1010 fm.formatlist(sorted(security), name='protocol',
1010 fm.formatlist(sorted(security), name='protocol',
1011 fmt='%s', sep=','))
1011 fmt='%s', sep=','))
1012
1012
1013 # These are warnings, not errors. So don't increment problem count. This
1013 # These are warnings, not errors. So don't increment problem count. This
1014 # may change in the future.
1014 # may change in the future.
1015 if 'tls1.2' not in security:
1015 if 'tls1.2' not in security:
1016 fm.plain(_(' TLS 1.2 not supported by Python install; '
1016 fm.plain(_(' TLS 1.2 not supported by Python install; '
1017 'network connections lack modern security\n'))
1017 'network connections lack modern security\n'))
1018 if 'sni' not in security:
1018 if 'sni' not in security:
1019 fm.plain(_(' SNI not supported by Python install; may have '
1019 fm.plain(_(' SNI not supported by Python install; may have '
1020 'connectivity issues with some servers\n'))
1020 'connectivity issues with some servers\n'))
1021
1021
1022 # TODO print CA cert info
1022 # TODO print CA cert info
1023
1023
1024 # hg version
1024 # hg version
1025 hgver = util.version()
1025 hgver = util.version()
1026 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1026 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1027 hgver.split('+')[0])
1027 hgver.split('+')[0])
1028 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1028 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1029 '+'.join(hgver.split('+')[1:]))
1029 '+'.join(hgver.split('+')[1:]))
1030
1030
1031 # compiled modules
1031 # compiled modules
1032 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1032 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1033 policy.policy)
1033 policy.policy)
1034 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1034 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1035 os.path.dirname(pycompat.fsencode(__file__)))
1035 os.path.dirname(pycompat.fsencode(__file__)))
1036
1036
1037 if policy.policy in ('c', 'allow'):
1037 if policy.policy in ('c', 'allow'):
1038 err = None
1038 err = None
1039 try:
1039 try:
1040 from .cext import (
1040 from .cext import (
1041 base85,
1041 base85,
1042 bdiff,
1042 bdiff,
1043 mpatch,
1043 mpatch,
1044 osutil,
1044 osutil,
1045 )
1045 )
1046 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1046 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1047 except Exception as inst:
1047 except Exception as inst:
1048 err = inst
1048 err = inst
1049 problems += 1
1049 problems += 1
1050 fm.condwrite(err, 'extensionserror', " %s\n", err)
1050 fm.condwrite(err, 'extensionserror', " %s\n", err)
1051
1051
1052 compengines = util.compengines._engines.values()
1052 compengines = util.compengines._engines.values()
1053 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1053 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1054 fm.formatlist(sorted(e.name() for e in compengines),
1054 fm.formatlist(sorted(e.name() for e in compengines),
1055 name='compengine', fmt='%s', sep=', '))
1055 name='compengine', fmt='%s', sep=', '))
1056 fm.write('compenginesavail', _('checking available compression engines '
1056 fm.write('compenginesavail', _('checking available compression engines '
1057 '(%s)\n'),
1057 '(%s)\n'),
1058 fm.formatlist(sorted(e.name() for e in compengines
1058 fm.formatlist(sorted(e.name() for e in compengines
1059 if e.available()),
1059 if e.available()),
1060 name='compengine', fmt='%s', sep=', '))
1060 name='compengine', fmt='%s', sep=', '))
1061 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1061 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1062 fm.write('compenginesserver', _('checking available compression engines '
1062 fm.write('compenginesserver', _('checking available compression engines '
1063 'for wire protocol (%s)\n'),
1063 'for wire protocol (%s)\n'),
1064 fm.formatlist([e.name() for e in wirecompengines
1064 fm.formatlist([e.name() for e in wirecompengines
1065 if e.wireprotosupport()],
1065 if e.wireprotosupport()],
1066 name='compengine', fmt='%s', sep=', '))
1066 name='compengine', fmt='%s', sep=', '))
1067
1067
1068 # templates
1068 # templates
1069 p = templater.templatepaths()
1069 p = templater.templatepaths()
1070 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1070 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1071 fm.condwrite(not p, '', _(" no template directories found\n"))
1071 fm.condwrite(not p, '', _(" no template directories found\n"))
1072 if p:
1072 if p:
1073 m = templater.templatepath("map-cmdline.default")
1073 m = templater.templatepath("map-cmdline.default")
1074 if m:
1074 if m:
1075 # template found, check if it is working
1075 # template found, check if it is working
1076 err = None
1076 err = None
1077 try:
1077 try:
1078 templater.templater.frommapfile(m)
1078 templater.templater.frommapfile(m)
1079 except Exception as inst:
1079 except Exception as inst:
1080 err = inst
1080 err = inst
1081 p = None
1081 p = None
1082 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1082 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1083 else:
1083 else:
1084 p = None
1084 p = None
1085 fm.condwrite(p, 'defaulttemplate',
1085 fm.condwrite(p, 'defaulttemplate',
1086 _("checking default template (%s)\n"), m)
1086 _("checking default template (%s)\n"), m)
1087 fm.condwrite(not m, 'defaulttemplatenotfound',
1087 fm.condwrite(not m, 'defaulttemplatenotfound',
1088 _(" template '%s' not found\n"), "default")
1088 _(" template '%s' not found\n"), "default")
1089 if not p:
1089 if not p:
1090 problems += 1
1090 problems += 1
1091 fm.condwrite(not p, '',
1091 fm.condwrite(not p, '',
1092 _(" (templates seem to have been installed incorrectly)\n"))
1092 _(" (templates seem to have been installed incorrectly)\n"))
1093
1093
1094 # editor
1094 # editor
1095 editor = ui.geteditor()
1095 editor = ui.geteditor()
1096 editor = util.expandpath(editor)
1096 editor = util.expandpath(editor)
1097 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1097 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1098 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1098 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1099 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1099 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1100 _(" No commit editor set and can't find %s in PATH\n"
1100 _(" No commit editor set and can't find %s in PATH\n"
1101 " (specify a commit editor in your configuration"
1101 " (specify a commit editor in your configuration"
1102 " file)\n"), not cmdpath and editor == 'vi' and editor)
1102 " file)\n"), not cmdpath and editor == 'vi' and editor)
1103 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1103 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1104 _(" Can't find editor '%s' in PATH\n"
1104 _(" Can't find editor '%s' in PATH\n"
1105 " (specify a commit editor in your configuration"
1105 " (specify a commit editor in your configuration"
1106 " file)\n"), not cmdpath and editor)
1106 " file)\n"), not cmdpath and editor)
1107 if not cmdpath and editor != 'vi':
1107 if not cmdpath and editor != 'vi':
1108 problems += 1
1108 problems += 1
1109
1109
1110 # check username
1110 # check username
1111 username = None
1111 username = None
1112 err = None
1112 err = None
1113 try:
1113 try:
1114 username = ui.username()
1114 username = ui.username()
1115 except error.Abort as e:
1115 except error.Abort as e:
1116 err = e
1116 err = e
1117 problems += 1
1117 problems += 1
1118
1118
1119 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1119 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1120 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1120 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1121 " (specify a username in your configuration file)\n"), err)
1121 " (specify a username in your configuration file)\n"), err)
1122
1122
1123 fm.condwrite(not problems, '',
1123 fm.condwrite(not problems, '',
1124 _("no problems detected\n"))
1124 _("no problems detected\n"))
1125 if not problems:
1125 if not problems:
1126 fm.data(problems=problems)
1126 fm.data(problems=problems)
1127 fm.condwrite(problems, 'problems',
1127 fm.condwrite(problems, 'problems',
1128 _("%d problems detected,"
1128 _("%d problems detected,"
1129 " please check your install!\n"), problems)
1129 " please check your install!\n"), problems)
1130 fm.end()
1130 fm.end()
1131
1131
1132 return problems
1132 return problems
1133
1133
1134 @command('debugknown', [], _('REPO ID...'), norepo=True)
1134 @command('debugknown', [], _('REPO ID...'), norepo=True)
1135 def debugknown(ui, repopath, *ids, **opts):
1135 def debugknown(ui, repopath, *ids, **opts):
1136 """test whether node ids are known to a repo
1136 """test whether node ids are known to a repo
1137
1137
1138 Every ID must be a full-length hex node id string. Returns a list of 0s
1138 Every ID must be a full-length hex node id string. Returns a list of 0s
1139 and 1s indicating unknown/known.
1139 and 1s indicating unknown/known.
1140 """
1140 """
1141 opts = pycompat.byteskwargs(opts)
1141 opts = pycompat.byteskwargs(opts)
1142 repo = hg.peer(ui, opts, repopath)
1142 repo = hg.peer(ui, opts, repopath)
1143 if not repo.capable('known'):
1143 if not repo.capable('known'):
1144 raise error.Abort("known() not supported by target repository")
1144 raise error.Abort("known() not supported by target repository")
1145 flags = repo.known([bin(s) for s in ids])
1145 flags = repo.known([bin(s) for s in ids])
1146 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1146 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1147
1147
1148 @command('debuglabelcomplete', [], _('LABEL...'))
1148 @command('debuglabelcomplete', [], _('LABEL...'))
1149 def debuglabelcomplete(ui, repo, *args):
1149 def debuglabelcomplete(ui, repo, *args):
1150 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1150 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1151 debugnamecomplete(ui, repo, *args)
1151 debugnamecomplete(ui, repo, *args)
1152
1152
1153 @command('debuglocks',
1153 @command('debuglocks',
1154 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1154 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1155 ('W', 'force-wlock', None,
1155 ('W', 'force-wlock', None,
1156 _('free the working state lock (DANGEROUS)'))],
1156 _('free the working state lock (DANGEROUS)'))],
1157 _('[OPTION]...'))
1157 _('[OPTION]...'))
1158 def debuglocks(ui, repo, **opts):
1158 def debuglocks(ui, repo, **opts):
1159 """show or modify state of locks
1159 """show or modify state of locks
1160
1160
1161 By default, this command will show which locks are held. This
1161 By default, this command will show which locks are held. This
1162 includes the user and process holding the lock, the amount of time
1162 includes the user and process holding the lock, the amount of time
1163 the lock has been held, and the machine name where the process is
1163 the lock has been held, and the machine name where the process is
1164 running if it's not local.
1164 running if it's not local.
1165
1165
1166 Locks protect the integrity of Mercurial's data, so should be
1166 Locks protect the integrity of Mercurial's data, so should be
1167 treated with care. System crashes or other interruptions may cause
1167 treated with care. System crashes or other interruptions may cause
1168 locks to not be properly released, though Mercurial will usually
1168 locks to not be properly released, though Mercurial will usually
1169 detect and remove such stale locks automatically.
1169 detect and remove such stale locks automatically.
1170
1170
1171 However, detecting stale locks may not always be possible (for
1171 However, detecting stale locks may not always be possible (for
1172 instance, on a shared filesystem). Removing locks may also be
1172 instance, on a shared filesystem). Removing locks may also be
1173 blocked by filesystem permissions.
1173 blocked by filesystem permissions.
1174
1174
1175 Returns 0 if no locks are held.
1175 Returns 0 if no locks are held.
1176
1176
1177 """
1177 """
1178
1178
1179 if opts.get(r'force_lock'):
1179 if opts.get(r'force_lock'):
1180 repo.svfs.unlink('lock')
1180 repo.svfs.unlink('lock')
1181 if opts.get(r'force_wlock'):
1181 if opts.get(r'force_wlock'):
1182 repo.vfs.unlink('wlock')
1182 repo.vfs.unlink('wlock')
1183 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1183 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1184 return 0
1184 return 0
1185
1185
1186 now = time.time()
1186 now = time.time()
1187 held = 0
1187 held = 0
1188
1188
1189 def report(vfs, name, method):
1189 def report(vfs, name, method):
1190 # this causes stale locks to get reaped for more accurate reporting
1190 # this causes stale locks to get reaped for more accurate reporting
1191 try:
1191 try:
1192 l = method(False)
1192 l = method(False)
1193 except error.LockHeld:
1193 except error.LockHeld:
1194 l = None
1194 l = None
1195
1195
1196 if l:
1196 if l:
1197 l.release()
1197 l.release()
1198 else:
1198 else:
1199 try:
1199 try:
1200 stat = vfs.lstat(name)
1200 stat = vfs.lstat(name)
1201 age = now - stat.st_mtime
1201 age = now - stat.st_mtime
1202 user = util.username(stat.st_uid)
1202 user = util.username(stat.st_uid)
1203 locker = vfs.readlock(name)
1203 locker = vfs.readlock(name)
1204 if ":" in locker:
1204 if ":" in locker:
1205 host, pid = locker.split(':')
1205 host, pid = locker.split(':')
1206 if host == socket.gethostname():
1206 if host == socket.gethostname():
1207 locker = 'user %s, process %s' % (user, pid)
1207 locker = 'user %s, process %s' % (user, pid)
1208 else:
1208 else:
1209 locker = 'user %s, process %s, host %s' \
1209 locker = 'user %s, process %s, host %s' \
1210 % (user, pid, host)
1210 % (user, pid, host)
1211 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1211 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1212 return 1
1212 return 1
1213 except OSError as e:
1213 except OSError as e:
1214 if e.errno != errno.ENOENT:
1214 if e.errno != errno.ENOENT:
1215 raise
1215 raise
1216
1216
1217 ui.write(("%-6s free\n") % (name + ":"))
1217 ui.write(("%-6s free\n") % (name + ":"))
1218 return 0
1218 return 0
1219
1219
1220 held += report(repo.svfs, "lock", repo.lock)
1220 held += report(repo.svfs, "lock", repo.lock)
1221 held += report(repo.vfs, "wlock", repo.wlock)
1221 held += report(repo.vfs, "wlock", repo.wlock)
1222
1222
1223 return held
1223 return held
1224
1224
1225 @command('debugmergestate', [], '')
1225 @command('debugmergestate', [], '')
1226 def debugmergestate(ui, repo, *args):
1226 def debugmergestate(ui, repo, *args):
1227 """print merge state
1227 """print merge state
1228
1228
1229 Use --verbose to print out information about whether v1 or v2 merge state
1229 Use --verbose to print out information about whether v1 or v2 merge state
1230 was chosen."""
1230 was chosen."""
1231 def _hashornull(h):
1231 def _hashornull(h):
1232 if h == nullhex:
1232 if h == nullhex:
1233 return 'null'
1233 return 'null'
1234 else:
1234 else:
1235 return h
1235 return h
1236
1236
1237 def printrecords(version):
1237 def printrecords(version):
1238 ui.write(('* version %s records\n') % version)
1238 ui.write(('* version %s records\n') % version)
1239 if version == 1:
1239 if version == 1:
1240 records = v1records
1240 records = v1records
1241 else:
1241 else:
1242 records = v2records
1242 records = v2records
1243
1243
1244 for rtype, record in records:
1244 for rtype, record in records:
1245 # pretty print some record types
1245 # pretty print some record types
1246 if rtype == 'L':
1246 if rtype == 'L':
1247 ui.write(('local: %s\n') % record)
1247 ui.write(('local: %s\n') % record)
1248 elif rtype == 'O':
1248 elif rtype == 'O':
1249 ui.write(('other: %s\n') % record)
1249 ui.write(('other: %s\n') % record)
1250 elif rtype == 'm':
1250 elif rtype == 'm':
1251 driver, mdstate = record.split('\0', 1)
1251 driver, mdstate = record.split('\0', 1)
1252 ui.write(('merge driver: %s (state "%s")\n')
1252 ui.write(('merge driver: %s (state "%s")\n')
1253 % (driver, mdstate))
1253 % (driver, mdstate))
1254 elif rtype in 'FDC':
1254 elif rtype in 'FDC':
1255 r = record.split('\0')
1255 r = record.split('\0')
1256 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1256 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1257 if version == 1:
1257 if version == 1:
1258 onode = 'not stored in v1 format'
1258 onode = 'not stored in v1 format'
1259 flags = r[7]
1259 flags = r[7]
1260 else:
1260 else:
1261 onode, flags = r[7:9]
1261 onode, flags = r[7:9]
1262 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1262 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1263 % (f, rtype, state, _hashornull(hash)))
1263 % (f, rtype, state, _hashornull(hash)))
1264 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1264 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1265 ui.write((' ancestor path: %s (node %s)\n')
1265 ui.write((' ancestor path: %s (node %s)\n')
1266 % (afile, _hashornull(anode)))
1266 % (afile, _hashornull(anode)))
1267 ui.write((' other path: %s (node %s)\n')
1267 ui.write((' other path: %s (node %s)\n')
1268 % (ofile, _hashornull(onode)))
1268 % (ofile, _hashornull(onode)))
1269 elif rtype == 'f':
1269 elif rtype == 'f':
1270 filename, rawextras = record.split('\0', 1)
1270 filename, rawextras = record.split('\0', 1)
1271 extras = rawextras.split('\0')
1271 extras = rawextras.split('\0')
1272 i = 0
1272 i = 0
1273 extrastrings = []
1273 extrastrings = []
1274 while i < len(extras):
1274 while i < len(extras):
1275 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1275 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1276 i += 2
1276 i += 2
1277
1277
1278 ui.write(('file extras: %s (%s)\n')
1278 ui.write(('file extras: %s (%s)\n')
1279 % (filename, ', '.join(extrastrings)))
1279 % (filename, ', '.join(extrastrings)))
1280 elif rtype == 'l':
1280 elif rtype == 'l':
1281 labels = record.split('\0', 2)
1281 labels = record.split('\0', 2)
1282 labels = [l for l in labels if len(l) > 0]
1282 labels = [l for l in labels if len(l) > 0]
1283 ui.write(('labels:\n'))
1283 ui.write(('labels:\n'))
1284 ui.write((' local: %s\n' % labels[0]))
1284 ui.write((' local: %s\n' % labels[0]))
1285 ui.write((' other: %s\n' % labels[1]))
1285 ui.write((' other: %s\n' % labels[1]))
1286 if len(labels) > 2:
1286 if len(labels) > 2:
1287 ui.write((' base: %s\n' % labels[2]))
1287 ui.write((' base: %s\n' % labels[2]))
1288 else:
1288 else:
1289 ui.write(('unrecognized entry: %s\t%s\n')
1289 ui.write(('unrecognized entry: %s\t%s\n')
1290 % (rtype, record.replace('\0', '\t')))
1290 % (rtype, record.replace('\0', '\t')))
1291
1291
1292 # Avoid mergestate.read() since it may raise an exception for unsupported
1292 # Avoid mergestate.read() since it may raise an exception for unsupported
1293 # merge state records. We shouldn't be doing this, but this is OK since this
1293 # merge state records. We shouldn't be doing this, but this is OK since this
1294 # command is pretty low-level.
1294 # command is pretty low-level.
1295 ms = mergemod.mergestate(repo)
1295 ms = mergemod.mergestate(repo)
1296
1296
1297 # sort so that reasonable information is on top
1297 # sort so that reasonable information is on top
1298 v1records = ms._readrecordsv1()
1298 v1records = ms._readrecordsv1()
1299 v2records = ms._readrecordsv2()
1299 v2records = ms._readrecordsv2()
1300 order = 'LOml'
1300 order = 'LOml'
1301 def key(r):
1301 def key(r):
1302 idx = order.find(r[0])
1302 idx = order.find(r[0])
1303 if idx == -1:
1303 if idx == -1:
1304 return (1, r[1])
1304 return (1, r[1])
1305 else:
1305 else:
1306 return (0, idx)
1306 return (0, idx)
1307 v1records.sort(key=key)
1307 v1records.sort(key=key)
1308 v2records.sort(key=key)
1308 v2records.sort(key=key)
1309
1309
1310 if not v1records and not v2records:
1310 if not v1records and not v2records:
1311 ui.write(('no merge state found\n'))
1311 ui.write(('no merge state found\n'))
1312 elif not v2records:
1312 elif not v2records:
1313 ui.note(('no version 2 merge state\n'))
1313 ui.note(('no version 2 merge state\n'))
1314 printrecords(1)
1314 printrecords(1)
1315 elif ms._v1v2match(v1records, v2records):
1315 elif ms._v1v2match(v1records, v2records):
1316 ui.note(('v1 and v2 states match: using v2\n'))
1316 ui.note(('v1 and v2 states match: using v2\n'))
1317 printrecords(2)
1317 printrecords(2)
1318 else:
1318 else:
1319 ui.note(('v1 and v2 states mismatch: using v1\n'))
1319 ui.note(('v1 and v2 states mismatch: using v1\n'))
1320 printrecords(1)
1320 printrecords(1)
1321 if ui.verbose:
1321 if ui.verbose:
1322 printrecords(2)
1322 printrecords(2)
1323
1323
1324 @command('debugnamecomplete', [], _('NAME...'))
1324 @command('debugnamecomplete', [], _('NAME...'))
1325 def debugnamecomplete(ui, repo, *args):
1325 def debugnamecomplete(ui, repo, *args):
1326 '''complete "names" - tags, open branch names, bookmark names'''
1326 '''complete "names" - tags, open branch names, bookmark names'''
1327
1327
1328 names = set()
1328 names = set()
1329 # since we previously only listed open branches, we will handle that
1329 # since we previously only listed open branches, we will handle that
1330 # specially (after this for loop)
1330 # specially (after this for loop)
1331 for name, ns in repo.names.iteritems():
1331 for name, ns in repo.names.iteritems():
1332 if name != 'branches':
1332 if name != 'branches':
1333 names.update(ns.listnames(repo))
1333 names.update(ns.listnames(repo))
1334 names.update(tag for (tag, heads, tip, closed)
1334 names.update(tag for (tag, heads, tip, closed)
1335 in repo.branchmap().iterbranches() if not closed)
1335 in repo.branchmap().iterbranches() if not closed)
1336 completions = set()
1336 completions = set()
1337 if not args:
1337 if not args:
1338 args = ['']
1338 args = ['']
1339 for a in args:
1339 for a in args:
1340 completions.update(n for n in names if n.startswith(a))
1340 completions.update(n for n in names if n.startswith(a))
1341 ui.write('\n'.join(sorted(completions)))
1341 ui.write('\n'.join(sorted(completions)))
1342 ui.write('\n')
1342 ui.write('\n')
1343
1343
1344 @command('debugobsolete',
1344 @command('debugobsolete',
1345 [('', 'flags', 0, _('markers flag')),
1345 [('', 'flags', 0, _('markers flag')),
1346 ('', 'record-parents', False,
1346 ('', 'record-parents', False,
1347 _('record parent information for the precursor')),
1347 _('record parent information for the precursor')),
1348 ('r', 'rev', [], _('display markers relevant to REV')),
1348 ('r', 'rev', [], _('display markers relevant to REV')),
1349 ('', 'exclusive', False, _('restrict display to markers only '
1349 ('', 'exclusive', False, _('restrict display to markers only '
1350 'relevant to REV')),
1350 'relevant to REV')),
1351 ('', 'index', False, _('display index of the marker')),
1351 ('', 'index', False, _('display index of the marker')),
1352 ('', 'delete', [], _('delete markers specified by indices')),
1352 ('', 'delete', [], _('delete markers specified by indices')),
1353 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1353 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1354 _('[OBSOLETED [REPLACEMENT ...]]'))
1354 _('[OBSOLETED [REPLACEMENT ...]]'))
1355 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1355 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1356 """create arbitrary obsolete marker
1356 """create arbitrary obsolete marker
1357
1357
1358 With no arguments, displays the list of obsolescence markers."""
1358 With no arguments, displays the list of obsolescence markers."""
1359
1359
1360 opts = pycompat.byteskwargs(opts)
1360 opts = pycompat.byteskwargs(opts)
1361
1361
1362 def parsenodeid(s):
1362 def parsenodeid(s):
1363 try:
1363 try:
1364 # We do not use revsingle/revrange functions here to accept
1364 # We do not use revsingle/revrange functions here to accept
1365 # arbitrary node identifiers, possibly not present in the
1365 # arbitrary node identifiers, possibly not present in the
1366 # local repository.
1366 # local repository.
1367 n = bin(s)
1367 n = bin(s)
1368 if len(n) != len(nullid):
1368 if len(n) != len(nullid):
1369 raise TypeError()
1369 raise TypeError()
1370 return n
1370 return n
1371 except TypeError:
1371 except TypeError:
1372 raise error.Abort('changeset references must be full hexadecimal '
1372 raise error.Abort('changeset references must be full hexadecimal '
1373 'node identifiers')
1373 'node identifiers')
1374
1374
1375 if opts.get('delete'):
1375 if opts.get('delete'):
1376 indices = []
1376 indices = []
1377 for v in opts.get('delete'):
1377 for v in opts.get('delete'):
1378 try:
1378 try:
1379 indices.append(int(v))
1379 indices.append(int(v))
1380 except ValueError:
1380 except ValueError:
1381 raise error.Abort(_('invalid index value: %r') % v,
1381 raise error.Abort(_('invalid index value: %r') % v,
1382 hint=_('use integers for indices'))
1382 hint=_('use integers for indices'))
1383
1383
1384 if repo.currenttransaction():
1384 if repo.currenttransaction():
1385 raise error.Abort(_('cannot delete obsmarkers in the middle '
1385 raise error.Abort(_('cannot delete obsmarkers in the middle '
1386 'of transaction.'))
1386 'of transaction.'))
1387
1387
1388 with repo.lock():
1388 with repo.lock():
1389 n = repair.deleteobsmarkers(repo.obsstore, indices)
1389 n = repair.deleteobsmarkers(repo.obsstore, indices)
1390 ui.write(_('deleted %i obsolescence markers\n') % n)
1390 ui.write(_('deleted %i obsolescence markers\n') % n)
1391
1391
1392 return
1392 return
1393
1393
1394 if precursor is not None:
1394 if precursor is not None:
1395 if opts['rev']:
1395 if opts['rev']:
1396 raise error.Abort('cannot select revision when creating marker')
1396 raise error.Abort('cannot select revision when creating marker')
1397 metadata = {}
1397 metadata = {}
1398 metadata['user'] = opts['user'] or ui.username()
1398 metadata['user'] = opts['user'] or ui.username()
1399 succs = tuple(parsenodeid(succ) for succ in successors)
1399 succs = tuple(parsenodeid(succ) for succ in successors)
1400 l = repo.lock()
1400 l = repo.lock()
1401 try:
1401 try:
1402 tr = repo.transaction('debugobsolete')
1402 tr = repo.transaction('debugobsolete')
1403 try:
1403 try:
1404 date = opts.get('date')
1404 date = opts.get('date')
1405 if date:
1405 if date:
1406 date = util.parsedate(date)
1406 date = util.parsedate(date)
1407 else:
1407 else:
1408 date = None
1408 date = None
1409 prec = parsenodeid(precursor)
1409 prec = parsenodeid(precursor)
1410 parents = None
1410 parents = None
1411 if opts['record_parents']:
1411 if opts['record_parents']:
1412 if prec not in repo.unfiltered():
1412 if prec not in repo.unfiltered():
1413 raise error.Abort('cannot used --record-parents on '
1413 raise error.Abort('cannot used --record-parents on '
1414 'unknown changesets')
1414 'unknown changesets')
1415 parents = repo.unfiltered()[prec].parents()
1415 parents = repo.unfiltered()[prec].parents()
1416 parents = tuple(p.node() for p in parents)
1416 parents = tuple(p.node() for p in parents)
1417 repo.obsstore.create(tr, prec, succs, opts['flags'],
1417 repo.obsstore.create(tr, prec, succs, opts['flags'],
1418 parents=parents, date=date,
1418 parents=parents, date=date,
1419 metadata=metadata, ui=ui)
1419 metadata=metadata, ui=ui)
1420 tr.close()
1420 tr.close()
1421 except ValueError as exc:
1421 except ValueError as exc:
1422 raise error.Abort(_('bad obsmarker input: %s') % exc)
1422 raise error.Abort(_('bad obsmarker input: %s') % exc)
1423 finally:
1423 finally:
1424 tr.release()
1424 tr.release()
1425 finally:
1425 finally:
1426 l.release()
1426 l.release()
1427 else:
1427 else:
1428 if opts['rev']:
1428 if opts['rev']:
1429 revs = scmutil.revrange(repo, opts['rev'])
1429 revs = scmutil.revrange(repo, opts['rev'])
1430 nodes = [repo[r].node() for r in revs]
1430 nodes = [repo[r].node() for r in revs]
1431 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1431 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1432 exclusive=opts['exclusive']))
1432 exclusive=opts['exclusive']))
1433 markers.sort(key=lambda x: x._data)
1433 markers.sort(key=lambda x: x._data)
1434 else:
1434 else:
1435 markers = obsolete.getmarkers(repo)
1435 markers = obsolete.getmarkers(repo)
1436
1436
1437 markerstoiter = markers
1437 markerstoiter = markers
1438 isrelevant = lambda m: True
1438 isrelevant = lambda m: True
1439 if opts.get('rev') and opts.get('index'):
1439 if opts.get('rev') and opts.get('index'):
1440 markerstoiter = obsolete.getmarkers(repo)
1440 markerstoiter = obsolete.getmarkers(repo)
1441 markerset = set(markers)
1441 markerset = set(markers)
1442 isrelevant = lambda m: m in markerset
1442 isrelevant = lambda m: m in markerset
1443
1443
1444 fm = ui.formatter('debugobsolete', opts)
1444 fm = ui.formatter('debugobsolete', opts)
1445 for i, m in enumerate(markerstoiter):
1445 for i, m in enumerate(markerstoiter):
1446 if not isrelevant(m):
1446 if not isrelevant(m):
1447 # marker can be irrelevant when we're iterating over a set
1447 # marker can be irrelevant when we're iterating over a set
1448 # of markers (markerstoiter) which is bigger than the set
1448 # of markers (markerstoiter) which is bigger than the set
1449 # of markers we want to display (markers)
1449 # of markers we want to display (markers)
1450 # this can happen if both --index and --rev options are
1450 # this can happen if both --index and --rev options are
1451 # provided and thus we need to iterate over all of the markers
1451 # provided and thus we need to iterate over all of the markers
1452 # to get the correct indices, but only display the ones that
1452 # to get the correct indices, but only display the ones that
1453 # are relevant to --rev value
1453 # are relevant to --rev value
1454 continue
1454 continue
1455 fm.startitem()
1455 fm.startitem()
1456 ind = i if opts.get('index') else None
1456 ind = i if opts.get('index') else None
1457 cmdutil.showmarker(fm, m, index=ind)
1457 cmdutil.showmarker(fm, m, index=ind)
1458 fm.end()
1458 fm.end()
1459
1459
1460 @command('debugpathcomplete',
1460 @command('debugpathcomplete',
1461 [('f', 'full', None, _('complete an entire path')),
1461 [('f', 'full', None, _('complete an entire path')),
1462 ('n', 'normal', None, _('show only normal files')),
1462 ('n', 'normal', None, _('show only normal files')),
1463 ('a', 'added', None, _('show only added files')),
1463 ('a', 'added', None, _('show only added files')),
1464 ('r', 'removed', None, _('show only removed files'))],
1464 ('r', 'removed', None, _('show only removed files'))],
1465 _('FILESPEC...'))
1465 _('FILESPEC...'))
1466 def debugpathcomplete(ui, repo, *specs, **opts):
1466 def debugpathcomplete(ui, repo, *specs, **opts):
1467 '''complete part or all of a tracked path
1467 '''complete part or all of a tracked path
1468
1468
1469 This command supports shells that offer path name completion. It
1469 This command supports shells that offer path name completion. It
1470 currently completes only files already known to the dirstate.
1470 currently completes only files already known to the dirstate.
1471
1471
1472 Completion extends only to the next path segment unless
1472 Completion extends only to the next path segment unless
1473 --full is specified, in which case entire paths are used.'''
1473 --full is specified, in which case entire paths are used.'''
1474
1474
1475 def complete(path, acceptable):
1475 def complete(path, acceptable):
1476 dirstate = repo.dirstate
1476 dirstate = repo.dirstate
1477 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1477 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1478 rootdir = repo.root + pycompat.ossep
1478 rootdir = repo.root + pycompat.ossep
1479 if spec != repo.root and not spec.startswith(rootdir):
1479 if spec != repo.root and not spec.startswith(rootdir):
1480 return [], []
1480 return [], []
1481 if os.path.isdir(spec):
1481 if os.path.isdir(spec):
1482 spec += '/'
1482 spec += '/'
1483 spec = spec[len(rootdir):]
1483 spec = spec[len(rootdir):]
1484 fixpaths = pycompat.ossep != '/'
1484 fixpaths = pycompat.ossep != '/'
1485 if fixpaths:
1485 if fixpaths:
1486 spec = spec.replace(pycompat.ossep, '/')
1486 spec = spec.replace(pycompat.ossep, '/')
1487 speclen = len(spec)
1487 speclen = len(spec)
1488 fullpaths = opts[r'full']
1488 fullpaths = opts[r'full']
1489 files, dirs = set(), set()
1489 files, dirs = set(), set()
1490 adddir, addfile = dirs.add, files.add
1490 adddir, addfile = dirs.add, files.add
1491 for f, st in dirstate.iteritems():
1491 for f, st in dirstate.iteritems():
1492 if f.startswith(spec) and st[0] in acceptable:
1492 if f.startswith(spec) and st[0] in acceptable:
1493 if fixpaths:
1493 if fixpaths:
1494 f = f.replace('/', pycompat.ossep)
1494 f = f.replace('/', pycompat.ossep)
1495 if fullpaths:
1495 if fullpaths:
1496 addfile(f)
1496 addfile(f)
1497 continue
1497 continue
1498 s = f.find(pycompat.ossep, speclen)
1498 s = f.find(pycompat.ossep, speclen)
1499 if s >= 0:
1499 if s >= 0:
1500 adddir(f[:s])
1500 adddir(f[:s])
1501 else:
1501 else:
1502 addfile(f)
1502 addfile(f)
1503 return files, dirs
1503 return files, dirs
1504
1504
1505 acceptable = ''
1505 acceptable = ''
1506 if opts[r'normal']:
1506 if opts[r'normal']:
1507 acceptable += 'nm'
1507 acceptable += 'nm'
1508 if opts[r'added']:
1508 if opts[r'added']:
1509 acceptable += 'a'
1509 acceptable += 'a'
1510 if opts[r'removed']:
1510 if opts[r'removed']:
1511 acceptable += 'r'
1511 acceptable += 'r'
1512 cwd = repo.getcwd()
1512 cwd = repo.getcwd()
1513 if not specs:
1513 if not specs:
1514 specs = ['.']
1514 specs = ['.']
1515
1515
1516 files, dirs = set(), set()
1516 files, dirs = set(), set()
1517 for spec in specs:
1517 for spec in specs:
1518 f, d = complete(spec, acceptable or 'nmar')
1518 f, d = complete(spec, acceptable or 'nmar')
1519 files.update(f)
1519 files.update(f)
1520 dirs.update(d)
1520 dirs.update(d)
1521 files.update(dirs)
1521 files.update(dirs)
1522 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1522 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1523 ui.write('\n')
1523 ui.write('\n')
1524
1524
1525 @command('debugpickmergetool',
1525 @command('debugpickmergetool',
1526 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1526 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1527 ('', 'changedelete', None, _('emulate merging change and delete')),
1527 ('', 'changedelete', None, _('emulate merging change and delete')),
1528 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1528 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1529 _('[PATTERN]...'),
1529 _('[PATTERN]...'),
1530 inferrepo=True)
1530 inferrepo=True)
1531 def debugpickmergetool(ui, repo, *pats, **opts):
1531 def debugpickmergetool(ui, repo, *pats, **opts):
1532 """examine which merge tool is chosen for specified file
1532 """examine which merge tool is chosen for specified file
1533
1533
1534 As described in :hg:`help merge-tools`, Mercurial examines
1534 As described in :hg:`help merge-tools`, Mercurial examines
1535 configurations below in this order to decide which merge tool is
1535 configurations below in this order to decide which merge tool is
1536 chosen for specified file.
1536 chosen for specified file.
1537
1537
1538 1. ``--tool`` option
1538 1. ``--tool`` option
1539 2. ``HGMERGE`` environment variable
1539 2. ``HGMERGE`` environment variable
1540 3. configurations in ``merge-patterns`` section
1540 3. configurations in ``merge-patterns`` section
1541 4. configuration of ``ui.merge``
1541 4. configuration of ``ui.merge``
1542 5. configurations in ``merge-tools`` section
1542 5. configurations in ``merge-tools`` section
1543 6. ``hgmerge`` tool (for historical reason only)
1543 6. ``hgmerge`` tool (for historical reason only)
1544 7. default tool for fallback (``:merge`` or ``:prompt``)
1544 7. default tool for fallback (``:merge`` or ``:prompt``)
1545
1545
1546 This command writes out examination result in the style below::
1546 This command writes out examination result in the style below::
1547
1547
1548 FILE = MERGETOOL
1548 FILE = MERGETOOL
1549
1549
1550 By default, all files known in the first parent context of the
1550 By default, all files known in the first parent context of the
1551 working directory are examined. Use file patterns and/or -I/-X
1551 working directory are examined. Use file patterns and/or -I/-X
1552 options to limit target files. -r/--rev is also useful to examine
1552 options to limit target files. -r/--rev is also useful to examine
1553 files in another context without actual updating to it.
1553 files in another context without actual updating to it.
1554
1554
1555 With --debug, this command shows warning messages while matching
1555 With --debug, this command shows warning messages while matching
1556 against ``merge-patterns`` and so on, too. It is recommended to
1556 against ``merge-patterns`` and so on, too. It is recommended to
1557 use this option with explicit file patterns and/or -I/-X options,
1557 use this option with explicit file patterns and/or -I/-X options,
1558 because this option increases amount of output per file according
1558 because this option increases amount of output per file according
1559 to configurations in hgrc.
1559 to configurations in hgrc.
1560
1560
1561 With -v/--verbose, this command shows configurations below at
1561 With -v/--verbose, this command shows configurations below at
1562 first (only if specified).
1562 first (only if specified).
1563
1563
1564 - ``--tool`` option
1564 - ``--tool`` option
1565 - ``HGMERGE`` environment variable
1565 - ``HGMERGE`` environment variable
1566 - configuration of ``ui.merge``
1566 - configuration of ``ui.merge``
1567
1567
1568 If merge tool is chosen before matching against
1568 If merge tool is chosen before matching against
1569 ``merge-patterns``, this command can't show any helpful
1569 ``merge-patterns``, this command can't show any helpful
1570 information, even with --debug. In such case, information above is
1570 information, even with --debug. In such case, information above is
1571 useful to know why a merge tool is chosen.
1571 useful to know why a merge tool is chosen.
1572 """
1572 """
1573 opts = pycompat.byteskwargs(opts)
1573 opts = pycompat.byteskwargs(opts)
1574 overrides = {}
1574 overrides = {}
1575 if opts['tool']:
1575 if opts['tool']:
1576 overrides[('ui', 'forcemerge')] = opts['tool']
1576 overrides[('ui', 'forcemerge')] = opts['tool']
1577 ui.note(('with --tool %r\n') % (opts['tool']))
1577 ui.note(('with --tool %r\n') % (opts['tool']))
1578
1578
1579 with ui.configoverride(overrides, 'debugmergepatterns'):
1579 with ui.configoverride(overrides, 'debugmergepatterns'):
1580 hgmerge = encoding.environ.get("HGMERGE")
1580 hgmerge = encoding.environ.get("HGMERGE")
1581 if hgmerge is not None:
1581 if hgmerge is not None:
1582 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1582 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1583 uimerge = ui.config("ui", "merge")
1583 uimerge = ui.config("ui", "merge")
1584 if uimerge:
1584 if uimerge:
1585 ui.note(('with ui.merge=%r\n') % (uimerge))
1585 ui.note(('with ui.merge=%r\n') % (uimerge))
1586
1586
1587 ctx = scmutil.revsingle(repo, opts.get('rev'))
1587 ctx = scmutil.revsingle(repo, opts.get('rev'))
1588 m = scmutil.match(ctx, pats, opts)
1588 m = scmutil.match(ctx, pats, opts)
1589 changedelete = opts['changedelete']
1589 changedelete = opts['changedelete']
1590 for path in ctx.walk(m):
1590 for path in ctx.walk(m):
1591 fctx = ctx[path]
1591 fctx = ctx[path]
1592 try:
1592 try:
1593 if not ui.debugflag:
1593 if not ui.debugflag:
1594 ui.pushbuffer(error=True)
1594 ui.pushbuffer(error=True)
1595 tool, toolpath = filemerge._picktool(repo, ui, path,
1595 tool, toolpath = filemerge._picktool(repo, ui, path,
1596 fctx.isbinary(),
1596 fctx.isbinary(),
1597 'l' in fctx.flags(),
1597 'l' in fctx.flags(),
1598 changedelete)
1598 changedelete)
1599 finally:
1599 finally:
1600 if not ui.debugflag:
1600 if not ui.debugflag:
1601 ui.popbuffer()
1601 ui.popbuffer()
1602 ui.write(('%s = %s\n') % (path, tool))
1602 ui.write(('%s = %s\n') % (path, tool))
1603
1603
1604 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1604 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1605 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1605 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1606 '''access the pushkey key/value protocol
1606 '''access the pushkey key/value protocol
1607
1607
1608 With two args, list the keys in the given namespace.
1608 With two args, list the keys in the given namespace.
1609
1609
1610 With five args, set a key to new if it currently is set to old.
1610 With five args, set a key to new if it currently is set to old.
1611 Reports success or failure.
1611 Reports success or failure.
1612 '''
1612 '''
1613
1613
1614 target = hg.peer(ui, {}, repopath)
1614 target = hg.peer(ui, {}, repopath)
1615 if keyinfo:
1615 if keyinfo:
1616 key, old, new = keyinfo
1616 key, old, new = keyinfo
1617 r = target.pushkey(namespace, key, old, new)
1617 r = target.pushkey(namespace, key, old, new)
1618 ui.status(str(r) + '\n')
1618 ui.status(str(r) + '\n')
1619 return not r
1619 return not r
1620 else:
1620 else:
1621 for k, v in sorted(target.listkeys(namespace).iteritems()):
1621 for k, v in sorted(target.listkeys(namespace).iteritems()):
1622 ui.write("%s\t%s\n" % (util.escapestr(k),
1622 ui.write("%s\t%s\n" % (util.escapestr(k),
1623 util.escapestr(v)))
1623 util.escapestr(v)))
1624
1624
1625 @command('debugpvec', [], _('A B'))
1625 @command('debugpvec', [], _('A B'))
1626 def debugpvec(ui, repo, a, b=None):
1626 def debugpvec(ui, repo, a, b=None):
1627 ca = scmutil.revsingle(repo, a)
1627 ca = scmutil.revsingle(repo, a)
1628 cb = scmutil.revsingle(repo, b)
1628 cb = scmutil.revsingle(repo, b)
1629 pa = pvec.ctxpvec(ca)
1629 pa = pvec.ctxpvec(ca)
1630 pb = pvec.ctxpvec(cb)
1630 pb = pvec.ctxpvec(cb)
1631 if pa == pb:
1631 if pa == pb:
1632 rel = "="
1632 rel = "="
1633 elif pa > pb:
1633 elif pa > pb:
1634 rel = ">"
1634 rel = ">"
1635 elif pa < pb:
1635 elif pa < pb:
1636 rel = "<"
1636 rel = "<"
1637 elif pa | pb:
1637 elif pa | pb:
1638 rel = "|"
1638 rel = "|"
1639 ui.write(_("a: %s\n") % pa)
1639 ui.write(_("a: %s\n") % pa)
1640 ui.write(_("b: %s\n") % pb)
1640 ui.write(_("b: %s\n") % pb)
1641 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1641 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1642 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1642 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1643 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1643 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1644 pa.distance(pb), rel))
1644 pa.distance(pb), rel))
1645
1645
1646 @command('debugrebuilddirstate|debugrebuildstate',
1646 @command('debugrebuilddirstate|debugrebuildstate',
1647 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1647 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1648 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1648 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1649 'the working copy parent')),
1649 'the working copy parent')),
1650 ],
1650 ],
1651 _('[-r REV]'))
1651 _('[-r REV]'))
1652 def debugrebuilddirstate(ui, repo, rev, **opts):
1652 def debugrebuilddirstate(ui, repo, rev, **opts):
1653 """rebuild the dirstate as it would look like for the given revision
1653 """rebuild the dirstate as it would look like for the given revision
1654
1654
1655 If no revision is specified the first current parent will be used.
1655 If no revision is specified the first current parent will be used.
1656
1656
1657 The dirstate will be set to the files of the given revision.
1657 The dirstate will be set to the files of the given revision.
1658 The actual working directory content or existing dirstate
1658 The actual working directory content or existing dirstate
1659 information such as adds or removes is not considered.
1659 information such as adds or removes is not considered.
1660
1660
1661 ``minimal`` will only rebuild the dirstate status for files that claim to be
1661 ``minimal`` will only rebuild the dirstate status for files that claim to be
1662 tracked but are not in the parent manifest, or that exist in the parent
1662 tracked but are not in the parent manifest, or that exist in the parent
1663 manifest but are not in the dirstate. It will not change adds, removes, or
1663 manifest but are not in the dirstate. It will not change adds, removes, or
1664 modified files that are in the working copy parent.
1664 modified files that are in the working copy parent.
1665
1665
1666 One use of this command is to make the next :hg:`status` invocation
1666 One use of this command is to make the next :hg:`status` invocation
1667 check the actual file content.
1667 check the actual file content.
1668 """
1668 """
1669 ctx = scmutil.revsingle(repo, rev)
1669 ctx = scmutil.revsingle(repo, rev)
1670 with repo.wlock():
1670 with repo.wlock():
1671 dirstate = repo.dirstate
1671 dirstate = repo.dirstate
1672 changedfiles = None
1672 changedfiles = None
1673 # See command doc for what minimal does.
1673 # See command doc for what minimal does.
1674 if opts.get(r'minimal'):
1674 if opts.get(r'minimal'):
1675 manifestfiles = set(ctx.manifest().keys())
1675 manifestfiles = set(ctx.manifest().keys())
1676 dirstatefiles = set(dirstate)
1676 dirstatefiles = set(dirstate)
1677 manifestonly = manifestfiles - dirstatefiles
1677 manifestonly = manifestfiles - dirstatefiles
1678 dsonly = dirstatefiles - manifestfiles
1678 dsonly = dirstatefiles - manifestfiles
1679 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1679 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1680 changedfiles = manifestonly | dsnotadded
1680 changedfiles = manifestonly | dsnotadded
1681
1681
1682 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1682 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1683
1683
1684 @command('debugrebuildfncache', [], '')
1684 @command('debugrebuildfncache', [], '')
1685 def debugrebuildfncache(ui, repo):
1685 def debugrebuildfncache(ui, repo):
1686 """rebuild the fncache file"""
1686 """rebuild the fncache file"""
1687 repair.rebuildfncache(ui, repo)
1687 repair.rebuildfncache(ui, repo)
1688
1688
1689 @command('debugrename',
1689 @command('debugrename',
1690 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1690 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1691 _('[-r REV] FILE'))
1691 _('[-r REV] FILE'))
1692 def debugrename(ui, repo, file1, *pats, **opts):
1692 def debugrename(ui, repo, file1, *pats, **opts):
1693 """dump rename information"""
1693 """dump rename information"""
1694
1694
1695 opts = pycompat.byteskwargs(opts)
1695 opts = pycompat.byteskwargs(opts)
1696 ctx = scmutil.revsingle(repo, opts.get('rev'))
1696 ctx = scmutil.revsingle(repo, opts.get('rev'))
1697 m = scmutil.match(ctx, (file1,) + pats, opts)
1697 m = scmutil.match(ctx, (file1,) + pats, opts)
1698 for abs in ctx.walk(m):
1698 for abs in ctx.walk(m):
1699 fctx = ctx[abs]
1699 fctx = ctx[abs]
1700 o = fctx.filelog().renamed(fctx.filenode())
1700 o = fctx.filelog().renamed(fctx.filenode())
1701 rel = m.rel(abs)
1701 rel = m.rel(abs)
1702 if o:
1702 if o:
1703 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1703 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1704 else:
1704 else:
1705 ui.write(_("%s not renamed\n") % rel)
1705 ui.write(_("%s not renamed\n") % rel)
1706
1706
1707 @command('debugrevlog', cmdutil.debugrevlogopts +
1707 @command('debugrevlog', cmdutil.debugrevlogopts +
1708 [('d', 'dump', False, _('dump index data'))],
1708 [('d', 'dump', False, _('dump index data'))],
1709 _('-c|-m|FILE'),
1709 _('-c|-m|FILE'),
1710 optionalrepo=True)
1710 optionalrepo=True)
1711 def debugrevlog(ui, repo, file_=None, **opts):
1711 def debugrevlog(ui, repo, file_=None, **opts):
1712 """show data and statistics about a revlog"""
1712 """show data and statistics about a revlog"""
1713 opts = pycompat.byteskwargs(opts)
1713 opts = pycompat.byteskwargs(opts)
1714 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1714 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1715
1715
1716 if opts.get("dump"):
1716 if opts.get("dump"):
1717 numrevs = len(r)
1717 numrevs = len(r)
1718 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1718 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1719 " rawsize totalsize compression heads chainlen\n"))
1719 " rawsize totalsize compression heads chainlen\n"))
1720 ts = 0
1720 ts = 0
1721 heads = set()
1721 heads = set()
1722
1722
1723 for rev in xrange(numrevs):
1723 for rev in xrange(numrevs):
1724 dbase = r.deltaparent(rev)
1724 dbase = r.deltaparent(rev)
1725 if dbase == -1:
1725 if dbase == -1:
1726 dbase = rev
1726 dbase = rev
1727 cbase = r.chainbase(rev)
1727 cbase = r.chainbase(rev)
1728 clen = r.chainlen(rev)
1728 clen = r.chainlen(rev)
1729 p1, p2 = r.parentrevs(rev)
1729 p1, p2 = r.parentrevs(rev)
1730 rs = r.rawsize(rev)
1730 rs = r.rawsize(rev)
1731 ts = ts + rs
1731 ts = ts + rs
1732 heads -= set(r.parentrevs(rev))
1732 heads -= set(r.parentrevs(rev))
1733 heads.add(rev)
1733 heads.add(rev)
1734 try:
1734 try:
1735 compression = ts / r.end(rev)
1735 compression = ts / r.end(rev)
1736 except ZeroDivisionError:
1736 except ZeroDivisionError:
1737 compression = 0
1737 compression = 0
1738 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1738 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1739 "%11d %5d %8d\n" %
1739 "%11d %5d %8d\n" %
1740 (rev, p1, p2, r.start(rev), r.end(rev),
1740 (rev, p1, p2, r.start(rev), r.end(rev),
1741 r.start(dbase), r.start(cbase),
1741 r.start(dbase), r.start(cbase),
1742 r.start(p1), r.start(p2),
1742 r.start(p1), r.start(p2),
1743 rs, ts, compression, len(heads), clen))
1743 rs, ts, compression, len(heads), clen))
1744 return 0
1744 return 0
1745
1745
1746 v = r.version
1746 v = r.version
1747 format = v & 0xFFFF
1747 format = v & 0xFFFF
1748 flags = []
1748 flags = []
1749 gdelta = False
1749 gdelta = False
1750 if v & revlog.FLAG_INLINE_DATA:
1750 if v & revlog.FLAG_INLINE_DATA:
1751 flags.append('inline')
1751 flags.append('inline')
1752 if v & revlog.FLAG_GENERALDELTA:
1752 if v & revlog.FLAG_GENERALDELTA:
1753 gdelta = True
1753 gdelta = True
1754 flags.append('generaldelta')
1754 flags.append('generaldelta')
1755 if not flags:
1755 if not flags:
1756 flags = ['(none)']
1756 flags = ['(none)']
1757
1757
1758 nummerges = 0
1758 nummerges = 0
1759 numfull = 0
1759 numfull = 0
1760 numprev = 0
1760 numprev = 0
1761 nump1 = 0
1761 nump1 = 0
1762 nump2 = 0
1762 nump2 = 0
1763 numother = 0
1763 numother = 0
1764 nump1prev = 0
1764 nump1prev = 0
1765 nump2prev = 0
1765 nump2prev = 0
1766 chainlengths = []
1766 chainlengths = []
1767 chainbases = []
1767 chainbases = []
1768 chainspans = []
1768 chainspans = []
1769
1769
1770 datasize = [None, 0, 0]
1770 datasize = [None, 0, 0]
1771 fullsize = [None, 0, 0]
1771 fullsize = [None, 0, 0]
1772 deltasize = [None, 0, 0]
1772 deltasize = [None, 0, 0]
1773 chunktypecounts = {}
1773 chunktypecounts = {}
1774 chunktypesizes = {}
1774 chunktypesizes = {}
1775
1775
1776 def addsize(size, l):
1776 def addsize(size, l):
1777 if l[0] is None or size < l[0]:
1777 if l[0] is None or size < l[0]:
1778 l[0] = size
1778 l[0] = size
1779 if size > l[1]:
1779 if size > l[1]:
1780 l[1] = size
1780 l[1] = size
1781 l[2] += size
1781 l[2] += size
1782
1782
1783 numrevs = len(r)
1783 numrevs = len(r)
1784 for rev in xrange(numrevs):
1784 for rev in xrange(numrevs):
1785 p1, p2 = r.parentrevs(rev)
1785 p1, p2 = r.parentrevs(rev)
1786 delta = r.deltaparent(rev)
1786 delta = r.deltaparent(rev)
1787 if format > 0:
1787 if format > 0:
1788 addsize(r.rawsize(rev), datasize)
1788 addsize(r.rawsize(rev), datasize)
1789 if p2 != nullrev:
1789 if p2 != nullrev:
1790 nummerges += 1
1790 nummerges += 1
1791 size = r.length(rev)
1791 size = r.length(rev)
1792 if delta == nullrev:
1792 if delta == nullrev:
1793 chainlengths.append(0)
1793 chainlengths.append(0)
1794 chainbases.append(r.start(rev))
1794 chainbases.append(r.start(rev))
1795 chainspans.append(size)
1795 chainspans.append(size)
1796 numfull += 1
1796 numfull += 1
1797 addsize(size, fullsize)
1797 addsize(size, fullsize)
1798 else:
1798 else:
1799 chainlengths.append(chainlengths[delta] + 1)
1799 chainlengths.append(chainlengths[delta] + 1)
1800 baseaddr = chainbases[delta]
1800 baseaddr = chainbases[delta]
1801 revaddr = r.start(rev)
1801 revaddr = r.start(rev)
1802 chainbases.append(baseaddr)
1802 chainbases.append(baseaddr)
1803 chainspans.append((revaddr - baseaddr) + size)
1803 chainspans.append((revaddr - baseaddr) + size)
1804 addsize(size, deltasize)
1804 addsize(size, deltasize)
1805 if delta == rev - 1:
1805 if delta == rev - 1:
1806 numprev += 1
1806 numprev += 1
1807 if delta == p1:
1807 if delta == p1:
1808 nump1prev += 1
1808 nump1prev += 1
1809 elif delta == p2:
1809 elif delta == p2:
1810 nump2prev += 1
1810 nump2prev += 1
1811 elif delta == p1:
1811 elif delta == p1:
1812 nump1 += 1
1812 nump1 += 1
1813 elif delta == p2:
1813 elif delta == p2:
1814 nump2 += 1
1814 nump2 += 1
1815 elif delta != nullrev:
1815 elif delta != nullrev:
1816 numother += 1
1816 numother += 1
1817
1817
1818 # Obtain data on the raw chunks in the revlog.
1818 # Obtain data on the raw chunks in the revlog.
1819 segment = r._getsegmentforrevs(rev, rev)[1]
1819 segment = r._getsegmentforrevs(rev, rev)[1]
1820 if segment:
1820 if segment:
1821 chunktype = bytes(segment[0:1])
1821 chunktype = bytes(segment[0:1])
1822 else:
1822 else:
1823 chunktype = 'empty'
1823 chunktype = 'empty'
1824
1824
1825 if chunktype not in chunktypecounts:
1825 if chunktype not in chunktypecounts:
1826 chunktypecounts[chunktype] = 0
1826 chunktypecounts[chunktype] = 0
1827 chunktypesizes[chunktype] = 0
1827 chunktypesizes[chunktype] = 0
1828
1828
1829 chunktypecounts[chunktype] += 1
1829 chunktypecounts[chunktype] += 1
1830 chunktypesizes[chunktype] += size
1830 chunktypesizes[chunktype] += size
1831
1831
1832 # Adjust size min value for empty cases
1832 # Adjust size min value for empty cases
1833 for size in (datasize, fullsize, deltasize):
1833 for size in (datasize, fullsize, deltasize):
1834 if size[0] is None:
1834 if size[0] is None:
1835 size[0] = 0
1835 size[0] = 0
1836
1836
1837 numdeltas = numrevs - numfull
1837 numdeltas = numrevs - numfull
1838 numoprev = numprev - nump1prev - nump2prev
1838 numoprev = numprev - nump1prev - nump2prev
1839 totalrawsize = datasize[2]
1839 totalrawsize = datasize[2]
1840 datasize[2] /= numrevs
1840 datasize[2] /= numrevs
1841 fulltotal = fullsize[2]
1841 fulltotal = fullsize[2]
1842 fullsize[2] /= numfull
1842 fullsize[2] /= numfull
1843 deltatotal = deltasize[2]
1843 deltatotal = deltasize[2]
1844 if numrevs - numfull > 0:
1844 if numrevs - numfull > 0:
1845 deltasize[2] /= numrevs - numfull
1845 deltasize[2] /= numrevs - numfull
1846 totalsize = fulltotal + deltatotal
1846 totalsize = fulltotal + deltatotal
1847 avgchainlen = sum(chainlengths) / numrevs
1847 avgchainlen = sum(chainlengths) / numrevs
1848 maxchainlen = max(chainlengths)
1848 maxchainlen = max(chainlengths)
1849 maxchainspan = max(chainspans)
1849 maxchainspan = max(chainspans)
1850 compratio = 1
1850 compratio = 1
1851 if totalsize:
1851 if totalsize:
1852 compratio = totalrawsize / totalsize
1852 compratio = totalrawsize / totalsize
1853
1853
1854 basedfmtstr = '%%%dd\n'
1854 basedfmtstr = '%%%dd\n'
1855 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1855 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1856
1856
1857 def dfmtstr(max):
1857 def dfmtstr(max):
1858 return basedfmtstr % len(str(max))
1858 return basedfmtstr % len(str(max))
1859 def pcfmtstr(max, padding=0):
1859 def pcfmtstr(max, padding=0):
1860 return basepcfmtstr % (len(str(max)), ' ' * padding)
1860 return basepcfmtstr % (len(str(max)), ' ' * padding)
1861
1861
1862 def pcfmt(value, total):
1862 def pcfmt(value, total):
1863 if total:
1863 if total:
1864 return (value, 100 * float(value) / total)
1864 return (value, 100 * float(value) / total)
1865 else:
1865 else:
1866 return value, 100.0
1866 return value, 100.0
1867
1867
1868 ui.write(('format : %d\n') % format)
1868 ui.write(('format : %d\n') % format)
1869 ui.write(('flags : %s\n') % ', '.join(flags))
1869 ui.write(('flags : %s\n') % ', '.join(flags))
1870
1870
1871 ui.write('\n')
1871 ui.write('\n')
1872 fmt = pcfmtstr(totalsize)
1872 fmt = pcfmtstr(totalsize)
1873 fmt2 = dfmtstr(totalsize)
1873 fmt2 = dfmtstr(totalsize)
1874 ui.write(('revisions : ') + fmt2 % numrevs)
1874 ui.write(('revisions : ') + fmt2 % numrevs)
1875 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1875 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1876 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1876 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1877 ui.write(('revisions : ') + fmt2 % numrevs)
1877 ui.write(('revisions : ') + fmt2 % numrevs)
1878 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1878 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1879 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1879 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1880 ui.write(('revision size : ') + fmt2 % totalsize)
1880 ui.write(('revision size : ') + fmt2 % totalsize)
1881 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1881 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1882 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1882 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1883
1883
1884 def fmtchunktype(chunktype):
1884 def fmtchunktype(chunktype):
1885 if chunktype == 'empty':
1885 if chunktype == 'empty':
1886 return ' %s : ' % chunktype
1886 return ' %s : ' % chunktype
1887 elif chunktype in string.ascii_letters:
1887 elif chunktype in pycompat.bytestr(string.ascii_letters):
1888 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1888 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1889 else:
1889 else:
1890 return ' 0x%s : ' % hex(chunktype)
1890 return ' 0x%s : ' % hex(chunktype)
1891
1891
1892 ui.write('\n')
1892 ui.write('\n')
1893 ui.write(('chunks : ') + fmt2 % numrevs)
1893 ui.write(('chunks : ') + fmt2 % numrevs)
1894 for chunktype in sorted(chunktypecounts):
1894 for chunktype in sorted(chunktypecounts):
1895 ui.write(fmtchunktype(chunktype))
1895 ui.write(fmtchunktype(chunktype))
1896 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1896 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1897 ui.write(('chunks size : ') + fmt2 % totalsize)
1897 ui.write(('chunks size : ') + fmt2 % totalsize)
1898 for chunktype in sorted(chunktypecounts):
1898 for chunktype in sorted(chunktypecounts):
1899 ui.write(fmtchunktype(chunktype))
1899 ui.write(fmtchunktype(chunktype))
1900 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1900 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1901
1901
1902 ui.write('\n')
1902 ui.write('\n')
1903 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1903 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1904 ui.write(('avg chain length : ') + fmt % avgchainlen)
1904 ui.write(('avg chain length : ') + fmt % avgchainlen)
1905 ui.write(('max chain length : ') + fmt % maxchainlen)
1905 ui.write(('max chain length : ') + fmt % maxchainlen)
1906 ui.write(('max chain reach : ') + fmt % maxchainspan)
1906 ui.write(('max chain reach : ') + fmt % maxchainspan)
1907 ui.write(('compression ratio : ') + fmt % compratio)
1907 ui.write(('compression ratio : ') + fmt % compratio)
1908
1908
1909 if format > 0:
1909 if format > 0:
1910 ui.write('\n')
1910 ui.write('\n')
1911 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1911 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1912 % tuple(datasize))
1912 % tuple(datasize))
1913 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1913 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1914 % tuple(fullsize))
1914 % tuple(fullsize))
1915 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1915 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1916 % tuple(deltasize))
1916 % tuple(deltasize))
1917
1917
1918 if numdeltas > 0:
1918 if numdeltas > 0:
1919 ui.write('\n')
1919 ui.write('\n')
1920 fmt = pcfmtstr(numdeltas)
1920 fmt = pcfmtstr(numdeltas)
1921 fmt2 = pcfmtstr(numdeltas, 4)
1921 fmt2 = pcfmtstr(numdeltas, 4)
1922 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1922 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1923 if numprev > 0:
1923 if numprev > 0:
1924 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1924 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1925 numprev))
1925 numprev))
1926 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1926 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1927 numprev))
1927 numprev))
1928 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1928 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1929 numprev))
1929 numprev))
1930 if gdelta:
1930 if gdelta:
1931 ui.write(('deltas against p1 : ')
1931 ui.write(('deltas against p1 : ')
1932 + fmt % pcfmt(nump1, numdeltas))
1932 + fmt % pcfmt(nump1, numdeltas))
1933 ui.write(('deltas against p2 : ')
1933 ui.write(('deltas against p2 : ')
1934 + fmt % pcfmt(nump2, numdeltas))
1934 + fmt % pcfmt(nump2, numdeltas))
1935 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1935 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1936 numdeltas))
1936 numdeltas))
1937
1937
1938 @command('debugrevspec',
1938 @command('debugrevspec',
1939 [('', 'optimize', None,
1939 [('', 'optimize', None,
1940 _('print parsed tree after optimizing (DEPRECATED)')),
1940 _('print parsed tree after optimizing (DEPRECATED)')),
1941 ('', 'show-revs', True, _('print list of result revisions (default)')),
1941 ('', 'show-revs', True, _('print list of result revisions (default)')),
1942 ('s', 'show-set', None, _('print internal representation of result set')),
1942 ('s', 'show-set', None, _('print internal representation of result set')),
1943 ('p', 'show-stage', [],
1943 ('p', 'show-stage', [],
1944 _('print parsed tree at the given stage'), _('NAME')),
1944 _('print parsed tree at the given stage'), _('NAME')),
1945 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1945 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1946 ('', 'verify-optimized', False, _('verify optimized result')),
1946 ('', 'verify-optimized', False, _('verify optimized result')),
1947 ],
1947 ],
1948 ('REVSPEC'))
1948 ('REVSPEC'))
1949 def debugrevspec(ui, repo, expr, **opts):
1949 def debugrevspec(ui, repo, expr, **opts):
1950 """parse and apply a revision specification
1950 """parse and apply a revision specification
1951
1951
1952 Use -p/--show-stage option to print the parsed tree at the given stages.
1952 Use -p/--show-stage option to print the parsed tree at the given stages.
1953 Use -p all to print tree at every stage.
1953 Use -p all to print tree at every stage.
1954
1954
1955 Use --no-show-revs option with -s or -p to print only the set
1955 Use --no-show-revs option with -s or -p to print only the set
1956 representation or the parsed tree respectively.
1956 representation or the parsed tree respectively.
1957
1957
1958 Use --verify-optimized to compare the optimized result with the unoptimized
1958 Use --verify-optimized to compare the optimized result with the unoptimized
1959 one. Returns 1 if the optimized result differs.
1959 one. Returns 1 if the optimized result differs.
1960 """
1960 """
1961 opts = pycompat.byteskwargs(opts)
1961 opts = pycompat.byteskwargs(opts)
1962 stages = [
1962 stages = [
1963 ('parsed', lambda tree: tree),
1963 ('parsed', lambda tree: tree),
1964 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1964 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1965 ('concatenated', revsetlang.foldconcat),
1965 ('concatenated', revsetlang.foldconcat),
1966 ('analyzed', revsetlang.analyze),
1966 ('analyzed', revsetlang.analyze),
1967 ('optimized', revsetlang.optimize),
1967 ('optimized', revsetlang.optimize),
1968 ]
1968 ]
1969 if opts['no_optimized']:
1969 if opts['no_optimized']:
1970 stages = stages[:-1]
1970 stages = stages[:-1]
1971 if opts['verify_optimized'] and opts['no_optimized']:
1971 if opts['verify_optimized'] and opts['no_optimized']:
1972 raise error.Abort(_('cannot use --verify-optimized with '
1972 raise error.Abort(_('cannot use --verify-optimized with '
1973 '--no-optimized'))
1973 '--no-optimized'))
1974 stagenames = set(n for n, f in stages)
1974 stagenames = set(n for n, f in stages)
1975
1975
1976 showalways = set()
1976 showalways = set()
1977 showchanged = set()
1977 showchanged = set()
1978 if ui.verbose and not opts['show_stage']:
1978 if ui.verbose and not opts['show_stage']:
1979 # show parsed tree by --verbose (deprecated)
1979 # show parsed tree by --verbose (deprecated)
1980 showalways.add('parsed')
1980 showalways.add('parsed')
1981 showchanged.update(['expanded', 'concatenated'])
1981 showchanged.update(['expanded', 'concatenated'])
1982 if opts['optimize']:
1982 if opts['optimize']:
1983 showalways.add('optimized')
1983 showalways.add('optimized')
1984 if opts['show_stage'] and opts['optimize']:
1984 if opts['show_stage'] and opts['optimize']:
1985 raise error.Abort(_('cannot use --optimize with --show-stage'))
1985 raise error.Abort(_('cannot use --optimize with --show-stage'))
1986 if opts['show_stage'] == ['all']:
1986 if opts['show_stage'] == ['all']:
1987 showalways.update(stagenames)
1987 showalways.update(stagenames)
1988 else:
1988 else:
1989 for n in opts['show_stage']:
1989 for n in opts['show_stage']:
1990 if n not in stagenames:
1990 if n not in stagenames:
1991 raise error.Abort(_('invalid stage name: %s') % n)
1991 raise error.Abort(_('invalid stage name: %s') % n)
1992 showalways.update(opts['show_stage'])
1992 showalways.update(opts['show_stage'])
1993
1993
1994 treebystage = {}
1994 treebystage = {}
1995 printedtree = None
1995 printedtree = None
1996 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1996 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1997 for n, f in stages:
1997 for n, f in stages:
1998 treebystage[n] = tree = f(tree)
1998 treebystage[n] = tree = f(tree)
1999 if n in showalways or (n in showchanged and tree != printedtree):
1999 if n in showalways or (n in showchanged and tree != printedtree):
2000 if opts['show_stage'] or n != 'parsed':
2000 if opts['show_stage'] or n != 'parsed':
2001 ui.write(("* %s:\n") % n)
2001 ui.write(("* %s:\n") % n)
2002 ui.write(revsetlang.prettyformat(tree), "\n")
2002 ui.write(revsetlang.prettyformat(tree), "\n")
2003 printedtree = tree
2003 printedtree = tree
2004
2004
2005 if opts['verify_optimized']:
2005 if opts['verify_optimized']:
2006 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2006 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2007 brevs = revset.makematcher(treebystage['optimized'])(repo)
2007 brevs = revset.makematcher(treebystage['optimized'])(repo)
2008 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2008 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2009 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2009 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2010 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2010 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2011 arevs = list(arevs)
2011 arevs = list(arevs)
2012 brevs = list(brevs)
2012 brevs = list(brevs)
2013 if arevs == brevs:
2013 if arevs == brevs:
2014 return 0
2014 return 0
2015 ui.write(('--- analyzed\n'), label='diff.file_a')
2015 ui.write(('--- analyzed\n'), label='diff.file_a')
2016 ui.write(('+++ optimized\n'), label='diff.file_b')
2016 ui.write(('+++ optimized\n'), label='diff.file_b')
2017 sm = difflib.SequenceMatcher(None, arevs, brevs)
2017 sm = difflib.SequenceMatcher(None, arevs, brevs)
2018 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2018 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2019 if tag in ('delete', 'replace'):
2019 if tag in ('delete', 'replace'):
2020 for c in arevs[alo:ahi]:
2020 for c in arevs[alo:ahi]:
2021 ui.write('-%s\n' % c, label='diff.deleted')
2021 ui.write('-%s\n' % c, label='diff.deleted')
2022 if tag in ('insert', 'replace'):
2022 if tag in ('insert', 'replace'):
2023 for c in brevs[blo:bhi]:
2023 for c in brevs[blo:bhi]:
2024 ui.write('+%s\n' % c, label='diff.inserted')
2024 ui.write('+%s\n' % c, label='diff.inserted')
2025 if tag == 'equal':
2025 if tag == 'equal':
2026 for c in arevs[alo:ahi]:
2026 for c in arevs[alo:ahi]:
2027 ui.write(' %s\n' % c)
2027 ui.write(' %s\n' % c)
2028 return 1
2028 return 1
2029
2029
2030 func = revset.makematcher(tree)
2030 func = revset.makematcher(tree)
2031 revs = func(repo)
2031 revs = func(repo)
2032 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2032 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2033 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2033 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2034 if not opts['show_revs']:
2034 if not opts['show_revs']:
2035 return
2035 return
2036 for c in revs:
2036 for c in revs:
2037 ui.write("%s\n" % c)
2037 ui.write("%s\n" % c)
2038
2038
2039 @command('debugsetparents', [], _('REV1 [REV2]'))
2039 @command('debugsetparents', [], _('REV1 [REV2]'))
2040 def debugsetparents(ui, repo, rev1, rev2=None):
2040 def debugsetparents(ui, repo, rev1, rev2=None):
2041 """manually set the parents of the current working directory
2041 """manually set the parents of the current working directory
2042
2042
2043 This is useful for writing repository conversion tools, but should
2043 This is useful for writing repository conversion tools, but should
2044 be used with care. For example, neither the working directory nor the
2044 be used with care. For example, neither the working directory nor the
2045 dirstate is updated, so file status may be incorrect after running this
2045 dirstate is updated, so file status may be incorrect after running this
2046 command.
2046 command.
2047
2047
2048 Returns 0 on success.
2048 Returns 0 on success.
2049 """
2049 """
2050
2050
2051 r1 = scmutil.revsingle(repo, rev1).node()
2051 r1 = scmutil.revsingle(repo, rev1).node()
2052 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2052 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2053
2053
2054 with repo.wlock():
2054 with repo.wlock():
2055 repo.setparents(r1, r2)
2055 repo.setparents(r1, r2)
2056
2056
2057 @command('debugsub',
2057 @command('debugsub',
2058 [('r', 'rev', '',
2058 [('r', 'rev', '',
2059 _('revision to check'), _('REV'))],
2059 _('revision to check'), _('REV'))],
2060 _('[-r REV] [REV]'))
2060 _('[-r REV] [REV]'))
2061 def debugsub(ui, repo, rev=None):
2061 def debugsub(ui, repo, rev=None):
2062 ctx = scmutil.revsingle(repo, rev, None)
2062 ctx = scmutil.revsingle(repo, rev, None)
2063 for k, v in sorted(ctx.substate.items()):
2063 for k, v in sorted(ctx.substate.items()):
2064 ui.write(('path %s\n') % k)
2064 ui.write(('path %s\n') % k)
2065 ui.write((' source %s\n') % v[0])
2065 ui.write((' source %s\n') % v[0])
2066 ui.write((' revision %s\n') % v[1])
2066 ui.write((' revision %s\n') % v[1])
2067
2067
2068 @command('debugsuccessorssets',
2068 @command('debugsuccessorssets',
2069 [],
2069 [],
2070 _('[REV]'))
2070 _('[REV]'))
2071 def debugsuccessorssets(ui, repo, *revs):
2071 def debugsuccessorssets(ui, repo, *revs):
2072 """show set of successors for revision
2072 """show set of successors for revision
2073
2073
2074 A successors set of changeset A is a consistent group of revisions that
2074 A successors set of changeset A is a consistent group of revisions that
2075 succeed A. It contains non-obsolete changesets only.
2075 succeed A. It contains non-obsolete changesets only.
2076
2076
2077 In most cases a changeset A has a single successors set containing a single
2077 In most cases a changeset A has a single successors set containing a single
2078 successor (changeset A replaced by A').
2078 successor (changeset A replaced by A').
2079
2079
2080 A changeset that is made obsolete with no successors are called "pruned".
2080 A changeset that is made obsolete with no successors are called "pruned".
2081 Such changesets have no successors sets at all.
2081 Such changesets have no successors sets at all.
2082
2082
2083 A changeset that has been "split" will have a successors set containing
2083 A changeset that has been "split" will have a successors set containing
2084 more than one successor.
2084 more than one successor.
2085
2085
2086 A changeset that has been rewritten in multiple different ways is called
2086 A changeset that has been rewritten in multiple different ways is called
2087 "divergent". Such changesets have multiple successor sets (each of which
2087 "divergent". Such changesets have multiple successor sets (each of which
2088 may also be split, i.e. have multiple successors).
2088 may also be split, i.e. have multiple successors).
2089
2089
2090 Results are displayed as follows::
2090 Results are displayed as follows::
2091
2091
2092 <rev1>
2092 <rev1>
2093 <successors-1A>
2093 <successors-1A>
2094 <rev2>
2094 <rev2>
2095 <successors-2A>
2095 <successors-2A>
2096 <successors-2B1> <successors-2B2> <successors-2B3>
2096 <successors-2B1> <successors-2B2> <successors-2B3>
2097
2097
2098 Here rev2 has two possible (i.e. divergent) successors sets. The first
2098 Here rev2 has two possible (i.e. divergent) successors sets. The first
2099 holds one element, whereas the second holds three (i.e. the changeset has
2099 holds one element, whereas the second holds three (i.e. the changeset has
2100 been split).
2100 been split).
2101 """
2101 """
2102 # passed to successorssets caching computation from one call to another
2102 # passed to successorssets caching computation from one call to another
2103 cache = {}
2103 cache = {}
2104 ctx2str = str
2104 ctx2str = str
2105 node2str = short
2105 node2str = short
2106 if ui.debug():
2106 if ui.debug():
2107 def ctx2str(ctx):
2107 def ctx2str(ctx):
2108 return ctx.hex()
2108 return ctx.hex()
2109 node2str = hex
2109 node2str = hex
2110 for rev in scmutil.revrange(repo, revs):
2110 for rev in scmutil.revrange(repo, revs):
2111 ctx = repo[rev]
2111 ctx = repo[rev]
2112 ui.write('%s\n'% ctx2str(ctx))
2112 ui.write('%s\n'% ctx2str(ctx))
2113 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2113 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2114 if succsset:
2114 if succsset:
2115 ui.write(' ')
2115 ui.write(' ')
2116 ui.write(node2str(succsset[0]))
2116 ui.write(node2str(succsset[0]))
2117 for node in succsset[1:]:
2117 for node in succsset[1:]:
2118 ui.write(' ')
2118 ui.write(' ')
2119 ui.write(node2str(node))
2119 ui.write(node2str(node))
2120 ui.write('\n')
2120 ui.write('\n')
2121
2121
2122 @command('debugtemplate',
2122 @command('debugtemplate',
2123 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2123 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2124 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2124 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2125 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2125 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2126 optionalrepo=True)
2126 optionalrepo=True)
2127 def debugtemplate(ui, repo, tmpl, **opts):
2127 def debugtemplate(ui, repo, tmpl, **opts):
2128 """parse and apply a template
2128 """parse and apply a template
2129
2129
2130 If -r/--rev is given, the template is processed as a log template and
2130 If -r/--rev is given, the template is processed as a log template and
2131 applied to the given changesets. Otherwise, it is processed as a generic
2131 applied to the given changesets. Otherwise, it is processed as a generic
2132 template.
2132 template.
2133
2133
2134 Use --verbose to print the parsed tree.
2134 Use --verbose to print the parsed tree.
2135 """
2135 """
2136 revs = None
2136 revs = None
2137 if opts[r'rev']:
2137 if opts[r'rev']:
2138 if repo is None:
2138 if repo is None:
2139 raise error.RepoError(_('there is no Mercurial repository here '
2139 raise error.RepoError(_('there is no Mercurial repository here '
2140 '(.hg not found)'))
2140 '(.hg not found)'))
2141 revs = scmutil.revrange(repo, opts[r'rev'])
2141 revs = scmutil.revrange(repo, opts[r'rev'])
2142
2142
2143 props = {}
2143 props = {}
2144 for d in opts[r'define']:
2144 for d in opts[r'define']:
2145 try:
2145 try:
2146 k, v = (e.strip() for e in d.split('=', 1))
2146 k, v = (e.strip() for e in d.split('=', 1))
2147 if not k or k == 'ui':
2147 if not k or k == 'ui':
2148 raise ValueError
2148 raise ValueError
2149 props[k] = v
2149 props[k] = v
2150 except ValueError:
2150 except ValueError:
2151 raise error.Abort(_('malformed keyword definition: %s') % d)
2151 raise error.Abort(_('malformed keyword definition: %s') % d)
2152
2152
2153 if ui.verbose:
2153 if ui.verbose:
2154 aliases = ui.configitems('templatealias')
2154 aliases = ui.configitems('templatealias')
2155 tree = templater.parse(tmpl)
2155 tree = templater.parse(tmpl)
2156 ui.note(templater.prettyformat(tree), '\n')
2156 ui.note(templater.prettyformat(tree), '\n')
2157 newtree = templater.expandaliases(tree, aliases)
2157 newtree = templater.expandaliases(tree, aliases)
2158 if newtree != tree:
2158 if newtree != tree:
2159 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2159 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2160
2160
2161 if revs is None:
2161 if revs is None:
2162 t = formatter.maketemplater(ui, tmpl)
2162 t = formatter.maketemplater(ui, tmpl)
2163 props['ui'] = ui
2163 props['ui'] = ui
2164 ui.write(t.render(props))
2164 ui.write(t.render(props))
2165 else:
2165 else:
2166 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2166 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2167 for r in revs:
2167 for r in revs:
2168 displayer.show(repo[r], **pycompat.strkwargs(props))
2168 displayer.show(repo[r], **pycompat.strkwargs(props))
2169 displayer.close()
2169 displayer.close()
2170
2170
2171 @command('debugupdatecaches', [])
2171 @command('debugupdatecaches', [])
2172 def debugupdatecaches(ui, repo, *pats, **opts):
2172 def debugupdatecaches(ui, repo, *pats, **opts):
2173 """warm all known caches in the repository"""
2173 """warm all known caches in the repository"""
2174 with repo.wlock():
2174 with repo.wlock():
2175 with repo.lock():
2175 with repo.lock():
2176 repo.updatecaches()
2176 repo.updatecaches()
2177
2177
2178 @command('debugupgraderepo', [
2178 @command('debugupgraderepo', [
2179 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2179 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2180 ('', 'run', False, _('performs an upgrade')),
2180 ('', 'run', False, _('performs an upgrade')),
2181 ])
2181 ])
2182 def debugupgraderepo(ui, repo, run=False, optimize=None):
2182 def debugupgraderepo(ui, repo, run=False, optimize=None):
2183 """upgrade a repository to use different features
2183 """upgrade a repository to use different features
2184
2184
2185 If no arguments are specified, the repository is evaluated for upgrade
2185 If no arguments are specified, the repository is evaluated for upgrade
2186 and a list of problems and potential optimizations is printed.
2186 and a list of problems and potential optimizations is printed.
2187
2187
2188 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2188 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2189 can be influenced via additional arguments. More details will be provided
2189 can be influenced via additional arguments. More details will be provided
2190 by the command output when run without ``--run``.
2190 by the command output when run without ``--run``.
2191
2191
2192 During the upgrade, the repository will be locked and no writes will be
2192 During the upgrade, the repository will be locked and no writes will be
2193 allowed.
2193 allowed.
2194
2194
2195 At the end of the upgrade, the repository may not be readable while new
2195 At the end of the upgrade, the repository may not be readable while new
2196 repository data is swapped in. This window will be as long as it takes to
2196 repository data is swapped in. This window will be as long as it takes to
2197 rename some directories inside the ``.hg`` directory. On most machines, this
2197 rename some directories inside the ``.hg`` directory. On most machines, this
2198 should complete almost instantaneously and the chances of a consumer being
2198 should complete almost instantaneously and the chances of a consumer being
2199 unable to access the repository should be low.
2199 unable to access the repository should be low.
2200 """
2200 """
2201 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2201 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2202
2202
2203 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2203 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2204 inferrepo=True)
2204 inferrepo=True)
2205 def debugwalk(ui, repo, *pats, **opts):
2205 def debugwalk(ui, repo, *pats, **opts):
2206 """show how files match on given patterns"""
2206 """show how files match on given patterns"""
2207 opts = pycompat.byteskwargs(opts)
2207 opts = pycompat.byteskwargs(opts)
2208 m = scmutil.match(repo[None], pats, opts)
2208 m = scmutil.match(repo[None], pats, opts)
2209 ui.write(('matcher: %r\n' % m))
2209 ui.write(('matcher: %r\n' % m))
2210 items = list(repo[None].walk(m))
2210 items = list(repo[None].walk(m))
2211 if not items:
2211 if not items:
2212 return
2212 return
2213 f = lambda fn: fn
2213 f = lambda fn: fn
2214 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2214 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2215 f = lambda fn: util.normpath(fn)
2215 f = lambda fn: util.normpath(fn)
2216 fmt = 'f %%-%ds %%-%ds %%s' % (
2216 fmt = 'f %%-%ds %%-%ds %%s' % (
2217 max([len(abs) for abs in items]),
2217 max([len(abs) for abs in items]),
2218 max([len(m.rel(abs)) for abs in items]))
2218 max([len(m.rel(abs)) for abs in items]))
2219 for abs in items:
2219 for abs in items:
2220 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2220 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2221 ui.write("%s\n" % line.rstrip())
2221 ui.write("%s\n" % line.rstrip())
2222
2222
2223 @command('debugwireargs',
2223 @command('debugwireargs',
2224 [('', 'three', '', 'three'),
2224 [('', 'three', '', 'three'),
2225 ('', 'four', '', 'four'),
2225 ('', 'four', '', 'four'),
2226 ('', 'five', '', 'five'),
2226 ('', 'five', '', 'five'),
2227 ] + cmdutil.remoteopts,
2227 ] + cmdutil.remoteopts,
2228 _('REPO [OPTIONS]... [ONE [TWO]]'),
2228 _('REPO [OPTIONS]... [ONE [TWO]]'),
2229 norepo=True)
2229 norepo=True)
2230 def debugwireargs(ui, repopath, *vals, **opts):
2230 def debugwireargs(ui, repopath, *vals, **opts):
2231 opts = pycompat.byteskwargs(opts)
2231 opts = pycompat.byteskwargs(opts)
2232 repo = hg.peer(ui, opts, repopath)
2232 repo = hg.peer(ui, opts, repopath)
2233 for opt in cmdutil.remoteopts:
2233 for opt in cmdutil.remoteopts:
2234 del opts[opt[1]]
2234 del opts[opt[1]]
2235 args = {}
2235 args = {}
2236 for k, v in opts.iteritems():
2236 for k, v in opts.iteritems():
2237 if v:
2237 if v:
2238 args[k] = v
2238 args[k] = v
2239 # run twice to check that we don't mess up the stream for the next command
2239 # run twice to check that we don't mess up the stream for the next command
2240 res1 = repo.debugwireargs(*vals, **args)
2240 res1 = repo.debugwireargs(*vals, **args)
2241 res2 = repo.debugwireargs(*vals, **args)
2241 res2 = repo.debugwireargs(*vals, **args)
2242 ui.write("%s\n" % res1)
2242 ui.write("%s\n" % res1)
2243 if res1 != res2:
2243 if res1 != res2:
2244 ui.warn("%s\n" % res2)
2244 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now