##// END OF EJS Templates
debugssl: allow a URL to be specified without a local repository...
Matt Harbison -
r34031:3c306636 stable
parent child Browse files
Show More
@@ -1,2311 +1,2311
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import ssl
16 import ssl
17 import string
17 import string
18 import sys
18 import sys
19 import tempfile
19 import tempfile
20 import time
20 import time
21
21
22 from .i18n import _
22 from .i18n import _
23 from .node import (
23 from .node import (
24 bin,
24 bin,
25 hex,
25 hex,
26 nullhex,
26 nullhex,
27 nullid,
27 nullid,
28 nullrev,
28 nullrev,
29 short,
29 short,
30 )
30 )
31 from . import (
31 from . import (
32 bundle2,
32 bundle2,
33 changegroup,
33 changegroup,
34 cmdutil,
34 cmdutil,
35 color,
35 color,
36 context,
36 context,
37 dagparser,
37 dagparser,
38 dagutil,
38 dagutil,
39 encoding,
39 encoding,
40 error,
40 error,
41 exchange,
41 exchange,
42 extensions,
42 extensions,
43 filemerge,
43 filemerge,
44 fileset,
44 fileset,
45 formatter,
45 formatter,
46 hg,
46 hg,
47 localrepo,
47 localrepo,
48 lock as lockmod,
48 lock as lockmod,
49 merge as mergemod,
49 merge as mergemod,
50 obsolete,
50 obsolete,
51 obsutil,
51 obsutil,
52 phases,
52 phases,
53 policy,
53 policy,
54 pvec,
54 pvec,
55 pycompat,
55 pycompat,
56 registrar,
56 registrar,
57 repair,
57 repair,
58 revlog,
58 revlog,
59 revset,
59 revset,
60 revsetlang,
60 revsetlang,
61 scmutil,
61 scmutil,
62 setdiscovery,
62 setdiscovery,
63 simplemerge,
63 simplemerge,
64 smartset,
64 smartset,
65 sslutil,
65 sslutil,
66 streamclone,
66 streamclone,
67 templater,
67 templater,
68 treediscovery,
68 treediscovery,
69 upgrade,
69 upgrade,
70 util,
70 util,
71 vfs as vfsmod,
71 vfs as vfsmod,
72 )
72 )
73
73
74 release = lockmod.release
74 release = lockmod.release
75
75
76 command = registrar.command()
76 command = registrar.command()
77
77
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
78 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
79 def debugancestor(ui, repo, *args):
79 def debugancestor(ui, repo, *args):
80 """find the ancestor revision of two revisions in a given index"""
80 """find the ancestor revision of two revisions in a given index"""
81 if len(args) == 3:
81 if len(args) == 3:
82 index, rev1, rev2 = args
82 index, rev1, rev2 = args
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
83 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
84 lookup = r.lookup
84 lookup = r.lookup
85 elif len(args) == 2:
85 elif len(args) == 2:
86 if not repo:
86 if not repo:
87 raise error.Abort(_('there is no Mercurial repository here '
87 raise error.Abort(_('there is no Mercurial repository here '
88 '(.hg not found)'))
88 '(.hg not found)'))
89 rev1, rev2 = args
89 rev1, rev2 = args
90 r = repo.changelog
90 r = repo.changelog
91 lookup = repo.lookup
91 lookup = repo.lookup
92 else:
92 else:
93 raise error.Abort(_('either two or three arguments required'))
93 raise error.Abort(_('either two or three arguments required'))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
94 a = r.ancestor(lookup(rev1), lookup(rev2))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
95 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
96
96
97 @command('debugapplystreamclonebundle', [], 'FILE')
97 @command('debugapplystreamclonebundle', [], 'FILE')
98 def debugapplystreamclonebundle(ui, repo, fname):
98 def debugapplystreamclonebundle(ui, repo, fname):
99 """apply a stream clone bundle file"""
99 """apply a stream clone bundle file"""
100 f = hg.openpath(ui, fname)
100 f = hg.openpath(ui, fname)
101 gen = exchange.readbundle(ui, f, fname)
101 gen = exchange.readbundle(ui, f, fname)
102 gen.apply(repo)
102 gen.apply(repo)
103
103
104 @command('debugbuilddag',
104 @command('debugbuilddag',
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
105 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
106 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
107 ('n', 'new-file', None, _('add new file at each rev'))],
107 ('n', 'new-file', None, _('add new file at each rev'))],
108 _('[OPTION]... [TEXT]'))
108 _('[OPTION]... [TEXT]'))
109 def debugbuilddag(ui, repo, text=None,
109 def debugbuilddag(ui, repo, text=None,
110 mergeable_file=False,
110 mergeable_file=False,
111 overwritten_file=False,
111 overwritten_file=False,
112 new_file=False):
112 new_file=False):
113 """builds a repo with a given DAG from scratch in the current empty repo
113 """builds a repo with a given DAG from scratch in the current empty repo
114
114
115 The description of the DAG is read from stdin if not given on the
115 The description of the DAG is read from stdin if not given on the
116 command line.
116 command line.
117
117
118 Elements:
118 Elements:
119
119
120 - "+n" is a linear run of n nodes based on the current default parent
120 - "+n" is a linear run of n nodes based on the current default parent
121 - "." is a single node based on the current default parent
121 - "." is a single node based on the current default parent
122 - "$" resets the default parent to null (implied at the start);
122 - "$" resets the default parent to null (implied at the start);
123 otherwise the default parent is always the last node created
123 otherwise the default parent is always the last node created
124 - "<p" sets the default parent to the backref p
124 - "<p" sets the default parent to the backref p
125 - "*p" is a fork at parent p, which is a backref
125 - "*p" is a fork at parent p, which is a backref
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
126 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
127 - "/p2" is a merge of the preceding node and p2
127 - "/p2" is a merge of the preceding node and p2
128 - ":tag" defines a local tag for the preceding node
128 - ":tag" defines a local tag for the preceding node
129 - "@branch" sets the named branch for subsequent nodes
129 - "@branch" sets the named branch for subsequent nodes
130 - "#...\\n" is a comment up to the end of the line
130 - "#...\\n" is a comment up to the end of the line
131
131
132 Whitespace between the above elements is ignored.
132 Whitespace between the above elements is ignored.
133
133
134 A backref is either
134 A backref is either
135
135
136 - a number n, which references the node curr-n, where curr is the current
136 - a number n, which references the node curr-n, where curr is the current
137 node, or
137 node, or
138 - the name of a local tag you placed earlier using ":tag", or
138 - the name of a local tag you placed earlier using ":tag", or
139 - empty to denote the default parent.
139 - empty to denote the default parent.
140
140
141 All string valued-elements are either strictly alphanumeric, or must
141 All string valued-elements are either strictly alphanumeric, or must
142 be enclosed in double quotes ("..."), with "\\" as escape character.
142 be enclosed in double quotes ("..."), with "\\" as escape character.
143 """
143 """
144
144
145 if text is None:
145 if text is None:
146 ui.status(_("reading DAG from stdin\n"))
146 ui.status(_("reading DAG from stdin\n"))
147 text = ui.fin.read()
147 text = ui.fin.read()
148
148
149 cl = repo.changelog
149 cl = repo.changelog
150 if len(cl) > 0:
150 if len(cl) > 0:
151 raise error.Abort(_('repository is not empty'))
151 raise error.Abort(_('repository is not empty'))
152
152
153 # determine number of revs in DAG
153 # determine number of revs in DAG
154 total = 0
154 total = 0
155 for type, data in dagparser.parsedag(text):
155 for type, data in dagparser.parsedag(text):
156 if type == 'n':
156 if type == 'n':
157 total += 1
157 total += 1
158
158
159 if mergeable_file:
159 if mergeable_file:
160 linesperrev = 2
160 linesperrev = 2
161 # make a file with k lines per rev
161 # make a file with k lines per rev
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
162 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
163 initialmergedlines.append("")
163 initialmergedlines.append("")
164
164
165 tags = []
165 tags = []
166
166
167 wlock = lock = tr = None
167 wlock = lock = tr = None
168 try:
168 try:
169 wlock = repo.wlock()
169 wlock = repo.wlock()
170 lock = repo.lock()
170 lock = repo.lock()
171 tr = repo.transaction("builddag")
171 tr = repo.transaction("builddag")
172
172
173 at = -1
173 at = -1
174 atbranch = 'default'
174 atbranch = 'default'
175 nodeids = []
175 nodeids = []
176 id = 0
176 id = 0
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
177 ui.progress(_('building'), id, unit=_('revisions'), total=total)
178 for type, data in dagparser.parsedag(text):
178 for type, data in dagparser.parsedag(text):
179 if type == 'n':
179 if type == 'n':
180 ui.note(('node %s\n' % str(data)))
180 ui.note(('node %s\n' % str(data)))
181 id, ps = data
181 id, ps = data
182
182
183 files = []
183 files = []
184 fctxs = {}
184 fctxs = {}
185
185
186 p2 = None
186 p2 = None
187 if mergeable_file:
187 if mergeable_file:
188 fn = "mf"
188 fn = "mf"
189 p1 = repo[ps[0]]
189 p1 = repo[ps[0]]
190 if len(ps) > 1:
190 if len(ps) > 1:
191 p2 = repo[ps[1]]
191 p2 = repo[ps[1]]
192 pa = p1.ancestor(p2)
192 pa = p1.ancestor(p2)
193 base, local, other = [x[fn].data() for x in (pa, p1,
193 base, local, other = [x[fn].data() for x in (pa, p1,
194 p2)]
194 p2)]
195 m3 = simplemerge.Merge3Text(base, local, other)
195 m3 = simplemerge.Merge3Text(base, local, other)
196 ml = [l.strip() for l in m3.merge_lines()]
196 ml = [l.strip() for l in m3.merge_lines()]
197 ml.append("")
197 ml.append("")
198 elif at > 0:
198 elif at > 0:
199 ml = p1[fn].data().split("\n")
199 ml = p1[fn].data().split("\n")
200 else:
200 else:
201 ml = initialmergedlines
201 ml = initialmergedlines
202 ml[id * linesperrev] += " r%i" % id
202 ml[id * linesperrev] += " r%i" % id
203 mergedtext = "\n".join(ml)
203 mergedtext = "\n".join(ml)
204 files.append(fn)
204 files.append(fn)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
205 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
206
206
207 if overwritten_file:
207 if overwritten_file:
208 fn = "of"
208 fn = "of"
209 files.append(fn)
209 files.append(fn)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
210 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
211
211
212 if new_file:
212 if new_file:
213 fn = "nf%i" % id
213 fn = "nf%i" % id
214 files.append(fn)
214 files.append(fn)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
215 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
216 if len(ps) > 1:
216 if len(ps) > 1:
217 if not p2:
217 if not p2:
218 p2 = repo[ps[1]]
218 p2 = repo[ps[1]]
219 for fn in p2:
219 for fn in p2:
220 if fn.startswith("nf"):
220 if fn.startswith("nf"):
221 files.append(fn)
221 files.append(fn)
222 fctxs[fn] = p2[fn]
222 fctxs[fn] = p2[fn]
223
223
224 def fctxfn(repo, cx, path):
224 def fctxfn(repo, cx, path):
225 return fctxs.get(path)
225 return fctxs.get(path)
226
226
227 if len(ps) == 0 or ps[0] < 0:
227 if len(ps) == 0 or ps[0] < 0:
228 pars = [None, None]
228 pars = [None, None]
229 elif len(ps) == 1:
229 elif len(ps) == 1:
230 pars = [nodeids[ps[0]], None]
230 pars = [nodeids[ps[0]], None]
231 else:
231 else:
232 pars = [nodeids[p] for p in ps]
232 pars = [nodeids[p] for p in ps]
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
233 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
234 date=(id, 0),
234 date=(id, 0),
235 user="debugbuilddag",
235 user="debugbuilddag",
236 extra={'branch': atbranch})
236 extra={'branch': atbranch})
237 nodeid = repo.commitctx(cx)
237 nodeid = repo.commitctx(cx)
238 nodeids.append(nodeid)
238 nodeids.append(nodeid)
239 at = id
239 at = id
240 elif type == 'l':
240 elif type == 'l':
241 id, name = data
241 id, name = data
242 ui.note(('tag %s\n' % name))
242 ui.note(('tag %s\n' % name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
243 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
244 elif type == 'a':
244 elif type == 'a':
245 ui.note(('branch %s\n' % data))
245 ui.note(('branch %s\n' % data))
246 atbranch = data
246 atbranch = data
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
247 ui.progress(_('building'), id, unit=_('revisions'), total=total)
248 tr.close()
248 tr.close()
249
249
250 if tags:
250 if tags:
251 repo.vfs.write("localtags", "".join(tags))
251 repo.vfs.write("localtags", "".join(tags))
252 finally:
252 finally:
253 ui.progress(_('building'), None)
253 ui.progress(_('building'), None)
254 release(tr, lock, wlock)
254 release(tr, lock, wlock)
255
255
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
256 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
257 indent_string = ' ' * indent
257 indent_string = ' ' * indent
258 if all:
258 if all:
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
259 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
260 % indent_string)
260 % indent_string)
261
261
262 def showchunks(named):
262 def showchunks(named):
263 ui.write("\n%s%s\n" % (indent_string, named))
263 ui.write("\n%s%s\n" % (indent_string, named))
264 chain = None
264 chain = None
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
265 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
266 node = chunkdata['node']
266 node = chunkdata['node']
267 p1 = chunkdata['p1']
267 p1 = chunkdata['p1']
268 p2 = chunkdata['p2']
268 p2 = chunkdata['p2']
269 cs = chunkdata['cs']
269 cs = chunkdata['cs']
270 deltabase = chunkdata['deltabase']
270 deltabase = chunkdata['deltabase']
271 delta = chunkdata['delta']
271 delta = chunkdata['delta']
272 ui.write("%s%s %s %s %s %s %s\n" %
272 ui.write("%s%s %s %s %s %s %s\n" %
273 (indent_string, hex(node), hex(p1), hex(p2),
273 (indent_string, hex(node), hex(p1), hex(p2),
274 hex(cs), hex(deltabase), len(delta)))
274 hex(cs), hex(deltabase), len(delta)))
275 chain = node
275 chain = node
276
276
277 chunkdata = gen.changelogheader()
277 chunkdata = gen.changelogheader()
278 showchunks("changelog")
278 showchunks("changelog")
279 chunkdata = gen.manifestheader()
279 chunkdata = gen.manifestheader()
280 showchunks("manifest")
280 showchunks("manifest")
281 for chunkdata in iter(gen.filelogheader, {}):
281 for chunkdata in iter(gen.filelogheader, {}):
282 fname = chunkdata['filename']
282 fname = chunkdata['filename']
283 showchunks(fname)
283 showchunks(fname)
284 else:
284 else:
285 if isinstance(gen, bundle2.unbundle20):
285 if isinstance(gen, bundle2.unbundle20):
286 raise error.Abort(_('use debugbundle2 for this file'))
286 raise error.Abort(_('use debugbundle2 for this file'))
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 chain = None
288 chain = None
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
289 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
290 node = chunkdata['node']
290 node = chunkdata['node']
291 ui.write("%s%s\n" % (indent_string, hex(node)))
291 ui.write("%s%s\n" % (indent_string, hex(node)))
292 chain = node
292 chain = node
293
293
294 def _debugobsmarkers(ui, part, indent=0, **opts):
294 def _debugobsmarkers(ui, part, indent=0, **opts):
295 """display version and markers contained in 'data'"""
295 """display version and markers contained in 'data'"""
296 opts = pycompat.byteskwargs(opts)
296 opts = pycompat.byteskwargs(opts)
297 data = part.read()
297 data = part.read()
298 indent_string = ' ' * indent
298 indent_string = ' ' * indent
299 try:
299 try:
300 version, markers = obsolete._readmarkers(data)
300 version, markers = obsolete._readmarkers(data)
301 except error.UnknownVersion as exc:
301 except error.UnknownVersion as exc:
302 msg = "%sunsupported version: %s (%d bytes)\n"
302 msg = "%sunsupported version: %s (%d bytes)\n"
303 msg %= indent_string, exc.version, len(data)
303 msg %= indent_string, exc.version, len(data)
304 ui.write(msg)
304 ui.write(msg)
305 else:
305 else:
306 msg = "%sversion: %s (%d bytes)\n"
306 msg = "%sversion: %s (%d bytes)\n"
307 msg %= indent_string, version, len(data)
307 msg %= indent_string, version, len(data)
308 ui.write(msg)
308 ui.write(msg)
309 fm = ui.formatter('debugobsolete', opts)
309 fm = ui.formatter('debugobsolete', opts)
310 for rawmarker in sorted(markers):
310 for rawmarker in sorted(markers):
311 m = obsutil.marker(None, rawmarker)
311 m = obsutil.marker(None, rawmarker)
312 fm.startitem()
312 fm.startitem()
313 fm.plain(indent_string)
313 fm.plain(indent_string)
314 cmdutil.showmarker(fm, m)
314 cmdutil.showmarker(fm, m)
315 fm.end()
315 fm.end()
316
316
317 def _debugphaseheads(ui, data, indent=0):
317 def _debugphaseheads(ui, data, indent=0):
318 """display version and markers contained in 'data'"""
318 """display version and markers contained in 'data'"""
319 indent_string = ' ' * indent
319 indent_string = ' ' * indent
320 headsbyphase = bundle2._readphaseheads(data)
320 headsbyphase = bundle2._readphaseheads(data)
321 for phase in phases.allphases:
321 for phase in phases.allphases:
322 for head in headsbyphase[phase]:
322 for head in headsbyphase[phase]:
323 ui.write(indent_string)
323 ui.write(indent_string)
324 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
324 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
325
325
326 def _debugbundle2(ui, gen, all=None, **opts):
326 def _debugbundle2(ui, gen, all=None, **opts):
327 """lists the contents of a bundle2"""
327 """lists the contents of a bundle2"""
328 if not isinstance(gen, bundle2.unbundle20):
328 if not isinstance(gen, bundle2.unbundle20):
329 raise error.Abort(_('not a bundle2 file'))
329 raise error.Abort(_('not a bundle2 file'))
330 ui.write(('Stream params: %s\n' % repr(gen.params)))
330 ui.write(('Stream params: %s\n' % repr(gen.params)))
331 parttypes = opts.get(r'part_type', [])
331 parttypes = opts.get(r'part_type', [])
332 for part in gen.iterparts():
332 for part in gen.iterparts():
333 if parttypes and part.type not in parttypes:
333 if parttypes and part.type not in parttypes:
334 continue
334 continue
335 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
335 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
336 if part.type == 'changegroup':
336 if part.type == 'changegroup':
337 version = part.params.get('version', '01')
337 version = part.params.get('version', '01')
338 cg = changegroup.getunbundler(version, part, 'UN')
338 cg = changegroup.getunbundler(version, part, 'UN')
339 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
339 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
340 if part.type == 'obsmarkers':
340 if part.type == 'obsmarkers':
341 _debugobsmarkers(ui, part, indent=4, **opts)
341 _debugobsmarkers(ui, part, indent=4, **opts)
342 if part.type == 'phase-heads':
342 if part.type == 'phase-heads':
343 _debugphaseheads(ui, part, indent=4)
343 _debugphaseheads(ui, part, indent=4)
344
344
345 @command('debugbundle',
345 @command('debugbundle',
346 [('a', 'all', None, _('show all details')),
346 [('a', 'all', None, _('show all details')),
347 ('', 'part-type', [], _('show only the named part type')),
347 ('', 'part-type', [], _('show only the named part type')),
348 ('', 'spec', None, _('print the bundlespec of the bundle'))],
348 ('', 'spec', None, _('print the bundlespec of the bundle'))],
349 _('FILE'),
349 _('FILE'),
350 norepo=True)
350 norepo=True)
351 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
351 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
352 """lists the contents of a bundle"""
352 """lists the contents of a bundle"""
353 with hg.openpath(ui, bundlepath) as f:
353 with hg.openpath(ui, bundlepath) as f:
354 if spec:
354 if spec:
355 spec = exchange.getbundlespec(ui, f)
355 spec = exchange.getbundlespec(ui, f)
356 ui.write('%s\n' % spec)
356 ui.write('%s\n' % spec)
357 return
357 return
358
358
359 gen = exchange.readbundle(ui, f, bundlepath)
359 gen = exchange.readbundle(ui, f, bundlepath)
360 if isinstance(gen, bundle2.unbundle20):
360 if isinstance(gen, bundle2.unbundle20):
361 return _debugbundle2(ui, gen, all=all, **opts)
361 return _debugbundle2(ui, gen, all=all, **opts)
362 _debugchangegroup(ui, gen, all=all, **opts)
362 _debugchangegroup(ui, gen, all=all, **opts)
363
363
364 @command('debugcheckstate', [], '')
364 @command('debugcheckstate', [], '')
365 def debugcheckstate(ui, repo):
365 def debugcheckstate(ui, repo):
366 """validate the correctness of the current dirstate"""
366 """validate the correctness of the current dirstate"""
367 parent1, parent2 = repo.dirstate.parents()
367 parent1, parent2 = repo.dirstate.parents()
368 m1 = repo[parent1].manifest()
368 m1 = repo[parent1].manifest()
369 m2 = repo[parent2].manifest()
369 m2 = repo[parent2].manifest()
370 errors = 0
370 errors = 0
371 for f in repo.dirstate:
371 for f in repo.dirstate:
372 state = repo.dirstate[f]
372 state = repo.dirstate[f]
373 if state in "nr" and f not in m1:
373 if state in "nr" and f not in m1:
374 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
374 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
375 errors += 1
375 errors += 1
376 if state in "a" and f in m1:
376 if state in "a" and f in m1:
377 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
377 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
378 errors += 1
378 errors += 1
379 if state in "m" and f not in m1 and f not in m2:
379 if state in "m" and f not in m1 and f not in m2:
380 ui.warn(_("%s in state %s, but not in either manifest\n") %
380 ui.warn(_("%s in state %s, but not in either manifest\n") %
381 (f, state))
381 (f, state))
382 errors += 1
382 errors += 1
383 for f in m1:
383 for f in m1:
384 state = repo.dirstate[f]
384 state = repo.dirstate[f]
385 if state not in "nrm":
385 if state not in "nrm":
386 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
386 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
387 errors += 1
387 errors += 1
388 if errors:
388 if errors:
389 error = _(".hg/dirstate inconsistent with current parent's manifest")
389 error = _(".hg/dirstate inconsistent with current parent's manifest")
390 raise error.Abort(error)
390 raise error.Abort(error)
391
391
392 @command('debugcolor',
392 @command('debugcolor',
393 [('', 'style', None, _('show all configured styles'))],
393 [('', 'style', None, _('show all configured styles'))],
394 'hg debugcolor')
394 'hg debugcolor')
395 def debugcolor(ui, repo, **opts):
395 def debugcolor(ui, repo, **opts):
396 """show available color, effects or style"""
396 """show available color, effects or style"""
397 ui.write(('color mode: %s\n') % ui._colormode)
397 ui.write(('color mode: %s\n') % ui._colormode)
398 if opts.get(r'style'):
398 if opts.get(r'style'):
399 return _debugdisplaystyle(ui)
399 return _debugdisplaystyle(ui)
400 else:
400 else:
401 return _debugdisplaycolor(ui)
401 return _debugdisplaycolor(ui)
402
402
403 def _debugdisplaycolor(ui):
403 def _debugdisplaycolor(ui):
404 ui = ui.copy()
404 ui = ui.copy()
405 ui._styles.clear()
405 ui._styles.clear()
406 for effect in color._activeeffects(ui).keys():
406 for effect in color._activeeffects(ui).keys():
407 ui._styles[effect] = effect
407 ui._styles[effect] = effect
408 if ui._terminfoparams:
408 if ui._terminfoparams:
409 for k, v in ui.configitems('color'):
409 for k, v in ui.configitems('color'):
410 if k.startswith('color.'):
410 if k.startswith('color.'):
411 ui._styles[k] = k[6:]
411 ui._styles[k] = k[6:]
412 elif k.startswith('terminfo.'):
412 elif k.startswith('terminfo.'):
413 ui._styles[k] = k[9:]
413 ui._styles[k] = k[9:]
414 ui.write(_('available colors:\n'))
414 ui.write(_('available colors:\n'))
415 # sort label with a '_' after the other to group '_background' entry.
415 # sort label with a '_' after the other to group '_background' entry.
416 items = sorted(ui._styles.items(),
416 items = sorted(ui._styles.items(),
417 key=lambda i: ('_' in i[0], i[0], i[1]))
417 key=lambda i: ('_' in i[0], i[0], i[1]))
418 for colorname, label in items:
418 for colorname, label in items:
419 ui.write(('%s\n') % colorname, label=label)
419 ui.write(('%s\n') % colorname, label=label)
420
420
421 def _debugdisplaystyle(ui):
421 def _debugdisplaystyle(ui):
422 ui.write(_('available style:\n'))
422 ui.write(_('available style:\n'))
423 width = max(len(s) for s in ui._styles)
423 width = max(len(s) for s in ui._styles)
424 for label, effects in sorted(ui._styles.items()):
424 for label, effects in sorted(ui._styles.items()):
425 ui.write('%s' % label, label=label)
425 ui.write('%s' % label, label=label)
426 if effects:
426 if effects:
427 # 50
427 # 50
428 ui.write(': ')
428 ui.write(': ')
429 ui.write(' ' * (max(0, width - len(label))))
429 ui.write(' ' * (max(0, width - len(label))))
430 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
430 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
431 ui.write('\n')
431 ui.write('\n')
432
432
433 @command('debugcreatestreamclonebundle', [], 'FILE')
433 @command('debugcreatestreamclonebundle', [], 'FILE')
434 def debugcreatestreamclonebundle(ui, repo, fname):
434 def debugcreatestreamclonebundle(ui, repo, fname):
435 """create a stream clone bundle file
435 """create a stream clone bundle file
436
436
437 Stream bundles are special bundles that are essentially archives of
437 Stream bundles are special bundles that are essentially archives of
438 revlog files. They are commonly used for cloning very quickly.
438 revlog files. They are commonly used for cloning very quickly.
439 """
439 """
440 # TODO we may want to turn this into an abort when this functionality
440 # TODO we may want to turn this into an abort when this functionality
441 # is moved into `hg bundle`.
441 # is moved into `hg bundle`.
442 if phases.hassecret(repo):
442 if phases.hassecret(repo):
443 ui.warn(_('(warning: stream clone bundle will contain secret '
443 ui.warn(_('(warning: stream clone bundle will contain secret '
444 'revisions)\n'))
444 'revisions)\n'))
445
445
446 requirements, gen = streamclone.generatebundlev1(repo)
446 requirements, gen = streamclone.generatebundlev1(repo)
447 changegroup.writechunks(ui, gen, fname)
447 changegroup.writechunks(ui, gen, fname)
448
448
449 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
449 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
450
450
451 @command('debugdag',
451 @command('debugdag',
452 [('t', 'tags', None, _('use tags as labels')),
452 [('t', 'tags', None, _('use tags as labels')),
453 ('b', 'branches', None, _('annotate with branch names')),
453 ('b', 'branches', None, _('annotate with branch names')),
454 ('', 'dots', None, _('use dots for runs')),
454 ('', 'dots', None, _('use dots for runs')),
455 ('s', 'spaces', None, _('separate elements by spaces'))],
455 ('s', 'spaces', None, _('separate elements by spaces'))],
456 _('[OPTION]... [FILE [REV]...]'),
456 _('[OPTION]... [FILE [REV]...]'),
457 optionalrepo=True)
457 optionalrepo=True)
458 def debugdag(ui, repo, file_=None, *revs, **opts):
458 def debugdag(ui, repo, file_=None, *revs, **opts):
459 """format the changelog or an index DAG as a concise textual description
459 """format the changelog or an index DAG as a concise textual description
460
460
461 If you pass a revlog index, the revlog's DAG is emitted. If you list
461 If you pass a revlog index, the revlog's DAG is emitted. If you list
462 revision numbers, they get labeled in the output as rN.
462 revision numbers, they get labeled in the output as rN.
463
463
464 Otherwise, the changelog DAG of the current repo is emitted.
464 Otherwise, the changelog DAG of the current repo is emitted.
465 """
465 """
466 spaces = opts.get(r'spaces')
466 spaces = opts.get(r'spaces')
467 dots = opts.get(r'dots')
467 dots = opts.get(r'dots')
468 if file_:
468 if file_:
469 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
469 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
470 file_)
470 file_)
471 revs = set((int(r) for r in revs))
471 revs = set((int(r) for r in revs))
472 def events():
472 def events():
473 for r in rlog:
473 for r in rlog:
474 yield 'n', (r, list(p for p in rlog.parentrevs(r)
474 yield 'n', (r, list(p for p in rlog.parentrevs(r)
475 if p != -1))
475 if p != -1))
476 if r in revs:
476 if r in revs:
477 yield 'l', (r, "r%i" % r)
477 yield 'l', (r, "r%i" % r)
478 elif repo:
478 elif repo:
479 cl = repo.changelog
479 cl = repo.changelog
480 tags = opts.get(r'tags')
480 tags = opts.get(r'tags')
481 branches = opts.get(r'branches')
481 branches = opts.get(r'branches')
482 if tags:
482 if tags:
483 labels = {}
483 labels = {}
484 for l, n in repo.tags().items():
484 for l, n in repo.tags().items():
485 labels.setdefault(cl.rev(n), []).append(l)
485 labels.setdefault(cl.rev(n), []).append(l)
486 def events():
486 def events():
487 b = "default"
487 b = "default"
488 for r in cl:
488 for r in cl:
489 if branches:
489 if branches:
490 newb = cl.read(cl.node(r))[5]['branch']
490 newb = cl.read(cl.node(r))[5]['branch']
491 if newb != b:
491 if newb != b:
492 yield 'a', newb
492 yield 'a', newb
493 b = newb
493 b = newb
494 yield 'n', (r, list(p for p in cl.parentrevs(r)
494 yield 'n', (r, list(p for p in cl.parentrevs(r)
495 if p != -1))
495 if p != -1))
496 if tags:
496 if tags:
497 ls = labels.get(r)
497 ls = labels.get(r)
498 if ls:
498 if ls:
499 for l in ls:
499 for l in ls:
500 yield 'l', (r, l)
500 yield 'l', (r, l)
501 else:
501 else:
502 raise error.Abort(_('need repo for changelog dag'))
502 raise error.Abort(_('need repo for changelog dag'))
503
503
504 for line in dagparser.dagtextlines(events(),
504 for line in dagparser.dagtextlines(events(),
505 addspaces=spaces,
505 addspaces=spaces,
506 wraplabels=True,
506 wraplabels=True,
507 wrapannotations=True,
507 wrapannotations=True,
508 wrapnonlinear=dots,
508 wrapnonlinear=dots,
509 usedots=dots,
509 usedots=dots,
510 maxlinewidth=70):
510 maxlinewidth=70):
511 ui.write(line)
511 ui.write(line)
512 ui.write("\n")
512 ui.write("\n")
513
513
514 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
514 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
515 def debugdata(ui, repo, file_, rev=None, **opts):
515 def debugdata(ui, repo, file_, rev=None, **opts):
516 """dump the contents of a data file revision"""
516 """dump the contents of a data file revision"""
517 opts = pycompat.byteskwargs(opts)
517 opts = pycompat.byteskwargs(opts)
518 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
518 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
519 if rev is not None:
519 if rev is not None:
520 raise error.CommandError('debugdata', _('invalid arguments'))
520 raise error.CommandError('debugdata', _('invalid arguments'))
521 file_, rev = None, file_
521 file_, rev = None, file_
522 elif rev is None:
522 elif rev is None:
523 raise error.CommandError('debugdata', _('invalid arguments'))
523 raise error.CommandError('debugdata', _('invalid arguments'))
524 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
524 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
525 try:
525 try:
526 ui.write(r.revision(r.lookup(rev), raw=True))
526 ui.write(r.revision(r.lookup(rev), raw=True))
527 except KeyError:
527 except KeyError:
528 raise error.Abort(_('invalid revision identifier %s') % rev)
528 raise error.Abort(_('invalid revision identifier %s') % rev)
529
529
530 @command('debugdate',
530 @command('debugdate',
531 [('e', 'extended', None, _('try extended date formats'))],
531 [('e', 'extended', None, _('try extended date formats'))],
532 _('[-e] DATE [RANGE]'),
532 _('[-e] DATE [RANGE]'),
533 norepo=True, optionalrepo=True)
533 norepo=True, optionalrepo=True)
534 def debugdate(ui, date, range=None, **opts):
534 def debugdate(ui, date, range=None, **opts):
535 """parse and display a date"""
535 """parse and display a date"""
536 if opts[r"extended"]:
536 if opts[r"extended"]:
537 d = util.parsedate(date, util.extendeddateformats)
537 d = util.parsedate(date, util.extendeddateformats)
538 else:
538 else:
539 d = util.parsedate(date)
539 d = util.parsedate(date)
540 ui.write(("internal: %s %s\n") % d)
540 ui.write(("internal: %s %s\n") % d)
541 ui.write(("standard: %s\n") % util.datestr(d))
541 ui.write(("standard: %s\n") % util.datestr(d))
542 if range:
542 if range:
543 m = util.matchdate(range)
543 m = util.matchdate(range)
544 ui.write(("match: %s\n") % m(d[0]))
544 ui.write(("match: %s\n") % m(d[0]))
545
545
546 @command('debugdeltachain',
546 @command('debugdeltachain',
547 cmdutil.debugrevlogopts + cmdutil.formatteropts,
547 cmdutil.debugrevlogopts + cmdutil.formatteropts,
548 _('-c|-m|FILE'),
548 _('-c|-m|FILE'),
549 optionalrepo=True)
549 optionalrepo=True)
550 def debugdeltachain(ui, repo, file_=None, **opts):
550 def debugdeltachain(ui, repo, file_=None, **opts):
551 """dump information about delta chains in a revlog
551 """dump information about delta chains in a revlog
552
552
553 Output can be templatized. Available template keywords are:
553 Output can be templatized. Available template keywords are:
554
554
555 :``rev``: revision number
555 :``rev``: revision number
556 :``chainid``: delta chain identifier (numbered by unique base)
556 :``chainid``: delta chain identifier (numbered by unique base)
557 :``chainlen``: delta chain length to this revision
557 :``chainlen``: delta chain length to this revision
558 :``prevrev``: previous revision in delta chain
558 :``prevrev``: previous revision in delta chain
559 :``deltatype``: role of delta / how it was computed
559 :``deltatype``: role of delta / how it was computed
560 :``compsize``: compressed size of revision
560 :``compsize``: compressed size of revision
561 :``uncompsize``: uncompressed size of revision
561 :``uncompsize``: uncompressed size of revision
562 :``chainsize``: total size of compressed revisions in chain
562 :``chainsize``: total size of compressed revisions in chain
563 :``chainratio``: total chain size divided by uncompressed revision size
563 :``chainratio``: total chain size divided by uncompressed revision size
564 (new delta chains typically start at ratio 2.00)
564 (new delta chains typically start at ratio 2.00)
565 :``lindist``: linear distance from base revision in delta chain to end
565 :``lindist``: linear distance from base revision in delta chain to end
566 of this revision
566 of this revision
567 :``extradist``: total size of revisions not part of this delta chain from
567 :``extradist``: total size of revisions not part of this delta chain from
568 base of delta chain to end of this revision; a measurement
568 base of delta chain to end of this revision; a measurement
569 of how much extra data we need to read/seek across to read
569 of how much extra data we need to read/seek across to read
570 the delta chain for this revision
570 the delta chain for this revision
571 :``extraratio``: extradist divided by chainsize; another representation of
571 :``extraratio``: extradist divided by chainsize; another representation of
572 how much unrelated data is needed to load this delta chain
572 how much unrelated data is needed to load this delta chain
573 """
573 """
574 opts = pycompat.byteskwargs(opts)
574 opts = pycompat.byteskwargs(opts)
575 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
575 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
576 index = r.index
576 index = r.index
577 generaldelta = r.version & revlog.FLAG_GENERALDELTA
577 generaldelta = r.version & revlog.FLAG_GENERALDELTA
578
578
579 def revinfo(rev):
579 def revinfo(rev):
580 e = index[rev]
580 e = index[rev]
581 compsize = e[1]
581 compsize = e[1]
582 uncompsize = e[2]
582 uncompsize = e[2]
583 chainsize = 0
583 chainsize = 0
584
584
585 if generaldelta:
585 if generaldelta:
586 if e[3] == e[5]:
586 if e[3] == e[5]:
587 deltatype = 'p1'
587 deltatype = 'p1'
588 elif e[3] == e[6]:
588 elif e[3] == e[6]:
589 deltatype = 'p2'
589 deltatype = 'p2'
590 elif e[3] == rev - 1:
590 elif e[3] == rev - 1:
591 deltatype = 'prev'
591 deltatype = 'prev'
592 elif e[3] == rev:
592 elif e[3] == rev:
593 deltatype = 'base'
593 deltatype = 'base'
594 else:
594 else:
595 deltatype = 'other'
595 deltatype = 'other'
596 else:
596 else:
597 if e[3] == rev:
597 if e[3] == rev:
598 deltatype = 'base'
598 deltatype = 'base'
599 else:
599 else:
600 deltatype = 'prev'
600 deltatype = 'prev'
601
601
602 chain = r._deltachain(rev)[0]
602 chain = r._deltachain(rev)[0]
603 for iterrev in chain:
603 for iterrev in chain:
604 e = index[iterrev]
604 e = index[iterrev]
605 chainsize += e[1]
605 chainsize += e[1]
606
606
607 return compsize, uncompsize, deltatype, chain, chainsize
607 return compsize, uncompsize, deltatype, chain, chainsize
608
608
609 fm = ui.formatter('debugdeltachain', opts)
609 fm = ui.formatter('debugdeltachain', opts)
610
610
611 fm.plain(' rev chain# chainlen prev delta '
611 fm.plain(' rev chain# chainlen prev delta '
612 'size rawsize chainsize ratio lindist extradist '
612 'size rawsize chainsize ratio lindist extradist '
613 'extraratio\n')
613 'extraratio\n')
614
614
615 chainbases = {}
615 chainbases = {}
616 for rev in r:
616 for rev in r:
617 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
617 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
618 chainbase = chain[0]
618 chainbase = chain[0]
619 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
619 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
620 basestart = r.start(chainbase)
620 basestart = r.start(chainbase)
621 revstart = r.start(rev)
621 revstart = r.start(rev)
622 lineardist = revstart + comp - basestart
622 lineardist = revstart + comp - basestart
623 extradist = lineardist - chainsize
623 extradist = lineardist - chainsize
624 try:
624 try:
625 prevrev = chain[-2]
625 prevrev = chain[-2]
626 except IndexError:
626 except IndexError:
627 prevrev = -1
627 prevrev = -1
628
628
629 chainratio = float(chainsize) / float(uncomp)
629 chainratio = float(chainsize) / float(uncomp)
630 extraratio = float(extradist) / float(chainsize)
630 extraratio = float(extradist) / float(chainsize)
631
631
632 fm.startitem()
632 fm.startitem()
633 fm.write('rev chainid chainlen prevrev deltatype compsize '
633 fm.write('rev chainid chainlen prevrev deltatype compsize '
634 'uncompsize chainsize chainratio lindist extradist '
634 'uncompsize chainsize chainratio lindist extradist '
635 'extraratio',
635 'extraratio',
636 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
636 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
637 rev, chainid, len(chain), prevrev, deltatype, comp,
637 rev, chainid, len(chain), prevrev, deltatype, comp,
638 uncomp, chainsize, chainratio, lineardist, extradist,
638 uncomp, chainsize, chainratio, lineardist, extradist,
639 extraratio,
639 extraratio,
640 rev=rev, chainid=chainid, chainlen=len(chain),
640 rev=rev, chainid=chainid, chainlen=len(chain),
641 prevrev=prevrev, deltatype=deltatype, compsize=comp,
641 prevrev=prevrev, deltatype=deltatype, compsize=comp,
642 uncompsize=uncomp, chainsize=chainsize,
642 uncompsize=uncomp, chainsize=chainsize,
643 chainratio=chainratio, lindist=lineardist,
643 chainratio=chainratio, lindist=lineardist,
644 extradist=extradist, extraratio=extraratio)
644 extradist=extradist, extraratio=extraratio)
645
645
646 fm.end()
646 fm.end()
647
647
648 @command('debugdirstate|debugstate',
648 @command('debugdirstate|debugstate',
649 [('', 'nodates', None, _('do not display the saved mtime')),
649 [('', 'nodates', None, _('do not display the saved mtime')),
650 ('', 'datesort', None, _('sort by saved mtime'))],
650 ('', 'datesort', None, _('sort by saved mtime'))],
651 _('[OPTION]...'))
651 _('[OPTION]...'))
652 def debugstate(ui, repo, **opts):
652 def debugstate(ui, repo, **opts):
653 """show the contents of the current dirstate"""
653 """show the contents of the current dirstate"""
654
654
655 nodates = opts.get(r'nodates')
655 nodates = opts.get(r'nodates')
656 datesort = opts.get(r'datesort')
656 datesort = opts.get(r'datesort')
657
657
658 timestr = ""
658 timestr = ""
659 if datesort:
659 if datesort:
660 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
660 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
661 else:
661 else:
662 keyfunc = None # sort by filename
662 keyfunc = None # sort by filename
663 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
663 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
664 if ent[3] == -1:
664 if ent[3] == -1:
665 timestr = 'unset '
665 timestr = 'unset '
666 elif nodates:
666 elif nodates:
667 timestr = 'set '
667 timestr = 'set '
668 else:
668 else:
669 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
669 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
670 time.localtime(ent[3]))
670 time.localtime(ent[3]))
671 if ent[1] & 0o20000:
671 if ent[1] & 0o20000:
672 mode = 'lnk'
672 mode = 'lnk'
673 else:
673 else:
674 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
674 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
675 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
675 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
676 for f in repo.dirstate.copies():
676 for f in repo.dirstate.copies():
677 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
677 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
678
678
679 @command('debugdiscovery',
679 @command('debugdiscovery',
680 [('', 'old', None, _('use old-style discovery')),
680 [('', 'old', None, _('use old-style discovery')),
681 ('', 'nonheads', None,
681 ('', 'nonheads', None,
682 _('use old-style discovery with non-heads included')),
682 _('use old-style discovery with non-heads included')),
683 ] + cmdutil.remoteopts,
683 ] + cmdutil.remoteopts,
684 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
684 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
685 def debugdiscovery(ui, repo, remoteurl="default", **opts):
685 def debugdiscovery(ui, repo, remoteurl="default", **opts):
686 """runs the changeset discovery protocol in isolation"""
686 """runs the changeset discovery protocol in isolation"""
687 opts = pycompat.byteskwargs(opts)
687 opts = pycompat.byteskwargs(opts)
688 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
688 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
689 opts.get('branch'))
689 opts.get('branch'))
690 remote = hg.peer(repo, opts, remoteurl)
690 remote = hg.peer(repo, opts, remoteurl)
691 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
691 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
692
692
693 # make sure tests are repeatable
693 # make sure tests are repeatable
694 random.seed(12323)
694 random.seed(12323)
695
695
696 def doit(localheads, remoteheads, remote=remote):
696 def doit(localheads, remoteheads, remote=remote):
697 if opts.get('old'):
697 if opts.get('old'):
698 if localheads:
698 if localheads:
699 raise error.Abort('cannot use localheads with old style '
699 raise error.Abort('cannot use localheads with old style '
700 'discovery')
700 'discovery')
701 if not util.safehasattr(remote, 'branches'):
701 if not util.safehasattr(remote, 'branches'):
702 # enable in-client legacy support
702 # enable in-client legacy support
703 remote = localrepo.locallegacypeer(remote.local())
703 remote = localrepo.locallegacypeer(remote.local())
704 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
704 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
705 force=True)
705 force=True)
706 common = set(common)
706 common = set(common)
707 if not opts.get('nonheads'):
707 if not opts.get('nonheads'):
708 ui.write(("unpruned common: %s\n") %
708 ui.write(("unpruned common: %s\n") %
709 " ".join(sorted(short(n) for n in common)))
709 " ".join(sorted(short(n) for n in common)))
710 dag = dagutil.revlogdag(repo.changelog)
710 dag = dagutil.revlogdag(repo.changelog)
711 all = dag.ancestorset(dag.internalizeall(common))
711 all = dag.ancestorset(dag.internalizeall(common))
712 common = dag.externalizeall(dag.headsetofconnecteds(all))
712 common = dag.externalizeall(dag.headsetofconnecteds(all))
713 else:
713 else:
714 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
714 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
715 common = set(common)
715 common = set(common)
716 rheads = set(hds)
716 rheads = set(hds)
717 lheads = set(repo.heads())
717 lheads = set(repo.heads())
718 ui.write(("common heads: %s\n") %
718 ui.write(("common heads: %s\n") %
719 " ".join(sorted(short(n) for n in common)))
719 " ".join(sorted(short(n) for n in common)))
720 if lheads <= common:
720 if lheads <= common:
721 ui.write(("local is subset\n"))
721 ui.write(("local is subset\n"))
722 elif rheads <= common:
722 elif rheads <= common:
723 ui.write(("remote is subset\n"))
723 ui.write(("remote is subset\n"))
724
724
725 serverlogs = opts.get('serverlog')
725 serverlogs = opts.get('serverlog')
726 if serverlogs:
726 if serverlogs:
727 for filename in serverlogs:
727 for filename in serverlogs:
728 with open(filename, 'r') as logfile:
728 with open(filename, 'r') as logfile:
729 line = logfile.readline()
729 line = logfile.readline()
730 while line:
730 while line:
731 parts = line.strip().split(';')
731 parts = line.strip().split(';')
732 op = parts[1]
732 op = parts[1]
733 if op == 'cg':
733 if op == 'cg':
734 pass
734 pass
735 elif op == 'cgss':
735 elif op == 'cgss':
736 doit(parts[2].split(' '), parts[3].split(' '))
736 doit(parts[2].split(' '), parts[3].split(' '))
737 elif op == 'unb':
737 elif op == 'unb':
738 doit(parts[3].split(' '), parts[2].split(' '))
738 doit(parts[3].split(' '), parts[2].split(' '))
739 line = logfile.readline()
739 line = logfile.readline()
740 else:
740 else:
741 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
741 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
742 opts.get('remote_head'))
742 opts.get('remote_head'))
743 localrevs = opts.get('local_head')
743 localrevs = opts.get('local_head')
744 doit(localrevs, remoterevs)
744 doit(localrevs, remoterevs)
745
745
746 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
746 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
747 def debugextensions(ui, **opts):
747 def debugextensions(ui, **opts):
748 '''show information about active extensions'''
748 '''show information about active extensions'''
749 opts = pycompat.byteskwargs(opts)
749 opts = pycompat.byteskwargs(opts)
750 exts = extensions.extensions(ui)
750 exts = extensions.extensions(ui)
751 hgver = util.version()
751 hgver = util.version()
752 fm = ui.formatter('debugextensions', opts)
752 fm = ui.formatter('debugextensions', opts)
753 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
753 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
754 isinternal = extensions.ismoduleinternal(extmod)
754 isinternal = extensions.ismoduleinternal(extmod)
755 extsource = pycompat.fsencode(extmod.__file__)
755 extsource = pycompat.fsencode(extmod.__file__)
756 if isinternal:
756 if isinternal:
757 exttestedwith = [] # never expose magic string to users
757 exttestedwith = [] # never expose magic string to users
758 else:
758 else:
759 exttestedwith = getattr(extmod, 'testedwith', '').split()
759 exttestedwith = getattr(extmod, 'testedwith', '').split()
760 extbuglink = getattr(extmod, 'buglink', None)
760 extbuglink = getattr(extmod, 'buglink', None)
761
761
762 fm.startitem()
762 fm.startitem()
763
763
764 if ui.quiet or ui.verbose:
764 if ui.quiet or ui.verbose:
765 fm.write('name', '%s\n', extname)
765 fm.write('name', '%s\n', extname)
766 else:
766 else:
767 fm.write('name', '%s', extname)
767 fm.write('name', '%s', extname)
768 if isinternal or hgver in exttestedwith:
768 if isinternal or hgver in exttestedwith:
769 fm.plain('\n')
769 fm.plain('\n')
770 elif not exttestedwith:
770 elif not exttestedwith:
771 fm.plain(_(' (untested!)\n'))
771 fm.plain(_(' (untested!)\n'))
772 else:
772 else:
773 lasttestedversion = exttestedwith[-1]
773 lasttestedversion = exttestedwith[-1]
774 fm.plain(' (%s!)\n' % lasttestedversion)
774 fm.plain(' (%s!)\n' % lasttestedversion)
775
775
776 fm.condwrite(ui.verbose and extsource, 'source',
776 fm.condwrite(ui.verbose and extsource, 'source',
777 _(' location: %s\n'), extsource or "")
777 _(' location: %s\n'), extsource or "")
778
778
779 if ui.verbose:
779 if ui.verbose:
780 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
780 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
781 fm.data(bundled=isinternal)
781 fm.data(bundled=isinternal)
782
782
783 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
783 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
784 _(' tested with: %s\n'),
784 _(' tested with: %s\n'),
785 fm.formatlist(exttestedwith, name='ver'))
785 fm.formatlist(exttestedwith, name='ver'))
786
786
787 fm.condwrite(ui.verbose and extbuglink, 'buglink',
787 fm.condwrite(ui.verbose and extbuglink, 'buglink',
788 _(' bug reporting: %s\n'), extbuglink or "")
788 _(' bug reporting: %s\n'), extbuglink or "")
789
789
790 fm.end()
790 fm.end()
791
791
792 @command('debugfileset',
792 @command('debugfileset',
793 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
793 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
794 _('[-r REV] FILESPEC'))
794 _('[-r REV] FILESPEC'))
795 def debugfileset(ui, repo, expr, **opts):
795 def debugfileset(ui, repo, expr, **opts):
796 '''parse and apply a fileset specification'''
796 '''parse and apply a fileset specification'''
797 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
797 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
798 if ui.verbose:
798 if ui.verbose:
799 tree = fileset.parse(expr)
799 tree = fileset.parse(expr)
800 ui.note(fileset.prettyformat(tree), "\n")
800 ui.note(fileset.prettyformat(tree), "\n")
801
801
802 for f in ctx.getfileset(expr):
802 for f in ctx.getfileset(expr):
803 ui.write("%s\n" % f)
803 ui.write("%s\n" % f)
804
804
805 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
805 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
806 def debugfsinfo(ui, path="."):
806 def debugfsinfo(ui, path="."):
807 """show information detected about current filesystem"""
807 """show information detected about current filesystem"""
808 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
808 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
809 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
809 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
810 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
810 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
811 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
811 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
812 casesensitive = '(unknown)'
812 casesensitive = '(unknown)'
813 try:
813 try:
814 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
814 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
815 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
815 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
816 except OSError:
816 except OSError:
817 pass
817 pass
818 ui.write(('case-sensitive: %s\n') % casesensitive)
818 ui.write(('case-sensitive: %s\n') % casesensitive)
819
819
820 @command('debuggetbundle',
820 @command('debuggetbundle',
821 [('H', 'head', [], _('id of head node'), _('ID')),
821 [('H', 'head', [], _('id of head node'), _('ID')),
822 ('C', 'common', [], _('id of common node'), _('ID')),
822 ('C', 'common', [], _('id of common node'), _('ID')),
823 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
823 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
824 _('REPO FILE [-H|-C ID]...'),
824 _('REPO FILE [-H|-C ID]...'),
825 norepo=True)
825 norepo=True)
826 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
826 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
827 """retrieves a bundle from a repo
827 """retrieves a bundle from a repo
828
828
829 Every ID must be a full-length hex node id string. Saves the bundle to the
829 Every ID must be a full-length hex node id string. Saves the bundle to the
830 given file.
830 given file.
831 """
831 """
832 opts = pycompat.byteskwargs(opts)
832 opts = pycompat.byteskwargs(opts)
833 repo = hg.peer(ui, opts, repopath)
833 repo = hg.peer(ui, opts, repopath)
834 if not repo.capable('getbundle'):
834 if not repo.capable('getbundle'):
835 raise error.Abort("getbundle() not supported by target repository")
835 raise error.Abort("getbundle() not supported by target repository")
836 args = {}
836 args = {}
837 if common:
837 if common:
838 args[r'common'] = [bin(s) for s in common]
838 args[r'common'] = [bin(s) for s in common]
839 if head:
839 if head:
840 args[r'heads'] = [bin(s) for s in head]
840 args[r'heads'] = [bin(s) for s in head]
841 # TODO: get desired bundlecaps from command line.
841 # TODO: get desired bundlecaps from command line.
842 args[r'bundlecaps'] = None
842 args[r'bundlecaps'] = None
843 bundle = repo.getbundle('debug', **args)
843 bundle = repo.getbundle('debug', **args)
844
844
845 bundletype = opts.get('type', 'bzip2').lower()
845 bundletype = opts.get('type', 'bzip2').lower()
846 btypes = {'none': 'HG10UN',
846 btypes = {'none': 'HG10UN',
847 'bzip2': 'HG10BZ',
847 'bzip2': 'HG10BZ',
848 'gzip': 'HG10GZ',
848 'gzip': 'HG10GZ',
849 'bundle2': 'HG20'}
849 'bundle2': 'HG20'}
850 bundletype = btypes.get(bundletype)
850 bundletype = btypes.get(bundletype)
851 if bundletype not in bundle2.bundletypes:
851 if bundletype not in bundle2.bundletypes:
852 raise error.Abort(_('unknown bundle type specified with --type'))
852 raise error.Abort(_('unknown bundle type specified with --type'))
853 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
853 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
854
854
855 @command('debugignore', [], '[FILE]')
855 @command('debugignore', [], '[FILE]')
856 def debugignore(ui, repo, *files, **opts):
856 def debugignore(ui, repo, *files, **opts):
857 """display the combined ignore pattern and information about ignored files
857 """display the combined ignore pattern and information about ignored files
858
858
859 With no argument display the combined ignore pattern.
859 With no argument display the combined ignore pattern.
860
860
861 Given space separated file names, shows if the given file is ignored and
861 Given space separated file names, shows if the given file is ignored and
862 if so, show the ignore rule (file and line number) that matched it.
862 if so, show the ignore rule (file and line number) that matched it.
863 """
863 """
864 ignore = repo.dirstate._ignore
864 ignore = repo.dirstate._ignore
865 if not files:
865 if not files:
866 # Show all the patterns
866 # Show all the patterns
867 ui.write("%s\n" % repr(ignore))
867 ui.write("%s\n" % repr(ignore))
868 else:
868 else:
869 m = scmutil.match(repo[None], pats=files)
869 m = scmutil.match(repo[None], pats=files)
870 for f in m.files():
870 for f in m.files():
871 nf = util.normpath(f)
871 nf = util.normpath(f)
872 ignored = None
872 ignored = None
873 ignoredata = None
873 ignoredata = None
874 if nf != '.':
874 if nf != '.':
875 if ignore(nf):
875 if ignore(nf):
876 ignored = nf
876 ignored = nf
877 ignoredata = repo.dirstate._ignorefileandline(nf)
877 ignoredata = repo.dirstate._ignorefileandline(nf)
878 else:
878 else:
879 for p in util.finddirs(nf):
879 for p in util.finddirs(nf):
880 if ignore(p):
880 if ignore(p):
881 ignored = p
881 ignored = p
882 ignoredata = repo.dirstate._ignorefileandline(p)
882 ignoredata = repo.dirstate._ignorefileandline(p)
883 break
883 break
884 if ignored:
884 if ignored:
885 if ignored == nf:
885 if ignored == nf:
886 ui.write(_("%s is ignored\n") % m.uipath(f))
886 ui.write(_("%s is ignored\n") % m.uipath(f))
887 else:
887 else:
888 ui.write(_("%s is ignored because of "
888 ui.write(_("%s is ignored because of "
889 "containing folder %s\n")
889 "containing folder %s\n")
890 % (m.uipath(f), ignored))
890 % (m.uipath(f), ignored))
891 ignorefile, lineno, line = ignoredata
891 ignorefile, lineno, line = ignoredata
892 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
892 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
893 % (ignorefile, lineno, line))
893 % (ignorefile, lineno, line))
894 else:
894 else:
895 ui.write(_("%s is not ignored\n") % m.uipath(f))
895 ui.write(_("%s is not ignored\n") % m.uipath(f))
896
896
897 @command('debugindex', cmdutil.debugrevlogopts +
897 @command('debugindex', cmdutil.debugrevlogopts +
898 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
898 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
899 _('[-f FORMAT] -c|-m|FILE'),
899 _('[-f FORMAT] -c|-m|FILE'),
900 optionalrepo=True)
900 optionalrepo=True)
901 def debugindex(ui, repo, file_=None, **opts):
901 def debugindex(ui, repo, file_=None, **opts):
902 """dump the contents of an index file"""
902 """dump the contents of an index file"""
903 opts = pycompat.byteskwargs(opts)
903 opts = pycompat.byteskwargs(opts)
904 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
904 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
905 format = opts.get('format', 0)
905 format = opts.get('format', 0)
906 if format not in (0, 1):
906 if format not in (0, 1):
907 raise error.Abort(_("unknown format %d") % format)
907 raise error.Abort(_("unknown format %d") % format)
908
908
909 generaldelta = r.version & revlog.FLAG_GENERALDELTA
909 generaldelta = r.version & revlog.FLAG_GENERALDELTA
910 if generaldelta:
910 if generaldelta:
911 basehdr = ' delta'
911 basehdr = ' delta'
912 else:
912 else:
913 basehdr = ' base'
913 basehdr = ' base'
914
914
915 if ui.debugflag:
915 if ui.debugflag:
916 shortfn = hex
916 shortfn = hex
917 else:
917 else:
918 shortfn = short
918 shortfn = short
919
919
920 # There might not be anything in r, so have a sane default
920 # There might not be anything in r, so have a sane default
921 idlen = 12
921 idlen = 12
922 for i in r:
922 for i in r:
923 idlen = len(shortfn(r.node(i)))
923 idlen = len(shortfn(r.node(i)))
924 break
924 break
925
925
926 if format == 0:
926 if format == 0:
927 ui.write((" rev offset length " + basehdr + " linkrev"
927 ui.write((" rev offset length " + basehdr + " linkrev"
928 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
928 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
929 elif format == 1:
929 elif format == 1:
930 ui.write((" rev flag offset length"
930 ui.write((" rev flag offset length"
931 " size " + basehdr + " link p1 p2"
931 " size " + basehdr + " link p1 p2"
932 " %s\n") % "nodeid".rjust(idlen))
932 " %s\n") % "nodeid".rjust(idlen))
933
933
934 for i in r:
934 for i in r:
935 node = r.node(i)
935 node = r.node(i)
936 if generaldelta:
936 if generaldelta:
937 base = r.deltaparent(i)
937 base = r.deltaparent(i)
938 else:
938 else:
939 base = r.chainbase(i)
939 base = r.chainbase(i)
940 if format == 0:
940 if format == 0:
941 try:
941 try:
942 pp = r.parents(node)
942 pp = r.parents(node)
943 except Exception:
943 except Exception:
944 pp = [nullid, nullid]
944 pp = [nullid, nullid]
945 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
945 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
946 i, r.start(i), r.length(i), base, r.linkrev(i),
946 i, r.start(i), r.length(i), base, r.linkrev(i),
947 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
947 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
948 elif format == 1:
948 elif format == 1:
949 pr = r.parentrevs(i)
949 pr = r.parentrevs(i)
950 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
950 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
951 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
951 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
952 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
952 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
953
953
954 @command('debugindexdot', cmdutil.debugrevlogopts,
954 @command('debugindexdot', cmdutil.debugrevlogopts,
955 _('-c|-m|FILE'), optionalrepo=True)
955 _('-c|-m|FILE'), optionalrepo=True)
956 def debugindexdot(ui, repo, file_=None, **opts):
956 def debugindexdot(ui, repo, file_=None, **opts):
957 """dump an index DAG as a graphviz dot file"""
957 """dump an index DAG as a graphviz dot file"""
958 opts = pycompat.byteskwargs(opts)
958 opts = pycompat.byteskwargs(opts)
959 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
959 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
960 ui.write(("digraph G {\n"))
960 ui.write(("digraph G {\n"))
961 for i in r:
961 for i in r:
962 node = r.node(i)
962 node = r.node(i)
963 pp = r.parents(node)
963 pp = r.parents(node)
964 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
964 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
965 if pp[1] != nullid:
965 if pp[1] != nullid:
966 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
966 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
967 ui.write("}\n")
967 ui.write("}\n")
968
968
969 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
969 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
970 def debuginstall(ui, **opts):
970 def debuginstall(ui, **opts):
971 '''test Mercurial installation
971 '''test Mercurial installation
972
972
973 Returns 0 on success.
973 Returns 0 on success.
974 '''
974 '''
975 opts = pycompat.byteskwargs(opts)
975 opts = pycompat.byteskwargs(opts)
976
976
977 def writetemp(contents):
977 def writetemp(contents):
978 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
978 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
979 f = os.fdopen(fd, pycompat.sysstr("wb"))
979 f = os.fdopen(fd, pycompat.sysstr("wb"))
980 f.write(contents)
980 f.write(contents)
981 f.close()
981 f.close()
982 return name
982 return name
983
983
984 problems = 0
984 problems = 0
985
985
986 fm = ui.formatter('debuginstall', opts)
986 fm = ui.formatter('debuginstall', opts)
987 fm.startitem()
987 fm.startitem()
988
988
989 # encoding
989 # encoding
990 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
990 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
991 err = None
991 err = None
992 try:
992 try:
993 encoding.fromlocal("test")
993 encoding.fromlocal("test")
994 except error.Abort as inst:
994 except error.Abort as inst:
995 err = inst
995 err = inst
996 problems += 1
996 problems += 1
997 fm.condwrite(err, 'encodingerror', _(" %s\n"
997 fm.condwrite(err, 'encodingerror', _(" %s\n"
998 " (check that your locale is properly set)\n"), err)
998 " (check that your locale is properly set)\n"), err)
999
999
1000 # Python
1000 # Python
1001 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1001 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1002 pycompat.sysexecutable)
1002 pycompat.sysexecutable)
1003 fm.write('pythonver', _("checking Python version (%s)\n"),
1003 fm.write('pythonver', _("checking Python version (%s)\n"),
1004 ("%d.%d.%d" % sys.version_info[:3]))
1004 ("%d.%d.%d" % sys.version_info[:3]))
1005 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1005 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1006 os.path.dirname(pycompat.fsencode(os.__file__)))
1006 os.path.dirname(pycompat.fsencode(os.__file__)))
1007
1007
1008 security = set(sslutil.supportedprotocols)
1008 security = set(sslutil.supportedprotocols)
1009 if sslutil.hassni:
1009 if sslutil.hassni:
1010 security.add('sni')
1010 security.add('sni')
1011
1011
1012 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1012 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1013 fm.formatlist(sorted(security), name='protocol',
1013 fm.formatlist(sorted(security), name='protocol',
1014 fmt='%s', sep=','))
1014 fmt='%s', sep=','))
1015
1015
1016 # These are warnings, not errors. So don't increment problem count. This
1016 # These are warnings, not errors. So don't increment problem count. This
1017 # may change in the future.
1017 # may change in the future.
1018 if 'tls1.2' not in security:
1018 if 'tls1.2' not in security:
1019 fm.plain(_(' TLS 1.2 not supported by Python install; '
1019 fm.plain(_(' TLS 1.2 not supported by Python install; '
1020 'network connections lack modern security\n'))
1020 'network connections lack modern security\n'))
1021 if 'sni' not in security:
1021 if 'sni' not in security:
1022 fm.plain(_(' SNI not supported by Python install; may have '
1022 fm.plain(_(' SNI not supported by Python install; may have '
1023 'connectivity issues with some servers\n'))
1023 'connectivity issues with some servers\n'))
1024
1024
1025 # TODO print CA cert info
1025 # TODO print CA cert info
1026
1026
1027 # hg version
1027 # hg version
1028 hgver = util.version()
1028 hgver = util.version()
1029 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1029 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1030 hgver.split('+')[0])
1030 hgver.split('+')[0])
1031 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1031 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1032 '+'.join(hgver.split('+')[1:]))
1032 '+'.join(hgver.split('+')[1:]))
1033
1033
1034 # compiled modules
1034 # compiled modules
1035 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1035 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1036 policy.policy)
1036 policy.policy)
1037 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1037 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1038 os.path.dirname(pycompat.fsencode(__file__)))
1038 os.path.dirname(pycompat.fsencode(__file__)))
1039
1039
1040 if policy.policy in ('c', 'allow'):
1040 if policy.policy in ('c', 'allow'):
1041 err = None
1041 err = None
1042 try:
1042 try:
1043 from .cext import (
1043 from .cext import (
1044 base85,
1044 base85,
1045 bdiff,
1045 bdiff,
1046 mpatch,
1046 mpatch,
1047 osutil,
1047 osutil,
1048 )
1048 )
1049 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1049 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1050 except Exception as inst:
1050 except Exception as inst:
1051 err = inst
1051 err = inst
1052 problems += 1
1052 problems += 1
1053 fm.condwrite(err, 'extensionserror', " %s\n", err)
1053 fm.condwrite(err, 'extensionserror', " %s\n", err)
1054
1054
1055 compengines = util.compengines._engines.values()
1055 compengines = util.compengines._engines.values()
1056 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1056 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1057 fm.formatlist(sorted(e.name() for e in compengines),
1057 fm.formatlist(sorted(e.name() for e in compengines),
1058 name='compengine', fmt='%s', sep=', '))
1058 name='compengine', fmt='%s', sep=', '))
1059 fm.write('compenginesavail', _('checking available compression engines '
1059 fm.write('compenginesavail', _('checking available compression engines '
1060 '(%s)\n'),
1060 '(%s)\n'),
1061 fm.formatlist(sorted(e.name() for e in compengines
1061 fm.formatlist(sorted(e.name() for e in compengines
1062 if e.available()),
1062 if e.available()),
1063 name='compengine', fmt='%s', sep=', '))
1063 name='compengine', fmt='%s', sep=', '))
1064 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1064 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1065 fm.write('compenginesserver', _('checking available compression engines '
1065 fm.write('compenginesserver', _('checking available compression engines '
1066 'for wire protocol (%s)\n'),
1066 'for wire protocol (%s)\n'),
1067 fm.formatlist([e.name() for e in wirecompengines
1067 fm.formatlist([e.name() for e in wirecompengines
1068 if e.wireprotosupport()],
1068 if e.wireprotosupport()],
1069 name='compengine', fmt='%s', sep=', '))
1069 name='compengine', fmt='%s', sep=', '))
1070
1070
1071 # templates
1071 # templates
1072 p = templater.templatepaths()
1072 p = templater.templatepaths()
1073 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1073 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1074 fm.condwrite(not p, '', _(" no template directories found\n"))
1074 fm.condwrite(not p, '', _(" no template directories found\n"))
1075 if p:
1075 if p:
1076 m = templater.templatepath("map-cmdline.default")
1076 m = templater.templatepath("map-cmdline.default")
1077 if m:
1077 if m:
1078 # template found, check if it is working
1078 # template found, check if it is working
1079 err = None
1079 err = None
1080 try:
1080 try:
1081 templater.templater.frommapfile(m)
1081 templater.templater.frommapfile(m)
1082 except Exception as inst:
1082 except Exception as inst:
1083 err = inst
1083 err = inst
1084 p = None
1084 p = None
1085 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1085 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1086 else:
1086 else:
1087 p = None
1087 p = None
1088 fm.condwrite(p, 'defaulttemplate',
1088 fm.condwrite(p, 'defaulttemplate',
1089 _("checking default template (%s)\n"), m)
1089 _("checking default template (%s)\n"), m)
1090 fm.condwrite(not m, 'defaulttemplatenotfound',
1090 fm.condwrite(not m, 'defaulttemplatenotfound',
1091 _(" template '%s' not found\n"), "default")
1091 _(" template '%s' not found\n"), "default")
1092 if not p:
1092 if not p:
1093 problems += 1
1093 problems += 1
1094 fm.condwrite(not p, '',
1094 fm.condwrite(not p, '',
1095 _(" (templates seem to have been installed incorrectly)\n"))
1095 _(" (templates seem to have been installed incorrectly)\n"))
1096
1096
1097 # editor
1097 # editor
1098 editor = ui.geteditor()
1098 editor = ui.geteditor()
1099 editor = util.expandpath(editor)
1099 editor = util.expandpath(editor)
1100 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1100 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1101 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1101 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1102 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1102 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1103 _(" No commit editor set and can't find %s in PATH\n"
1103 _(" No commit editor set and can't find %s in PATH\n"
1104 " (specify a commit editor in your configuration"
1104 " (specify a commit editor in your configuration"
1105 " file)\n"), not cmdpath and editor == 'vi' and editor)
1105 " file)\n"), not cmdpath and editor == 'vi' and editor)
1106 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1106 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1107 _(" Can't find editor '%s' in PATH\n"
1107 _(" Can't find editor '%s' in PATH\n"
1108 " (specify a commit editor in your configuration"
1108 " (specify a commit editor in your configuration"
1109 " file)\n"), not cmdpath and editor)
1109 " file)\n"), not cmdpath and editor)
1110 if not cmdpath and editor != 'vi':
1110 if not cmdpath and editor != 'vi':
1111 problems += 1
1111 problems += 1
1112
1112
1113 # check username
1113 # check username
1114 username = None
1114 username = None
1115 err = None
1115 err = None
1116 try:
1116 try:
1117 username = ui.username()
1117 username = ui.username()
1118 except error.Abort as e:
1118 except error.Abort as e:
1119 err = e
1119 err = e
1120 problems += 1
1120 problems += 1
1121
1121
1122 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1122 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1123 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1123 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1124 " (specify a username in your configuration file)\n"), err)
1124 " (specify a username in your configuration file)\n"), err)
1125
1125
1126 fm.condwrite(not problems, '',
1126 fm.condwrite(not problems, '',
1127 _("no problems detected\n"))
1127 _("no problems detected\n"))
1128 if not problems:
1128 if not problems:
1129 fm.data(problems=problems)
1129 fm.data(problems=problems)
1130 fm.condwrite(problems, 'problems',
1130 fm.condwrite(problems, 'problems',
1131 _("%d problems detected,"
1131 _("%d problems detected,"
1132 " please check your install!\n"), problems)
1132 " please check your install!\n"), problems)
1133 fm.end()
1133 fm.end()
1134
1134
1135 return problems
1135 return problems
1136
1136
1137 @command('debugknown', [], _('REPO ID...'), norepo=True)
1137 @command('debugknown', [], _('REPO ID...'), norepo=True)
1138 def debugknown(ui, repopath, *ids, **opts):
1138 def debugknown(ui, repopath, *ids, **opts):
1139 """test whether node ids are known to a repo
1139 """test whether node ids are known to a repo
1140
1140
1141 Every ID must be a full-length hex node id string. Returns a list of 0s
1141 Every ID must be a full-length hex node id string. Returns a list of 0s
1142 and 1s indicating unknown/known.
1142 and 1s indicating unknown/known.
1143 """
1143 """
1144 opts = pycompat.byteskwargs(opts)
1144 opts = pycompat.byteskwargs(opts)
1145 repo = hg.peer(ui, opts, repopath)
1145 repo = hg.peer(ui, opts, repopath)
1146 if not repo.capable('known'):
1146 if not repo.capable('known'):
1147 raise error.Abort("known() not supported by target repository")
1147 raise error.Abort("known() not supported by target repository")
1148 flags = repo.known([bin(s) for s in ids])
1148 flags = repo.known([bin(s) for s in ids])
1149 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1149 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1150
1150
1151 @command('debuglabelcomplete', [], _('LABEL...'))
1151 @command('debuglabelcomplete', [], _('LABEL...'))
1152 def debuglabelcomplete(ui, repo, *args):
1152 def debuglabelcomplete(ui, repo, *args):
1153 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1153 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1154 debugnamecomplete(ui, repo, *args)
1154 debugnamecomplete(ui, repo, *args)
1155
1155
1156 @command('debuglocks',
1156 @command('debuglocks',
1157 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1157 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1158 ('W', 'force-wlock', None,
1158 ('W', 'force-wlock', None,
1159 _('free the working state lock (DANGEROUS)'))],
1159 _('free the working state lock (DANGEROUS)'))],
1160 _('[OPTION]...'))
1160 _('[OPTION]...'))
1161 def debuglocks(ui, repo, **opts):
1161 def debuglocks(ui, repo, **opts):
1162 """show or modify state of locks
1162 """show or modify state of locks
1163
1163
1164 By default, this command will show which locks are held. This
1164 By default, this command will show which locks are held. This
1165 includes the user and process holding the lock, the amount of time
1165 includes the user and process holding the lock, the amount of time
1166 the lock has been held, and the machine name where the process is
1166 the lock has been held, and the machine name where the process is
1167 running if it's not local.
1167 running if it's not local.
1168
1168
1169 Locks protect the integrity of Mercurial's data, so should be
1169 Locks protect the integrity of Mercurial's data, so should be
1170 treated with care. System crashes or other interruptions may cause
1170 treated with care. System crashes or other interruptions may cause
1171 locks to not be properly released, though Mercurial will usually
1171 locks to not be properly released, though Mercurial will usually
1172 detect and remove such stale locks automatically.
1172 detect and remove such stale locks automatically.
1173
1173
1174 However, detecting stale locks may not always be possible (for
1174 However, detecting stale locks may not always be possible (for
1175 instance, on a shared filesystem). Removing locks may also be
1175 instance, on a shared filesystem). Removing locks may also be
1176 blocked by filesystem permissions.
1176 blocked by filesystem permissions.
1177
1177
1178 Returns 0 if no locks are held.
1178 Returns 0 if no locks are held.
1179
1179
1180 """
1180 """
1181
1181
1182 if opts.get(r'force_lock'):
1182 if opts.get(r'force_lock'):
1183 repo.svfs.unlink('lock')
1183 repo.svfs.unlink('lock')
1184 if opts.get(r'force_wlock'):
1184 if opts.get(r'force_wlock'):
1185 repo.vfs.unlink('wlock')
1185 repo.vfs.unlink('wlock')
1186 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1186 if opts.get(r'force_lock') or opts.get(r'force_lock'):
1187 return 0
1187 return 0
1188
1188
1189 now = time.time()
1189 now = time.time()
1190 held = 0
1190 held = 0
1191
1191
1192 def report(vfs, name, method):
1192 def report(vfs, name, method):
1193 # this causes stale locks to get reaped for more accurate reporting
1193 # this causes stale locks to get reaped for more accurate reporting
1194 try:
1194 try:
1195 l = method(False)
1195 l = method(False)
1196 except error.LockHeld:
1196 except error.LockHeld:
1197 l = None
1197 l = None
1198
1198
1199 if l:
1199 if l:
1200 l.release()
1200 l.release()
1201 else:
1201 else:
1202 try:
1202 try:
1203 stat = vfs.lstat(name)
1203 stat = vfs.lstat(name)
1204 age = now - stat.st_mtime
1204 age = now - stat.st_mtime
1205 user = util.username(stat.st_uid)
1205 user = util.username(stat.st_uid)
1206 locker = vfs.readlock(name)
1206 locker = vfs.readlock(name)
1207 if ":" in locker:
1207 if ":" in locker:
1208 host, pid = locker.split(':')
1208 host, pid = locker.split(':')
1209 if host == socket.gethostname():
1209 if host == socket.gethostname():
1210 locker = 'user %s, process %s' % (user, pid)
1210 locker = 'user %s, process %s' % (user, pid)
1211 else:
1211 else:
1212 locker = 'user %s, process %s, host %s' \
1212 locker = 'user %s, process %s, host %s' \
1213 % (user, pid, host)
1213 % (user, pid, host)
1214 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1214 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1215 return 1
1215 return 1
1216 except OSError as e:
1216 except OSError as e:
1217 if e.errno != errno.ENOENT:
1217 if e.errno != errno.ENOENT:
1218 raise
1218 raise
1219
1219
1220 ui.write(("%-6s free\n") % (name + ":"))
1220 ui.write(("%-6s free\n") % (name + ":"))
1221 return 0
1221 return 0
1222
1222
1223 held += report(repo.svfs, "lock", repo.lock)
1223 held += report(repo.svfs, "lock", repo.lock)
1224 held += report(repo.vfs, "wlock", repo.wlock)
1224 held += report(repo.vfs, "wlock", repo.wlock)
1225
1225
1226 return held
1226 return held
1227
1227
1228 @command('debugmergestate', [], '')
1228 @command('debugmergestate', [], '')
1229 def debugmergestate(ui, repo, *args):
1229 def debugmergestate(ui, repo, *args):
1230 """print merge state
1230 """print merge state
1231
1231
1232 Use --verbose to print out information about whether v1 or v2 merge state
1232 Use --verbose to print out information about whether v1 or v2 merge state
1233 was chosen."""
1233 was chosen."""
1234 def _hashornull(h):
1234 def _hashornull(h):
1235 if h == nullhex:
1235 if h == nullhex:
1236 return 'null'
1236 return 'null'
1237 else:
1237 else:
1238 return h
1238 return h
1239
1239
1240 def printrecords(version):
1240 def printrecords(version):
1241 ui.write(('* version %s records\n') % version)
1241 ui.write(('* version %s records\n') % version)
1242 if version == 1:
1242 if version == 1:
1243 records = v1records
1243 records = v1records
1244 else:
1244 else:
1245 records = v2records
1245 records = v2records
1246
1246
1247 for rtype, record in records:
1247 for rtype, record in records:
1248 # pretty print some record types
1248 # pretty print some record types
1249 if rtype == 'L':
1249 if rtype == 'L':
1250 ui.write(('local: %s\n') % record)
1250 ui.write(('local: %s\n') % record)
1251 elif rtype == 'O':
1251 elif rtype == 'O':
1252 ui.write(('other: %s\n') % record)
1252 ui.write(('other: %s\n') % record)
1253 elif rtype == 'm':
1253 elif rtype == 'm':
1254 driver, mdstate = record.split('\0', 1)
1254 driver, mdstate = record.split('\0', 1)
1255 ui.write(('merge driver: %s (state "%s")\n')
1255 ui.write(('merge driver: %s (state "%s")\n')
1256 % (driver, mdstate))
1256 % (driver, mdstate))
1257 elif rtype in 'FDC':
1257 elif rtype in 'FDC':
1258 r = record.split('\0')
1258 r = record.split('\0')
1259 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1259 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1260 if version == 1:
1260 if version == 1:
1261 onode = 'not stored in v1 format'
1261 onode = 'not stored in v1 format'
1262 flags = r[7]
1262 flags = r[7]
1263 else:
1263 else:
1264 onode, flags = r[7:9]
1264 onode, flags = r[7:9]
1265 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1265 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1266 % (f, rtype, state, _hashornull(hash)))
1266 % (f, rtype, state, _hashornull(hash)))
1267 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1267 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1268 ui.write((' ancestor path: %s (node %s)\n')
1268 ui.write((' ancestor path: %s (node %s)\n')
1269 % (afile, _hashornull(anode)))
1269 % (afile, _hashornull(anode)))
1270 ui.write((' other path: %s (node %s)\n')
1270 ui.write((' other path: %s (node %s)\n')
1271 % (ofile, _hashornull(onode)))
1271 % (ofile, _hashornull(onode)))
1272 elif rtype == 'f':
1272 elif rtype == 'f':
1273 filename, rawextras = record.split('\0', 1)
1273 filename, rawextras = record.split('\0', 1)
1274 extras = rawextras.split('\0')
1274 extras = rawextras.split('\0')
1275 i = 0
1275 i = 0
1276 extrastrings = []
1276 extrastrings = []
1277 while i < len(extras):
1277 while i < len(extras):
1278 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1278 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1279 i += 2
1279 i += 2
1280
1280
1281 ui.write(('file extras: %s (%s)\n')
1281 ui.write(('file extras: %s (%s)\n')
1282 % (filename, ', '.join(extrastrings)))
1282 % (filename, ', '.join(extrastrings)))
1283 elif rtype == 'l':
1283 elif rtype == 'l':
1284 labels = record.split('\0', 2)
1284 labels = record.split('\0', 2)
1285 labels = [l for l in labels if len(l) > 0]
1285 labels = [l for l in labels if len(l) > 0]
1286 ui.write(('labels:\n'))
1286 ui.write(('labels:\n'))
1287 ui.write((' local: %s\n' % labels[0]))
1287 ui.write((' local: %s\n' % labels[0]))
1288 ui.write((' other: %s\n' % labels[1]))
1288 ui.write((' other: %s\n' % labels[1]))
1289 if len(labels) > 2:
1289 if len(labels) > 2:
1290 ui.write((' base: %s\n' % labels[2]))
1290 ui.write((' base: %s\n' % labels[2]))
1291 else:
1291 else:
1292 ui.write(('unrecognized entry: %s\t%s\n')
1292 ui.write(('unrecognized entry: %s\t%s\n')
1293 % (rtype, record.replace('\0', '\t')))
1293 % (rtype, record.replace('\0', '\t')))
1294
1294
1295 # Avoid mergestate.read() since it may raise an exception for unsupported
1295 # Avoid mergestate.read() since it may raise an exception for unsupported
1296 # merge state records. We shouldn't be doing this, but this is OK since this
1296 # merge state records. We shouldn't be doing this, but this is OK since this
1297 # command is pretty low-level.
1297 # command is pretty low-level.
1298 ms = mergemod.mergestate(repo)
1298 ms = mergemod.mergestate(repo)
1299
1299
1300 # sort so that reasonable information is on top
1300 # sort so that reasonable information is on top
1301 v1records = ms._readrecordsv1()
1301 v1records = ms._readrecordsv1()
1302 v2records = ms._readrecordsv2()
1302 v2records = ms._readrecordsv2()
1303 order = 'LOml'
1303 order = 'LOml'
1304 def key(r):
1304 def key(r):
1305 idx = order.find(r[0])
1305 idx = order.find(r[0])
1306 if idx == -1:
1306 if idx == -1:
1307 return (1, r[1])
1307 return (1, r[1])
1308 else:
1308 else:
1309 return (0, idx)
1309 return (0, idx)
1310 v1records.sort(key=key)
1310 v1records.sort(key=key)
1311 v2records.sort(key=key)
1311 v2records.sort(key=key)
1312
1312
1313 if not v1records and not v2records:
1313 if not v1records and not v2records:
1314 ui.write(('no merge state found\n'))
1314 ui.write(('no merge state found\n'))
1315 elif not v2records:
1315 elif not v2records:
1316 ui.note(('no version 2 merge state\n'))
1316 ui.note(('no version 2 merge state\n'))
1317 printrecords(1)
1317 printrecords(1)
1318 elif ms._v1v2match(v1records, v2records):
1318 elif ms._v1v2match(v1records, v2records):
1319 ui.note(('v1 and v2 states match: using v2\n'))
1319 ui.note(('v1 and v2 states match: using v2\n'))
1320 printrecords(2)
1320 printrecords(2)
1321 else:
1321 else:
1322 ui.note(('v1 and v2 states mismatch: using v1\n'))
1322 ui.note(('v1 and v2 states mismatch: using v1\n'))
1323 printrecords(1)
1323 printrecords(1)
1324 if ui.verbose:
1324 if ui.verbose:
1325 printrecords(2)
1325 printrecords(2)
1326
1326
1327 @command('debugnamecomplete', [], _('NAME...'))
1327 @command('debugnamecomplete', [], _('NAME...'))
1328 def debugnamecomplete(ui, repo, *args):
1328 def debugnamecomplete(ui, repo, *args):
1329 '''complete "names" - tags, open branch names, bookmark names'''
1329 '''complete "names" - tags, open branch names, bookmark names'''
1330
1330
1331 names = set()
1331 names = set()
1332 # since we previously only listed open branches, we will handle that
1332 # since we previously only listed open branches, we will handle that
1333 # specially (after this for loop)
1333 # specially (after this for loop)
1334 for name, ns in repo.names.iteritems():
1334 for name, ns in repo.names.iteritems():
1335 if name != 'branches':
1335 if name != 'branches':
1336 names.update(ns.listnames(repo))
1336 names.update(ns.listnames(repo))
1337 names.update(tag for (tag, heads, tip, closed)
1337 names.update(tag for (tag, heads, tip, closed)
1338 in repo.branchmap().iterbranches() if not closed)
1338 in repo.branchmap().iterbranches() if not closed)
1339 completions = set()
1339 completions = set()
1340 if not args:
1340 if not args:
1341 args = ['']
1341 args = ['']
1342 for a in args:
1342 for a in args:
1343 completions.update(n for n in names if n.startswith(a))
1343 completions.update(n for n in names if n.startswith(a))
1344 ui.write('\n'.join(sorted(completions)))
1344 ui.write('\n'.join(sorted(completions)))
1345 ui.write('\n')
1345 ui.write('\n')
1346
1346
1347 @command('debugobsolete',
1347 @command('debugobsolete',
1348 [('', 'flags', 0, _('markers flag')),
1348 [('', 'flags', 0, _('markers flag')),
1349 ('', 'record-parents', False,
1349 ('', 'record-parents', False,
1350 _('record parent information for the precursor')),
1350 _('record parent information for the precursor')),
1351 ('r', 'rev', [], _('display markers relevant to REV')),
1351 ('r', 'rev', [], _('display markers relevant to REV')),
1352 ('', 'exclusive', False, _('restrict display to markers only '
1352 ('', 'exclusive', False, _('restrict display to markers only '
1353 'relevant to REV')),
1353 'relevant to REV')),
1354 ('', 'index', False, _('display index of the marker')),
1354 ('', 'index', False, _('display index of the marker')),
1355 ('', 'delete', [], _('delete markers specified by indices')),
1355 ('', 'delete', [], _('delete markers specified by indices')),
1356 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1356 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1357 _('[OBSOLETED [REPLACEMENT ...]]'))
1357 _('[OBSOLETED [REPLACEMENT ...]]'))
1358 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1358 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1359 """create arbitrary obsolete marker
1359 """create arbitrary obsolete marker
1360
1360
1361 With no arguments, displays the list of obsolescence markers."""
1361 With no arguments, displays the list of obsolescence markers."""
1362
1362
1363 opts = pycompat.byteskwargs(opts)
1363 opts = pycompat.byteskwargs(opts)
1364
1364
1365 def parsenodeid(s):
1365 def parsenodeid(s):
1366 try:
1366 try:
1367 # We do not use revsingle/revrange functions here to accept
1367 # We do not use revsingle/revrange functions here to accept
1368 # arbitrary node identifiers, possibly not present in the
1368 # arbitrary node identifiers, possibly not present in the
1369 # local repository.
1369 # local repository.
1370 n = bin(s)
1370 n = bin(s)
1371 if len(n) != len(nullid):
1371 if len(n) != len(nullid):
1372 raise TypeError()
1372 raise TypeError()
1373 return n
1373 return n
1374 except TypeError:
1374 except TypeError:
1375 raise error.Abort('changeset references must be full hexadecimal '
1375 raise error.Abort('changeset references must be full hexadecimal '
1376 'node identifiers')
1376 'node identifiers')
1377
1377
1378 if opts.get('delete'):
1378 if opts.get('delete'):
1379 indices = []
1379 indices = []
1380 for v in opts.get('delete'):
1380 for v in opts.get('delete'):
1381 try:
1381 try:
1382 indices.append(int(v))
1382 indices.append(int(v))
1383 except ValueError:
1383 except ValueError:
1384 raise error.Abort(_('invalid index value: %r') % v,
1384 raise error.Abort(_('invalid index value: %r') % v,
1385 hint=_('use integers for indices'))
1385 hint=_('use integers for indices'))
1386
1386
1387 if repo.currenttransaction():
1387 if repo.currenttransaction():
1388 raise error.Abort(_('cannot delete obsmarkers in the middle '
1388 raise error.Abort(_('cannot delete obsmarkers in the middle '
1389 'of transaction.'))
1389 'of transaction.'))
1390
1390
1391 with repo.lock():
1391 with repo.lock():
1392 n = repair.deleteobsmarkers(repo.obsstore, indices)
1392 n = repair.deleteobsmarkers(repo.obsstore, indices)
1393 ui.write(_('deleted %i obsolescence markers\n') % n)
1393 ui.write(_('deleted %i obsolescence markers\n') % n)
1394
1394
1395 return
1395 return
1396
1396
1397 if precursor is not None:
1397 if precursor is not None:
1398 if opts['rev']:
1398 if opts['rev']:
1399 raise error.Abort('cannot select revision when creating marker')
1399 raise error.Abort('cannot select revision when creating marker')
1400 metadata = {}
1400 metadata = {}
1401 metadata['user'] = opts['user'] or ui.username()
1401 metadata['user'] = opts['user'] or ui.username()
1402 succs = tuple(parsenodeid(succ) for succ in successors)
1402 succs = tuple(parsenodeid(succ) for succ in successors)
1403 l = repo.lock()
1403 l = repo.lock()
1404 try:
1404 try:
1405 tr = repo.transaction('debugobsolete')
1405 tr = repo.transaction('debugobsolete')
1406 try:
1406 try:
1407 date = opts.get('date')
1407 date = opts.get('date')
1408 if date:
1408 if date:
1409 date = util.parsedate(date)
1409 date = util.parsedate(date)
1410 else:
1410 else:
1411 date = None
1411 date = None
1412 prec = parsenodeid(precursor)
1412 prec = parsenodeid(precursor)
1413 parents = None
1413 parents = None
1414 if opts['record_parents']:
1414 if opts['record_parents']:
1415 if prec not in repo.unfiltered():
1415 if prec not in repo.unfiltered():
1416 raise error.Abort('cannot used --record-parents on '
1416 raise error.Abort('cannot used --record-parents on '
1417 'unknown changesets')
1417 'unknown changesets')
1418 parents = repo.unfiltered()[prec].parents()
1418 parents = repo.unfiltered()[prec].parents()
1419 parents = tuple(p.node() for p in parents)
1419 parents = tuple(p.node() for p in parents)
1420 repo.obsstore.create(tr, prec, succs, opts['flags'],
1420 repo.obsstore.create(tr, prec, succs, opts['flags'],
1421 parents=parents, date=date,
1421 parents=parents, date=date,
1422 metadata=metadata, ui=ui)
1422 metadata=metadata, ui=ui)
1423 tr.close()
1423 tr.close()
1424 except ValueError as exc:
1424 except ValueError as exc:
1425 raise error.Abort(_('bad obsmarker input: %s') % exc)
1425 raise error.Abort(_('bad obsmarker input: %s') % exc)
1426 finally:
1426 finally:
1427 tr.release()
1427 tr.release()
1428 finally:
1428 finally:
1429 l.release()
1429 l.release()
1430 else:
1430 else:
1431 if opts['rev']:
1431 if opts['rev']:
1432 revs = scmutil.revrange(repo, opts['rev'])
1432 revs = scmutil.revrange(repo, opts['rev'])
1433 nodes = [repo[r].node() for r in revs]
1433 nodes = [repo[r].node() for r in revs]
1434 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1434 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1435 exclusive=opts['exclusive']))
1435 exclusive=opts['exclusive']))
1436 markers.sort(key=lambda x: x._data)
1436 markers.sort(key=lambda x: x._data)
1437 else:
1437 else:
1438 markers = obsutil.getmarkers(repo)
1438 markers = obsutil.getmarkers(repo)
1439
1439
1440 markerstoiter = markers
1440 markerstoiter = markers
1441 isrelevant = lambda m: True
1441 isrelevant = lambda m: True
1442 if opts.get('rev') and opts.get('index'):
1442 if opts.get('rev') and opts.get('index'):
1443 markerstoiter = obsutil.getmarkers(repo)
1443 markerstoiter = obsutil.getmarkers(repo)
1444 markerset = set(markers)
1444 markerset = set(markers)
1445 isrelevant = lambda m: m in markerset
1445 isrelevant = lambda m: m in markerset
1446
1446
1447 fm = ui.formatter('debugobsolete', opts)
1447 fm = ui.formatter('debugobsolete', opts)
1448 for i, m in enumerate(markerstoiter):
1448 for i, m in enumerate(markerstoiter):
1449 if not isrelevant(m):
1449 if not isrelevant(m):
1450 # marker can be irrelevant when we're iterating over a set
1450 # marker can be irrelevant when we're iterating over a set
1451 # of markers (markerstoiter) which is bigger than the set
1451 # of markers (markerstoiter) which is bigger than the set
1452 # of markers we want to display (markers)
1452 # of markers we want to display (markers)
1453 # this can happen if both --index and --rev options are
1453 # this can happen if both --index and --rev options are
1454 # provided and thus we need to iterate over all of the markers
1454 # provided and thus we need to iterate over all of the markers
1455 # to get the correct indices, but only display the ones that
1455 # to get the correct indices, but only display the ones that
1456 # are relevant to --rev value
1456 # are relevant to --rev value
1457 continue
1457 continue
1458 fm.startitem()
1458 fm.startitem()
1459 ind = i if opts.get('index') else None
1459 ind = i if opts.get('index') else None
1460 cmdutil.showmarker(fm, m, index=ind)
1460 cmdutil.showmarker(fm, m, index=ind)
1461 fm.end()
1461 fm.end()
1462
1462
1463 @command('debugpathcomplete',
1463 @command('debugpathcomplete',
1464 [('f', 'full', None, _('complete an entire path')),
1464 [('f', 'full', None, _('complete an entire path')),
1465 ('n', 'normal', None, _('show only normal files')),
1465 ('n', 'normal', None, _('show only normal files')),
1466 ('a', 'added', None, _('show only added files')),
1466 ('a', 'added', None, _('show only added files')),
1467 ('r', 'removed', None, _('show only removed files'))],
1467 ('r', 'removed', None, _('show only removed files'))],
1468 _('FILESPEC...'))
1468 _('FILESPEC...'))
1469 def debugpathcomplete(ui, repo, *specs, **opts):
1469 def debugpathcomplete(ui, repo, *specs, **opts):
1470 '''complete part or all of a tracked path
1470 '''complete part or all of a tracked path
1471
1471
1472 This command supports shells that offer path name completion. It
1472 This command supports shells that offer path name completion. It
1473 currently completes only files already known to the dirstate.
1473 currently completes only files already known to the dirstate.
1474
1474
1475 Completion extends only to the next path segment unless
1475 Completion extends only to the next path segment unless
1476 --full is specified, in which case entire paths are used.'''
1476 --full is specified, in which case entire paths are used.'''
1477
1477
1478 def complete(path, acceptable):
1478 def complete(path, acceptable):
1479 dirstate = repo.dirstate
1479 dirstate = repo.dirstate
1480 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1480 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1481 rootdir = repo.root + pycompat.ossep
1481 rootdir = repo.root + pycompat.ossep
1482 if spec != repo.root and not spec.startswith(rootdir):
1482 if spec != repo.root and not spec.startswith(rootdir):
1483 return [], []
1483 return [], []
1484 if os.path.isdir(spec):
1484 if os.path.isdir(spec):
1485 spec += '/'
1485 spec += '/'
1486 spec = spec[len(rootdir):]
1486 spec = spec[len(rootdir):]
1487 fixpaths = pycompat.ossep != '/'
1487 fixpaths = pycompat.ossep != '/'
1488 if fixpaths:
1488 if fixpaths:
1489 spec = spec.replace(pycompat.ossep, '/')
1489 spec = spec.replace(pycompat.ossep, '/')
1490 speclen = len(spec)
1490 speclen = len(spec)
1491 fullpaths = opts[r'full']
1491 fullpaths = opts[r'full']
1492 files, dirs = set(), set()
1492 files, dirs = set(), set()
1493 adddir, addfile = dirs.add, files.add
1493 adddir, addfile = dirs.add, files.add
1494 for f, st in dirstate.iteritems():
1494 for f, st in dirstate.iteritems():
1495 if f.startswith(spec) and st[0] in acceptable:
1495 if f.startswith(spec) and st[0] in acceptable:
1496 if fixpaths:
1496 if fixpaths:
1497 f = f.replace('/', pycompat.ossep)
1497 f = f.replace('/', pycompat.ossep)
1498 if fullpaths:
1498 if fullpaths:
1499 addfile(f)
1499 addfile(f)
1500 continue
1500 continue
1501 s = f.find(pycompat.ossep, speclen)
1501 s = f.find(pycompat.ossep, speclen)
1502 if s >= 0:
1502 if s >= 0:
1503 adddir(f[:s])
1503 adddir(f[:s])
1504 else:
1504 else:
1505 addfile(f)
1505 addfile(f)
1506 return files, dirs
1506 return files, dirs
1507
1507
1508 acceptable = ''
1508 acceptable = ''
1509 if opts[r'normal']:
1509 if opts[r'normal']:
1510 acceptable += 'nm'
1510 acceptable += 'nm'
1511 if opts[r'added']:
1511 if opts[r'added']:
1512 acceptable += 'a'
1512 acceptable += 'a'
1513 if opts[r'removed']:
1513 if opts[r'removed']:
1514 acceptable += 'r'
1514 acceptable += 'r'
1515 cwd = repo.getcwd()
1515 cwd = repo.getcwd()
1516 if not specs:
1516 if not specs:
1517 specs = ['.']
1517 specs = ['.']
1518
1518
1519 files, dirs = set(), set()
1519 files, dirs = set(), set()
1520 for spec in specs:
1520 for spec in specs:
1521 f, d = complete(spec, acceptable or 'nmar')
1521 f, d = complete(spec, acceptable or 'nmar')
1522 files.update(f)
1522 files.update(f)
1523 dirs.update(d)
1523 dirs.update(d)
1524 files.update(dirs)
1524 files.update(dirs)
1525 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1525 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1526 ui.write('\n')
1526 ui.write('\n')
1527
1527
1528 @command('debugpickmergetool',
1528 @command('debugpickmergetool',
1529 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1529 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1530 ('', 'changedelete', None, _('emulate merging change and delete')),
1530 ('', 'changedelete', None, _('emulate merging change and delete')),
1531 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1531 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1532 _('[PATTERN]...'),
1532 _('[PATTERN]...'),
1533 inferrepo=True)
1533 inferrepo=True)
1534 def debugpickmergetool(ui, repo, *pats, **opts):
1534 def debugpickmergetool(ui, repo, *pats, **opts):
1535 """examine which merge tool is chosen for specified file
1535 """examine which merge tool is chosen for specified file
1536
1536
1537 As described in :hg:`help merge-tools`, Mercurial examines
1537 As described in :hg:`help merge-tools`, Mercurial examines
1538 configurations below in this order to decide which merge tool is
1538 configurations below in this order to decide which merge tool is
1539 chosen for specified file.
1539 chosen for specified file.
1540
1540
1541 1. ``--tool`` option
1541 1. ``--tool`` option
1542 2. ``HGMERGE`` environment variable
1542 2. ``HGMERGE`` environment variable
1543 3. configurations in ``merge-patterns`` section
1543 3. configurations in ``merge-patterns`` section
1544 4. configuration of ``ui.merge``
1544 4. configuration of ``ui.merge``
1545 5. configurations in ``merge-tools`` section
1545 5. configurations in ``merge-tools`` section
1546 6. ``hgmerge`` tool (for historical reason only)
1546 6. ``hgmerge`` tool (for historical reason only)
1547 7. default tool for fallback (``:merge`` or ``:prompt``)
1547 7. default tool for fallback (``:merge`` or ``:prompt``)
1548
1548
1549 This command writes out examination result in the style below::
1549 This command writes out examination result in the style below::
1550
1550
1551 FILE = MERGETOOL
1551 FILE = MERGETOOL
1552
1552
1553 By default, all files known in the first parent context of the
1553 By default, all files known in the first parent context of the
1554 working directory are examined. Use file patterns and/or -I/-X
1554 working directory are examined. Use file patterns and/or -I/-X
1555 options to limit target files. -r/--rev is also useful to examine
1555 options to limit target files. -r/--rev is also useful to examine
1556 files in another context without actual updating to it.
1556 files in another context without actual updating to it.
1557
1557
1558 With --debug, this command shows warning messages while matching
1558 With --debug, this command shows warning messages while matching
1559 against ``merge-patterns`` and so on, too. It is recommended to
1559 against ``merge-patterns`` and so on, too. It is recommended to
1560 use this option with explicit file patterns and/or -I/-X options,
1560 use this option with explicit file patterns and/or -I/-X options,
1561 because this option increases amount of output per file according
1561 because this option increases amount of output per file according
1562 to configurations in hgrc.
1562 to configurations in hgrc.
1563
1563
1564 With -v/--verbose, this command shows configurations below at
1564 With -v/--verbose, this command shows configurations below at
1565 first (only if specified).
1565 first (only if specified).
1566
1566
1567 - ``--tool`` option
1567 - ``--tool`` option
1568 - ``HGMERGE`` environment variable
1568 - ``HGMERGE`` environment variable
1569 - configuration of ``ui.merge``
1569 - configuration of ``ui.merge``
1570
1570
1571 If merge tool is chosen before matching against
1571 If merge tool is chosen before matching against
1572 ``merge-patterns``, this command can't show any helpful
1572 ``merge-patterns``, this command can't show any helpful
1573 information, even with --debug. In such case, information above is
1573 information, even with --debug. In such case, information above is
1574 useful to know why a merge tool is chosen.
1574 useful to know why a merge tool is chosen.
1575 """
1575 """
1576 opts = pycompat.byteskwargs(opts)
1576 opts = pycompat.byteskwargs(opts)
1577 overrides = {}
1577 overrides = {}
1578 if opts['tool']:
1578 if opts['tool']:
1579 overrides[('ui', 'forcemerge')] = opts['tool']
1579 overrides[('ui', 'forcemerge')] = opts['tool']
1580 ui.note(('with --tool %r\n') % (opts['tool']))
1580 ui.note(('with --tool %r\n') % (opts['tool']))
1581
1581
1582 with ui.configoverride(overrides, 'debugmergepatterns'):
1582 with ui.configoverride(overrides, 'debugmergepatterns'):
1583 hgmerge = encoding.environ.get("HGMERGE")
1583 hgmerge = encoding.environ.get("HGMERGE")
1584 if hgmerge is not None:
1584 if hgmerge is not None:
1585 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1585 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1586 uimerge = ui.config("ui", "merge")
1586 uimerge = ui.config("ui", "merge")
1587 if uimerge:
1587 if uimerge:
1588 ui.note(('with ui.merge=%r\n') % (uimerge))
1588 ui.note(('with ui.merge=%r\n') % (uimerge))
1589
1589
1590 ctx = scmutil.revsingle(repo, opts.get('rev'))
1590 ctx = scmutil.revsingle(repo, opts.get('rev'))
1591 m = scmutil.match(ctx, pats, opts)
1591 m = scmutil.match(ctx, pats, opts)
1592 changedelete = opts['changedelete']
1592 changedelete = opts['changedelete']
1593 for path in ctx.walk(m):
1593 for path in ctx.walk(m):
1594 fctx = ctx[path]
1594 fctx = ctx[path]
1595 try:
1595 try:
1596 if not ui.debugflag:
1596 if not ui.debugflag:
1597 ui.pushbuffer(error=True)
1597 ui.pushbuffer(error=True)
1598 tool, toolpath = filemerge._picktool(repo, ui, path,
1598 tool, toolpath = filemerge._picktool(repo, ui, path,
1599 fctx.isbinary(),
1599 fctx.isbinary(),
1600 'l' in fctx.flags(),
1600 'l' in fctx.flags(),
1601 changedelete)
1601 changedelete)
1602 finally:
1602 finally:
1603 if not ui.debugflag:
1603 if not ui.debugflag:
1604 ui.popbuffer()
1604 ui.popbuffer()
1605 ui.write(('%s = %s\n') % (path, tool))
1605 ui.write(('%s = %s\n') % (path, tool))
1606
1606
1607 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1607 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1608 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1608 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1609 '''access the pushkey key/value protocol
1609 '''access the pushkey key/value protocol
1610
1610
1611 With two args, list the keys in the given namespace.
1611 With two args, list the keys in the given namespace.
1612
1612
1613 With five args, set a key to new if it currently is set to old.
1613 With five args, set a key to new if it currently is set to old.
1614 Reports success or failure.
1614 Reports success or failure.
1615 '''
1615 '''
1616
1616
1617 target = hg.peer(ui, {}, repopath)
1617 target = hg.peer(ui, {}, repopath)
1618 if keyinfo:
1618 if keyinfo:
1619 key, old, new = keyinfo
1619 key, old, new = keyinfo
1620 r = target.pushkey(namespace, key, old, new)
1620 r = target.pushkey(namespace, key, old, new)
1621 ui.status(str(r) + '\n')
1621 ui.status(str(r) + '\n')
1622 return not r
1622 return not r
1623 else:
1623 else:
1624 for k, v in sorted(target.listkeys(namespace).iteritems()):
1624 for k, v in sorted(target.listkeys(namespace).iteritems()):
1625 ui.write("%s\t%s\n" % (util.escapestr(k),
1625 ui.write("%s\t%s\n" % (util.escapestr(k),
1626 util.escapestr(v)))
1626 util.escapestr(v)))
1627
1627
1628 @command('debugpvec', [], _('A B'))
1628 @command('debugpvec', [], _('A B'))
1629 def debugpvec(ui, repo, a, b=None):
1629 def debugpvec(ui, repo, a, b=None):
1630 ca = scmutil.revsingle(repo, a)
1630 ca = scmutil.revsingle(repo, a)
1631 cb = scmutil.revsingle(repo, b)
1631 cb = scmutil.revsingle(repo, b)
1632 pa = pvec.ctxpvec(ca)
1632 pa = pvec.ctxpvec(ca)
1633 pb = pvec.ctxpvec(cb)
1633 pb = pvec.ctxpvec(cb)
1634 if pa == pb:
1634 if pa == pb:
1635 rel = "="
1635 rel = "="
1636 elif pa > pb:
1636 elif pa > pb:
1637 rel = ">"
1637 rel = ">"
1638 elif pa < pb:
1638 elif pa < pb:
1639 rel = "<"
1639 rel = "<"
1640 elif pa | pb:
1640 elif pa | pb:
1641 rel = "|"
1641 rel = "|"
1642 ui.write(_("a: %s\n") % pa)
1642 ui.write(_("a: %s\n") % pa)
1643 ui.write(_("b: %s\n") % pb)
1643 ui.write(_("b: %s\n") % pb)
1644 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1644 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1645 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1645 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1646 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1646 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1647 pa.distance(pb), rel))
1647 pa.distance(pb), rel))
1648
1648
1649 @command('debugrebuilddirstate|debugrebuildstate',
1649 @command('debugrebuilddirstate|debugrebuildstate',
1650 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1650 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1651 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1651 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1652 'the working copy parent')),
1652 'the working copy parent')),
1653 ],
1653 ],
1654 _('[-r REV]'))
1654 _('[-r REV]'))
1655 def debugrebuilddirstate(ui, repo, rev, **opts):
1655 def debugrebuilddirstate(ui, repo, rev, **opts):
1656 """rebuild the dirstate as it would look like for the given revision
1656 """rebuild the dirstate as it would look like for the given revision
1657
1657
1658 If no revision is specified the first current parent will be used.
1658 If no revision is specified the first current parent will be used.
1659
1659
1660 The dirstate will be set to the files of the given revision.
1660 The dirstate will be set to the files of the given revision.
1661 The actual working directory content or existing dirstate
1661 The actual working directory content or existing dirstate
1662 information such as adds or removes is not considered.
1662 information such as adds or removes is not considered.
1663
1663
1664 ``minimal`` will only rebuild the dirstate status for files that claim to be
1664 ``minimal`` will only rebuild the dirstate status for files that claim to be
1665 tracked but are not in the parent manifest, or that exist in the parent
1665 tracked but are not in the parent manifest, or that exist in the parent
1666 manifest but are not in the dirstate. It will not change adds, removes, or
1666 manifest but are not in the dirstate. It will not change adds, removes, or
1667 modified files that are in the working copy parent.
1667 modified files that are in the working copy parent.
1668
1668
1669 One use of this command is to make the next :hg:`status` invocation
1669 One use of this command is to make the next :hg:`status` invocation
1670 check the actual file content.
1670 check the actual file content.
1671 """
1671 """
1672 ctx = scmutil.revsingle(repo, rev)
1672 ctx = scmutil.revsingle(repo, rev)
1673 with repo.wlock():
1673 with repo.wlock():
1674 dirstate = repo.dirstate
1674 dirstate = repo.dirstate
1675 changedfiles = None
1675 changedfiles = None
1676 # See command doc for what minimal does.
1676 # See command doc for what minimal does.
1677 if opts.get(r'minimal'):
1677 if opts.get(r'minimal'):
1678 manifestfiles = set(ctx.manifest().keys())
1678 manifestfiles = set(ctx.manifest().keys())
1679 dirstatefiles = set(dirstate)
1679 dirstatefiles = set(dirstate)
1680 manifestonly = manifestfiles - dirstatefiles
1680 manifestonly = manifestfiles - dirstatefiles
1681 dsonly = dirstatefiles - manifestfiles
1681 dsonly = dirstatefiles - manifestfiles
1682 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1682 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1683 changedfiles = manifestonly | dsnotadded
1683 changedfiles = manifestonly | dsnotadded
1684
1684
1685 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1685 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1686
1686
1687 @command('debugrebuildfncache', [], '')
1687 @command('debugrebuildfncache', [], '')
1688 def debugrebuildfncache(ui, repo):
1688 def debugrebuildfncache(ui, repo):
1689 """rebuild the fncache file"""
1689 """rebuild the fncache file"""
1690 repair.rebuildfncache(ui, repo)
1690 repair.rebuildfncache(ui, repo)
1691
1691
1692 @command('debugrename',
1692 @command('debugrename',
1693 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1693 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1694 _('[-r REV] FILE'))
1694 _('[-r REV] FILE'))
1695 def debugrename(ui, repo, file1, *pats, **opts):
1695 def debugrename(ui, repo, file1, *pats, **opts):
1696 """dump rename information"""
1696 """dump rename information"""
1697
1697
1698 opts = pycompat.byteskwargs(opts)
1698 opts = pycompat.byteskwargs(opts)
1699 ctx = scmutil.revsingle(repo, opts.get('rev'))
1699 ctx = scmutil.revsingle(repo, opts.get('rev'))
1700 m = scmutil.match(ctx, (file1,) + pats, opts)
1700 m = scmutil.match(ctx, (file1,) + pats, opts)
1701 for abs in ctx.walk(m):
1701 for abs in ctx.walk(m):
1702 fctx = ctx[abs]
1702 fctx = ctx[abs]
1703 o = fctx.filelog().renamed(fctx.filenode())
1703 o = fctx.filelog().renamed(fctx.filenode())
1704 rel = m.rel(abs)
1704 rel = m.rel(abs)
1705 if o:
1705 if o:
1706 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1706 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1707 else:
1707 else:
1708 ui.write(_("%s not renamed\n") % rel)
1708 ui.write(_("%s not renamed\n") % rel)
1709
1709
1710 @command('debugrevlog', cmdutil.debugrevlogopts +
1710 @command('debugrevlog', cmdutil.debugrevlogopts +
1711 [('d', 'dump', False, _('dump index data'))],
1711 [('d', 'dump', False, _('dump index data'))],
1712 _('-c|-m|FILE'),
1712 _('-c|-m|FILE'),
1713 optionalrepo=True)
1713 optionalrepo=True)
1714 def debugrevlog(ui, repo, file_=None, **opts):
1714 def debugrevlog(ui, repo, file_=None, **opts):
1715 """show data and statistics about a revlog"""
1715 """show data and statistics about a revlog"""
1716 opts = pycompat.byteskwargs(opts)
1716 opts = pycompat.byteskwargs(opts)
1717 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1717 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1718
1718
1719 if opts.get("dump"):
1719 if opts.get("dump"):
1720 numrevs = len(r)
1720 numrevs = len(r)
1721 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1721 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1722 " rawsize totalsize compression heads chainlen\n"))
1722 " rawsize totalsize compression heads chainlen\n"))
1723 ts = 0
1723 ts = 0
1724 heads = set()
1724 heads = set()
1725
1725
1726 for rev in xrange(numrevs):
1726 for rev in xrange(numrevs):
1727 dbase = r.deltaparent(rev)
1727 dbase = r.deltaparent(rev)
1728 if dbase == -1:
1728 if dbase == -1:
1729 dbase = rev
1729 dbase = rev
1730 cbase = r.chainbase(rev)
1730 cbase = r.chainbase(rev)
1731 clen = r.chainlen(rev)
1731 clen = r.chainlen(rev)
1732 p1, p2 = r.parentrevs(rev)
1732 p1, p2 = r.parentrevs(rev)
1733 rs = r.rawsize(rev)
1733 rs = r.rawsize(rev)
1734 ts = ts + rs
1734 ts = ts + rs
1735 heads -= set(r.parentrevs(rev))
1735 heads -= set(r.parentrevs(rev))
1736 heads.add(rev)
1736 heads.add(rev)
1737 try:
1737 try:
1738 compression = ts / r.end(rev)
1738 compression = ts / r.end(rev)
1739 except ZeroDivisionError:
1739 except ZeroDivisionError:
1740 compression = 0
1740 compression = 0
1741 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1741 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1742 "%11d %5d %8d\n" %
1742 "%11d %5d %8d\n" %
1743 (rev, p1, p2, r.start(rev), r.end(rev),
1743 (rev, p1, p2, r.start(rev), r.end(rev),
1744 r.start(dbase), r.start(cbase),
1744 r.start(dbase), r.start(cbase),
1745 r.start(p1), r.start(p2),
1745 r.start(p1), r.start(p2),
1746 rs, ts, compression, len(heads), clen))
1746 rs, ts, compression, len(heads), clen))
1747 return 0
1747 return 0
1748
1748
1749 v = r.version
1749 v = r.version
1750 format = v & 0xFFFF
1750 format = v & 0xFFFF
1751 flags = []
1751 flags = []
1752 gdelta = False
1752 gdelta = False
1753 if v & revlog.FLAG_INLINE_DATA:
1753 if v & revlog.FLAG_INLINE_DATA:
1754 flags.append('inline')
1754 flags.append('inline')
1755 if v & revlog.FLAG_GENERALDELTA:
1755 if v & revlog.FLAG_GENERALDELTA:
1756 gdelta = True
1756 gdelta = True
1757 flags.append('generaldelta')
1757 flags.append('generaldelta')
1758 if not flags:
1758 if not flags:
1759 flags = ['(none)']
1759 flags = ['(none)']
1760
1760
1761 nummerges = 0
1761 nummerges = 0
1762 numfull = 0
1762 numfull = 0
1763 numprev = 0
1763 numprev = 0
1764 nump1 = 0
1764 nump1 = 0
1765 nump2 = 0
1765 nump2 = 0
1766 numother = 0
1766 numother = 0
1767 nump1prev = 0
1767 nump1prev = 0
1768 nump2prev = 0
1768 nump2prev = 0
1769 chainlengths = []
1769 chainlengths = []
1770 chainbases = []
1770 chainbases = []
1771 chainspans = []
1771 chainspans = []
1772
1772
1773 datasize = [None, 0, 0]
1773 datasize = [None, 0, 0]
1774 fullsize = [None, 0, 0]
1774 fullsize = [None, 0, 0]
1775 deltasize = [None, 0, 0]
1775 deltasize = [None, 0, 0]
1776 chunktypecounts = {}
1776 chunktypecounts = {}
1777 chunktypesizes = {}
1777 chunktypesizes = {}
1778
1778
1779 def addsize(size, l):
1779 def addsize(size, l):
1780 if l[0] is None or size < l[0]:
1780 if l[0] is None or size < l[0]:
1781 l[0] = size
1781 l[0] = size
1782 if size > l[1]:
1782 if size > l[1]:
1783 l[1] = size
1783 l[1] = size
1784 l[2] += size
1784 l[2] += size
1785
1785
1786 numrevs = len(r)
1786 numrevs = len(r)
1787 for rev in xrange(numrevs):
1787 for rev in xrange(numrevs):
1788 p1, p2 = r.parentrevs(rev)
1788 p1, p2 = r.parentrevs(rev)
1789 delta = r.deltaparent(rev)
1789 delta = r.deltaparent(rev)
1790 if format > 0:
1790 if format > 0:
1791 addsize(r.rawsize(rev), datasize)
1791 addsize(r.rawsize(rev), datasize)
1792 if p2 != nullrev:
1792 if p2 != nullrev:
1793 nummerges += 1
1793 nummerges += 1
1794 size = r.length(rev)
1794 size = r.length(rev)
1795 if delta == nullrev:
1795 if delta == nullrev:
1796 chainlengths.append(0)
1796 chainlengths.append(0)
1797 chainbases.append(r.start(rev))
1797 chainbases.append(r.start(rev))
1798 chainspans.append(size)
1798 chainspans.append(size)
1799 numfull += 1
1799 numfull += 1
1800 addsize(size, fullsize)
1800 addsize(size, fullsize)
1801 else:
1801 else:
1802 chainlengths.append(chainlengths[delta] + 1)
1802 chainlengths.append(chainlengths[delta] + 1)
1803 baseaddr = chainbases[delta]
1803 baseaddr = chainbases[delta]
1804 revaddr = r.start(rev)
1804 revaddr = r.start(rev)
1805 chainbases.append(baseaddr)
1805 chainbases.append(baseaddr)
1806 chainspans.append((revaddr - baseaddr) + size)
1806 chainspans.append((revaddr - baseaddr) + size)
1807 addsize(size, deltasize)
1807 addsize(size, deltasize)
1808 if delta == rev - 1:
1808 if delta == rev - 1:
1809 numprev += 1
1809 numprev += 1
1810 if delta == p1:
1810 if delta == p1:
1811 nump1prev += 1
1811 nump1prev += 1
1812 elif delta == p2:
1812 elif delta == p2:
1813 nump2prev += 1
1813 nump2prev += 1
1814 elif delta == p1:
1814 elif delta == p1:
1815 nump1 += 1
1815 nump1 += 1
1816 elif delta == p2:
1816 elif delta == p2:
1817 nump2 += 1
1817 nump2 += 1
1818 elif delta != nullrev:
1818 elif delta != nullrev:
1819 numother += 1
1819 numother += 1
1820
1820
1821 # Obtain data on the raw chunks in the revlog.
1821 # Obtain data on the raw chunks in the revlog.
1822 segment = r._getsegmentforrevs(rev, rev)[1]
1822 segment = r._getsegmentforrevs(rev, rev)[1]
1823 if segment:
1823 if segment:
1824 chunktype = bytes(segment[0:1])
1824 chunktype = bytes(segment[0:1])
1825 else:
1825 else:
1826 chunktype = 'empty'
1826 chunktype = 'empty'
1827
1827
1828 if chunktype not in chunktypecounts:
1828 if chunktype not in chunktypecounts:
1829 chunktypecounts[chunktype] = 0
1829 chunktypecounts[chunktype] = 0
1830 chunktypesizes[chunktype] = 0
1830 chunktypesizes[chunktype] = 0
1831
1831
1832 chunktypecounts[chunktype] += 1
1832 chunktypecounts[chunktype] += 1
1833 chunktypesizes[chunktype] += size
1833 chunktypesizes[chunktype] += size
1834
1834
1835 # Adjust size min value for empty cases
1835 # Adjust size min value for empty cases
1836 for size in (datasize, fullsize, deltasize):
1836 for size in (datasize, fullsize, deltasize):
1837 if size[0] is None:
1837 if size[0] is None:
1838 size[0] = 0
1838 size[0] = 0
1839
1839
1840 numdeltas = numrevs - numfull
1840 numdeltas = numrevs - numfull
1841 numoprev = numprev - nump1prev - nump2prev
1841 numoprev = numprev - nump1prev - nump2prev
1842 totalrawsize = datasize[2]
1842 totalrawsize = datasize[2]
1843 datasize[2] /= numrevs
1843 datasize[2] /= numrevs
1844 fulltotal = fullsize[2]
1844 fulltotal = fullsize[2]
1845 fullsize[2] /= numfull
1845 fullsize[2] /= numfull
1846 deltatotal = deltasize[2]
1846 deltatotal = deltasize[2]
1847 if numrevs - numfull > 0:
1847 if numrevs - numfull > 0:
1848 deltasize[2] /= numrevs - numfull
1848 deltasize[2] /= numrevs - numfull
1849 totalsize = fulltotal + deltatotal
1849 totalsize = fulltotal + deltatotal
1850 avgchainlen = sum(chainlengths) / numrevs
1850 avgchainlen = sum(chainlengths) / numrevs
1851 maxchainlen = max(chainlengths)
1851 maxchainlen = max(chainlengths)
1852 maxchainspan = max(chainspans)
1852 maxchainspan = max(chainspans)
1853 compratio = 1
1853 compratio = 1
1854 if totalsize:
1854 if totalsize:
1855 compratio = totalrawsize / totalsize
1855 compratio = totalrawsize / totalsize
1856
1856
1857 basedfmtstr = '%%%dd\n'
1857 basedfmtstr = '%%%dd\n'
1858 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1858 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1859
1859
1860 def dfmtstr(max):
1860 def dfmtstr(max):
1861 return basedfmtstr % len(str(max))
1861 return basedfmtstr % len(str(max))
1862 def pcfmtstr(max, padding=0):
1862 def pcfmtstr(max, padding=0):
1863 return basepcfmtstr % (len(str(max)), ' ' * padding)
1863 return basepcfmtstr % (len(str(max)), ' ' * padding)
1864
1864
1865 def pcfmt(value, total):
1865 def pcfmt(value, total):
1866 if total:
1866 if total:
1867 return (value, 100 * float(value) / total)
1867 return (value, 100 * float(value) / total)
1868 else:
1868 else:
1869 return value, 100.0
1869 return value, 100.0
1870
1870
1871 ui.write(('format : %d\n') % format)
1871 ui.write(('format : %d\n') % format)
1872 ui.write(('flags : %s\n') % ', '.join(flags))
1872 ui.write(('flags : %s\n') % ', '.join(flags))
1873
1873
1874 ui.write('\n')
1874 ui.write('\n')
1875 fmt = pcfmtstr(totalsize)
1875 fmt = pcfmtstr(totalsize)
1876 fmt2 = dfmtstr(totalsize)
1876 fmt2 = dfmtstr(totalsize)
1877 ui.write(('revisions : ') + fmt2 % numrevs)
1877 ui.write(('revisions : ') + fmt2 % numrevs)
1878 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1878 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1879 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1879 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1880 ui.write(('revisions : ') + fmt2 % numrevs)
1880 ui.write(('revisions : ') + fmt2 % numrevs)
1881 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1881 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1882 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1882 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1883 ui.write(('revision size : ') + fmt2 % totalsize)
1883 ui.write(('revision size : ') + fmt2 % totalsize)
1884 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1884 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1885 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1885 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1886
1886
1887 def fmtchunktype(chunktype):
1887 def fmtchunktype(chunktype):
1888 if chunktype == 'empty':
1888 if chunktype == 'empty':
1889 return ' %s : ' % chunktype
1889 return ' %s : ' % chunktype
1890 elif chunktype in pycompat.bytestr(string.ascii_letters):
1890 elif chunktype in pycompat.bytestr(string.ascii_letters):
1891 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1891 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1892 else:
1892 else:
1893 return ' 0x%s : ' % hex(chunktype)
1893 return ' 0x%s : ' % hex(chunktype)
1894
1894
1895 ui.write('\n')
1895 ui.write('\n')
1896 ui.write(('chunks : ') + fmt2 % numrevs)
1896 ui.write(('chunks : ') + fmt2 % numrevs)
1897 for chunktype in sorted(chunktypecounts):
1897 for chunktype in sorted(chunktypecounts):
1898 ui.write(fmtchunktype(chunktype))
1898 ui.write(fmtchunktype(chunktype))
1899 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1899 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1900 ui.write(('chunks size : ') + fmt2 % totalsize)
1900 ui.write(('chunks size : ') + fmt2 % totalsize)
1901 for chunktype in sorted(chunktypecounts):
1901 for chunktype in sorted(chunktypecounts):
1902 ui.write(fmtchunktype(chunktype))
1902 ui.write(fmtchunktype(chunktype))
1903 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1903 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1904
1904
1905 ui.write('\n')
1905 ui.write('\n')
1906 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1906 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1907 ui.write(('avg chain length : ') + fmt % avgchainlen)
1907 ui.write(('avg chain length : ') + fmt % avgchainlen)
1908 ui.write(('max chain length : ') + fmt % maxchainlen)
1908 ui.write(('max chain length : ') + fmt % maxchainlen)
1909 ui.write(('max chain reach : ') + fmt % maxchainspan)
1909 ui.write(('max chain reach : ') + fmt % maxchainspan)
1910 ui.write(('compression ratio : ') + fmt % compratio)
1910 ui.write(('compression ratio : ') + fmt % compratio)
1911
1911
1912 if format > 0:
1912 if format > 0:
1913 ui.write('\n')
1913 ui.write('\n')
1914 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1914 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1915 % tuple(datasize))
1915 % tuple(datasize))
1916 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1916 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1917 % tuple(fullsize))
1917 % tuple(fullsize))
1918 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1918 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1919 % tuple(deltasize))
1919 % tuple(deltasize))
1920
1920
1921 if numdeltas > 0:
1921 if numdeltas > 0:
1922 ui.write('\n')
1922 ui.write('\n')
1923 fmt = pcfmtstr(numdeltas)
1923 fmt = pcfmtstr(numdeltas)
1924 fmt2 = pcfmtstr(numdeltas, 4)
1924 fmt2 = pcfmtstr(numdeltas, 4)
1925 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1925 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1926 if numprev > 0:
1926 if numprev > 0:
1927 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1927 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1928 numprev))
1928 numprev))
1929 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1929 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1930 numprev))
1930 numprev))
1931 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1931 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1932 numprev))
1932 numprev))
1933 if gdelta:
1933 if gdelta:
1934 ui.write(('deltas against p1 : ')
1934 ui.write(('deltas against p1 : ')
1935 + fmt % pcfmt(nump1, numdeltas))
1935 + fmt % pcfmt(nump1, numdeltas))
1936 ui.write(('deltas against p2 : ')
1936 ui.write(('deltas against p2 : ')
1937 + fmt % pcfmt(nump2, numdeltas))
1937 + fmt % pcfmt(nump2, numdeltas))
1938 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1938 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1939 numdeltas))
1939 numdeltas))
1940
1940
1941 @command('debugrevspec',
1941 @command('debugrevspec',
1942 [('', 'optimize', None,
1942 [('', 'optimize', None,
1943 _('print parsed tree after optimizing (DEPRECATED)')),
1943 _('print parsed tree after optimizing (DEPRECATED)')),
1944 ('', 'show-revs', True, _('print list of result revisions (default)')),
1944 ('', 'show-revs', True, _('print list of result revisions (default)')),
1945 ('s', 'show-set', None, _('print internal representation of result set')),
1945 ('s', 'show-set', None, _('print internal representation of result set')),
1946 ('p', 'show-stage', [],
1946 ('p', 'show-stage', [],
1947 _('print parsed tree at the given stage'), _('NAME')),
1947 _('print parsed tree at the given stage'), _('NAME')),
1948 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1948 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1949 ('', 'verify-optimized', False, _('verify optimized result')),
1949 ('', 'verify-optimized', False, _('verify optimized result')),
1950 ],
1950 ],
1951 ('REVSPEC'))
1951 ('REVSPEC'))
1952 def debugrevspec(ui, repo, expr, **opts):
1952 def debugrevspec(ui, repo, expr, **opts):
1953 """parse and apply a revision specification
1953 """parse and apply a revision specification
1954
1954
1955 Use -p/--show-stage option to print the parsed tree at the given stages.
1955 Use -p/--show-stage option to print the parsed tree at the given stages.
1956 Use -p all to print tree at every stage.
1956 Use -p all to print tree at every stage.
1957
1957
1958 Use --no-show-revs option with -s or -p to print only the set
1958 Use --no-show-revs option with -s or -p to print only the set
1959 representation or the parsed tree respectively.
1959 representation or the parsed tree respectively.
1960
1960
1961 Use --verify-optimized to compare the optimized result with the unoptimized
1961 Use --verify-optimized to compare the optimized result with the unoptimized
1962 one. Returns 1 if the optimized result differs.
1962 one. Returns 1 if the optimized result differs.
1963 """
1963 """
1964 opts = pycompat.byteskwargs(opts)
1964 opts = pycompat.byteskwargs(opts)
1965 aliases = ui.configitems('revsetalias')
1965 aliases = ui.configitems('revsetalias')
1966 stages = [
1966 stages = [
1967 ('parsed', lambda tree: tree),
1967 ('parsed', lambda tree: tree),
1968 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1968 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
1969 ui.warn)),
1969 ui.warn)),
1970 ('concatenated', revsetlang.foldconcat),
1970 ('concatenated', revsetlang.foldconcat),
1971 ('analyzed', revsetlang.analyze),
1971 ('analyzed', revsetlang.analyze),
1972 ('optimized', revsetlang.optimize),
1972 ('optimized', revsetlang.optimize),
1973 ]
1973 ]
1974 if opts['no_optimized']:
1974 if opts['no_optimized']:
1975 stages = stages[:-1]
1975 stages = stages[:-1]
1976 if opts['verify_optimized'] and opts['no_optimized']:
1976 if opts['verify_optimized'] and opts['no_optimized']:
1977 raise error.Abort(_('cannot use --verify-optimized with '
1977 raise error.Abort(_('cannot use --verify-optimized with '
1978 '--no-optimized'))
1978 '--no-optimized'))
1979 stagenames = set(n for n, f in stages)
1979 stagenames = set(n for n, f in stages)
1980
1980
1981 showalways = set()
1981 showalways = set()
1982 showchanged = set()
1982 showchanged = set()
1983 if ui.verbose and not opts['show_stage']:
1983 if ui.verbose and not opts['show_stage']:
1984 # show parsed tree by --verbose (deprecated)
1984 # show parsed tree by --verbose (deprecated)
1985 showalways.add('parsed')
1985 showalways.add('parsed')
1986 showchanged.update(['expanded', 'concatenated'])
1986 showchanged.update(['expanded', 'concatenated'])
1987 if opts['optimize']:
1987 if opts['optimize']:
1988 showalways.add('optimized')
1988 showalways.add('optimized')
1989 if opts['show_stage'] and opts['optimize']:
1989 if opts['show_stage'] and opts['optimize']:
1990 raise error.Abort(_('cannot use --optimize with --show-stage'))
1990 raise error.Abort(_('cannot use --optimize with --show-stage'))
1991 if opts['show_stage'] == ['all']:
1991 if opts['show_stage'] == ['all']:
1992 showalways.update(stagenames)
1992 showalways.update(stagenames)
1993 else:
1993 else:
1994 for n in opts['show_stage']:
1994 for n in opts['show_stage']:
1995 if n not in stagenames:
1995 if n not in stagenames:
1996 raise error.Abort(_('invalid stage name: %s') % n)
1996 raise error.Abort(_('invalid stage name: %s') % n)
1997 showalways.update(opts['show_stage'])
1997 showalways.update(opts['show_stage'])
1998
1998
1999 treebystage = {}
1999 treebystage = {}
2000 printedtree = None
2000 printedtree = None
2001 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2001 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2002 for n, f in stages:
2002 for n, f in stages:
2003 treebystage[n] = tree = f(tree)
2003 treebystage[n] = tree = f(tree)
2004 if n in showalways or (n in showchanged and tree != printedtree):
2004 if n in showalways or (n in showchanged and tree != printedtree):
2005 if opts['show_stage'] or n != 'parsed':
2005 if opts['show_stage'] or n != 'parsed':
2006 ui.write(("* %s:\n") % n)
2006 ui.write(("* %s:\n") % n)
2007 ui.write(revsetlang.prettyformat(tree), "\n")
2007 ui.write(revsetlang.prettyformat(tree), "\n")
2008 printedtree = tree
2008 printedtree = tree
2009
2009
2010 if opts['verify_optimized']:
2010 if opts['verify_optimized']:
2011 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2011 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2012 brevs = revset.makematcher(treebystage['optimized'])(repo)
2012 brevs = revset.makematcher(treebystage['optimized'])(repo)
2013 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2013 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2014 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2014 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2015 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2015 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2016 arevs = list(arevs)
2016 arevs = list(arevs)
2017 brevs = list(brevs)
2017 brevs = list(brevs)
2018 if arevs == brevs:
2018 if arevs == brevs:
2019 return 0
2019 return 0
2020 ui.write(('--- analyzed\n'), label='diff.file_a')
2020 ui.write(('--- analyzed\n'), label='diff.file_a')
2021 ui.write(('+++ optimized\n'), label='diff.file_b')
2021 ui.write(('+++ optimized\n'), label='diff.file_b')
2022 sm = difflib.SequenceMatcher(None, arevs, brevs)
2022 sm = difflib.SequenceMatcher(None, arevs, brevs)
2023 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2023 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2024 if tag in ('delete', 'replace'):
2024 if tag in ('delete', 'replace'):
2025 for c in arevs[alo:ahi]:
2025 for c in arevs[alo:ahi]:
2026 ui.write('-%s\n' % c, label='diff.deleted')
2026 ui.write('-%s\n' % c, label='diff.deleted')
2027 if tag in ('insert', 'replace'):
2027 if tag in ('insert', 'replace'):
2028 for c in brevs[blo:bhi]:
2028 for c in brevs[blo:bhi]:
2029 ui.write('+%s\n' % c, label='diff.inserted')
2029 ui.write('+%s\n' % c, label='diff.inserted')
2030 if tag == 'equal':
2030 if tag == 'equal':
2031 for c in arevs[alo:ahi]:
2031 for c in arevs[alo:ahi]:
2032 ui.write(' %s\n' % c)
2032 ui.write(' %s\n' % c)
2033 return 1
2033 return 1
2034
2034
2035 func = revset.makematcher(tree)
2035 func = revset.makematcher(tree)
2036 revs = func(repo)
2036 revs = func(repo)
2037 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2037 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2038 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2038 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2039 if not opts['show_revs']:
2039 if not opts['show_revs']:
2040 return
2040 return
2041 for c in revs:
2041 for c in revs:
2042 ui.write("%s\n" % c)
2042 ui.write("%s\n" % c)
2043
2043
2044 @command('debugsetparents', [], _('REV1 [REV2]'))
2044 @command('debugsetparents', [], _('REV1 [REV2]'))
2045 def debugsetparents(ui, repo, rev1, rev2=None):
2045 def debugsetparents(ui, repo, rev1, rev2=None):
2046 """manually set the parents of the current working directory
2046 """manually set the parents of the current working directory
2047
2047
2048 This is useful for writing repository conversion tools, but should
2048 This is useful for writing repository conversion tools, but should
2049 be used with care. For example, neither the working directory nor the
2049 be used with care. For example, neither the working directory nor the
2050 dirstate is updated, so file status may be incorrect after running this
2050 dirstate is updated, so file status may be incorrect after running this
2051 command.
2051 command.
2052
2052
2053 Returns 0 on success.
2053 Returns 0 on success.
2054 """
2054 """
2055
2055
2056 r1 = scmutil.revsingle(repo, rev1).node()
2056 r1 = scmutil.revsingle(repo, rev1).node()
2057 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2057 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2058
2058
2059 with repo.wlock():
2059 with repo.wlock():
2060 repo.setparents(r1, r2)
2060 repo.setparents(r1, r2)
2061
2061
2062 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2062 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2063 def debugssl(ui, repo, source=None, **opts):
2063 def debugssl(ui, repo, source=None, **opts):
2064 '''test a secure connection to a server
2064 '''test a secure connection to a server
2065
2065
2066 This builds the certificate chain for the server on Windows, installing the
2066 This builds the certificate chain for the server on Windows, installing the
2067 missing intermediates and trusted root via Windows Update if necessary. It
2067 missing intermediates and trusted root via Windows Update if necessary. It
2068 does nothing on other platforms.
2068 does nothing on other platforms.
2069
2069
2070 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2070 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2071 that server is used. See :hg:`help urls` for more information.
2071 that server is used. See :hg:`help urls` for more information.
2072
2072
2073 If the update succeeds, retry the original operation. Otherwise, the cause
2073 If the update succeeds, retry the original operation. Otherwise, the cause
2074 of the SSL error is likely another issue.
2074 of the SSL error is likely another issue.
2075 '''
2075 '''
2076 if pycompat.osname != 'nt':
2076 if pycompat.osname != 'nt':
2077 raise error.Abort(_('certificate chain building is only possible on '
2077 raise error.Abort(_('certificate chain building is only possible on '
2078 'Windows'))
2078 'Windows'))
2079
2079
2080 if not source:
2080 if not source:
2081 source = "default"
2081 if not repo:
2082 elif not repo:
2083 raise error.Abort(_("there is no Mercurial repository here, and no "
2082 raise error.Abort(_("there is no Mercurial repository here, and no "
2084 "server specified"))
2083 "server specified"))
2084 source = "default"
2085
2085
2086 source, branches = hg.parseurl(ui.expandpath(source))
2086 source, branches = hg.parseurl(ui.expandpath(source))
2087 url = util.url(source)
2087 url = util.url(source)
2088 addr = None
2088 addr = None
2089
2089
2090 if url.scheme == 'https':
2090 if url.scheme == 'https':
2091 addr = (url.host, url.port or 443)
2091 addr = (url.host, url.port or 443)
2092 elif url.scheme == 'ssh':
2092 elif url.scheme == 'ssh':
2093 addr = (url.host, url.port or 22)
2093 addr = (url.host, url.port or 22)
2094 else:
2094 else:
2095 raise error.Abort(_("only https and ssh connections are supported"))
2095 raise error.Abort(_("only https and ssh connections are supported"))
2096
2096
2097 from . import win32
2097 from . import win32
2098
2098
2099 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2099 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2100 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2100 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2101
2101
2102 try:
2102 try:
2103 s.connect(addr)
2103 s.connect(addr)
2104 cert = s.getpeercert(True)
2104 cert = s.getpeercert(True)
2105
2105
2106 ui.status(_('checking the certificate chain for %s\n') % url.host)
2106 ui.status(_('checking the certificate chain for %s\n') % url.host)
2107
2107
2108 complete = win32.checkcertificatechain(cert, build=False)
2108 complete = win32.checkcertificatechain(cert, build=False)
2109
2109
2110 if not complete:
2110 if not complete:
2111 ui.status(_('certificate chain is incomplete, updating... '))
2111 ui.status(_('certificate chain is incomplete, updating... '))
2112
2112
2113 if not win32.checkcertificatechain(cert):
2113 if not win32.checkcertificatechain(cert):
2114 ui.status(_('failed.\n'))
2114 ui.status(_('failed.\n'))
2115 else:
2115 else:
2116 ui.status(_('done.\n'))
2116 ui.status(_('done.\n'))
2117 else:
2117 else:
2118 ui.status(_('full certificate chain is available\n'))
2118 ui.status(_('full certificate chain is available\n'))
2119 finally:
2119 finally:
2120 s.close()
2120 s.close()
2121
2121
2122 @command('debugsub',
2122 @command('debugsub',
2123 [('r', 'rev', '',
2123 [('r', 'rev', '',
2124 _('revision to check'), _('REV'))],
2124 _('revision to check'), _('REV'))],
2125 _('[-r REV] [REV]'))
2125 _('[-r REV] [REV]'))
2126 def debugsub(ui, repo, rev=None):
2126 def debugsub(ui, repo, rev=None):
2127 ctx = scmutil.revsingle(repo, rev, None)
2127 ctx = scmutil.revsingle(repo, rev, None)
2128 for k, v in sorted(ctx.substate.items()):
2128 for k, v in sorted(ctx.substate.items()):
2129 ui.write(('path %s\n') % k)
2129 ui.write(('path %s\n') % k)
2130 ui.write((' source %s\n') % v[0])
2130 ui.write((' source %s\n') % v[0])
2131 ui.write((' revision %s\n') % v[1])
2131 ui.write((' revision %s\n') % v[1])
2132
2132
2133 @command('debugsuccessorssets',
2133 @command('debugsuccessorssets',
2134 [('', 'closest', False, _('return closest successors sets only'))],
2134 [('', 'closest', False, _('return closest successors sets only'))],
2135 _('[REV]'))
2135 _('[REV]'))
2136 def debugsuccessorssets(ui, repo, *revs, **opts):
2136 def debugsuccessorssets(ui, repo, *revs, **opts):
2137 """show set of successors for revision
2137 """show set of successors for revision
2138
2138
2139 A successors set of changeset A is a consistent group of revisions that
2139 A successors set of changeset A is a consistent group of revisions that
2140 succeed A. It contains non-obsolete changesets only unless closests
2140 succeed A. It contains non-obsolete changesets only unless closests
2141 successors set is set.
2141 successors set is set.
2142
2142
2143 In most cases a changeset A has a single successors set containing a single
2143 In most cases a changeset A has a single successors set containing a single
2144 successor (changeset A replaced by A').
2144 successor (changeset A replaced by A').
2145
2145
2146 A changeset that is made obsolete with no successors are called "pruned".
2146 A changeset that is made obsolete with no successors are called "pruned".
2147 Such changesets have no successors sets at all.
2147 Such changesets have no successors sets at all.
2148
2148
2149 A changeset that has been "split" will have a successors set containing
2149 A changeset that has been "split" will have a successors set containing
2150 more than one successor.
2150 more than one successor.
2151
2151
2152 A changeset that has been rewritten in multiple different ways is called
2152 A changeset that has been rewritten in multiple different ways is called
2153 "divergent". Such changesets have multiple successor sets (each of which
2153 "divergent". Such changesets have multiple successor sets (each of which
2154 may also be split, i.e. have multiple successors).
2154 may also be split, i.e. have multiple successors).
2155
2155
2156 Results are displayed as follows::
2156 Results are displayed as follows::
2157
2157
2158 <rev1>
2158 <rev1>
2159 <successors-1A>
2159 <successors-1A>
2160 <rev2>
2160 <rev2>
2161 <successors-2A>
2161 <successors-2A>
2162 <successors-2B1> <successors-2B2> <successors-2B3>
2162 <successors-2B1> <successors-2B2> <successors-2B3>
2163
2163
2164 Here rev2 has two possible (i.e. divergent) successors sets. The first
2164 Here rev2 has two possible (i.e. divergent) successors sets. The first
2165 holds one element, whereas the second holds three (i.e. the changeset has
2165 holds one element, whereas the second holds three (i.e. the changeset has
2166 been split).
2166 been split).
2167 """
2167 """
2168 # passed to successorssets caching computation from one call to another
2168 # passed to successorssets caching computation from one call to another
2169 cache = {}
2169 cache = {}
2170 ctx2str = str
2170 ctx2str = str
2171 node2str = short
2171 node2str = short
2172 if ui.debug():
2172 if ui.debug():
2173 def ctx2str(ctx):
2173 def ctx2str(ctx):
2174 return ctx.hex()
2174 return ctx.hex()
2175 node2str = hex
2175 node2str = hex
2176 for rev in scmutil.revrange(repo, revs):
2176 for rev in scmutil.revrange(repo, revs):
2177 ctx = repo[rev]
2177 ctx = repo[rev]
2178 ui.write('%s\n'% ctx2str(ctx))
2178 ui.write('%s\n'% ctx2str(ctx))
2179 for succsset in obsutil.successorssets(repo, ctx.node(),
2179 for succsset in obsutil.successorssets(repo, ctx.node(),
2180 closest=opts['closest'],
2180 closest=opts['closest'],
2181 cache=cache):
2181 cache=cache):
2182 if succsset:
2182 if succsset:
2183 ui.write(' ')
2183 ui.write(' ')
2184 ui.write(node2str(succsset[0]))
2184 ui.write(node2str(succsset[0]))
2185 for node in succsset[1:]:
2185 for node in succsset[1:]:
2186 ui.write(' ')
2186 ui.write(' ')
2187 ui.write(node2str(node))
2187 ui.write(node2str(node))
2188 ui.write('\n')
2188 ui.write('\n')
2189
2189
2190 @command('debugtemplate',
2190 @command('debugtemplate',
2191 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2191 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2192 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2192 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2193 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2193 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2194 optionalrepo=True)
2194 optionalrepo=True)
2195 def debugtemplate(ui, repo, tmpl, **opts):
2195 def debugtemplate(ui, repo, tmpl, **opts):
2196 """parse and apply a template
2196 """parse and apply a template
2197
2197
2198 If -r/--rev is given, the template is processed as a log template and
2198 If -r/--rev is given, the template is processed as a log template and
2199 applied to the given changesets. Otherwise, it is processed as a generic
2199 applied to the given changesets. Otherwise, it is processed as a generic
2200 template.
2200 template.
2201
2201
2202 Use --verbose to print the parsed tree.
2202 Use --verbose to print the parsed tree.
2203 """
2203 """
2204 revs = None
2204 revs = None
2205 if opts[r'rev']:
2205 if opts[r'rev']:
2206 if repo is None:
2206 if repo is None:
2207 raise error.RepoError(_('there is no Mercurial repository here '
2207 raise error.RepoError(_('there is no Mercurial repository here '
2208 '(.hg not found)'))
2208 '(.hg not found)'))
2209 revs = scmutil.revrange(repo, opts[r'rev'])
2209 revs = scmutil.revrange(repo, opts[r'rev'])
2210
2210
2211 props = {}
2211 props = {}
2212 for d in opts[r'define']:
2212 for d in opts[r'define']:
2213 try:
2213 try:
2214 k, v = (e.strip() for e in d.split('=', 1))
2214 k, v = (e.strip() for e in d.split('=', 1))
2215 if not k or k == 'ui':
2215 if not k or k == 'ui':
2216 raise ValueError
2216 raise ValueError
2217 props[k] = v
2217 props[k] = v
2218 except ValueError:
2218 except ValueError:
2219 raise error.Abort(_('malformed keyword definition: %s') % d)
2219 raise error.Abort(_('malformed keyword definition: %s') % d)
2220
2220
2221 if ui.verbose:
2221 if ui.verbose:
2222 aliases = ui.configitems('templatealias')
2222 aliases = ui.configitems('templatealias')
2223 tree = templater.parse(tmpl)
2223 tree = templater.parse(tmpl)
2224 ui.note(templater.prettyformat(tree), '\n')
2224 ui.note(templater.prettyformat(tree), '\n')
2225 newtree = templater.expandaliases(tree, aliases)
2225 newtree = templater.expandaliases(tree, aliases)
2226 if newtree != tree:
2226 if newtree != tree:
2227 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2227 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2228
2228
2229 if revs is None:
2229 if revs is None:
2230 t = formatter.maketemplater(ui, tmpl)
2230 t = formatter.maketemplater(ui, tmpl)
2231 props['ui'] = ui
2231 props['ui'] = ui
2232 ui.write(t.render(props))
2232 ui.write(t.render(props))
2233 else:
2233 else:
2234 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2234 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2235 for r in revs:
2235 for r in revs:
2236 displayer.show(repo[r], **pycompat.strkwargs(props))
2236 displayer.show(repo[r], **pycompat.strkwargs(props))
2237 displayer.close()
2237 displayer.close()
2238
2238
2239 @command('debugupdatecaches', [])
2239 @command('debugupdatecaches', [])
2240 def debugupdatecaches(ui, repo, *pats, **opts):
2240 def debugupdatecaches(ui, repo, *pats, **opts):
2241 """warm all known caches in the repository"""
2241 """warm all known caches in the repository"""
2242 with repo.wlock(), repo.lock():
2242 with repo.wlock(), repo.lock():
2243 repo.updatecaches()
2243 repo.updatecaches()
2244
2244
2245 @command('debugupgraderepo', [
2245 @command('debugupgraderepo', [
2246 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2246 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2247 ('', 'run', False, _('performs an upgrade')),
2247 ('', 'run', False, _('performs an upgrade')),
2248 ])
2248 ])
2249 def debugupgraderepo(ui, repo, run=False, optimize=None):
2249 def debugupgraderepo(ui, repo, run=False, optimize=None):
2250 """upgrade a repository to use different features
2250 """upgrade a repository to use different features
2251
2251
2252 If no arguments are specified, the repository is evaluated for upgrade
2252 If no arguments are specified, the repository is evaluated for upgrade
2253 and a list of problems and potential optimizations is printed.
2253 and a list of problems and potential optimizations is printed.
2254
2254
2255 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2255 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2256 can be influenced via additional arguments. More details will be provided
2256 can be influenced via additional arguments. More details will be provided
2257 by the command output when run without ``--run``.
2257 by the command output when run without ``--run``.
2258
2258
2259 During the upgrade, the repository will be locked and no writes will be
2259 During the upgrade, the repository will be locked and no writes will be
2260 allowed.
2260 allowed.
2261
2261
2262 At the end of the upgrade, the repository may not be readable while new
2262 At the end of the upgrade, the repository may not be readable while new
2263 repository data is swapped in. This window will be as long as it takes to
2263 repository data is swapped in. This window will be as long as it takes to
2264 rename some directories inside the ``.hg`` directory. On most machines, this
2264 rename some directories inside the ``.hg`` directory. On most machines, this
2265 should complete almost instantaneously and the chances of a consumer being
2265 should complete almost instantaneously and the chances of a consumer being
2266 unable to access the repository should be low.
2266 unable to access the repository should be low.
2267 """
2267 """
2268 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2268 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2269
2269
2270 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2270 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2271 inferrepo=True)
2271 inferrepo=True)
2272 def debugwalk(ui, repo, *pats, **opts):
2272 def debugwalk(ui, repo, *pats, **opts):
2273 """show how files match on given patterns"""
2273 """show how files match on given patterns"""
2274 opts = pycompat.byteskwargs(opts)
2274 opts = pycompat.byteskwargs(opts)
2275 m = scmutil.match(repo[None], pats, opts)
2275 m = scmutil.match(repo[None], pats, opts)
2276 ui.write(('matcher: %r\n' % m))
2276 ui.write(('matcher: %r\n' % m))
2277 items = list(repo[None].walk(m))
2277 items = list(repo[None].walk(m))
2278 if not items:
2278 if not items:
2279 return
2279 return
2280 f = lambda fn: fn
2280 f = lambda fn: fn
2281 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2281 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2282 f = lambda fn: util.normpath(fn)
2282 f = lambda fn: util.normpath(fn)
2283 fmt = 'f %%-%ds %%-%ds %%s' % (
2283 fmt = 'f %%-%ds %%-%ds %%s' % (
2284 max([len(abs) for abs in items]),
2284 max([len(abs) for abs in items]),
2285 max([len(m.rel(abs)) for abs in items]))
2285 max([len(m.rel(abs)) for abs in items]))
2286 for abs in items:
2286 for abs in items:
2287 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2287 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2288 ui.write("%s\n" % line.rstrip())
2288 ui.write("%s\n" % line.rstrip())
2289
2289
2290 @command('debugwireargs',
2290 @command('debugwireargs',
2291 [('', 'three', '', 'three'),
2291 [('', 'three', '', 'three'),
2292 ('', 'four', '', 'four'),
2292 ('', 'four', '', 'four'),
2293 ('', 'five', '', 'five'),
2293 ('', 'five', '', 'five'),
2294 ] + cmdutil.remoteopts,
2294 ] + cmdutil.remoteopts,
2295 _('REPO [OPTIONS]... [ONE [TWO]]'),
2295 _('REPO [OPTIONS]... [ONE [TWO]]'),
2296 norepo=True)
2296 norepo=True)
2297 def debugwireargs(ui, repopath, *vals, **opts):
2297 def debugwireargs(ui, repopath, *vals, **opts):
2298 opts = pycompat.byteskwargs(opts)
2298 opts = pycompat.byteskwargs(opts)
2299 repo = hg.peer(ui, opts, repopath)
2299 repo = hg.peer(ui, opts, repopath)
2300 for opt in cmdutil.remoteopts:
2300 for opt in cmdutil.remoteopts:
2301 del opts[opt[1]]
2301 del opts[opt[1]]
2302 args = {}
2302 args = {}
2303 for k, v in opts.iteritems():
2303 for k, v in opts.iteritems():
2304 if v:
2304 if v:
2305 args[k] = v
2305 args[k] = v
2306 # run twice to check that we don't mess up the stream for the next command
2306 # run twice to check that we don't mess up the stream for the next command
2307 res1 = repo.debugwireargs(*vals, **args)
2307 res1 = repo.debugwireargs(*vals, **args)
2308 res2 = repo.debugwireargs(*vals, **args)
2308 res2 = repo.debugwireargs(*vals, **args)
2309 ui.write("%s\n" % res1)
2309 ui.write("%s\n" % res1)
2310 if res1 != res2:
2310 if res1 != res2:
2311 ui.warn("%s\n" % res2)
2311 ui.warn("%s\n" % res2)
General Comments 0
You need to be logged in to leave comments. Login now