##// END OF EJS Templates
debugrevlog: include information about intermediate snapshots...
Boris Feld -
r39187:07b82f55 default
parent child Browse files
Show More
@@ -1,3288 +1,3299 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 filesetlang,
51 filesetlang,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i
180 initialmergedlines = ['%d' % i
181 for i in pycompat.xrange(0, total * linesperrev)]
181 for i in pycompat.xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
182 initialmergedlines.append("")
183
183
184 tags = []
184 tags = []
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 total=total)
186 total=total)
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 at = -1
188 at = -1
189 atbranch = 'default'
189 atbranch = 'default'
190 nodeids = []
190 nodeids = []
191 id = 0
191 id = 0
192 progress.update(id)
192 progress.update(id)
193 for type, data in dagparser.parsedag(text):
193 for type, data in dagparser.parsedag(text):
194 if type == 'n':
194 if type == 'n':
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 id, ps = data
196 id, ps = data
197
197
198 files = []
198 files = []
199 filecontent = {}
199 filecontent = {}
200
200
201 p2 = None
201 p2 = None
202 if mergeable_file:
202 if mergeable_file:
203 fn = "mf"
203 fn = "mf"
204 p1 = repo[ps[0]]
204 p1 = repo[ps[0]]
205 if len(ps) > 1:
205 if len(ps) > 1:
206 p2 = repo[ps[1]]
206 p2 = repo[ps[1]]
207 pa = p1.ancestor(p2)
207 pa = p1.ancestor(p2)
208 base, local, other = [x[fn].data() for x in (pa, p1,
208 base, local, other = [x[fn].data() for x in (pa, p1,
209 p2)]
209 p2)]
210 m3 = simplemerge.Merge3Text(base, local, other)
210 m3 = simplemerge.Merge3Text(base, local, other)
211 ml = [l.strip() for l in m3.merge_lines()]
211 ml = [l.strip() for l in m3.merge_lines()]
212 ml.append("")
212 ml.append("")
213 elif at > 0:
213 elif at > 0:
214 ml = p1[fn].data().split("\n")
214 ml = p1[fn].data().split("\n")
215 else:
215 else:
216 ml = initialmergedlines
216 ml = initialmergedlines
217 ml[id * linesperrev] += " r%i" % id
217 ml[id * linesperrev] += " r%i" % id
218 mergedtext = "\n".join(ml)
218 mergedtext = "\n".join(ml)
219 files.append(fn)
219 files.append(fn)
220 filecontent[fn] = mergedtext
220 filecontent[fn] = mergedtext
221
221
222 if overwritten_file:
222 if overwritten_file:
223 fn = "of"
223 fn = "of"
224 files.append(fn)
224 files.append(fn)
225 filecontent[fn] = "r%i\n" % id
225 filecontent[fn] = "r%i\n" % id
226
226
227 if new_file:
227 if new_file:
228 fn = "nf%i" % id
228 fn = "nf%i" % id
229 files.append(fn)
229 files.append(fn)
230 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
231 if len(ps) > 1:
231 if len(ps) > 1:
232 if not p2:
232 if not p2:
233 p2 = repo[ps[1]]
233 p2 = repo[ps[1]]
234 for fn in p2:
234 for fn in p2:
235 if fn.startswith("nf"):
235 if fn.startswith("nf"):
236 files.append(fn)
236 files.append(fn)
237 filecontent[fn] = p2[fn].data()
237 filecontent[fn] = p2[fn].data()
238
238
239 def fctxfn(repo, cx, path):
239 def fctxfn(repo, cx, path):
240 if path in filecontent:
240 if path in filecontent:
241 return context.memfilectx(repo, cx, path,
241 return context.memfilectx(repo, cx, path,
242 filecontent[path])
242 filecontent[path])
243 return None
243 return None
244
244
245 if len(ps) == 0 or ps[0] < 0:
245 if len(ps) == 0 or ps[0] < 0:
246 pars = [None, None]
246 pars = [None, None]
247 elif len(ps) == 1:
247 elif len(ps) == 1:
248 pars = [nodeids[ps[0]], None]
248 pars = [nodeids[ps[0]], None]
249 else:
249 else:
250 pars = [nodeids[p] for p in ps]
250 pars = [nodeids[p] for p in ps]
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 date=(id, 0),
252 date=(id, 0),
253 user="debugbuilddag",
253 user="debugbuilddag",
254 extra={'branch': atbranch})
254 extra={'branch': atbranch})
255 nodeid = repo.commitctx(cx)
255 nodeid = repo.commitctx(cx)
256 nodeids.append(nodeid)
256 nodeids.append(nodeid)
257 at = id
257 at = id
258 elif type == 'l':
258 elif type == 'l':
259 id, name = data
259 id, name = data
260 ui.note(('tag %s\n' % name))
260 ui.note(('tag %s\n' % name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 elif type == 'a':
262 elif type == 'a':
263 ui.note(('branch %s\n' % data))
263 ui.note(('branch %s\n' % data))
264 atbranch = data
264 atbranch = data
265 progress.update(id)
265 progress.update(id)
266
266
267 if tags:
267 if tags:
268 repo.vfs.write("localtags", "".join(tags))
268 repo.vfs.write("localtags", "".join(tags))
269
269
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
271 indent_string = ' ' * indent
272 if all:
272 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
274 % indent_string)
275
275
276 def showchunks(named):
276 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
277 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
278 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
280 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
281 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
282 hex(cs), hex(deltabase), len(delta)))
283
283
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 showchunks("changelog")
285 showchunks("changelog")
286 chunkdata = gen.manifestheader()
286 chunkdata = gen.manifestheader()
287 showchunks("manifest")
287 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
288 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
289 fname = chunkdata['filename']
290 showchunks(fname)
290 showchunks(fname)
291 else:
291 else:
292 if isinstance(gen, bundle2.unbundle20):
292 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
293 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
294 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
295 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
297 ui.write("%s%s\n" % (indent_string, hex(node)))
298
298
299 def _debugobsmarkers(ui, part, indent=0, **opts):
299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
300 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
301 opts = pycompat.byteskwargs(opts)
302 data = part.read()
302 data = part.read()
303 indent_string = ' ' * indent
303 indent_string = ' ' * indent
304 try:
304 try:
305 version, markers = obsolete._readmarkers(data)
305 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
306 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
308 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
309 ui.write(msg)
310 else:
310 else:
311 msg = "%sversion: %d (%d bytes)\n"
311 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
312 msg %= indent_string, version, len(data)
313 ui.write(msg)
313 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
314 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
315 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
316 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
317 fm.startitem()
318 fm.plain(indent_string)
318 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
319 cmdutil.showmarker(fm, m)
320 fm.end()
320 fm.end()
321
321
322 def _debugphaseheads(ui, data, indent=0):
322 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
323 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
324 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
325 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
326 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
327 for head in headsbyphase[phase]:
328 ui.write(indent_string)
328 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
330
331 def _quasirepr(thing):
331 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
333 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
335 return pycompat.bytestr(repr(thing))
336
336
337 def _debugbundle2(ui, gen, all=None, **opts):
337 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
338 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
339 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
340 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
342 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
343 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
344 if parttypes and part.type not in parttypes:
345 continue
345 continue
346 msg = '%s -- %s (mandatory: %r)\n'
346 msg = '%s -- %s (mandatory: %r)\n'
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 if part.type == 'changegroup':
348 if part.type == 'changegroup':
349 version = part.params.get('version', '01')
349 version = part.params.get('version', '01')
350 cg = changegroup.getunbundler(version, part, 'UN')
350 cg = changegroup.getunbundler(version, part, 'UN')
351 if not ui.quiet:
351 if not ui.quiet:
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 if part.type == 'obsmarkers':
353 if part.type == 'obsmarkers':
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugobsmarkers(ui, part, indent=4, **opts)
355 _debugobsmarkers(ui, part, indent=4, **opts)
356 if part.type == 'phase-heads':
356 if part.type == 'phase-heads':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugphaseheads(ui, part, indent=4)
358 _debugphaseheads(ui, part, indent=4)
359
359
360 @command('debugbundle',
360 @command('debugbundle',
361 [('a', 'all', None, _('show all details')),
361 [('a', 'all', None, _('show all details')),
362 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 _('FILE'),
364 _('FILE'),
365 norepo=True)
365 norepo=True)
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 """lists the contents of a bundle"""
367 """lists the contents of a bundle"""
368 with hg.openpath(ui, bundlepath) as f:
368 with hg.openpath(ui, bundlepath) as f:
369 if spec:
369 if spec:
370 spec = exchange.getbundlespec(ui, f)
370 spec = exchange.getbundlespec(ui, f)
371 ui.write('%s\n' % spec)
371 ui.write('%s\n' % spec)
372 return
372 return
373
373
374 gen = exchange.readbundle(ui, f, bundlepath)
374 gen = exchange.readbundle(ui, f, bundlepath)
375 if isinstance(gen, bundle2.unbundle20):
375 if isinstance(gen, bundle2.unbundle20):
376 return _debugbundle2(ui, gen, all=all, **opts)
376 return _debugbundle2(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
378
378
379 @command('debugcapabilities',
379 @command('debugcapabilities',
380 [], _('PATH'),
380 [], _('PATH'),
381 norepo=True)
381 norepo=True)
382 def debugcapabilities(ui, path, **opts):
382 def debugcapabilities(ui, path, **opts):
383 """lists the capabilities of a remote peer"""
383 """lists the capabilities of a remote peer"""
384 opts = pycompat.byteskwargs(opts)
384 opts = pycompat.byteskwargs(opts)
385 peer = hg.peer(ui, opts, path)
385 peer = hg.peer(ui, opts, path)
386 caps = peer.capabilities()
386 caps = peer.capabilities()
387 ui.write(('Main capabilities:\n'))
387 ui.write(('Main capabilities:\n'))
388 for c in sorted(caps):
388 for c in sorted(caps):
389 ui.write((' %s\n') % c)
389 ui.write((' %s\n') % c)
390 b2caps = bundle2.bundle2caps(peer)
390 b2caps = bundle2.bundle2caps(peer)
391 if b2caps:
391 if b2caps:
392 ui.write(('Bundle2 capabilities:\n'))
392 ui.write(('Bundle2 capabilities:\n'))
393 for key, values in sorted(b2caps.iteritems()):
393 for key, values in sorted(b2caps.iteritems()):
394 ui.write((' %s\n') % key)
394 ui.write((' %s\n') % key)
395 for v in values:
395 for v in values:
396 ui.write((' %s\n') % v)
396 ui.write((' %s\n') % v)
397
397
398 @command('debugcheckstate', [], '')
398 @command('debugcheckstate', [], '')
399 def debugcheckstate(ui, repo):
399 def debugcheckstate(ui, repo):
400 """validate the correctness of the current dirstate"""
400 """validate the correctness of the current dirstate"""
401 parent1, parent2 = repo.dirstate.parents()
401 parent1, parent2 = repo.dirstate.parents()
402 m1 = repo[parent1].manifest()
402 m1 = repo[parent1].manifest()
403 m2 = repo[parent2].manifest()
403 m2 = repo[parent2].manifest()
404 errors = 0
404 errors = 0
405 for f in repo.dirstate:
405 for f in repo.dirstate:
406 state = repo.dirstate[f]
406 state = repo.dirstate[f]
407 if state in "nr" and f not in m1:
407 if state in "nr" and f not in m1:
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 errors += 1
409 errors += 1
410 if state in "a" and f in m1:
410 if state in "a" and f in m1:
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "m" and f not in m1 and f not in m2:
413 if state in "m" and f not in m1 and f not in m2:
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 (f, state))
415 (f, state))
416 errors += 1
416 errors += 1
417 for f in m1:
417 for f in m1:
418 state = repo.dirstate[f]
418 state = repo.dirstate[f]
419 if state not in "nrm":
419 if state not in "nrm":
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 errors += 1
421 errors += 1
422 if errors:
422 if errors:
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 raise error.Abort(error)
424 raise error.Abort(error)
425
425
426 @command('debugcolor',
426 @command('debugcolor',
427 [('', 'style', None, _('show all configured styles'))],
427 [('', 'style', None, _('show all configured styles'))],
428 'hg debugcolor')
428 'hg debugcolor')
429 def debugcolor(ui, repo, **opts):
429 def debugcolor(ui, repo, **opts):
430 """show available color, effects or style"""
430 """show available color, effects or style"""
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 if opts.get(r'style'):
432 if opts.get(r'style'):
433 return _debugdisplaystyle(ui)
433 return _debugdisplaystyle(ui)
434 else:
434 else:
435 return _debugdisplaycolor(ui)
435 return _debugdisplaycolor(ui)
436
436
437 def _debugdisplaycolor(ui):
437 def _debugdisplaycolor(ui):
438 ui = ui.copy()
438 ui = ui.copy()
439 ui._styles.clear()
439 ui._styles.clear()
440 for effect in color._activeeffects(ui).keys():
440 for effect in color._activeeffects(ui).keys():
441 ui._styles[effect] = effect
441 ui._styles[effect] = effect
442 if ui._terminfoparams:
442 if ui._terminfoparams:
443 for k, v in ui.configitems('color'):
443 for k, v in ui.configitems('color'):
444 if k.startswith('color.'):
444 if k.startswith('color.'):
445 ui._styles[k] = k[6:]
445 ui._styles[k] = k[6:]
446 elif k.startswith('terminfo.'):
446 elif k.startswith('terminfo.'):
447 ui._styles[k] = k[9:]
447 ui._styles[k] = k[9:]
448 ui.write(_('available colors:\n'))
448 ui.write(_('available colors:\n'))
449 # sort label with a '_' after the other to group '_background' entry.
449 # sort label with a '_' after the other to group '_background' entry.
450 items = sorted(ui._styles.items(),
450 items = sorted(ui._styles.items(),
451 key=lambda i: ('_' in i[0], i[0], i[1]))
451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 for colorname, label in items:
452 for colorname, label in items:
453 ui.write(('%s\n') % colorname, label=label)
453 ui.write(('%s\n') % colorname, label=label)
454
454
455 def _debugdisplaystyle(ui):
455 def _debugdisplaystyle(ui):
456 ui.write(_('available style:\n'))
456 ui.write(_('available style:\n'))
457 if not ui._styles:
457 if not ui._styles:
458 return
458 return
459 width = max(len(s) for s in ui._styles)
459 width = max(len(s) for s in ui._styles)
460 for label, effects in sorted(ui._styles.items()):
460 for label, effects in sorted(ui._styles.items()):
461 ui.write('%s' % label, label=label)
461 ui.write('%s' % label, label=label)
462 if effects:
462 if effects:
463 # 50
463 # 50
464 ui.write(': ')
464 ui.write(': ')
465 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write('\n')
467 ui.write('\n')
468
468
469 @command('debugcreatestreamclonebundle', [], 'FILE')
469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 def debugcreatestreamclonebundle(ui, repo, fname):
470 def debugcreatestreamclonebundle(ui, repo, fname):
471 """create a stream clone bundle file
471 """create a stream clone bundle file
472
472
473 Stream bundles are special bundles that are essentially archives of
473 Stream bundles are special bundles that are essentially archives of
474 revlog files. They are commonly used for cloning very quickly.
474 revlog files. They are commonly used for cloning very quickly.
475 """
475 """
476 # TODO we may want to turn this into an abort when this functionality
476 # TODO we may want to turn this into an abort when this functionality
477 # is moved into `hg bundle`.
477 # is moved into `hg bundle`.
478 if phases.hassecret(repo):
478 if phases.hassecret(repo):
479 ui.warn(_('(warning: stream clone bundle will contain secret '
479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 'revisions)\n'))
480 'revisions)\n'))
481
481
482 requirements, gen = streamclone.generatebundlev1(repo)
482 requirements, gen = streamclone.generatebundlev1(repo)
483 changegroup.writechunks(ui, gen, fname)
483 changegroup.writechunks(ui, gen, fname)
484
484
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486
486
487 @command('debugdag',
487 @command('debugdag',
488 [('t', 'tags', None, _('use tags as labels')),
488 [('t', 'tags', None, _('use tags as labels')),
489 ('b', 'branches', None, _('annotate with branch names')),
489 ('b', 'branches', None, _('annotate with branch names')),
490 ('', 'dots', None, _('use dots for runs')),
490 ('', 'dots', None, _('use dots for runs')),
491 ('s', 'spaces', None, _('separate elements by spaces'))],
491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 _('[OPTION]... [FILE [REV]...]'),
492 _('[OPTION]... [FILE [REV]...]'),
493 optionalrepo=True)
493 optionalrepo=True)
494 def debugdag(ui, repo, file_=None, *revs, **opts):
494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 """format the changelog or an index DAG as a concise textual description
495 """format the changelog or an index DAG as a concise textual description
496
496
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 revision numbers, they get labeled in the output as rN.
498 revision numbers, they get labeled in the output as rN.
499
499
500 Otherwise, the changelog DAG of the current repo is emitted.
500 Otherwise, the changelog DAG of the current repo is emitted.
501 """
501 """
502 spaces = opts.get(r'spaces')
502 spaces = opts.get(r'spaces')
503 dots = opts.get(r'dots')
503 dots = opts.get(r'dots')
504 if file_:
504 if file_:
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 file_)
506 file_)
507 revs = set((int(r) for r in revs))
507 revs = set((int(r) for r in revs))
508 def events():
508 def events():
509 for r in rlog:
509 for r in rlog:
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 if p != -1))
511 if p != -1))
512 if r in revs:
512 if r in revs:
513 yield 'l', (r, "r%i" % r)
513 yield 'l', (r, "r%i" % r)
514 elif repo:
514 elif repo:
515 cl = repo.changelog
515 cl = repo.changelog
516 tags = opts.get(r'tags')
516 tags = opts.get(r'tags')
517 branches = opts.get(r'branches')
517 branches = opts.get(r'branches')
518 if tags:
518 if tags:
519 labels = {}
519 labels = {}
520 for l, n in repo.tags().items():
520 for l, n in repo.tags().items():
521 labels.setdefault(cl.rev(n), []).append(l)
521 labels.setdefault(cl.rev(n), []).append(l)
522 def events():
522 def events():
523 b = "default"
523 b = "default"
524 for r in cl:
524 for r in cl:
525 if branches:
525 if branches:
526 newb = cl.read(cl.node(r))[5]['branch']
526 newb = cl.read(cl.node(r))[5]['branch']
527 if newb != b:
527 if newb != b:
528 yield 'a', newb
528 yield 'a', newb
529 b = newb
529 b = newb
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 if p != -1))
531 if p != -1))
532 if tags:
532 if tags:
533 ls = labels.get(r)
533 ls = labels.get(r)
534 if ls:
534 if ls:
535 for l in ls:
535 for l in ls:
536 yield 'l', (r, l)
536 yield 'l', (r, l)
537 else:
537 else:
538 raise error.Abort(_('need repo for changelog dag'))
538 raise error.Abort(_('need repo for changelog dag'))
539
539
540 for line in dagparser.dagtextlines(events(),
540 for line in dagparser.dagtextlines(events(),
541 addspaces=spaces,
541 addspaces=spaces,
542 wraplabels=True,
542 wraplabels=True,
543 wrapannotations=True,
543 wrapannotations=True,
544 wrapnonlinear=dots,
544 wrapnonlinear=dots,
545 usedots=dots,
545 usedots=dots,
546 maxlinewidth=70):
546 maxlinewidth=70):
547 ui.write(line)
547 ui.write(line)
548 ui.write("\n")
548 ui.write("\n")
549
549
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 def debugdata(ui, repo, file_, rev=None, **opts):
551 def debugdata(ui, repo, file_, rev=None, **opts):
552 """dump the contents of a data file revision"""
552 """dump the contents of a data file revision"""
553 opts = pycompat.byteskwargs(opts)
553 opts = pycompat.byteskwargs(opts)
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if rev is not None:
555 if rev is not None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
556 raise error.CommandError('debugdata', _('invalid arguments'))
557 file_, rev = None, file_
557 file_, rev = None, file_
558 elif rev is None:
558 elif rev is None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 try:
561 try:
562 ui.write(r.revision(r.lookup(rev), raw=True))
562 ui.write(r.revision(r.lookup(rev), raw=True))
563 except KeyError:
563 except KeyError:
564 raise error.Abort(_('invalid revision identifier %s') % rev)
564 raise error.Abort(_('invalid revision identifier %s') % rev)
565
565
566 @command('debugdate',
566 @command('debugdate',
567 [('e', 'extended', None, _('try extended date formats'))],
567 [('e', 'extended', None, _('try extended date formats'))],
568 _('[-e] DATE [RANGE]'),
568 _('[-e] DATE [RANGE]'),
569 norepo=True, optionalrepo=True)
569 norepo=True, optionalrepo=True)
570 def debugdate(ui, date, range=None, **opts):
570 def debugdate(ui, date, range=None, **opts):
571 """parse and display a date"""
571 """parse and display a date"""
572 if opts[r"extended"]:
572 if opts[r"extended"]:
573 d = dateutil.parsedate(date, util.extendeddateformats)
573 d = dateutil.parsedate(date, util.extendeddateformats)
574 else:
574 else:
575 d = dateutil.parsedate(date)
575 d = dateutil.parsedate(date)
576 ui.write(("internal: %d %d\n") % d)
576 ui.write(("internal: %d %d\n") % d)
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 if range:
578 if range:
579 m = dateutil.matchdate(range)
579 m = dateutil.matchdate(range)
580 ui.write(("match: %s\n") % m(d[0]))
580 ui.write(("match: %s\n") % m(d[0]))
581
581
582 @command('debugdeltachain',
582 @command('debugdeltachain',
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 _('-c|-m|FILE'),
584 _('-c|-m|FILE'),
585 optionalrepo=True)
585 optionalrepo=True)
586 def debugdeltachain(ui, repo, file_=None, **opts):
586 def debugdeltachain(ui, repo, file_=None, **opts):
587 """dump information about delta chains in a revlog
587 """dump information about delta chains in a revlog
588
588
589 Output can be templatized. Available template keywords are:
589 Output can be templatized. Available template keywords are:
590
590
591 :``rev``: revision number
591 :``rev``: revision number
592 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainlen``: delta chain length to this revision
593 :``chainlen``: delta chain length to this revision
594 :``prevrev``: previous revision in delta chain
594 :``prevrev``: previous revision in delta chain
595 :``deltatype``: role of delta / how it was computed
595 :``deltatype``: role of delta / how it was computed
596 :``compsize``: compressed size of revision
596 :``compsize``: compressed size of revision
597 :``uncompsize``: uncompressed size of revision
597 :``uncompsize``: uncompressed size of revision
598 :``chainsize``: total size of compressed revisions in chain
598 :``chainsize``: total size of compressed revisions in chain
599 :``chainratio``: total chain size divided by uncompressed revision size
599 :``chainratio``: total chain size divided by uncompressed revision size
600 (new delta chains typically start at ratio 2.00)
600 (new delta chains typically start at ratio 2.00)
601 :``lindist``: linear distance from base revision in delta chain to end
601 :``lindist``: linear distance from base revision in delta chain to end
602 of this revision
602 of this revision
603 :``extradist``: total size of revisions not part of this delta chain from
603 :``extradist``: total size of revisions not part of this delta chain from
604 base of delta chain to end of this revision; a measurement
604 base of delta chain to end of this revision; a measurement
605 of how much extra data we need to read/seek across to read
605 of how much extra data we need to read/seek across to read
606 the delta chain for this revision
606 the delta chain for this revision
607 :``extraratio``: extradist divided by chainsize; another representation of
607 :``extraratio``: extradist divided by chainsize; another representation of
608 how much unrelated data is needed to load this delta chain
608 how much unrelated data is needed to load this delta chain
609
609
610 If the repository is configured to use the sparse read, additional keywords
610 If the repository is configured to use the sparse read, additional keywords
611 are available:
611 are available:
612
612
613 :``readsize``: total size of data read from the disk for a revision
613 :``readsize``: total size of data read from the disk for a revision
614 (sum of the sizes of all the blocks)
614 (sum of the sizes of all the blocks)
615 :``largestblock``: size of the largest block of data read from the disk
615 :``largestblock``: size of the largest block of data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
617 :``srchunks``: in how many data hunks the whole revision would be read
617 :``srchunks``: in how many data hunks the whole revision would be read
618
618
619 The sparse read can be enabled with experimental.sparse-read = True
619 The sparse read can be enabled with experimental.sparse-read = True
620 """
620 """
621 opts = pycompat.byteskwargs(opts)
621 opts = pycompat.byteskwargs(opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 index = r.index
623 index = r.index
624 start = r.start
624 start = r.start
625 length = r.length
625 length = r.length
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 basestart = start(chainbase)
673 basestart = start(chainbase)
674 revstart = start(rev)
674 revstart = start(rev)
675 lineardist = revstart + comp - basestart
675 lineardist = revstart + comp - basestart
676 extradist = lineardist - chainsize
676 extradist = lineardist - chainsize
677 try:
677 try:
678 prevrev = chain[-2]
678 prevrev = chain[-2]
679 except IndexError:
679 except IndexError:
680 prevrev = -1
680 prevrev = -1
681
681
682 if uncomp != 0:
682 if uncomp != 0:
683 chainratio = float(chainsize) / float(uncomp)
683 chainratio = float(chainsize) / float(uncomp)
684 else:
684 else:
685 chainratio = chainsize
685 chainratio = chainsize
686
686
687 if chainsize != 0:
687 if chainsize != 0:
688 extraratio = float(extradist) / float(chainsize)
688 extraratio = float(extradist) / float(chainsize)
689 else:
689 else:
690 extraratio = extradist
690 extraratio = extradist
691
691
692 fm.startitem()
692 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
694 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
695 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
698 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
699 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
700 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
702 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
703 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
704 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
705 if withsparseread:
706 readsize = 0
706 readsize = 0
707 largestblock = 0
707 largestblock = 0
708 srchunks = 0
708 srchunks = 0
709
709
710 for revschunk in revlog._slicechunk(r, chain):
710 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
711 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
713 blksize = blkend - start(revschunk[0])
714
714
715 readsize += blksize
715 readsize += blksize
716 if largestblock < blksize:
716 if largestblock < blksize:
717 largestblock = blksize
717 largestblock = blksize
718
718
719 if readsize:
719 if readsize:
720 readdensity = float(chainsize) / float(readsize)
720 readdensity = float(chainsize) / float(readsize)
721 else:
721 else:
722 readdensity = 1
722 readdensity = 1
723
723
724 fm.write('readsize largestblock readdensity srchunks',
724 fm.write('readsize largestblock readdensity srchunks',
725 ' %10d %10d %9.5f %8d',
725 ' %10d %10d %9.5f %8d',
726 readsize, largestblock, readdensity, srchunks,
726 readsize, largestblock, readdensity, srchunks,
727 readsize=readsize, largestblock=largestblock,
727 readsize=readsize, largestblock=largestblock,
728 readdensity=readdensity, srchunks=srchunks)
728 readdensity=readdensity, srchunks=srchunks)
729
729
730 fm.plain('\n')
730 fm.plain('\n')
731
731
732 fm.end()
732 fm.end()
733
733
734 @command('debugdirstate|debugstate',
734 @command('debugdirstate|debugstate',
735 [('', 'nodates', None, _('do not display the saved mtime')),
735 [('', 'nodates', None, _('do not display the saved mtime')),
736 ('', 'datesort', None, _('sort by saved mtime'))],
736 ('', 'datesort', None, _('sort by saved mtime'))],
737 _('[OPTION]...'))
737 _('[OPTION]...'))
738 def debugstate(ui, repo, **opts):
738 def debugstate(ui, repo, **opts):
739 """show the contents of the current dirstate"""
739 """show the contents of the current dirstate"""
740
740
741 nodates = opts.get(r'nodates')
741 nodates = opts.get(r'nodates')
742 datesort = opts.get(r'datesort')
742 datesort = opts.get(r'datesort')
743
743
744 timestr = ""
744 timestr = ""
745 if datesort:
745 if datesort:
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 else:
747 else:
748 keyfunc = None # sort by filename
748 keyfunc = None # sort by filename
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 if ent[3] == -1:
750 if ent[3] == -1:
751 timestr = 'unset '
751 timestr = 'unset '
752 elif nodates:
752 elif nodates:
753 timestr = 'set '
753 timestr = 'set '
754 else:
754 else:
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 time.localtime(ent[3]))
756 time.localtime(ent[3]))
757 timestr = encoding.strtolocal(timestr)
757 timestr = encoding.strtolocal(timestr)
758 if ent[1] & 0o20000:
758 if ent[1] & 0o20000:
759 mode = 'lnk'
759 mode = 'lnk'
760 else:
760 else:
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 for f in repo.dirstate.copies():
763 for f in repo.dirstate.copies():
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765
765
766 @command('debugdiscovery',
766 @command('debugdiscovery',
767 [('', 'old', None, _('use old-style discovery')),
767 [('', 'old', None, _('use old-style discovery')),
768 ('', 'nonheads', None,
768 ('', 'nonheads', None,
769 _('use old-style discovery with non-heads included')),
769 _('use old-style discovery with non-heads included')),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ] + cmdutil.remoteopts,
771 ] + cmdutil.remoteopts,
772 _('[--rev REV] [OTHER]'))
772 _('[--rev REV] [OTHER]'))
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 """runs the changeset discovery protocol in isolation"""
774 """runs the changeset discovery protocol in isolation"""
775 opts = pycompat.byteskwargs(opts)
775 opts = pycompat.byteskwargs(opts)
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remote = hg.peer(repo, opts, remoteurl)
777 remote = hg.peer(repo, opts, remoteurl)
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779
779
780 # make sure tests are repeatable
780 # make sure tests are repeatable
781 random.seed(12323)
781 random.seed(12323)
782
782
783 def doit(pushedrevs, remoteheads, remote=remote):
783 def doit(pushedrevs, remoteheads, remote=remote):
784 if opts.get('old'):
784 if opts.get('old'):
785 if not util.safehasattr(remote, 'branches'):
785 if not util.safehasattr(remote, 'branches'):
786 # enable in-client legacy support
786 # enable in-client legacy support
787 remote = localrepo.locallegacypeer(remote.local())
787 remote = localrepo.locallegacypeer(remote.local())
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 force=True)
789 force=True)
790 common = set(common)
790 common = set(common)
791 if not opts.get('nonheads'):
791 if not opts.get('nonheads'):
792 ui.write(("unpruned common: %s\n") %
792 ui.write(("unpruned common: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
793 " ".join(sorted(short(n) for n in common)))
794 dag = dagutil.revlogdag(repo.changelog)
794 dag = dagutil.revlogdag(repo.changelog)
795 all = dag.ancestorset(dag.internalizeall(common))
795 all = dag.ancestorset(dag.internalizeall(common))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
797 else:
797 else:
798 nodes = None
798 nodes = None
799 if pushedrevs:
799 if pushedrevs:
800 revs = scmutil.revrange(repo, pushedrevs)
800 revs = scmutil.revrange(repo, pushedrevs)
801 nodes = [repo[r].node() for r in revs]
801 nodes = [repo[r].node() for r in revs]
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 ancestorsof=nodes)
803 ancestorsof=nodes)
804 common = set(common)
804 common = set(common)
805 rheads = set(hds)
805 rheads = set(hds)
806 lheads = set(repo.heads())
806 lheads = set(repo.heads())
807 ui.write(("common heads: %s\n") %
807 ui.write(("common heads: %s\n") %
808 " ".join(sorted(short(n) for n in common)))
808 " ".join(sorted(short(n) for n in common)))
809 if lheads <= common:
809 if lheads <= common:
810 ui.write(("local is subset\n"))
810 ui.write(("local is subset\n"))
811 elif rheads <= common:
811 elif rheads <= common:
812 ui.write(("remote is subset\n"))
812 ui.write(("remote is subset\n"))
813
813
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 localrevs = opts['rev']
815 localrevs = opts['rev']
816 doit(localrevs, remoterevs)
816 doit(localrevs, remoterevs)
817
817
818 _chunksize = 4 << 10
818 _chunksize = 4 << 10
819
819
820 @command('debugdownload',
820 @command('debugdownload',
821 [
821 [
822 ('o', 'output', '', _('path')),
822 ('o', 'output', '', _('path')),
823 ],
823 ],
824 optionalrepo=True)
824 optionalrepo=True)
825 def debugdownload(ui, repo, url, output=None, **opts):
825 def debugdownload(ui, repo, url, output=None, **opts):
826 """download a resource using Mercurial logic and config
826 """download a resource using Mercurial logic and config
827 """
827 """
828 fh = urlmod.open(ui, url, output)
828 fh = urlmod.open(ui, url, output)
829
829
830 dest = ui
830 dest = ui
831 if output:
831 if output:
832 dest = open(output, "wb", _chunksize)
832 dest = open(output, "wb", _chunksize)
833 try:
833 try:
834 data = fh.read(_chunksize)
834 data = fh.read(_chunksize)
835 while data:
835 while data:
836 dest.write(data)
836 dest.write(data)
837 data = fh.read(_chunksize)
837 data = fh.read(_chunksize)
838 finally:
838 finally:
839 if output:
839 if output:
840 dest.close()
840 dest.close()
841
841
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 def debugextensions(ui, repo, **opts):
843 def debugextensions(ui, repo, **opts):
844 '''show information about active extensions'''
844 '''show information about active extensions'''
845 opts = pycompat.byteskwargs(opts)
845 opts = pycompat.byteskwargs(opts)
846 exts = extensions.extensions(ui)
846 exts = extensions.extensions(ui)
847 hgver = util.version()
847 hgver = util.version()
848 fm = ui.formatter('debugextensions', opts)
848 fm = ui.formatter('debugextensions', opts)
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 isinternal = extensions.ismoduleinternal(extmod)
850 isinternal = extensions.ismoduleinternal(extmod)
851 extsource = pycompat.fsencode(extmod.__file__)
851 extsource = pycompat.fsencode(extmod.__file__)
852 if isinternal:
852 if isinternal:
853 exttestedwith = [] # never expose magic string to users
853 exttestedwith = [] # never expose magic string to users
854 else:
854 else:
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 extbuglink = getattr(extmod, 'buglink', None)
856 extbuglink = getattr(extmod, 'buglink', None)
857
857
858 fm.startitem()
858 fm.startitem()
859
859
860 if ui.quiet or ui.verbose:
860 if ui.quiet or ui.verbose:
861 fm.write('name', '%s\n', extname)
861 fm.write('name', '%s\n', extname)
862 else:
862 else:
863 fm.write('name', '%s', extname)
863 fm.write('name', '%s', extname)
864 if isinternal or hgver in exttestedwith:
864 if isinternal or hgver in exttestedwith:
865 fm.plain('\n')
865 fm.plain('\n')
866 elif not exttestedwith:
866 elif not exttestedwith:
867 fm.plain(_(' (untested!)\n'))
867 fm.plain(_(' (untested!)\n'))
868 else:
868 else:
869 lasttestedversion = exttestedwith[-1]
869 lasttestedversion = exttestedwith[-1]
870 fm.plain(' (%s!)\n' % lasttestedversion)
870 fm.plain(' (%s!)\n' % lasttestedversion)
871
871
872 fm.condwrite(ui.verbose and extsource, 'source',
872 fm.condwrite(ui.verbose and extsource, 'source',
873 _(' location: %s\n'), extsource or "")
873 _(' location: %s\n'), extsource or "")
874
874
875 if ui.verbose:
875 if ui.verbose:
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 fm.data(bundled=isinternal)
877 fm.data(bundled=isinternal)
878
878
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 _(' tested with: %s\n'),
880 _(' tested with: %s\n'),
881 fm.formatlist(exttestedwith, name='ver'))
881 fm.formatlist(exttestedwith, name='ver'))
882
882
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 _(' bug reporting: %s\n'), extbuglink or "")
884 _(' bug reporting: %s\n'), extbuglink or "")
885
885
886 fm.end()
886 fm.end()
887
887
888 @command('debugfileset',
888 @command('debugfileset',
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 ('', 'all-files', False,
890 ('', 'all-files', False,
891 _('test files from all revisions and working directory')),
891 _('test files from all revisions and working directory')),
892 ('s', 'show-matcher', None,
892 ('s', 'show-matcher', None,
893 _('print internal representation of matcher')),
893 _('print internal representation of matcher')),
894 ('p', 'show-stage', [],
894 ('p', 'show-stage', [],
895 _('print parsed tree at the given stage'), _('NAME'))],
895 _('print parsed tree at the given stage'), _('NAME'))],
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 def debugfileset(ui, repo, expr, **opts):
897 def debugfileset(ui, repo, expr, **opts):
898 '''parse and apply a fileset specification'''
898 '''parse and apply a fileset specification'''
899 from . import fileset
899 from . import fileset
900 fileset.symbols # force import of fileset so we have predicates to optimize
900 fileset.symbols # force import of fileset so we have predicates to optimize
901 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
903
903
904 stages = [
904 stages = [
905 ('parsed', pycompat.identity),
905 ('parsed', pycompat.identity),
906 ('analyzed', filesetlang.analyze),
906 ('analyzed', filesetlang.analyze),
907 ('optimized', filesetlang.optimize),
907 ('optimized', filesetlang.optimize),
908 ]
908 ]
909 stagenames = set(n for n, f in stages)
909 stagenames = set(n for n, f in stages)
910
910
911 showalways = set()
911 showalways = set()
912 if ui.verbose and not opts['show_stage']:
912 if ui.verbose and not opts['show_stage']:
913 # show parsed tree by --verbose (deprecated)
913 # show parsed tree by --verbose (deprecated)
914 showalways.add('parsed')
914 showalways.add('parsed')
915 if opts['show_stage'] == ['all']:
915 if opts['show_stage'] == ['all']:
916 showalways.update(stagenames)
916 showalways.update(stagenames)
917 else:
917 else:
918 for n in opts['show_stage']:
918 for n in opts['show_stage']:
919 if n not in stagenames:
919 if n not in stagenames:
920 raise error.Abort(_('invalid stage name: %s') % n)
920 raise error.Abort(_('invalid stage name: %s') % n)
921 showalways.update(opts['show_stage'])
921 showalways.update(opts['show_stage'])
922
922
923 tree = filesetlang.parse(expr)
923 tree = filesetlang.parse(expr)
924 for n, f in stages:
924 for n, f in stages:
925 tree = f(tree)
925 tree = f(tree)
926 if n in showalways:
926 if n in showalways:
927 if opts['show_stage'] or n != 'parsed':
927 if opts['show_stage'] or n != 'parsed':
928 ui.write(("* %s:\n") % n)
928 ui.write(("* %s:\n") % n)
929 ui.write(filesetlang.prettyformat(tree), "\n")
929 ui.write(filesetlang.prettyformat(tree), "\n")
930
930
931 files = set()
931 files = set()
932 if opts['all_files']:
932 if opts['all_files']:
933 for r in repo:
933 for r in repo:
934 c = repo[r]
934 c = repo[r]
935 files.update(c.files())
935 files.update(c.files())
936 files.update(c.substate)
936 files.update(c.substate)
937 if opts['all_files'] or ctx.rev() is None:
937 if opts['all_files'] or ctx.rev() is None:
938 wctx = repo[None]
938 wctx = repo[None]
939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
940 subrepos=list(wctx.substate),
940 subrepos=list(wctx.substate),
941 unknown=True, ignored=True))
941 unknown=True, ignored=True))
942 files.update(wctx.substate)
942 files.update(wctx.substate)
943 else:
943 else:
944 files.update(ctx.files())
944 files.update(ctx.files())
945 files.update(ctx.substate)
945 files.update(ctx.substate)
946
946
947 m = ctx.matchfileset(expr)
947 m = ctx.matchfileset(expr)
948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
950 for f in sorted(files):
950 for f in sorted(files):
951 if not m(f):
951 if not m(f):
952 continue
952 continue
953 ui.write("%s\n" % f)
953 ui.write("%s\n" % f)
954
954
955 @command('debugformat',
955 @command('debugformat',
956 [] + cmdutil.formatteropts)
956 [] + cmdutil.formatteropts)
957 def debugformat(ui, repo, **opts):
957 def debugformat(ui, repo, **opts):
958 """display format information about the current repository
958 """display format information about the current repository
959
959
960 Use --verbose to get extra information about current config value and
960 Use --verbose to get extra information about current config value and
961 Mercurial default."""
961 Mercurial default."""
962 opts = pycompat.byteskwargs(opts)
962 opts = pycompat.byteskwargs(opts)
963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
964 maxvariantlength = max(len('format-variant'), maxvariantlength)
964 maxvariantlength = max(len('format-variant'), maxvariantlength)
965
965
966 def makeformatname(name):
966 def makeformatname(name):
967 return '%s:' + (' ' * (maxvariantlength - len(name)))
967 return '%s:' + (' ' * (maxvariantlength - len(name)))
968
968
969 fm = ui.formatter('debugformat', opts)
969 fm = ui.formatter('debugformat', opts)
970 if fm.isplain():
970 if fm.isplain():
971 def formatvalue(value):
971 def formatvalue(value):
972 if util.safehasattr(value, 'startswith'):
972 if util.safehasattr(value, 'startswith'):
973 return value
973 return value
974 if value:
974 if value:
975 return 'yes'
975 return 'yes'
976 else:
976 else:
977 return 'no'
977 return 'no'
978 else:
978 else:
979 formatvalue = pycompat.identity
979 formatvalue = pycompat.identity
980
980
981 fm.plain('format-variant')
981 fm.plain('format-variant')
982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
983 fm.plain(' repo')
983 fm.plain(' repo')
984 if ui.verbose:
984 if ui.verbose:
985 fm.plain(' config default')
985 fm.plain(' config default')
986 fm.plain('\n')
986 fm.plain('\n')
987 for fv in upgrade.allformatvariant:
987 for fv in upgrade.allformatvariant:
988 fm.startitem()
988 fm.startitem()
989 repovalue = fv.fromrepo(repo)
989 repovalue = fv.fromrepo(repo)
990 configvalue = fv.fromconfig(repo)
990 configvalue = fv.fromconfig(repo)
991
991
992 if repovalue != configvalue:
992 if repovalue != configvalue:
993 namelabel = 'formatvariant.name.mismatchconfig'
993 namelabel = 'formatvariant.name.mismatchconfig'
994 repolabel = 'formatvariant.repo.mismatchconfig'
994 repolabel = 'formatvariant.repo.mismatchconfig'
995 elif repovalue != fv.default:
995 elif repovalue != fv.default:
996 namelabel = 'formatvariant.name.mismatchdefault'
996 namelabel = 'formatvariant.name.mismatchdefault'
997 repolabel = 'formatvariant.repo.mismatchdefault'
997 repolabel = 'formatvariant.repo.mismatchdefault'
998 else:
998 else:
999 namelabel = 'formatvariant.name.uptodate'
999 namelabel = 'formatvariant.name.uptodate'
1000 repolabel = 'formatvariant.repo.uptodate'
1000 repolabel = 'formatvariant.repo.uptodate'
1001
1001
1002 fm.write('name', makeformatname(fv.name), fv.name,
1002 fm.write('name', makeformatname(fv.name), fv.name,
1003 label=namelabel)
1003 label=namelabel)
1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1005 label=repolabel)
1005 label=repolabel)
1006 if fv.default != configvalue:
1006 if fv.default != configvalue:
1007 configlabel = 'formatvariant.config.special'
1007 configlabel = 'formatvariant.config.special'
1008 else:
1008 else:
1009 configlabel = 'formatvariant.config.default'
1009 configlabel = 'formatvariant.config.default'
1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1011 label=configlabel)
1011 label=configlabel)
1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1013 label='formatvariant.default')
1013 label='formatvariant.default')
1014 fm.plain('\n')
1014 fm.plain('\n')
1015 fm.end()
1015 fm.end()
1016
1016
1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1018 def debugfsinfo(ui, path="."):
1018 def debugfsinfo(ui, path="."):
1019 """show information detected about current filesystem"""
1019 """show information detected about current filesystem"""
1020 ui.write(('path: %s\n') % path)
1020 ui.write(('path: %s\n') % path)
1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1026 casesensitive = '(unknown)'
1026 casesensitive = '(unknown)'
1027 try:
1027 try:
1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1030 except OSError:
1030 except OSError:
1031 pass
1031 pass
1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1033
1033
1034 @command('debuggetbundle',
1034 @command('debuggetbundle',
1035 [('H', 'head', [], _('id of head node'), _('ID')),
1035 [('H', 'head', [], _('id of head node'), _('ID')),
1036 ('C', 'common', [], _('id of common node'), _('ID')),
1036 ('C', 'common', [], _('id of common node'), _('ID')),
1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1038 _('REPO FILE [-H|-C ID]...'),
1038 _('REPO FILE [-H|-C ID]...'),
1039 norepo=True)
1039 norepo=True)
1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1041 """retrieves a bundle from a repo
1041 """retrieves a bundle from a repo
1042
1042
1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1044 given file.
1044 given file.
1045 """
1045 """
1046 opts = pycompat.byteskwargs(opts)
1046 opts = pycompat.byteskwargs(opts)
1047 repo = hg.peer(ui, opts, repopath)
1047 repo = hg.peer(ui, opts, repopath)
1048 if not repo.capable('getbundle'):
1048 if not repo.capable('getbundle'):
1049 raise error.Abort("getbundle() not supported by target repository")
1049 raise error.Abort("getbundle() not supported by target repository")
1050 args = {}
1050 args = {}
1051 if common:
1051 if common:
1052 args[r'common'] = [bin(s) for s in common]
1052 args[r'common'] = [bin(s) for s in common]
1053 if head:
1053 if head:
1054 args[r'heads'] = [bin(s) for s in head]
1054 args[r'heads'] = [bin(s) for s in head]
1055 # TODO: get desired bundlecaps from command line.
1055 # TODO: get desired bundlecaps from command line.
1056 args[r'bundlecaps'] = None
1056 args[r'bundlecaps'] = None
1057 bundle = repo.getbundle('debug', **args)
1057 bundle = repo.getbundle('debug', **args)
1058
1058
1059 bundletype = opts.get('type', 'bzip2').lower()
1059 bundletype = opts.get('type', 'bzip2').lower()
1060 btypes = {'none': 'HG10UN',
1060 btypes = {'none': 'HG10UN',
1061 'bzip2': 'HG10BZ',
1061 'bzip2': 'HG10BZ',
1062 'gzip': 'HG10GZ',
1062 'gzip': 'HG10GZ',
1063 'bundle2': 'HG20'}
1063 'bundle2': 'HG20'}
1064 bundletype = btypes.get(bundletype)
1064 bundletype = btypes.get(bundletype)
1065 if bundletype not in bundle2.bundletypes:
1065 if bundletype not in bundle2.bundletypes:
1066 raise error.Abort(_('unknown bundle type specified with --type'))
1066 raise error.Abort(_('unknown bundle type specified with --type'))
1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1068
1068
1069 @command('debugignore', [], '[FILE]')
1069 @command('debugignore', [], '[FILE]')
1070 def debugignore(ui, repo, *files, **opts):
1070 def debugignore(ui, repo, *files, **opts):
1071 """display the combined ignore pattern and information about ignored files
1071 """display the combined ignore pattern and information about ignored files
1072
1072
1073 With no argument display the combined ignore pattern.
1073 With no argument display the combined ignore pattern.
1074
1074
1075 Given space separated file names, shows if the given file is ignored and
1075 Given space separated file names, shows if the given file is ignored and
1076 if so, show the ignore rule (file and line number) that matched it.
1076 if so, show the ignore rule (file and line number) that matched it.
1077 """
1077 """
1078 ignore = repo.dirstate._ignore
1078 ignore = repo.dirstate._ignore
1079 if not files:
1079 if not files:
1080 # Show all the patterns
1080 # Show all the patterns
1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1082 else:
1082 else:
1083 m = scmutil.match(repo[None], pats=files)
1083 m = scmutil.match(repo[None], pats=files)
1084 for f in m.files():
1084 for f in m.files():
1085 nf = util.normpath(f)
1085 nf = util.normpath(f)
1086 ignored = None
1086 ignored = None
1087 ignoredata = None
1087 ignoredata = None
1088 if nf != '.':
1088 if nf != '.':
1089 if ignore(nf):
1089 if ignore(nf):
1090 ignored = nf
1090 ignored = nf
1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1092 else:
1092 else:
1093 for p in util.finddirs(nf):
1093 for p in util.finddirs(nf):
1094 if ignore(p):
1094 if ignore(p):
1095 ignored = p
1095 ignored = p
1096 ignoredata = repo.dirstate._ignorefileandline(p)
1096 ignoredata = repo.dirstate._ignorefileandline(p)
1097 break
1097 break
1098 if ignored:
1098 if ignored:
1099 if ignored == nf:
1099 if ignored == nf:
1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1101 else:
1101 else:
1102 ui.write(_("%s is ignored because of "
1102 ui.write(_("%s is ignored because of "
1103 "containing folder %s\n")
1103 "containing folder %s\n")
1104 % (m.uipath(f), ignored))
1104 % (m.uipath(f), ignored))
1105 ignorefile, lineno, line = ignoredata
1105 ignorefile, lineno, line = ignoredata
1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1107 % (ignorefile, lineno, line))
1107 % (ignorefile, lineno, line))
1108 else:
1108 else:
1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1110
1110
1111 @command('debugindex', cmdutil.debugrevlogopts +
1111 @command('debugindex', cmdutil.debugrevlogopts +
1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1113 _('[-f FORMAT] -c|-m|FILE'),
1113 _('[-f FORMAT] -c|-m|FILE'),
1114 optionalrepo=True)
1114 optionalrepo=True)
1115 def debugindex(ui, repo, file_=None, **opts):
1115 def debugindex(ui, repo, file_=None, **opts):
1116 """dump the contents of an index file"""
1116 """dump the contents of an index file"""
1117 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1119 format = opts.get('format', 0)
1119 format = opts.get('format', 0)
1120 if format not in (0, 1):
1120 if format not in (0, 1):
1121 raise error.Abort(_("unknown format %d") % format)
1121 raise error.Abort(_("unknown format %d") % format)
1122
1122
1123 if ui.debugflag:
1123 if ui.debugflag:
1124 shortfn = hex
1124 shortfn = hex
1125 else:
1125 else:
1126 shortfn = short
1126 shortfn = short
1127
1127
1128 # There might not be anything in r, so have a sane default
1128 # There might not be anything in r, so have a sane default
1129 idlen = 12
1129 idlen = 12
1130 for i in r:
1130 for i in r:
1131 idlen = len(shortfn(r.node(i)))
1131 idlen = len(shortfn(r.node(i)))
1132 break
1132 break
1133
1133
1134 if format == 0:
1134 if format == 0:
1135 if ui.verbose:
1135 if ui.verbose:
1136 ui.write((" rev offset length linkrev"
1136 ui.write((" rev offset length linkrev"
1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1138 "p1".ljust(idlen)))
1138 "p1".ljust(idlen)))
1139 else:
1139 else:
1140 ui.write((" rev linkrev %s %s p2\n") % (
1140 ui.write((" rev linkrev %s %s p2\n") % (
1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1142 elif format == 1:
1142 elif format == 1:
1143 if ui.verbose:
1143 if ui.verbose:
1144 ui.write((" rev flag offset length size link p1"
1144 ui.write((" rev flag offset length size link p1"
1145 " p2 %s\n") % "nodeid".rjust(idlen))
1145 " p2 %s\n") % "nodeid".rjust(idlen))
1146 else:
1146 else:
1147 ui.write((" rev flag size link p1 p2 %s\n") %
1147 ui.write((" rev flag size link p1 p2 %s\n") %
1148 "nodeid".rjust(idlen))
1148 "nodeid".rjust(idlen))
1149
1149
1150 for i in r:
1150 for i in r:
1151 node = r.node(i)
1151 node = r.node(i)
1152 if format == 0:
1152 if format == 0:
1153 try:
1153 try:
1154 pp = r.parents(node)
1154 pp = r.parents(node)
1155 except Exception:
1155 except Exception:
1156 pp = [nullid, nullid]
1156 pp = [nullid, nullid]
1157 if ui.verbose:
1157 if ui.verbose:
1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1159 i, r.start(i), r.length(i), r.linkrev(i),
1159 i, r.start(i), r.length(i), r.linkrev(i),
1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1161 else:
1161 else:
1162 ui.write("% 6d % 7d %s %s %s\n" % (
1162 ui.write("% 6d % 7d %s %s %s\n" % (
1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1164 shortfn(pp[1])))
1164 shortfn(pp[1])))
1165 elif format == 1:
1165 elif format == 1:
1166 pr = r.parentrevs(i)
1166 pr = r.parentrevs(i)
1167 if ui.verbose:
1167 if ui.verbose:
1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1171 else:
1171 else:
1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1174 shortfn(node)))
1174 shortfn(node)))
1175
1175
1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1177 _('-c|-m|FILE'), optionalrepo=True)
1177 _('-c|-m|FILE'), optionalrepo=True)
1178 def debugindexdot(ui, repo, file_=None, **opts):
1178 def debugindexdot(ui, repo, file_=None, **opts):
1179 """dump an index DAG as a graphviz dot file"""
1179 """dump an index DAG as a graphviz dot file"""
1180 opts = pycompat.byteskwargs(opts)
1180 opts = pycompat.byteskwargs(opts)
1181 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1181 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1182 ui.write(("digraph G {\n"))
1182 ui.write(("digraph G {\n"))
1183 for i in r:
1183 for i in r:
1184 node = r.node(i)
1184 node = r.node(i)
1185 pp = r.parents(node)
1185 pp = r.parents(node)
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1187 if pp[1] != nullid:
1187 if pp[1] != nullid:
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1189 ui.write("}\n")
1189 ui.write("}\n")
1190
1190
1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1192 def debuginstall(ui, **opts):
1192 def debuginstall(ui, **opts):
1193 '''test Mercurial installation
1193 '''test Mercurial installation
1194
1194
1195 Returns 0 on success.
1195 Returns 0 on success.
1196 '''
1196 '''
1197 opts = pycompat.byteskwargs(opts)
1197 opts = pycompat.byteskwargs(opts)
1198
1198
1199 def writetemp(contents):
1199 def writetemp(contents):
1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1201 f = os.fdopen(fd, r"wb")
1201 f = os.fdopen(fd, r"wb")
1202 f.write(contents)
1202 f.write(contents)
1203 f.close()
1203 f.close()
1204 return name
1204 return name
1205
1205
1206 problems = 0
1206 problems = 0
1207
1207
1208 fm = ui.formatter('debuginstall', opts)
1208 fm = ui.formatter('debuginstall', opts)
1209 fm.startitem()
1209 fm.startitem()
1210
1210
1211 # encoding
1211 # encoding
1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1213 err = None
1213 err = None
1214 try:
1214 try:
1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1216 except LookupError as inst:
1216 except LookupError as inst:
1217 err = stringutil.forcebytestr(inst)
1217 err = stringutil.forcebytestr(inst)
1218 problems += 1
1218 problems += 1
1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1220 " (check that your locale is properly set)\n"), err)
1220 " (check that your locale is properly set)\n"), err)
1221
1221
1222 # Python
1222 # Python
1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1224 pycompat.sysexecutable)
1224 pycompat.sysexecutable)
1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1226 ("%d.%d.%d" % sys.version_info[:3]))
1226 ("%d.%d.%d" % sys.version_info[:3]))
1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1229
1229
1230 security = set(sslutil.supportedprotocols)
1230 security = set(sslutil.supportedprotocols)
1231 if sslutil.hassni:
1231 if sslutil.hassni:
1232 security.add('sni')
1232 security.add('sni')
1233
1233
1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1235 fm.formatlist(sorted(security), name='protocol',
1235 fm.formatlist(sorted(security), name='protocol',
1236 fmt='%s', sep=','))
1236 fmt='%s', sep=','))
1237
1237
1238 # These are warnings, not errors. So don't increment problem count. This
1238 # These are warnings, not errors. So don't increment problem count. This
1239 # may change in the future.
1239 # may change in the future.
1240 if 'tls1.2' not in security:
1240 if 'tls1.2' not in security:
1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1242 'network connections lack modern security\n'))
1242 'network connections lack modern security\n'))
1243 if 'sni' not in security:
1243 if 'sni' not in security:
1244 fm.plain(_(' SNI not supported by Python install; may have '
1244 fm.plain(_(' SNI not supported by Python install; may have '
1245 'connectivity issues with some servers\n'))
1245 'connectivity issues with some servers\n'))
1246
1246
1247 # TODO print CA cert info
1247 # TODO print CA cert info
1248
1248
1249 # hg version
1249 # hg version
1250 hgver = util.version()
1250 hgver = util.version()
1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1252 hgver.split('+')[0])
1252 hgver.split('+')[0])
1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1254 '+'.join(hgver.split('+')[1:]))
1254 '+'.join(hgver.split('+')[1:]))
1255
1255
1256 # compiled modules
1256 # compiled modules
1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1258 policy.policy)
1258 policy.policy)
1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1260 os.path.dirname(pycompat.fsencode(__file__)))
1260 os.path.dirname(pycompat.fsencode(__file__)))
1261
1261
1262 if policy.policy in ('c', 'allow'):
1262 if policy.policy in ('c', 'allow'):
1263 err = None
1263 err = None
1264 try:
1264 try:
1265 from .cext import (
1265 from .cext import (
1266 base85,
1266 base85,
1267 bdiff,
1267 bdiff,
1268 mpatch,
1268 mpatch,
1269 osutil,
1269 osutil,
1270 )
1270 )
1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1272 except Exception as inst:
1272 except Exception as inst:
1273 err = stringutil.forcebytestr(inst)
1273 err = stringutil.forcebytestr(inst)
1274 problems += 1
1274 problems += 1
1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1276
1276
1277 compengines = util.compengines._engines.values()
1277 compengines = util.compengines._engines.values()
1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1279 fm.formatlist(sorted(e.name() for e in compengines),
1279 fm.formatlist(sorted(e.name() for e in compengines),
1280 name='compengine', fmt='%s', sep=', '))
1280 name='compengine', fmt='%s', sep=', '))
1281 fm.write('compenginesavail', _('checking available compression engines '
1281 fm.write('compenginesavail', _('checking available compression engines '
1282 '(%s)\n'),
1282 '(%s)\n'),
1283 fm.formatlist(sorted(e.name() for e in compengines
1283 fm.formatlist(sorted(e.name() for e in compengines
1284 if e.available()),
1284 if e.available()),
1285 name='compengine', fmt='%s', sep=', '))
1285 name='compengine', fmt='%s', sep=', '))
1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1287 fm.write('compenginesserver', _('checking available compression engines '
1287 fm.write('compenginesserver', _('checking available compression engines '
1288 'for wire protocol (%s)\n'),
1288 'for wire protocol (%s)\n'),
1289 fm.formatlist([e.name() for e in wirecompengines
1289 fm.formatlist([e.name() for e in wirecompengines
1290 if e.wireprotosupport()],
1290 if e.wireprotosupport()],
1291 name='compengine', fmt='%s', sep=', '))
1291 name='compengine', fmt='%s', sep=', '))
1292 re2 = 'missing'
1292 re2 = 'missing'
1293 if util._re2:
1293 if util._re2:
1294 re2 = 'available'
1294 re2 = 'available'
1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1296 fm.data(re2=bool(util._re2))
1296 fm.data(re2=bool(util._re2))
1297
1297
1298 # templates
1298 # templates
1299 p = templater.templatepaths()
1299 p = templater.templatepaths()
1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1302 if p:
1302 if p:
1303 m = templater.templatepath("map-cmdline.default")
1303 m = templater.templatepath("map-cmdline.default")
1304 if m:
1304 if m:
1305 # template found, check if it is working
1305 # template found, check if it is working
1306 err = None
1306 err = None
1307 try:
1307 try:
1308 templater.templater.frommapfile(m)
1308 templater.templater.frommapfile(m)
1309 except Exception as inst:
1309 except Exception as inst:
1310 err = stringutil.forcebytestr(inst)
1310 err = stringutil.forcebytestr(inst)
1311 p = None
1311 p = None
1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1313 else:
1313 else:
1314 p = None
1314 p = None
1315 fm.condwrite(p, 'defaulttemplate',
1315 fm.condwrite(p, 'defaulttemplate',
1316 _("checking default template (%s)\n"), m)
1316 _("checking default template (%s)\n"), m)
1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1318 _(" template '%s' not found\n"), "default")
1318 _(" template '%s' not found\n"), "default")
1319 if not p:
1319 if not p:
1320 problems += 1
1320 problems += 1
1321 fm.condwrite(not p, '',
1321 fm.condwrite(not p, '',
1322 _(" (templates seem to have been installed incorrectly)\n"))
1322 _(" (templates seem to have been installed incorrectly)\n"))
1323
1323
1324 # editor
1324 # editor
1325 editor = ui.geteditor()
1325 editor = ui.geteditor()
1326 editor = util.expandpath(editor)
1326 editor = util.expandpath(editor)
1327 editorbin = procutil.shellsplit(editor)[0]
1327 editorbin = procutil.shellsplit(editor)[0]
1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1329 cmdpath = procutil.findexe(editorbin)
1329 cmdpath = procutil.findexe(editorbin)
1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1331 _(" No commit editor set and can't find %s in PATH\n"
1331 _(" No commit editor set and can't find %s in PATH\n"
1332 " (specify a commit editor in your configuration"
1332 " (specify a commit editor in your configuration"
1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1335 _(" Can't find editor '%s' in PATH\n"
1335 _(" Can't find editor '%s' in PATH\n"
1336 " (specify a commit editor in your configuration"
1336 " (specify a commit editor in your configuration"
1337 " file)\n"), not cmdpath and editorbin)
1337 " file)\n"), not cmdpath and editorbin)
1338 if not cmdpath and editor != 'vi':
1338 if not cmdpath and editor != 'vi':
1339 problems += 1
1339 problems += 1
1340
1340
1341 # check username
1341 # check username
1342 username = None
1342 username = None
1343 err = None
1343 err = None
1344 try:
1344 try:
1345 username = ui.username()
1345 username = ui.username()
1346 except error.Abort as e:
1346 except error.Abort as e:
1347 err = stringutil.forcebytestr(e)
1347 err = stringutil.forcebytestr(e)
1348 problems += 1
1348 problems += 1
1349
1349
1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1352 " (specify a username in your configuration file)\n"), err)
1352 " (specify a username in your configuration file)\n"), err)
1353
1353
1354 fm.condwrite(not problems, '',
1354 fm.condwrite(not problems, '',
1355 _("no problems detected\n"))
1355 _("no problems detected\n"))
1356 if not problems:
1356 if not problems:
1357 fm.data(problems=problems)
1357 fm.data(problems=problems)
1358 fm.condwrite(problems, 'problems',
1358 fm.condwrite(problems, 'problems',
1359 _("%d problems detected,"
1359 _("%d problems detected,"
1360 " please check your install!\n"), problems)
1360 " please check your install!\n"), problems)
1361 fm.end()
1361 fm.end()
1362
1362
1363 return problems
1363 return problems
1364
1364
1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1366 def debugknown(ui, repopath, *ids, **opts):
1366 def debugknown(ui, repopath, *ids, **opts):
1367 """test whether node ids are known to a repo
1367 """test whether node ids are known to a repo
1368
1368
1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1370 and 1s indicating unknown/known.
1370 and 1s indicating unknown/known.
1371 """
1371 """
1372 opts = pycompat.byteskwargs(opts)
1372 opts = pycompat.byteskwargs(opts)
1373 repo = hg.peer(ui, opts, repopath)
1373 repo = hg.peer(ui, opts, repopath)
1374 if not repo.capable('known'):
1374 if not repo.capable('known'):
1375 raise error.Abort("known() not supported by target repository")
1375 raise error.Abort("known() not supported by target repository")
1376 flags = repo.known([bin(s) for s in ids])
1376 flags = repo.known([bin(s) for s in ids])
1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1378
1378
1379 @command('debuglabelcomplete', [], _('LABEL...'))
1379 @command('debuglabelcomplete', [], _('LABEL...'))
1380 def debuglabelcomplete(ui, repo, *args):
1380 def debuglabelcomplete(ui, repo, *args):
1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1382 debugnamecomplete(ui, repo, *args)
1382 debugnamecomplete(ui, repo, *args)
1383
1383
1384 @command('debuglocks',
1384 @command('debuglocks',
1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1386 ('W', 'force-wlock', None,
1386 ('W', 'force-wlock', None,
1387 _('free the working state lock (DANGEROUS)')),
1387 _('free the working state lock (DANGEROUS)')),
1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1389 ('S', 'set-wlock', None,
1389 ('S', 'set-wlock', None,
1390 _('set the working state lock until stopped'))],
1390 _('set the working state lock until stopped'))],
1391 _('[OPTION]...'))
1391 _('[OPTION]...'))
1392 def debuglocks(ui, repo, **opts):
1392 def debuglocks(ui, repo, **opts):
1393 """show or modify state of locks
1393 """show or modify state of locks
1394
1394
1395 By default, this command will show which locks are held. This
1395 By default, this command will show which locks are held. This
1396 includes the user and process holding the lock, the amount of time
1396 includes the user and process holding the lock, the amount of time
1397 the lock has been held, and the machine name where the process is
1397 the lock has been held, and the machine name where the process is
1398 running if it's not local.
1398 running if it's not local.
1399
1399
1400 Locks protect the integrity of Mercurial's data, so should be
1400 Locks protect the integrity of Mercurial's data, so should be
1401 treated with care. System crashes or other interruptions may cause
1401 treated with care. System crashes or other interruptions may cause
1402 locks to not be properly released, though Mercurial will usually
1402 locks to not be properly released, though Mercurial will usually
1403 detect and remove such stale locks automatically.
1403 detect and remove such stale locks automatically.
1404
1404
1405 However, detecting stale locks may not always be possible (for
1405 However, detecting stale locks may not always be possible (for
1406 instance, on a shared filesystem). Removing locks may also be
1406 instance, on a shared filesystem). Removing locks may also be
1407 blocked by filesystem permissions.
1407 blocked by filesystem permissions.
1408
1408
1409 Setting a lock will prevent other commands from changing the data.
1409 Setting a lock will prevent other commands from changing the data.
1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1411 The set locks are removed when the command exits.
1411 The set locks are removed when the command exits.
1412
1412
1413 Returns 0 if no locks are held.
1413 Returns 0 if no locks are held.
1414
1414
1415 """
1415 """
1416
1416
1417 if opts.get(r'force_lock'):
1417 if opts.get(r'force_lock'):
1418 repo.svfs.unlink('lock')
1418 repo.svfs.unlink('lock')
1419 if opts.get(r'force_wlock'):
1419 if opts.get(r'force_wlock'):
1420 repo.vfs.unlink('wlock')
1420 repo.vfs.unlink('wlock')
1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1422 return 0
1422 return 0
1423
1423
1424 locks = []
1424 locks = []
1425 try:
1425 try:
1426 if opts.get(r'set_wlock'):
1426 if opts.get(r'set_wlock'):
1427 try:
1427 try:
1428 locks.append(repo.wlock(False))
1428 locks.append(repo.wlock(False))
1429 except error.LockHeld:
1429 except error.LockHeld:
1430 raise error.Abort(_('wlock is already held'))
1430 raise error.Abort(_('wlock is already held'))
1431 if opts.get(r'set_lock'):
1431 if opts.get(r'set_lock'):
1432 try:
1432 try:
1433 locks.append(repo.lock(False))
1433 locks.append(repo.lock(False))
1434 except error.LockHeld:
1434 except error.LockHeld:
1435 raise error.Abort(_('lock is already held'))
1435 raise error.Abort(_('lock is already held'))
1436 if len(locks):
1436 if len(locks):
1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1438 return 0
1438 return 0
1439 finally:
1439 finally:
1440 release(*locks)
1440 release(*locks)
1441
1441
1442 now = time.time()
1442 now = time.time()
1443 held = 0
1443 held = 0
1444
1444
1445 def report(vfs, name, method):
1445 def report(vfs, name, method):
1446 # this causes stale locks to get reaped for more accurate reporting
1446 # this causes stale locks to get reaped for more accurate reporting
1447 try:
1447 try:
1448 l = method(False)
1448 l = method(False)
1449 except error.LockHeld:
1449 except error.LockHeld:
1450 l = None
1450 l = None
1451
1451
1452 if l:
1452 if l:
1453 l.release()
1453 l.release()
1454 else:
1454 else:
1455 try:
1455 try:
1456 st = vfs.lstat(name)
1456 st = vfs.lstat(name)
1457 age = now - st[stat.ST_MTIME]
1457 age = now - st[stat.ST_MTIME]
1458 user = util.username(st.st_uid)
1458 user = util.username(st.st_uid)
1459 locker = vfs.readlock(name)
1459 locker = vfs.readlock(name)
1460 if ":" in locker:
1460 if ":" in locker:
1461 host, pid = locker.split(':')
1461 host, pid = locker.split(':')
1462 if host == socket.gethostname():
1462 if host == socket.gethostname():
1463 locker = 'user %s, process %s' % (user, pid)
1463 locker = 'user %s, process %s' % (user, pid)
1464 else:
1464 else:
1465 locker = 'user %s, process %s, host %s' \
1465 locker = 'user %s, process %s, host %s' \
1466 % (user, pid, host)
1466 % (user, pid, host)
1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1468 return 1
1468 return 1
1469 except OSError as e:
1469 except OSError as e:
1470 if e.errno != errno.ENOENT:
1470 if e.errno != errno.ENOENT:
1471 raise
1471 raise
1472
1472
1473 ui.write(("%-6s free\n") % (name + ":"))
1473 ui.write(("%-6s free\n") % (name + ":"))
1474 return 0
1474 return 0
1475
1475
1476 held += report(repo.svfs, "lock", repo.lock)
1476 held += report(repo.svfs, "lock", repo.lock)
1477 held += report(repo.vfs, "wlock", repo.wlock)
1477 held += report(repo.vfs, "wlock", repo.wlock)
1478
1478
1479 return held
1479 return held
1480
1480
1481 @command('debugmanifestfulltextcache', [
1481 @command('debugmanifestfulltextcache', [
1482 ('', 'clear', False, _('clear the cache')),
1482 ('', 'clear', False, _('clear the cache')),
1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1484 _('NODE'))
1484 _('NODE'))
1485 ], '')
1485 ], '')
1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1487 """show, clear or amend the contents of the manifest fulltext cache"""
1487 """show, clear or amend the contents of the manifest fulltext cache"""
1488 with repo.lock():
1488 with repo.lock():
1489 r = repo.manifestlog._revlog
1489 r = repo.manifestlog._revlog
1490 try:
1490 try:
1491 cache = r._fulltextcache
1491 cache = r._fulltextcache
1492 except AttributeError:
1492 except AttributeError:
1493 ui.warn(_(
1493 ui.warn(_(
1494 "Current revlog implementation doesn't appear to have a "
1494 "Current revlog implementation doesn't appear to have a "
1495 'manifest fulltext cache\n'))
1495 'manifest fulltext cache\n'))
1496 return
1496 return
1497
1497
1498 if opts.get(r'clear'):
1498 if opts.get(r'clear'):
1499 cache.clear()
1499 cache.clear()
1500
1500
1501 if add:
1501 if add:
1502 try:
1502 try:
1503 manifest = repo.manifestlog[r.lookup(add)]
1503 manifest = repo.manifestlog[r.lookup(add)]
1504 except error.LookupError as e:
1504 except error.LookupError as e:
1505 raise error.Abort(e, hint="Check your manifest node id")
1505 raise error.Abort(e, hint="Check your manifest node id")
1506 manifest.read() # stores revisision in cache too
1506 manifest.read() # stores revisision in cache too
1507
1507
1508 if not len(cache):
1508 if not len(cache):
1509 ui.write(_('Cache empty'))
1509 ui.write(_('Cache empty'))
1510 else:
1510 else:
1511 ui.write(
1511 ui.write(
1512 _('Cache contains %d manifest entries, in order of most to '
1512 _('Cache contains %d manifest entries, in order of most to '
1513 'least recent:\n') % (len(cache),))
1513 'least recent:\n') % (len(cache),))
1514 totalsize = 0
1514 totalsize = 0
1515 for nodeid in cache:
1515 for nodeid in cache:
1516 # Use cache.get to not update the LRU order
1516 # Use cache.get to not update the LRU order
1517 data = cache.get(nodeid)
1517 data = cache.get(nodeid)
1518 size = len(data)
1518 size = len(data)
1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1520 ui.write(_('id: %s, size %s\n') % (
1520 ui.write(_('id: %s, size %s\n') % (
1521 hex(nodeid), util.bytecount(size)))
1521 hex(nodeid), util.bytecount(size)))
1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1523 ui.write(
1523 ui.write(
1524 _('Total cache data size %s, on-disk %s\n') % (
1524 _('Total cache data size %s, on-disk %s\n') % (
1525 util.bytecount(totalsize), util.bytecount(ondisk))
1525 util.bytecount(totalsize), util.bytecount(ondisk))
1526 )
1526 )
1527
1527
1528 @command('debugmergestate', [], '')
1528 @command('debugmergestate', [], '')
1529 def debugmergestate(ui, repo, *args):
1529 def debugmergestate(ui, repo, *args):
1530 """print merge state
1530 """print merge state
1531
1531
1532 Use --verbose to print out information about whether v1 or v2 merge state
1532 Use --verbose to print out information about whether v1 or v2 merge state
1533 was chosen."""
1533 was chosen."""
1534 def _hashornull(h):
1534 def _hashornull(h):
1535 if h == nullhex:
1535 if h == nullhex:
1536 return 'null'
1536 return 'null'
1537 else:
1537 else:
1538 return h
1538 return h
1539
1539
1540 def printrecords(version):
1540 def printrecords(version):
1541 ui.write(('* version %d records\n') % version)
1541 ui.write(('* version %d records\n') % version)
1542 if version == 1:
1542 if version == 1:
1543 records = v1records
1543 records = v1records
1544 else:
1544 else:
1545 records = v2records
1545 records = v2records
1546
1546
1547 for rtype, record in records:
1547 for rtype, record in records:
1548 # pretty print some record types
1548 # pretty print some record types
1549 if rtype == 'L':
1549 if rtype == 'L':
1550 ui.write(('local: %s\n') % record)
1550 ui.write(('local: %s\n') % record)
1551 elif rtype == 'O':
1551 elif rtype == 'O':
1552 ui.write(('other: %s\n') % record)
1552 ui.write(('other: %s\n') % record)
1553 elif rtype == 'm':
1553 elif rtype == 'm':
1554 driver, mdstate = record.split('\0', 1)
1554 driver, mdstate = record.split('\0', 1)
1555 ui.write(('merge driver: %s (state "%s")\n')
1555 ui.write(('merge driver: %s (state "%s")\n')
1556 % (driver, mdstate))
1556 % (driver, mdstate))
1557 elif rtype in 'FDC':
1557 elif rtype in 'FDC':
1558 r = record.split('\0')
1558 r = record.split('\0')
1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1560 if version == 1:
1560 if version == 1:
1561 onode = 'not stored in v1 format'
1561 onode = 'not stored in v1 format'
1562 flags = r[7]
1562 flags = r[7]
1563 else:
1563 else:
1564 onode, flags = r[7:9]
1564 onode, flags = r[7:9]
1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1566 % (f, rtype, state, _hashornull(hash)))
1566 % (f, rtype, state, _hashornull(hash)))
1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1568 ui.write((' ancestor path: %s (node %s)\n')
1568 ui.write((' ancestor path: %s (node %s)\n')
1569 % (afile, _hashornull(anode)))
1569 % (afile, _hashornull(anode)))
1570 ui.write((' other path: %s (node %s)\n')
1570 ui.write((' other path: %s (node %s)\n')
1571 % (ofile, _hashornull(onode)))
1571 % (ofile, _hashornull(onode)))
1572 elif rtype == 'f':
1572 elif rtype == 'f':
1573 filename, rawextras = record.split('\0', 1)
1573 filename, rawextras = record.split('\0', 1)
1574 extras = rawextras.split('\0')
1574 extras = rawextras.split('\0')
1575 i = 0
1575 i = 0
1576 extrastrings = []
1576 extrastrings = []
1577 while i < len(extras):
1577 while i < len(extras):
1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1579 i += 2
1579 i += 2
1580
1580
1581 ui.write(('file extras: %s (%s)\n')
1581 ui.write(('file extras: %s (%s)\n')
1582 % (filename, ', '.join(extrastrings)))
1582 % (filename, ', '.join(extrastrings)))
1583 elif rtype == 'l':
1583 elif rtype == 'l':
1584 labels = record.split('\0', 2)
1584 labels = record.split('\0', 2)
1585 labels = [l for l in labels if len(l) > 0]
1585 labels = [l for l in labels if len(l) > 0]
1586 ui.write(('labels:\n'))
1586 ui.write(('labels:\n'))
1587 ui.write((' local: %s\n' % labels[0]))
1587 ui.write((' local: %s\n' % labels[0]))
1588 ui.write((' other: %s\n' % labels[1]))
1588 ui.write((' other: %s\n' % labels[1]))
1589 if len(labels) > 2:
1589 if len(labels) > 2:
1590 ui.write((' base: %s\n' % labels[2]))
1590 ui.write((' base: %s\n' % labels[2]))
1591 else:
1591 else:
1592 ui.write(('unrecognized entry: %s\t%s\n')
1592 ui.write(('unrecognized entry: %s\t%s\n')
1593 % (rtype, record.replace('\0', '\t')))
1593 % (rtype, record.replace('\0', '\t')))
1594
1594
1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1596 # merge state records. We shouldn't be doing this, but this is OK since this
1596 # merge state records. We shouldn't be doing this, but this is OK since this
1597 # command is pretty low-level.
1597 # command is pretty low-level.
1598 ms = mergemod.mergestate(repo)
1598 ms = mergemod.mergestate(repo)
1599
1599
1600 # sort so that reasonable information is on top
1600 # sort so that reasonable information is on top
1601 v1records = ms._readrecordsv1()
1601 v1records = ms._readrecordsv1()
1602 v2records = ms._readrecordsv2()
1602 v2records = ms._readrecordsv2()
1603 order = 'LOml'
1603 order = 'LOml'
1604 def key(r):
1604 def key(r):
1605 idx = order.find(r[0])
1605 idx = order.find(r[0])
1606 if idx == -1:
1606 if idx == -1:
1607 return (1, r[1])
1607 return (1, r[1])
1608 else:
1608 else:
1609 return (0, idx)
1609 return (0, idx)
1610 v1records.sort(key=key)
1610 v1records.sort(key=key)
1611 v2records.sort(key=key)
1611 v2records.sort(key=key)
1612
1612
1613 if not v1records and not v2records:
1613 if not v1records and not v2records:
1614 ui.write(('no merge state found\n'))
1614 ui.write(('no merge state found\n'))
1615 elif not v2records:
1615 elif not v2records:
1616 ui.note(('no version 2 merge state\n'))
1616 ui.note(('no version 2 merge state\n'))
1617 printrecords(1)
1617 printrecords(1)
1618 elif ms._v1v2match(v1records, v2records):
1618 elif ms._v1v2match(v1records, v2records):
1619 ui.note(('v1 and v2 states match: using v2\n'))
1619 ui.note(('v1 and v2 states match: using v2\n'))
1620 printrecords(2)
1620 printrecords(2)
1621 else:
1621 else:
1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1623 printrecords(1)
1623 printrecords(1)
1624 if ui.verbose:
1624 if ui.verbose:
1625 printrecords(2)
1625 printrecords(2)
1626
1626
1627 @command('debugnamecomplete', [], _('NAME...'))
1627 @command('debugnamecomplete', [], _('NAME...'))
1628 def debugnamecomplete(ui, repo, *args):
1628 def debugnamecomplete(ui, repo, *args):
1629 '''complete "names" - tags, open branch names, bookmark names'''
1629 '''complete "names" - tags, open branch names, bookmark names'''
1630
1630
1631 names = set()
1631 names = set()
1632 # since we previously only listed open branches, we will handle that
1632 # since we previously only listed open branches, we will handle that
1633 # specially (after this for loop)
1633 # specially (after this for loop)
1634 for name, ns in repo.names.iteritems():
1634 for name, ns in repo.names.iteritems():
1635 if name != 'branches':
1635 if name != 'branches':
1636 names.update(ns.listnames(repo))
1636 names.update(ns.listnames(repo))
1637 names.update(tag for (tag, heads, tip, closed)
1637 names.update(tag for (tag, heads, tip, closed)
1638 in repo.branchmap().iterbranches() if not closed)
1638 in repo.branchmap().iterbranches() if not closed)
1639 completions = set()
1639 completions = set()
1640 if not args:
1640 if not args:
1641 args = ['']
1641 args = ['']
1642 for a in args:
1642 for a in args:
1643 completions.update(n for n in names if n.startswith(a))
1643 completions.update(n for n in names if n.startswith(a))
1644 ui.write('\n'.join(sorted(completions)))
1644 ui.write('\n'.join(sorted(completions)))
1645 ui.write('\n')
1645 ui.write('\n')
1646
1646
1647 @command('debugobsolete',
1647 @command('debugobsolete',
1648 [('', 'flags', 0, _('markers flag')),
1648 [('', 'flags', 0, _('markers flag')),
1649 ('', 'record-parents', False,
1649 ('', 'record-parents', False,
1650 _('record parent information for the precursor')),
1650 _('record parent information for the precursor')),
1651 ('r', 'rev', [], _('display markers relevant to REV')),
1651 ('r', 'rev', [], _('display markers relevant to REV')),
1652 ('', 'exclusive', False, _('restrict display to markers only '
1652 ('', 'exclusive', False, _('restrict display to markers only '
1653 'relevant to REV')),
1653 'relevant to REV')),
1654 ('', 'index', False, _('display index of the marker')),
1654 ('', 'index', False, _('display index of the marker')),
1655 ('', 'delete', [], _('delete markers specified by indices')),
1655 ('', 'delete', [], _('delete markers specified by indices')),
1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1659 """create arbitrary obsolete marker
1659 """create arbitrary obsolete marker
1660
1660
1661 With no arguments, displays the list of obsolescence markers."""
1661 With no arguments, displays the list of obsolescence markers."""
1662
1662
1663 opts = pycompat.byteskwargs(opts)
1663 opts = pycompat.byteskwargs(opts)
1664
1664
1665 def parsenodeid(s):
1665 def parsenodeid(s):
1666 try:
1666 try:
1667 # We do not use revsingle/revrange functions here to accept
1667 # We do not use revsingle/revrange functions here to accept
1668 # arbitrary node identifiers, possibly not present in the
1668 # arbitrary node identifiers, possibly not present in the
1669 # local repository.
1669 # local repository.
1670 n = bin(s)
1670 n = bin(s)
1671 if len(n) != len(nullid):
1671 if len(n) != len(nullid):
1672 raise TypeError()
1672 raise TypeError()
1673 return n
1673 return n
1674 except TypeError:
1674 except TypeError:
1675 raise error.Abort('changeset references must be full hexadecimal '
1675 raise error.Abort('changeset references must be full hexadecimal '
1676 'node identifiers')
1676 'node identifiers')
1677
1677
1678 if opts.get('delete'):
1678 if opts.get('delete'):
1679 indices = []
1679 indices = []
1680 for v in opts.get('delete'):
1680 for v in opts.get('delete'):
1681 try:
1681 try:
1682 indices.append(int(v))
1682 indices.append(int(v))
1683 except ValueError:
1683 except ValueError:
1684 raise error.Abort(_('invalid index value: %r') % v,
1684 raise error.Abort(_('invalid index value: %r') % v,
1685 hint=_('use integers for indices'))
1685 hint=_('use integers for indices'))
1686
1686
1687 if repo.currenttransaction():
1687 if repo.currenttransaction():
1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1689 'of transaction.'))
1689 'of transaction.'))
1690
1690
1691 with repo.lock():
1691 with repo.lock():
1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1694
1694
1695 return
1695 return
1696
1696
1697 if precursor is not None:
1697 if precursor is not None:
1698 if opts['rev']:
1698 if opts['rev']:
1699 raise error.Abort('cannot select revision when creating marker')
1699 raise error.Abort('cannot select revision when creating marker')
1700 metadata = {}
1700 metadata = {}
1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1702 succs = tuple(parsenodeid(succ) for succ in successors)
1702 succs = tuple(parsenodeid(succ) for succ in successors)
1703 l = repo.lock()
1703 l = repo.lock()
1704 try:
1704 try:
1705 tr = repo.transaction('debugobsolete')
1705 tr = repo.transaction('debugobsolete')
1706 try:
1706 try:
1707 date = opts.get('date')
1707 date = opts.get('date')
1708 if date:
1708 if date:
1709 date = dateutil.parsedate(date)
1709 date = dateutil.parsedate(date)
1710 else:
1710 else:
1711 date = None
1711 date = None
1712 prec = parsenodeid(precursor)
1712 prec = parsenodeid(precursor)
1713 parents = None
1713 parents = None
1714 if opts['record_parents']:
1714 if opts['record_parents']:
1715 if prec not in repo.unfiltered():
1715 if prec not in repo.unfiltered():
1716 raise error.Abort('cannot used --record-parents on '
1716 raise error.Abort('cannot used --record-parents on '
1717 'unknown changesets')
1717 'unknown changesets')
1718 parents = repo.unfiltered()[prec].parents()
1718 parents = repo.unfiltered()[prec].parents()
1719 parents = tuple(p.node() for p in parents)
1719 parents = tuple(p.node() for p in parents)
1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1721 parents=parents, date=date,
1721 parents=parents, date=date,
1722 metadata=metadata, ui=ui)
1722 metadata=metadata, ui=ui)
1723 tr.close()
1723 tr.close()
1724 except ValueError as exc:
1724 except ValueError as exc:
1725 raise error.Abort(_('bad obsmarker input: %s') %
1725 raise error.Abort(_('bad obsmarker input: %s') %
1726 pycompat.bytestr(exc))
1726 pycompat.bytestr(exc))
1727 finally:
1727 finally:
1728 tr.release()
1728 tr.release()
1729 finally:
1729 finally:
1730 l.release()
1730 l.release()
1731 else:
1731 else:
1732 if opts['rev']:
1732 if opts['rev']:
1733 revs = scmutil.revrange(repo, opts['rev'])
1733 revs = scmutil.revrange(repo, opts['rev'])
1734 nodes = [repo[r].node() for r in revs]
1734 nodes = [repo[r].node() for r in revs]
1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1736 exclusive=opts['exclusive']))
1736 exclusive=opts['exclusive']))
1737 markers.sort(key=lambda x: x._data)
1737 markers.sort(key=lambda x: x._data)
1738 else:
1738 else:
1739 markers = obsutil.getmarkers(repo)
1739 markers = obsutil.getmarkers(repo)
1740
1740
1741 markerstoiter = markers
1741 markerstoiter = markers
1742 isrelevant = lambda m: True
1742 isrelevant = lambda m: True
1743 if opts.get('rev') and opts.get('index'):
1743 if opts.get('rev') and opts.get('index'):
1744 markerstoiter = obsutil.getmarkers(repo)
1744 markerstoiter = obsutil.getmarkers(repo)
1745 markerset = set(markers)
1745 markerset = set(markers)
1746 isrelevant = lambda m: m in markerset
1746 isrelevant = lambda m: m in markerset
1747
1747
1748 fm = ui.formatter('debugobsolete', opts)
1748 fm = ui.formatter('debugobsolete', opts)
1749 for i, m in enumerate(markerstoiter):
1749 for i, m in enumerate(markerstoiter):
1750 if not isrelevant(m):
1750 if not isrelevant(m):
1751 # marker can be irrelevant when we're iterating over a set
1751 # marker can be irrelevant when we're iterating over a set
1752 # of markers (markerstoiter) which is bigger than the set
1752 # of markers (markerstoiter) which is bigger than the set
1753 # of markers we want to display (markers)
1753 # of markers we want to display (markers)
1754 # this can happen if both --index and --rev options are
1754 # this can happen if both --index and --rev options are
1755 # provided and thus we need to iterate over all of the markers
1755 # provided and thus we need to iterate over all of the markers
1756 # to get the correct indices, but only display the ones that
1756 # to get the correct indices, but only display the ones that
1757 # are relevant to --rev value
1757 # are relevant to --rev value
1758 continue
1758 continue
1759 fm.startitem()
1759 fm.startitem()
1760 ind = i if opts.get('index') else None
1760 ind = i if opts.get('index') else None
1761 cmdutil.showmarker(fm, m, index=ind)
1761 cmdutil.showmarker(fm, m, index=ind)
1762 fm.end()
1762 fm.end()
1763
1763
1764 @command('debugpathcomplete',
1764 @command('debugpathcomplete',
1765 [('f', 'full', None, _('complete an entire path')),
1765 [('f', 'full', None, _('complete an entire path')),
1766 ('n', 'normal', None, _('show only normal files')),
1766 ('n', 'normal', None, _('show only normal files')),
1767 ('a', 'added', None, _('show only added files')),
1767 ('a', 'added', None, _('show only added files')),
1768 ('r', 'removed', None, _('show only removed files'))],
1768 ('r', 'removed', None, _('show only removed files'))],
1769 _('FILESPEC...'))
1769 _('FILESPEC...'))
1770 def debugpathcomplete(ui, repo, *specs, **opts):
1770 def debugpathcomplete(ui, repo, *specs, **opts):
1771 '''complete part or all of a tracked path
1771 '''complete part or all of a tracked path
1772
1772
1773 This command supports shells that offer path name completion. It
1773 This command supports shells that offer path name completion. It
1774 currently completes only files already known to the dirstate.
1774 currently completes only files already known to the dirstate.
1775
1775
1776 Completion extends only to the next path segment unless
1776 Completion extends only to the next path segment unless
1777 --full is specified, in which case entire paths are used.'''
1777 --full is specified, in which case entire paths are used.'''
1778
1778
1779 def complete(path, acceptable):
1779 def complete(path, acceptable):
1780 dirstate = repo.dirstate
1780 dirstate = repo.dirstate
1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1782 rootdir = repo.root + pycompat.ossep
1782 rootdir = repo.root + pycompat.ossep
1783 if spec != repo.root and not spec.startswith(rootdir):
1783 if spec != repo.root and not spec.startswith(rootdir):
1784 return [], []
1784 return [], []
1785 if os.path.isdir(spec):
1785 if os.path.isdir(spec):
1786 spec += '/'
1786 spec += '/'
1787 spec = spec[len(rootdir):]
1787 spec = spec[len(rootdir):]
1788 fixpaths = pycompat.ossep != '/'
1788 fixpaths = pycompat.ossep != '/'
1789 if fixpaths:
1789 if fixpaths:
1790 spec = spec.replace(pycompat.ossep, '/')
1790 spec = spec.replace(pycompat.ossep, '/')
1791 speclen = len(spec)
1791 speclen = len(spec)
1792 fullpaths = opts[r'full']
1792 fullpaths = opts[r'full']
1793 files, dirs = set(), set()
1793 files, dirs = set(), set()
1794 adddir, addfile = dirs.add, files.add
1794 adddir, addfile = dirs.add, files.add
1795 for f, st in dirstate.iteritems():
1795 for f, st in dirstate.iteritems():
1796 if f.startswith(spec) and st[0] in acceptable:
1796 if f.startswith(spec) and st[0] in acceptable:
1797 if fixpaths:
1797 if fixpaths:
1798 f = f.replace('/', pycompat.ossep)
1798 f = f.replace('/', pycompat.ossep)
1799 if fullpaths:
1799 if fullpaths:
1800 addfile(f)
1800 addfile(f)
1801 continue
1801 continue
1802 s = f.find(pycompat.ossep, speclen)
1802 s = f.find(pycompat.ossep, speclen)
1803 if s >= 0:
1803 if s >= 0:
1804 adddir(f[:s])
1804 adddir(f[:s])
1805 else:
1805 else:
1806 addfile(f)
1806 addfile(f)
1807 return files, dirs
1807 return files, dirs
1808
1808
1809 acceptable = ''
1809 acceptable = ''
1810 if opts[r'normal']:
1810 if opts[r'normal']:
1811 acceptable += 'nm'
1811 acceptable += 'nm'
1812 if opts[r'added']:
1812 if opts[r'added']:
1813 acceptable += 'a'
1813 acceptable += 'a'
1814 if opts[r'removed']:
1814 if opts[r'removed']:
1815 acceptable += 'r'
1815 acceptable += 'r'
1816 cwd = repo.getcwd()
1816 cwd = repo.getcwd()
1817 if not specs:
1817 if not specs:
1818 specs = ['.']
1818 specs = ['.']
1819
1819
1820 files, dirs = set(), set()
1820 files, dirs = set(), set()
1821 for spec in specs:
1821 for spec in specs:
1822 f, d = complete(spec, acceptable or 'nmar')
1822 f, d = complete(spec, acceptable or 'nmar')
1823 files.update(f)
1823 files.update(f)
1824 dirs.update(d)
1824 dirs.update(d)
1825 files.update(dirs)
1825 files.update(dirs)
1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1827 ui.write('\n')
1827 ui.write('\n')
1828
1828
1829 @command('debugpeer', [], _('PATH'), norepo=True)
1829 @command('debugpeer', [], _('PATH'), norepo=True)
1830 def debugpeer(ui, path):
1830 def debugpeer(ui, path):
1831 """establish a connection to a peer repository"""
1831 """establish a connection to a peer repository"""
1832 # Always enable peer request logging. Requires --debug to display
1832 # Always enable peer request logging. Requires --debug to display
1833 # though.
1833 # though.
1834 overrides = {
1834 overrides = {
1835 ('devel', 'debug.peer-request'): True,
1835 ('devel', 'debug.peer-request'): True,
1836 }
1836 }
1837
1837
1838 with ui.configoverride(overrides):
1838 with ui.configoverride(overrides):
1839 peer = hg.peer(ui, {}, path)
1839 peer = hg.peer(ui, {}, path)
1840
1840
1841 local = peer.local() is not None
1841 local = peer.local() is not None
1842 canpush = peer.canpush()
1842 canpush = peer.canpush()
1843
1843
1844 ui.write(_('url: %s\n') % peer.url())
1844 ui.write(_('url: %s\n') % peer.url())
1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1847
1847
1848 @command('debugpickmergetool',
1848 @command('debugpickmergetool',
1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1852 _('[PATTERN]...'),
1852 _('[PATTERN]...'),
1853 inferrepo=True)
1853 inferrepo=True)
1854 def debugpickmergetool(ui, repo, *pats, **opts):
1854 def debugpickmergetool(ui, repo, *pats, **opts):
1855 """examine which merge tool is chosen for specified file
1855 """examine which merge tool is chosen for specified file
1856
1856
1857 As described in :hg:`help merge-tools`, Mercurial examines
1857 As described in :hg:`help merge-tools`, Mercurial examines
1858 configurations below in this order to decide which merge tool is
1858 configurations below in this order to decide which merge tool is
1859 chosen for specified file.
1859 chosen for specified file.
1860
1860
1861 1. ``--tool`` option
1861 1. ``--tool`` option
1862 2. ``HGMERGE`` environment variable
1862 2. ``HGMERGE`` environment variable
1863 3. configurations in ``merge-patterns`` section
1863 3. configurations in ``merge-patterns`` section
1864 4. configuration of ``ui.merge``
1864 4. configuration of ``ui.merge``
1865 5. configurations in ``merge-tools`` section
1865 5. configurations in ``merge-tools`` section
1866 6. ``hgmerge`` tool (for historical reason only)
1866 6. ``hgmerge`` tool (for historical reason only)
1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1868
1868
1869 This command writes out examination result in the style below::
1869 This command writes out examination result in the style below::
1870
1870
1871 FILE = MERGETOOL
1871 FILE = MERGETOOL
1872
1872
1873 By default, all files known in the first parent context of the
1873 By default, all files known in the first parent context of the
1874 working directory are examined. Use file patterns and/or -I/-X
1874 working directory are examined. Use file patterns and/or -I/-X
1875 options to limit target files. -r/--rev is also useful to examine
1875 options to limit target files. -r/--rev is also useful to examine
1876 files in another context without actual updating to it.
1876 files in another context without actual updating to it.
1877
1877
1878 With --debug, this command shows warning messages while matching
1878 With --debug, this command shows warning messages while matching
1879 against ``merge-patterns`` and so on, too. It is recommended to
1879 against ``merge-patterns`` and so on, too. It is recommended to
1880 use this option with explicit file patterns and/or -I/-X options,
1880 use this option with explicit file patterns and/or -I/-X options,
1881 because this option increases amount of output per file according
1881 because this option increases amount of output per file according
1882 to configurations in hgrc.
1882 to configurations in hgrc.
1883
1883
1884 With -v/--verbose, this command shows configurations below at
1884 With -v/--verbose, this command shows configurations below at
1885 first (only if specified).
1885 first (only if specified).
1886
1886
1887 - ``--tool`` option
1887 - ``--tool`` option
1888 - ``HGMERGE`` environment variable
1888 - ``HGMERGE`` environment variable
1889 - configuration of ``ui.merge``
1889 - configuration of ``ui.merge``
1890
1890
1891 If merge tool is chosen before matching against
1891 If merge tool is chosen before matching against
1892 ``merge-patterns``, this command can't show any helpful
1892 ``merge-patterns``, this command can't show any helpful
1893 information, even with --debug. In such case, information above is
1893 information, even with --debug. In such case, information above is
1894 useful to know why a merge tool is chosen.
1894 useful to know why a merge tool is chosen.
1895 """
1895 """
1896 opts = pycompat.byteskwargs(opts)
1896 opts = pycompat.byteskwargs(opts)
1897 overrides = {}
1897 overrides = {}
1898 if opts['tool']:
1898 if opts['tool']:
1899 overrides[('ui', 'forcemerge')] = opts['tool']
1899 overrides[('ui', 'forcemerge')] = opts['tool']
1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1901
1901
1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1903 hgmerge = encoding.environ.get("HGMERGE")
1903 hgmerge = encoding.environ.get("HGMERGE")
1904 if hgmerge is not None:
1904 if hgmerge is not None:
1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1906 uimerge = ui.config("ui", "merge")
1906 uimerge = ui.config("ui", "merge")
1907 if uimerge:
1907 if uimerge:
1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1909
1909
1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 m = scmutil.match(ctx, pats, opts)
1911 m = scmutil.match(ctx, pats, opts)
1912 changedelete = opts['changedelete']
1912 changedelete = opts['changedelete']
1913 for path in ctx.walk(m):
1913 for path in ctx.walk(m):
1914 fctx = ctx[path]
1914 fctx = ctx[path]
1915 try:
1915 try:
1916 if not ui.debugflag:
1916 if not ui.debugflag:
1917 ui.pushbuffer(error=True)
1917 ui.pushbuffer(error=True)
1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1919 fctx.isbinary(),
1919 fctx.isbinary(),
1920 'l' in fctx.flags(),
1920 'l' in fctx.flags(),
1921 changedelete)
1921 changedelete)
1922 finally:
1922 finally:
1923 if not ui.debugflag:
1923 if not ui.debugflag:
1924 ui.popbuffer()
1924 ui.popbuffer()
1925 ui.write(('%s = %s\n') % (path, tool))
1925 ui.write(('%s = %s\n') % (path, tool))
1926
1926
1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1929 '''access the pushkey key/value protocol
1929 '''access the pushkey key/value protocol
1930
1930
1931 With two args, list the keys in the given namespace.
1931 With two args, list the keys in the given namespace.
1932
1932
1933 With five args, set a key to new if it currently is set to old.
1933 With five args, set a key to new if it currently is set to old.
1934 Reports success or failure.
1934 Reports success or failure.
1935 '''
1935 '''
1936
1936
1937 target = hg.peer(ui, {}, repopath)
1937 target = hg.peer(ui, {}, repopath)
1938 if keyinfo:
1938 if keyinfo:
1939 key, old, new = keyinfo
1939 key, old, new = keyinfo
1940 with target.commandexecutor() as e:
1940 with target.commandexecutor() as e:
1941 r = e.callcommand('pushkey', {
1941 r = e.callcommand('pushkey', {
1942 'namespace': namespace,
1942 'namespace': namespace,
1943 'key': key,
1943 'key': key,
1944 'old': old,
1944 'old': old,
1945 'new': new,
1945 'new': new,
1946 }).result()
1946 }).result()
1947
1947
1948 ui.status(pycompat.bytestr(r) + '\n')
1948 ui.status(pycompat.bytestr(r) + '\n')
1949 return not r
1949 return not r
1950 else:
1950 else:
1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1953 stringutil.escapestr(v)))
1953 stringutil.escapestr(v)))
1954
1954
1955 @command('debugpvec', [], _('A B'))
1955 @command('debugpvec', [], _('A B'))
1956 def debugpvec(ui, repo, a, b=None):
1956 def debugpvec(ui, repo, a, b=None):
1957 ca = scmutil.revsingle(repo, a)
1957 ca = scmutil.revsingle(repo, a)
1958 cb = scmutil.revsingle(repo, b)
1958 cb = scmutil.revsingle(repo, b)
1959 pa = pvec.ctxpvec(ca)
1959 pa = pvec.ctxpvec(ca)
1960 pb = pvec.ctxpvec(cb)
1960 pb = pvec.ctxpvec(cb)
1961 if pa == pb:
1961 if pa == pb:
1962 rel = "="
1962 rel = "="
1963 elif pa > pb:
1963 elif pa > pb:
1964 rel = ">"
1964 rel = ">"
1965 elif pa < pb:
1965 elif pa < pb:
1966 rel = "<"
1966 rel = "<"
1967 elif pa | pb:
1967 elif pa | pb:
1968 rel = "|"
1968 rel = "|"
1969 ui.write(_("a: %s\n") % pa)
1969 ui.write(_("a: %s\n") % pa)
1970 ui.write(_("b: %s\n") % pb)
1970 ui.write(_("b: %s\n") % pb)
1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1974 pa.distance(pb), rel))
1974 pa.distance(pb), rel))
1975
1975
1976 @command('debugrebuilddirstate|debugrebuildstate',
1976 @command('debugrebuilddirstate|debugrebuildstate',
1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1979 'the working copy parent')),
1979 'the working copy parent')),
1980 ],
1980 ],
1981 _('[-r REV]'))
1981 _('[-r REV]'))
1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1983 """rebuild the dirstate as it would look like for the given revision
1983 """rebuild the dirstate as it would look like for the given revision
1984
1984
1985 If no revision is specified the first current parent will be used.
1985 If no revision is specified the first current parent will be used.
1986
1986
1987 The dirstate will be set to the files of the given revision.
1987 The dirstate will be set to the files of the given revision.
1988 The actual working directory content or existing dirstate
1988 The actual working directory content or existing dirstate
1989 information such as adds or removes is not considered.
1989 information such as adds or removes is not considered.
1990
1990
1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1992 tracked but are not in the parent manifest, or that exist in the parent
1992 tracked but are not in the parent manifest, or that exist in the parent
1993 manifest but are not in the dirstate. It will not change adds, removes, or
1993 manifest but are not in the dirstate. It will not change adds, removes, or
1994 modified files that are in the working copy parent.
1994 modified files that are in the working copy parent.
1995
1995
1996 One use of this command is to make the next :hg:`status` invocation
1996 One use of this command is to make the next :hg:`status` invocation
1997 check the actual file content.
1997 check the actual file content.
1998 """
1998 """
1999 ctx = scmutil.revsingle(repo, rev)
1999 ctx = scmutil.revsingle(repo, rev)
2000 with repo.wlock():
2000 with repo.wlock():
2001 dirstate = repo.dirstate
2001 dirstate = repo.dirstate
2002 changedfiles = None
2002 changedfiles = None
2003 # See command doc for what minimal does.
2003 # See command doc for what minimal does.
2004 if opts.get(r'minimal'):
2004 if opts.get(r'minimal'):
2005 manifestfiles = set(ctx.manifest().keys())
2005 manifestfiles = set(ctx.manifest().keys())
2006 dirstatefiles = set(dirstate)
2006 dirstatefiles = set(dirstate)
2007 manifestonly = manifestfiles - dirstatefiles
2007 manifestonly = manifestfiles - dirstatefiles
2008 dsonly = dirstatefiles - manifestfiles
2008 dsonly = dirstatefiles - manifestfiles
2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2010 changedfiles = manifestonly | dsnotadded
2010 changedfiles = manifestonly | dsnotadded
2011
2011
2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2013
2013
2014 @command('debugrebuildfncache', [], '')
2014 @command('debugrebuildfncache', [], '')
2015 def debugrebuildfncache(ui, repo):
2015 def debugrebuildfncache(ui, repo):
2016 """rebuild the fncache file"""
2016 """rebuild the fncache file"""
2017 repair.rebuildfncache(ui, repo)
2017 repair.rebuildfncache(ui, repo)
2018
2018
2019 @command('debugrename',
2019 @command('debugrename',
2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2021 _('[-r REV] FILE'))
2021 _('[-r REV] FILE'))
2022 def debugrename(ui, repo, file1, *pats, **opts):
2022 def debugrename(ui, repo, file1, *pats, **opts):
2023 """dump rename information"""
2023 """dump rename information"""
2024
2024
2025 opts = pycompat.byteskwargs(opts)
2025 opts = pycompat.byteskwargs(opts)
2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2028 for abs in ctx.walk(m):
2028 for abs in ctx.walk(m):
2029 fctx = ctx[abs]
2029 fctx = ctx[abs]
2030 o = fctx.filelog().renamed(fctx.filenode())
2030 o = fctx.filelog().renamed(fctx.filenode())
2031 rel = m.rel(abs)
2031 rel = m.rel(abs)
2032 if o:
2032 if o:
2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2034 else:
2034 else:
2035 ui.write(_("%s not renamed\n") % rel)
2035 ui.write(_("%s not renamed\n") % rel)
2036
2036
2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2038 [('d', 'dump', False, _('dump index data'))],
2038 [('d', 'dump', False, _('dump index data'))],
2039 _('-c|-m|FILE'),
2039 _('-c|-m|FILE'),
2040 optionalrepo=True)
2040 optionalrepo=True)
2041 def debugrevlog(ui, repo, file_=None, **opts):
2041 def debugrevlog(ui, repo, file_=None, **opts):
2042 """show data and statistics about a revlog"""
2042 """show data and statistics about a revlog"""
2043 opts = pycompat.byteskwargs(opts)
2043 opts = pycompat.byteskwargs(opts)
2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2045
2045
2046 if opts.get("dump"):
2046 if opts.get("dump"):
2047 numrevs = len(r)
2047 numrevs = len(r)
2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2049 " rawsize totalsize compression heads chainlen\n"))
2049 " rawsize totalsize compression heads chainlen\n"))
2050 ts = 0
2050 ts = 0
2051 heads = set()
2051 heads = set()
2052
2052
2053 for rev in pycompat.xrange(numrevs):
2053 for rev in pycompat.xrange(numrevs):
2054 dbase = r.deltaparent(rev)
2054 dbase = r.deltaparent(rev)
2055 if dbase == -1:
2055 if dbase == -1:
2056 dbase = rev
2056 dbase = rev
2057 cbase = r.chainbase(rev)
2057 cbase = r.chainbase(rev)
2058 clen = r.chainlen(rev)
2058 clen = r.chainlen(rev)
2059 p1, p2 = r.parentrevs(rev)
2059 p1, p2 = r.parentrevs(rev)
2060 rs = r.rawsize(rev)
2060 rs = r.rawsize(rev)
2061 ts = ts + rs
2061 ts = ts + rs
2062 heads -= set(r.parentrevs(rev))
2062 heads -= set(r.parentrevs(rev))
2063 heads.add(rev)
2063 heads.add(rev)
2064 try:
2064 try:
2065 compression = ts / r.end(rev)
2065 compression = ts / r.end(rev)
2066 except ZeroDivisionError:
2066 except ZeroDivisionError:
2067 compression = 0
2067 compression = 0
2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2069 "%11d %5d %8d\n" %
2069 "%11d %5d %8d\n" %
2070 (rev, p1, p2, r.start(rev), r.end(rev),
2070 (rev, p1, p2, r.start(rev), r.end(rev),
2071 r.start(dbase), r.start(cbase),
2071 r.start(dbase), r.start(cbase),
2072 r.start(p1), r.start(p2),
2072 r.start(p1), r.start(p2),
2073 rs, ts, compression, len(heads), clen))
2073 rs, ts, compression, len(heads), clen))
2074 return 0
2074 return 0
2075
2075
2076 v = r.version
2076 v = r.version
2077 format = v & 0xFFFF
2077 format = v & 0xFFFF
2078 flags = []
2078 flags = []
2079 gdelta = False
2079 gdelta = False
2080 if v & revlog.FLAG_INLINE_DATA:
2080 if v & revlog.FLAG_INLINE_DATA:
2081 flags.append('inline')
2081 flags.append('inline')
2082 if v & revlog.FLAG_GENERALDELTA:
2082 if v & revlog.FLAG_GENERALDELTA:
2083 gdelta = True
2083 gdelta = True
2084 flags.append('generaldelta')
2084 flags.append('generaldelta')
2085 if not flags:
2085 if not flags:
2086 flags = ['(none)']
2086 flags = ['(none)']
2087
2087
2088 ### tracks merge vs single parent
2088 ### tracks merge vs single parent
2089 nummerges = 0
2089 nummerges = 0
2090
2090
2091 ### tracks ways the "delta" are build
2091 ### tracks ways the "delta" are build
2092 # nodelta
2092 # nodelta
2093 numempty = 0
2093 numempty = 0
2094 numemptytext = 0
2094 numemptytext = 0
2095 numemptydelta = 0
2095 numemptydelta = 0
2096 # full file content
2096 # full file content
2097 numfull = 0
2097 numfull = 0
2098 # intermediate snapshot against a prior snapshot
2099 numsemi = 0
2098 # delta against previous revision
2100 # delta against previous revision
2099 numprev = 0
2101 numprev = 0
2100 # delta against first or second parent (not prev)
2102 # delta against first or second parent (not prev)
2101 nump1 = 0
2103 nump1 = 0
2102 nump2 = 0
2104 nump2 = 0
2103 # delta against neither prev nor parents
2105 # delta against neither prev nor parents
2104 numother = 0
2106 numother = 0
2105 # delta against prev that are also first or second parent
2107 # delta against prev that are also first or second parent
2106 # (details of `numprev`)
2108 # (details of `numprev`)
2107 nump1prev = 0
2109 nump1prev = 0
2108 nump2prev = 0
2110 nump2prev = 0
2109
2111
2110 # data about delta chain of each revs
2112 # data about delta chain of each revs
2111 chainlengths = []
2113 chainlengths = []
2112 chainbases = []
2114 chainbases = []
2113 chainspans = []
2115 chainspans = []
2114
2116
2115 # data about each revision
2117 # data about each revision
2116 datasize = [None, 0, 0]
2118 datasize = [None, 0, 0]
2117 fullsize = [None, 0, 0]
2119 fullsize = [None, 0, 0]
2120 semisize = [None, 0, 0]
2118 deltasize = [None, 0, 0]
2121 deltasize = [None, 0, 0]
2119 chunktypecounts = {}
2122 chunktypecounts = {}
2120 chunktypesizes = {}
2123 chunktypesizes = {}
2121
2124
2122 def addsize(size, l):
2125 def addsize(size, l):
2123 if l[0] is None or size < l[0]:
2126 if l[0] is None or size < l[0]:
2124 l[0] = size
2127 l[0] = size
2125 if size > l[1]:
2128 if size > l[1]:
2126 l[1] = size
2129 l[1] = size
2127 l[2] += size
2130 l[2] += size
2128
2131
2129 numrevs = len(r)
2132 numrevs = len(r)
2130 for rev in pycompat.xrange(numrevs):
2133 for rev in pycompat.xrange(numrevs):
2131 p1, p2 = r.parentrevs(rev)
2134 p1, p2 = r.parentrevs(rev)
2132 delta = r.deltaparent(rev)
2135 delta = r.deltaparent(rev)
2133 if format > 0:
2136 if format > 0:
2134 addsize(r.rawsize(rev), datasize)
2137 addsize(r.rawsize(rev), datasize)
2135 if p2 != nullrev:
2138 if p2 != nullrev:
2136 nummerges += 1
2139 nummerges += 1
2137 size = r.length(rev)
2140 size = r.length(rev)
2138 if delta == nullrev:
2141 if delta == nullrev:
2139 chainlengths.append(0)
2142 chainlengths.append(0)
2140 chainbases.append(r.start(rev))
2143 chainbases.append(r.start(rev))
2141 chainspans.append(size)
2144 chainspans.append(size)
2142 if size == 0:
2145 if size == 0:
2143 numempty += 1
2146 numempty += 1
2144 numemptytext += 1
2147 numemptytext += 1
2145 else:
2148 else:
2146 numfull += 1
2149 numfull += 1
2147 addsize(size, fullsize)
2150 addsize(size, fullsize)
2148 else:
2151 else:
2149 chainlengths.append(chainlengths[delta] + 1)
2152 chainlengths.append(chainlengths[delta] + 1)
2150 baseaddr = chainbases[delta]
2153 baseaddr = chainbases[delta]
2151 revaddr = r.start(rev)
2154 revaddr = r.start(rev)
2152 chainbases.append(baseaddr)
2155 chainbases.append(baseaddr)
2153 chainspans.append((revaddr - baseaddr) + size)
2156 chainspans.append((revaddr - baseaddr) + size)
2154 if size == 0:
2157 if size == 0:
2155 numempty += 1
2158 numempty += 1
2156 numemptydelta += 1
2159 numemptydelta += 1
2160 elif r.issnapshot(rev):
2161 addsize(size, semisize)
2162 numsemi += 1
2157 else:
2163 else:
2158 addsize(size, deltasize)
2164 addsize(size, deltasize)
2159 if delta == rev - 1:
2165 if delta == rev - 1:
2160 numprev += 1
2166 numprev += 1
2161 if delta == p1:
2167 if delta == p1:
2162 nump1prev += 1
2168 nump1prev += 1
2163 elif delta == p2:
2169 elif delta == p2:
2164 nump2prev += 1
2170 nump2prev += 1
2165 elif delta == p1:
2171 elif delta == p1:
2166 nump1 += 1
2172 nump1 += 1
2167 elif delta == p2:
2173 elif delta == p2:
2168 nump2 += 1
2174 nump2 += 1
2169 elif delta != nullrev:
2175 elif delta != nullrev:
2170 numother += 1
2176 numother += 1
2171
2177
2172 # Obtain data on the raw chunks in the revlog.
2178 # Obtain data on the raw chunks in the revlog.
2173 if util.safehasattr(r, '_getsegmentforrevs'):
2179 if util.safehasattr(r, '_getsegmentforrevs'):
2174 segment = r._getsegmentforrevs(rev, rev)[1]
2180 segment = r._getsegmentforrevs(rev, rev)[1]
2175 else:
2181 else:
2176 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2182 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2177 if segment:
2183 if segment:
2178 chunktype = bytes(segment[0:1])
2184 chunktype = bytes(segment[0:1])
2179 else:
2185 else:
2180 chunktype = 'empty'
2186 chunktype = 'empty'
2181
2187
2182 if chunktype not in chunktypecounts:
2188 if chunktype not in chunktypecounts:
2183 chunktypecounts[chunktype] = 0
2189 chunktypecounts[chunktype] = 0
2184 chunktypesizes[chunktype] = 0
2190 chunktypesizes[chunktype] = 0
2185
2191
2186 chunktypecounts[chunktype] += 1
2192 chunktypecounts[chunktype] += 1
2187 chunktypesizes[chunktype] += size
2193 chunktypesizes[chunktype] += size
2188
2194
2189 # Adjust size min value for empty cases
2195 # Adjust size min value for empty cases
2190 for size in (datasize, fullsize, deltasize):
2196 for size in (datasize, fullsize, semisize, deltasize):
2191 if size[0] is None:
2197 if size[0] is None:
2192 size[0] = 0
2198 size[0] = 0
2193
2199
2194 numdeltas = numrevs - numfull - numempty
2200 numdeltas = numrevs - numfull - numempty - numsemi
2195 numoprev = numprev - nump1prev - nump2prev
2201 numoprev = numprev - nump1prev - nump2prev
2196 totalrawsize = datasize[2]
2202 totalrawsize = datasize[2]
2197 datasize[2] /= numrevs
2203 datasize[2] /= numrevs
2198 fulltotal = fullsize[2]
2204 fulltotal = fullsize[2]
2199 fullsize[2] /= numfull
2205 fullsize[2] /= numfull
2206 semitotal = semisize[2]
2207 if 0 < numsemi:
2208 semisize[2] /= numsemi
2200 deltatotal = deltasize[2]
2209 deltatotal = deltasize[2]
2201 if numdeltas > 0:
2210 if numdeltas > 0:
2202 deltasize[2] /= numdeltas
2211 deltasize[2] /= numdeltas
2203 totalsize = fulltotal + deltatotal
2212 totalsize = fulltotal + semitotal + deltatotal
2204 avgchainlen = sum(chainlengths) / numrevs
2213 avgchainlen = sum(chainlengths) / numrevs
2205 maxchainlen = max(chainlengths)
2214 maxchainlen = max(chainlengths)
2206 maxchainspan = max(chainspans)
2215 maxchainspan = max(chainspans)
2207 compratio = 1
2216 compratio = 1
2208 if totalsize:
2217 if totalsize:
2209 compratio = totalrawsize / totalsize
2218 compratio = totalrawsize / totalsize
2210
2219
2211 basedfmtstr = '%%%dd\n'
2220 basedfmtstr = '%%%dd\n'
2212 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2221 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2213
2222
2214 def dfmtstr(max):
2223 def dfmtstr(max):
2215 return basedfmtstr % len(str(max))
2224 return basedfmtstr % len(str(max))
2216 def pcfmtstr(max, padding=0):
2225 def pcfmtstr(max, padding=0):
2217 return basepcfmtstr % (len(str(max)), ' ' * padding)
2226 return basepcfmtstr % (len(str(max)), ' ' * padding)
2218
2227
2219 def pcfmt(value, total):
2228 def pcfmt(value, total):
2220 if total:
2229 if total:
2221 return (value, 100 * float(value) / total)
2230 return (value, 100 * float(value) / total)
2222 else:
2231 else:
2223 return value, 100.0
2232 return value, 100.0
2224
2233
2225 ui.write(('format : %d\n') % format)
2234 ui.write(('format : %d\n') % format)
2226 ui.write(('flags : %s\n') % ', '.join(flags))
2235 ui.write(('flags : %s\n') % ', '.join(flags))
2227
2236
2228 ui.write('\n')
2237 ui.write('\n')
2229 fmt = pcfmtstr(totalsize)
2238 fmt = pcfmtstr(totalsize)
2230 fmt2 = dfmtstr(totalsize)
2239 fmt2 = dfmtstr(totalsize)
2231 ui.write(('revisions : ') + fmt2 % numrevs)
2240 ui.write(('revisions : ') + fmt2 % numrevs)
2232 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2241 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2233 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2242 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2234 ui.write(('revisions : ') + fmt2 % numrevs)
2243 ui.write(('revisions : ') + fmt2 % numrevs)
2235 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2244 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2236 ui.write((' text : ')
2245 ui.write((' text : ')
2237 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2246 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2238 ui.write((' delta : ')
2247 ui.write((' delta : ')
2239 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2248 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2240 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2249 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2250 ui.write((' inter : ') + fmt % pcfmt(numsemi, numrevs))
2241 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2251 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2242 ui.write(('revision size : ') + fmt2 % totalsize)
2252 ui.write(('revision size : ') + fmt2 % totalsize)
2243 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2253 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2254 ui.write((' inter : ') + fmt % pcfmt(semitotal, totalsize))
2244 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2255 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2245
2256
2246 def fmtchunktype(chunktype):
2257 def fmtchunktype(chunktype):
2247 if chunktype == 'empty':
2258 if chunktype == 'empty':
2248 return ' %s : ' % chunktype
2259 return ' %s : ' % chunktype
2249 elif chunktype in pycompat.bytestr(string.ascii_letters):
2260 elif chunktype in pycompat.bytestr(string.ascii_letters):
2250 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2261 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2251 else:
2262 else:
2252 return ' 0x%s : ' % hex(chunktype)
2263 return ' 0x%s : ' % hex(chunktype)
2253
2264
2254 ui.write('\n')
2265 ui.write('\n')
2255 ui.write(('chunks : ') + fmt2 % numrevs)
2266 ui.write(('chunks : ') + fmt2 % numrevs)
2256 for chunktype in sorted(chunktypecounts):
2267 for chunktype in sorted(chunktypecounts):
2257 ui.write(fmtchunktype(chunktype))
2268 ui.write(fmtchunktype(chunktype))
2258 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2269 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2259 ui.write(('chunks size : ') + fmt2 % totalsize)
2270 ui.write(('chunks size : ') + fmt2 % totalsize)
2260 for chunktype in sorted(chunktypecounts):
2271 for chunktype in sorted(chunktypecounts):
2261 ui.write(fmtchunktype(chunktype))
2272 ui.write(fmtchunktype(chunktype))
2262 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2273 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2263
2274
2264 ui.write('\n')
2275 ui.write('\n')
2265 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2276 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2266 ui.write(('avg chain length : ') + fmt % avgchainlen)
2277 ui.write(('avg chain length : ') + fmt % avgchainlen)
2267 ui.write(('max chain length : ') + fmt % maxchainlen)
2278 ui.write(('max chain length : ') + fmt % maxchainlen)
2268 ui.write(('max chain reach : ') + fmt % maxchainspan)
2279 ui.write(('max chain reach : ') + fmt % maxchainspan)
2269 ui.write(('compression ratio : ') + fmt % compratio)
2280 ui.write(('compression ratio : ') + fmt % compratio)
2270
2281
2271 if format > 0:
2282 if format > 0:
2272 ui.write('\n')
2283 ui.write('\n')
2273 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2284 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2274 % tuple(datasize))
2285 % tuple(datasize))
2275 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2286 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2276 % tuple(fullsize))
2287 % tuple(fullsize))
2277 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2288 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2278 % tuple(deltasize))
2289 % tuple(deltasize))
2279
2290
2280 if numdeltas > 0:
2291 if numdeltas > 0:
2281 ui.write('\n')
2292 ui.write('\n')
2282 fmt = pcfmtstr(numdeltas)
2293 fmt = pcfmtstr(numdeltas)
2283 fmt2 = pcfmtstr(numdeltas, 4)
2294 fmt2 = pcfmtstr(numdeltas, 4)
2284 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2295 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2285 if numprev > 0:
2296 if numprev > 0:
2286 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2297 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2287 numprev))
2298 numprev))
2288 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2299 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2289 numprev))
2300 numprev))
2290 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2301 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2291 numprev))
2302 numprev))
2292 if gdelta:
2303 if gdelta:
2293 ui.write(('deltas against p1 : ')
2304 ui.write(('deltas against p1 : ')
2294 + fmt % pcfmt(nump1, numdeltas))
2305 + fmt % pcfmt(nump1, numdeltas))
2295 ui.write(('deltas against p2 : ')
2306 ui.write(('deltas against p2 : ')
2296 + fmt % pcfmt(nump2, numdeltas))
2307 + fmt % pcfmt(nump2, numdeltas))
2297 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2308 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2298 numdeltas))
2309 numdeltas))
2299
2310
2300 @command('debugrevspec',
2311 @command('debugrevspec',
2301 [('', 'optimize', None,
2312 [('', 'optimize', None,
2302 _('print parsed tree after optimizing (DEPRECATED)')),
2313 _('print parsed tree after optimizing (DEPRECATED)')),
2303 ('', 'show-revs', True, _('print list of result revisions (default)')),
2314 ('', 'show-revs', True, _('print list of result revisions (default)')),
2304 ('s', 'show-set', None, _('print internal representation of result set')),
2315 ('s', 'show-set', None, _('print internal representation of result set')),
2305 ('p', 'show-stage', [],
2316 ('p', 'show-stage', [],
2306 _('print parsed tree at the given stage'), _('NAME')),
2317 _('print parsed tree at the given stage'), _('NAME')),
2307 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2318 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2308 ('', 'verify-optimized', False, _('verify optimized result')),
2319 ('', 'verify-optimized', False, _('verify optimized result')),
2309 ],
2320 ],
2310 ('REVSPEC'))
2321 ('REVSPEC'))
2311 def debugrevspec(ui, repo, expr, **opts):
2322 def debugrevspec(ui, repo, expr, **opts):
2312 """parse and apply a revision specification
2323 """parse and apply a revision specification
2313
2324
2314 Use -p/--show-stage option to print the parsed tree at the given stages.
2325 Use -p/--show-stage option to print the parsed tree at the given stages.
2315 Use -p all to print tree at every stage.
2326 Use -p all to print tree at every stage.
2316
2327
2317 Use --no-show-revs option with -s or -p to print only the set
2328 Use --no-show-revs option with -s or -p to print only the set
2318 representation or the parsed tree respectively.
2329 representation or the parsed tree respectively.
2319
2330
2320 Use --verify-optimized to compare the optimized result with the unoptimized
2331 Use --verify-optimized to compare the optimized result with the unoptimized
2321 one. Returns 1 if the optimized result differs.
2332 one. Returns 1 if the optimized result differs.
2322 """
2333 """
2323 opts = pycompat.byteskwargs(opts)
2334 opts = pycompat.byteskwargs(opts)
2324 aliases = ui.configitems('revsetalias')
2335 aliases = ui.configitems('revsetalias')
2325 stages = [
2336 stages = [
2326 ('parsed', lambda tree: tree),
2337 ('parsed', lambda tree: tree),
2327 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2338 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2328 ui.warn)),
2339 ui.warn)),
2329 ('concatenated', revsetlang.foldconcat),
2340 ('concatenated', revsetlang.foldconcat),
2330 ('analyzed', revsetlang.analyze),
2341 ('analyzed', revsetlang.analyze),
2331 ('optimized', revsetlang.optimize),
2342 ('optimized', revsetlang.optimize),
2332 ]
2343 ]
2333 if opts['no_optimized']:
2344 if opts['no_optimized']:
2334 stages = stages[:-1]
2345 stages = stages[:-1]
2335 if opts['verify_optimized'] and opts['no_optimized']:
2346 if opts['verify_optimized'] and opts['no_optimized']:
2336 raise error.Abort(_('cannot use --verify-optimized with '
2347 raise error.Abort(_('cannot use --verify-optimized with '
2337 '--no-optimized'))
2348 '--no-optimized'))
2338 stagenames = set(n for n, f in stages)
2349 stagenames = set(n for n, f in stages)
2339
2350
2340 showalways = set()
2351 showalways = set()
2341 showchanged = set()
2352 showchanged = set()
2342 if ui.verbose and not opts['show_stage']:
2353 if ui.verbose and not opts['show_stage']:
2343 # show parsed tree by --verbose (deprecated)
2354 # show parsed tree by --verbose (deprecated)
2344 showalways.add('parsed')
2355 showalways.add('parsed')
2345 showchanged.update(['expanded', 'concatenated'])
2356 showchanged.update(['expanded', 'concatenated'])
2346 if opts['optimize']:
2357 if opts['optimize']:
2347 showalways.add('optimized')
2358 showalways.add('optimized')
2348 if opts['show_stage'] and opts['optimize']:
2359 if opts['show_stage'] and opts['optimize']:
2349 raise error.Abort(_('cannot use --optimize with --show-stage'))
2360 raise error.Abort(_('cannot use --optimize with --show-stage'))
2350 if opts['show_stage'] == ['all']:
2361 if opts['show_stage'] == ['all']:
2351 showalways.update(stagenames)
2362 showalways.update(stagenames)
2352 else:
2363 else:
2353 for n in opts['show_stage']:
2364 for n in opts['show_stage']:
2354 if n not in stagenames:
2365 if n not in stagenames:
2355 raise error.Abort(_('invalid stage name: %s') % n)
2366 raise error.Abort(_('invalid stage name: %s') % n)
2356 showalways.update(opts['show_stage'])
2367 showalways.update(opts['show_stage'])
2357
2368
2358 treebystage = {}
2369 treebystage = {}
2359 printedtree = None
2370 printedtree = None
2360 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2371 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2361 for n, f in stages:
2372 for n, f in stages:
2362 treebystage[n] = tree = f(tree)
2373 treebystage[n] = tree = f(tree)
2363 if n in showalways or (n in showchanged and tree != printedtree):
2374 if n in showalways or (n in showchanged and tree != printedtree):
2364 if opts['show_stage'] or n != 'parsed':
2375 if opts['show_stage'] or n != 'parsed':
2365 ui.write(("* %s:\n") % n)
2376 ui.write(("* %s:\n") % n)
2366 ui.write(revsetlang.prettyformat(tree), "\n")
2377 ui.write(revsetlang.prettyformat(tree), "\n")
2367 printedtree = tree
2378 printedtree = tree
2368
2379
2369 if opts['verify_optimized']:
2380 if opts['verify_optimized']:
2370 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2381 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2371 brevs = revset.makematcher(treebystage['optimized'])(repo)
2382 brevs = revset.makematcher(treebystage['optimized'])(repo)
2372 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2383 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2373 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2384 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2374 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2385 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2375 arevs = list(arevs)
2386 arevs = list(arevs)
2376 brevs = list(brevs)
2387 brevs = list(brevs)
2377 if arevs == brevs:
2388 if arevs == brevs:
2378 return 0
2389 return 0
2379 ui.write(('--- analyzed\n'), label='diff.file_a')
2390 ui.write(('--- analyzed\n'), label='diff.file_a')
2380 ui.write(('+++ optimized\n'), label='diff.file_b')
2391 ui.write(('+++ optimized\n'), label='diff.file_b')
2381 sm = difflib.SequenceMatcher(None, arevs, brevs)
2392 sm = difflib.SequenceMatcher(None, arevs, brevs)
2382 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2393 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2383 if tag in ('delete', 'replace'):
2394 if tag in ('delete', 'replace'):
2384 for c in arevs[alo:ahi]:
2395 for c in arevs[alo:ahi]:
2385 ui.write('-%s\n' % c, label='diff.deleted')
2396 ui.write('-%s\n' % c, label='diff.deleted')
2386 if tag in ('insert', 'replace'):
2397 if tag in ('insert', 'replace'):
2387 for c in brevs[blo:bhi]:
2398 for c in brevs[blo:bhi]:
2388 ui.write('+%s\n' % c, label='diff.inserted')
2399 ui.write('+%s\n' % c, label='diff.inserted')
2389 if tag == 'equal':
2400 if tag == 'equal':
2390 for c in arevs[alo:ahi]:
2401 for c in arevs[alo:ahi]:
2391 ui.write(' %s\n' % c)
2402 ui.write(' %s\n' % c)
2392 return 1
2403 return 1
2393
2404
2394 func = revset.makematcher(tree)
2405 func = revset.makematcher(tree)
2395 revs = func(repo)
2406 revs = func(repo)
2396 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2407 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2397 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2408 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2398 if not opts['show_revs']:
2409 if not opts['show_revs']:
2399 return
2410 return
2400 for c in revs:
2411 for c in revs:
2401 ui.write("%d\n" % c)
2412 ui.write("%d\n" % c)
2402
2413
2403 @command('debugserve', [
2414 @command('debugserve', [
2404 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2415 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2405 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2416 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2406 ('', 'logiofile', '', _('file to log server I/O to')),
2417 ('', 'logiofile', '', _('file to log server I/O to')),
2407 ], '')
2418 ], '')
2408 def debugserve(ui, repo, **opts):
2419 def debugserve(ui, repo, **opts):
2409 """run a server with advanced settings
2420 """run a server with advanced settings
2410
2421
2411 This command is similar to :hg:`serve`. It exists partially as a
2422 This command is similar to :hg:`serve`. It exists partially as a
2412 workaround to the fact that ``hg serve --stdio`` must have specific
2423 workaround to the fact that ``hg serve --stdio`` must have specific
2413 arguments for security reasons.
2424 arguments for security reasons.
2414 """
2425 """
2415 opts = pycompat.byteskwargs(opts)
2426 opts = pycompat.byteskwargs(opts)
2416
2427
2417 if not opts['sshstdio']:
2428 if not opts['sshstdio']:
2418 raise error.Abort(_('only --sshstdio is currently supported'))
2429 raise error.Abort(_('only --sshstdio is currently supported'))
2419
2430
2420 logfh = None
2431 logfh = None
2421
2432
2422 if opts['logiofd'] and opts['logiofile']:
2433 if opts['logiofd'] and opts['logiofile']:
2423 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2434 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2424
2435
2425 if opts['logiofd']:
2436 if opts['logiofd']:
2426 # Line buffered because output is line based.
2437 # Line buffered because output is line based.
2427 try:
2438 try:
2428 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2439 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2429 except OSError as e:
2440 except OSError as e:
2430 if e.errno != errno.ESPIPE:
2441 if e.errno != errno.ESPIPE:
2431 raise
2442 raise
2432 # can't seek a pipe, so `ab` mode fails on py3
2443 # can't seek a pipe, so `ab` mode fails on py3
2433 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2444 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2434 elif opts['logiofile']:
2445 elif opts['logiofile']:
2435 logfh = open(opts['logiofile'], 'ab', 1)
2446 logfh = open(opts['logiofile'], 'ab', 1)
2436
2447
2437 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2448 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2438 s.serve_forever()
2449 s.serve_forever()
2439
2450
2440 @command('debugsetparents', [], _('REV1 [REV2]'))
2451 @command('debugsetparents', [], _('REV1 [REV2]'))
2441 def debugsetparents(ui, repo, rev1, rev2=None):
2452 def debugsetparents(ui, repo, rev1, rev2=None):
2442 """manually set the parents of the current working directory
2453 """manually set the parents of the current working directory
2443
2454
2444 This is useful for writing repository conversion tools, but should
2455 This is useful for writing repository conversion tools, but should
2445 be used with care. For example, neither the working directory nor the
2456 be used with care. For example, neither the working directory nor the
2446 dirstate is updated, so file status may be incorrect after running this
2457 dirstate is updated, so file status may be incorrect after running this
2447 command.
2458 command.
2448
2459
2449 Returns 0 on success.
2460 Returns 0 on success.
2450 """
2461 """
2451
2462
2452 node1 = scmutil.revsingle(repo, rev1).node()
2463 node1 = scmutil.revsingle(repo, rev1).node()
2453 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2464 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2454
2465
2455 with repo.wlock():
2466 with repo.wlock():
2456 repo.setparents(node1, node2)
2467 repo.setparents(node1, node2)
2457
2468
2458 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2469 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2459 def debugssl(ui, repo, source=None, **opts):
2470 def debugssl(ui, repo, source=None, **opts):
2460 '''test a secure connection to a server
2471 '''test a secure connection to a server
2461
2472
2462 This builds the certificate chain for the server on Windows, installing the
2473 This builds the certificate chain for the server on Windows, installing the
2463 missing intermediates and trusted root via Windows Update if necessary. It
2474 missing intermediates and trusted root via Windows Update if necessary. It
2464 does nothing on other platforms.
2475 does nothing on other platforms.
2465
2476
2466 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2477 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2467 that server is used. See :hg:`help urls` for more information.
2478 that server is used. See :hg:`help urls` for more information.
2468
2479
2469 If the update succeeds, retry the original operation. Otherwise, the cause
2480 If the update succeeds, retry the original operation. Otherwise, the cause
2470 of the SSL error is likely another issue.
2481 of the SSL error is likely another issue.
2471 '''
2482 '''
2472 if not pycompat.iswindows:
2483 if not pycompat.iswindows:
2473 raise error.Abort(_('certificate chain building is only possible on '
2484 raise error.Abort(_('certificate chain building is only possible on '
2474 'Windows'))
2485 'Windows'))
2475
2486
2476 if not source:
2487 if not source:
2477 if not repo:
2488 if not repo:
2478 raise error.Abort(_("there is no Mercurial repository here, and no "
2489 raise error.Abort(_("there is no Mercurial repository here, and no "
2479 "server specified"))
2490 "server specified"))
2480 source = "default"
2491 source = "default"
2481
2492
2482 source, branches = hg.parseurl(ui.expandpath(source))
2493 source, branches = hg.parseurl(ui.expandpath(source))
2483 url = util.url(source)
2494 url = util.url(source)
2484 addr = None
2495 addr = None
2485
2496
2486 defaultport = {'https': 443, 'ssh': 22}
2497 defaultport = {'https': 443, 'ssh': 22}
2487 if url.scheme in defaultport:
2498 if url.scheme in defaultport:
2488 try:
2499 try:
2489 addr = (url.host, int(url.port or defaultport[url.scheme]))
2500 addr = (url.host, int(url.port or defaultport[url.scheme]))
2490 except ValueError:
2501 except ValueError:
2491 raise error.Abort(_("malformed port number in URL"))
2502 raise error.Abort(_("malformed port number in URL"))
2492 else:
2503 else:
2493 raise error.Abort(_("only https and ssh connections are supported"))
2504 raise error.Abort(_("only https and ssh connections are supported"))
2494
2505
2495 from . import win32
2506 from . import win32
2496
2507
2497 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2508 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2498 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2509 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2499
2510
2500 try:
2511 try:
2501 s.connect(addr)
2512 s.connect(addr)
2502 cert = s.getpeercert(True)
2513 cert = s.getpeercert(True)
2503
2514
2504 ui.status(_('checking the certificate chain for %s\n') % url.host)
2515 ui.status(_('checking the certificate chain for %s\n') % url.host)
2505
2516
2506 complete = win32.checkcertificatechain(cert, build=False)
2517 complete = win32.checkcertificatechain(cert, build=False)
2507
2518
2508 if not complete:
2519 if not complete:
2509 ui.status(_('certificate chain is incomplete, updating... '))
2520 ui.status(_('certificate chain is incomplete, updating... '))
2510
2521
2511 if not win32.checkcertificatechain(cert):
2522 if not win32.checkcertificatechain(cert):
2512 ui.status(_('failed.\n'))
2523 ui.status(_('failed.\n'))
2513 else:
2524 else:
2514 ui.status(_('done.\n'))
2525 ui.status(_('done.\n'))
2515 else:
2526 else:
2516 ui.status(_('full certificate chain is available\n'))
2527 ui.status(_('full certificate chain is available\n'))
2517 finally:
2528 finally:
2518 s.close()
2529 s.close()
2519
2530
2520 @command('debugsub',
2531 @command('debugsub',
2521 [('r', 'rev', '',
2532 [('r', 'rev', '',
2522 _('revision to check'), _('REV'))],
2533 _('revision to check'), _('REV'))],
2523 _('[-r REV] [REV]'))
2534 _('[-r REV] [REV]'))
2524 def debugsub(ui, repo, rev=None):
2535 def debugsub(ui, repo, rev=None):
2525 ctx = scmutil.revsingle(repo, rev, None)
2536 ctx = scmutil.revsingle(repo, rev, None)
2526 for k, v in sorted(ctx.substate.items()):
2537 for k, v in sorted(ctx.substate.items()):
2527 ui.write(('path %s\n') % k)
2538 ui.write(('path %s\n') % k)
2528 ui.write((' source %s\n') % v[0])
2539 ui.write((' source %s\n') % v[0])
2529 ui.write((' revision %s\n') % v[1])
2540 ui.write((' revision %s\n') % v[1])
2530
2541
2531 @command('debugsuccessorssets',
2542 @command('debugsuccessorssets',
2532 [('', 'closest', False, _('return closest successors sets only'))],
2543 [('', 'closest', False, _('return closest successors sets only'))],
2533 _('[REV]'))
2544 _('[REV]'))
2534 def debugsuccessorssets(ui, repo, *revs, **opts):
2545 def debugsuccessorssets(ui, repo, *revs, **opts):
2535 """show set of successors for revision
2546 """show set of successors for revision
2536
2547
2537 A successors set of changeset A is a consistent group of revisions that
2548 A successors set of changeset A is a consistent group of revisions that
2538 succeed A. It contains non-obsolete changesets only unless closests
2549 succeed A. It contains non-obsolete changesets only unless closests
2539 successors set is set.
2550 successors set is set.
2540
2551
2541 In most cases a changeset A has a single successors set containing a single
2552 In most cases a changeset A has a single successors set containing a single
2542 successor (changeset A replaced by A').
2553 successor (changeset A replaced by A').
2543
2554
2544 A changeset that is made obsolete with no successors are called "pruned".
2555 A changeset that is made obsolete with no successors are called "pruned".
2545 Such changesets have no successors sets at all.
2556 Such changesets have no successors sets at all.
2546
2557
2547 A changeset that has been "split" will have a successors set containing
2558 A changeset that has been "split" will have a successors set containing
2548 more than one successor.
2559 more than one successor.
2549
2560
2550 A changeset that has been rewritten in multiple different ways is called
2561 A changeset that has been rewritten in multiple different ways is called
2551 "divergent". Such changesets have multiple successor sets (each of which
2562 "divergent". Such changesets have multiple successor sets (each of which
2552 may also be split, i.e. have multiple successors).
2563 may also be split, i.e. have multiple successors).
2553
2564
2554 Results are displayed as follows::
2565 Results are displayed as follows::
2555
2566
2556 <rev1>
2567 <rev1>
2557 <successors-1A>
2568 <successors-1A>
2558 <rev2>
2569 <rev2>
2559 <successors-2A>
2570 <successors-2A>
2560 <successors-2B1> <successors-2B2> <successors-2B3>
2571 <successors-2B1> <successors-2B2> <successors-2B3>
2561
2572
2562 Here rev2 has two possible (i.e. divergent) successors sets. The first
2573 Here rev2 has two possible (i.e. divergent) successors sets. The first
2563 holds one element, whereas the second holds three (i.e. the changeset has
2574 holds one element, whereas the second holds three (i.e. the changeset has
2564 been split).
2575 been split).
2565 """
2576 """
2566 # passed to successorssets caching computation from one call to another
2577 # passed to successorssets caching computation from one call to another
2567 cache = {}
2578 cache = {}
2568 ctx2str = bytes
2579 ctx2str = bytes
2569 node2str = short
2580 node2str = short
2570 for rev in scmutil.revrange(repo, revs):
2581 for rev in scmutil.revrange(repo, revs):
2571 ctx = repo[rev]
2582 ctx = repo[rev]
2572 ui.write('%s\n'% ctx2str(ctx))
2583 ui.write('%s\n'% ctx2str(ctx))
2573 for succsset in obsutil.successorssets(repo, ctx.node(),
2584 for succsset in obsutil.successorssets(repo, ctx.node(),
2574 closest=opts[r'closest'],
2585 closest=opts[r'closest'],
2575 cache=cache):
2586 cache=cache):
2576 if succsset:
2587 if succsset:
2577 ui.write(' ')
2588 ui.write(' ')
2578 ui.write(node2str(succsset[0]))
2589 ui.write(node2str(succsset[0]))
2579 for node in succsset[1:]:
2590 for node in succsset[1:]:
2580 ui.write(' ')
2591 ui.write(' ')
2581 ui.write(node2str(node))
2592 ui.write(node2str(node))
2582 ui.write('\n')
2593 ui.write('\n')
2583
2594
2584 @command('debugtemplate',
2595 @command('debugtemplate',
2585 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2596 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2586 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2597 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2587 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2598 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2588 optionalrepo=True)
2599 optionalrepo=True)
2589 def debugtemplate(ui, repo, tmpl, **opts):
2600 def debugtemplate(ui, repo, tmpl, **opts):
2590 """parse and apply a template
2601 """parse and apply a template
2591
2602
2592 If -r/--rev is given, the template is processed as a log template and
2603 If -r/--rev is given, the template is processed as a log template and
2593 applied to the given changesets. Otherwise, it is processed as a generic
2604 applied to the given changesets. Otherwise, it is processed as a generic
2594 template.
2605 template.
2595
2606
2596 Use --verbose to print the parsed tree.
2607 Use --verbose to print the parsed tree.
2597 """
2608 """
2598 revs = None
2609 revs = None
2599 if opts[r'rev']:
2610 if opts[r'rev']:
2600 if repo is None:
2611 if repo is None:
2601 raise error.RepoError(_('there is no Mercurial repository here '
2612 raise error.RepoError(_('there is no Mercurial repository here '
2602 '(.hg not found)'))
2613 '(.hg not found)'))
2603 revs = scmutil.revrange(repo, opts[r'rev'])
2614 revs = scmutil.revrange(repo, opts[r'rev'])
2604
2615
2605 props = {}
2616 props = {}
2606 for d in opts[r'define']:
2617 for d in opts[r'define']:
2607 try:
2618 try:
2608 k, v = (e.strip() for e in d.split('=', 1))
2619 k, v = (e.strip() for e in d.split('=', 1))
2609 if not k or k == 'ui':
2620 if not k or k == 'ui':
2610 raise ValueError
2621 raise ValueError
2611 props[k] = v
2622 props[k] = v
2612 except ValueError:
2623 except ValueError:
2613 raise error.Abort(_('malformed keyword definition: %s') % d)
2624 raise error.Abort(_('malformed keyword definition: %s') % d)
2614
2625
2615 if ui.verbose:
2626 if ui.verbose:
2616 aliases = ui.configitems('templatealias')
2627 aliases = ui.configitems('templatealias')
2617 tree = templater.parse(tmpl)
2628 tree = templater.parse(tmpl)
2618 ui.note(templater.prettyformat(tree), '\n')
2629 ui.note(templater.prettyformat(tree), '\n')
2619 newtree = templater.expandaliases(tree, aliases)
2630 newtree = templater.expandaliases(tree, aliases)
2620 if newtree != tree:
2631 if newtree != tree:
2621 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2632 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2622
2633
2623 if revs is None:
2634 if revs is None:
2624 tres = formatter.templateresources(ui, repo)
2635 tres = formatter.templateresources(ui, repo)
2625 t = formatter.maketemplater(ui, tmpl, resources=tres)
2636 t = formatter.maketemplater(ui, tmpl, resources=tres)
2626 if ui.verbose:
2637 if ui.verbose:
2627 kwds, funcs = t.symbolsuseddefault()
2638 kwds, funcs = t.symbolsuseddefault()
2628 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2639 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2629 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2640 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2630 ui.write(t.renderdefault(props))
2641 ui.write(t.renderdefault(props))
2631 else:
2642 else:
2632 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2643 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2633 if ui.verbose:
2644 if ui.verbose:
2634 kwds, funcs = displayer.t.symbolsuseddefault()
2645 kwds, funcs = displayer.t.symbolsuseddefault()
2635 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2646 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2636 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2647 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2637 for r in revs:
2648 for r in revs:
2638 displayer.show(repo[r], **pycompat.strkwargs(props))
2649 displayer.show(repo[r], **pycompat.strkwargs(props))
2639 displayer.close()
2650 displayer.close()
2640
2651
2641 @command('debuguigetpass', [
2652 @command('debuguigetpass', [
2642 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2653 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2643 ], _('[-p TEXT]'), norepo=True)
2654 ], _('[-p TEXT]'), norepo=True)
2644 def debuguigetpass(ui, prompt=''):
2655 def debuguigetpass(ui, prompt=''):
2645 """show prompt to type password"""
2656 """show prompt to type password"""
2646 r = ui.getpass(prompt)
2657 r = ui.getpass(prompt)
2647 ui.write(('respose: %s\n') % r)
2658 ui.write(('respose: %s\n') % r)
2648
2659
2649 @command('debuguiprompt', [
2660 @command('debuguiprompt', [
2650 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2661 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2651 ], _('[-p TEXT]'), norepo=True)
2662 ], _('[-p TEXT]'), norepo=True)
2652 def debuguiprompt(ui, prompt=''):
2663 def debuguiprompt(ui, prompt=''):
2653 """show plain prompt"""
2664 """show plain prompt"""
2654 r = ui.prompt(prompt)
2665 r = ui.prompt(prompt)
2655 ui.write(('response: %s\n') % r)
2666 ui.write(('response: %s\n') % r)
2656
2667
2657 @command('debugupdatecaches', [])
2668 @command('debugupdatecaches', [])
2658 def debugupdatecaches(ui, repo, *pats, **opts):
2669 def debugupdatecaches(ui, repo, *pats, **opts):
2659 """warm all known caches in the repository"""
2670 """warm all known caches in the repository"""
2660 with repo.wlock(), repo.lock():
2671 with repo.wlock(), repo.lock():
2661 repo.updatecaches(full=True)
2672 repo.updatecaches(full=True)
2662
2673
2663 @command('debugupgraderepo', [
2674 @command('debugupgraderepo', [
2664 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2675 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2665 ('', 'run', False, _('performs an upgrade')),
2676 ('', 'run', False, _('performs an upgrade')),
2666 ])
2677 ])
2667 def debugupgraderepo(ui, repo, run=False, optimize=None):
2678 def debugupgraderepo(ui, repo, run=False, optimize=None):
2668 """upgrade a repository to use different features
2679 """upgrade a repository to use different features
2669
2680
2670 If no arguments are specified, the repository is evaluated for upgrade
2681 If no arguments are specified, the repository is evaluated for upgrade
2671 and a list of problems and potential optimizations is printed.
2682 and a list of problems and potential optimizations is printed.
2672
2683
2673 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2684 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2674 can be influenced via additional arguments. More details will be provided
2685 can be influenced via additional arguments. More details will be provided
2675 by the command output when run without ``--run``.
2686 by the command output when run without ``--run``.
2676
2687
2677 During the upgrade, the repository will be locked and no writes will be
2688 During the upgrade, the repository will be locked and no writes will be
2678 allowed.
2689 allowed.
2679
2690
2680 At the end of the upgrade, the repository may not be readable while new
2691 At the end of the upgrade, the repository may not be readable while new
2681 repository data is swapped in. This window will be as long as it takes to
2692 repository data is swapped in. This window will be as long as it takes to
2682 rename some directories inside the ``.hg`` directory. On most machines, this
2693 rename some directories inside the ``.hg`` directory. On most machines, this
2683 should complete almost instantaneously and the chances of a consumer being
2694 should complete almost instantaneously and the chances of a consumer being
2684 unable to access the repository should be low.
2695 unable to access the repository should be low.
2685 """
2696 """
2686 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2697 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2687
2698
2688 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2699 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2689 inferrepo=True)
2700 inferrepo=True)
2690 def debugwalk(ui, repo, *pats, **opts):
2701 def debugwalk(ui, repo, *pats, **opts):
2691 """show how files match on given patterns"""
2702 """show how files match on given patterns"""
2692 opts = pycompat.byteskwargs(opts)
2703 opts = pycompat.byteskwargs(opts)
2693 m = scmutil.match(repo[None], pats, opts)
2704 m = scmutil.match(repo[None], pats, opts)
2694 if ui.verbose:
2705 if ui.verbose:
2695 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2706 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2696 items = list(repo[None].walk(m))
2707 items = list(repo[None].walk(m))
2697 if not items:
2708 if not items:
2698 return
2709 return
2699 f = lambda fn: fn
2710 f = lambda fn: fn
2700 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2711 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2701 f = lambda fn: util.normpath(fn)
2712 f = lambda fn: util.normpath(fn)
2702 fmt = 'f %%-%ds %%-%ds %%s' % (
2713 fmt = 'f %%-%ds %%-%ds %%s' % (
2703 max([len(abs) for abs in items]),
2714 max([len(abs) for abs in items]),
2704 max([len(m.rel(abs)) for abs in items]))
2715 max([len(m.rel(abs)) for abs in items]))
2705 for abs in items:
2716 for abs in items:
2706 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2717 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2707 ui.write("%s\n" % line.rstrip())
2718 ui.write("%s\n" % line.rstrip())
2708
2719
2709 @command('debugwhyunstable', [], _('REV'))
2720 @command('debugwhyunstable', [], _('REV'))
2710 def debugwhyunstable(ui, repo, rev):
2721 def debugwhyunstable(ui, repo, rev):
2711 """explain instabilities of a changeset"""
2722 """explain instabilities of a changeset"""
2712 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2723 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2713 dnodes = ''
2724 dnodes = ''
2714 if entry.get('divergentnodes'):
2725 if entry.get('divergentnodes'):
2715 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2726 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2716 for ctx in entry['divergentnodes']) + ' '
2727 for ctx in entry['divergentnodes']) + ' '
2717 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2728 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2718 entry['reason'], entry['node']))
2729 entry['reason'], entry['node']))
2719
2730
2720 @command('debugwireargs',
2731 @command('debugwireargs',
2721 [('', 'three', '', 'three'),
2732 [('', 'three', '', 'three'),
2722 ('', 'four', '', 'four'),
2733 ('', 'four', '', 'four'),
2723 ('', 'five', '', 'five'),
2734 ('', 'five', '', 'five'),
2724 ] + cmdutil.remoteopts,
2735 ] + cmdutil.remoteopts,
2725 _('REPO [OPTIONS]... [ONE [TWO]]'),
2736 _('REPO [OPTIONS]... [ONE [TWO]]'),
2726 norepo=True)
2737 norepo=True)
2727 def debugwireargs(ui, repopath, *vals, **opts):
2738 def debugwireargs(ui, repopath, *vals, **opts):
2728 opts = pycompat.byteskwargs(opts)
2739 opts = pycompat.byteskwargs(opts)
2729 repo = hg.peer(ui, opts, repopath)
2740 repo = hg.peer(ui, opts, repopath)
2730 for opt in cmdutil.remoteopts:
2741 for opt in cmdutil.remoteopts:
2731 del opts[opt[1]]
2742 del opts[opt[1]]
2732 args = {}
2743 args = {}
2733 for k, v in opts.iteritems():
2744 for k, v in opts.iteritems():
2734 if v:
2745 if v:
2735 args[k] = v
2746 args[k] = v
2736 args = pycompat.strkwargs(args)
2747 args = pycompat.strkwargs(args)
2737 # run twice to check that we don't mess up the stream for the next command
2748 # run twice to check that we don't mess up the stream for the next command
2738 res1 = repo.debugwireargs(*vals, **args)
2749 res1 = repo.debugwireargs(*vals, **args)
2739 res2 = repo.debugwireargs(*vals, **args)
2750 res2 = repo.debugwireargs(*vals, **args)
2740 ui.write("%s\n" % res1)
2751 ui.write("%s\n" % res1)
2741 if res1 != res2:
2752 if res1 != res2:
2742 ui.warn("%s\n" % res2)
2753 ui.warn("%s\n" % res2)
2743
2754
2744 def _parsewirelangblocks(fh):
2755 def _parsewirelangblocks(fh):
2745 activeaction = None
2756 activeaction = None
2746 blocklines = []
2757 blocklines = []
2747
2758
2748 for line in fh:
2759 for line in fh:
2749 line = line.rstrip()
2760 line = line.rstrip()
2750 if not line:
2761 if not line:
2751 continue
2762 continue
2752
2763
2753 if line.startswith(b'#'):
2764 if line.startswith(b'#'):
2754 continue
2765 continue
2755
2766
2756 if not line.startswith(b' '):
2767 if not line.startswith(b' '):
2757 # New block. Flush previous one.
2768 # New block. Flush previous one.
2758 if activeaction:
2769 if activeaction:
2759 yield activeaction, blocklines
2770 yield activeaction, blocklines
2760
2771
2761 activeaction = line
2772 activeaction = line
2762 blocklines = []
2773 blocklines = []
2763 continue
2774 continue
2764
2775
2765 # Else we start with an indent.
2776 # Else we start with an indent.
2766
2777
2767 if not activeaction:
2778 if not activeaction:
2768 raise error.Abort(_('indented line outside of block'))
2779 raise error.Abort(_('indented line outside of block'))
2769
2780
2770 blocklines.append(line)
2781 blocklines.append(line)
2771
2782
2772 # Flush last block.
2783 # Flush last block.
2773 if activeaction:
2784 if activeaction:
2774 yield activeaction, blocklines
2785 yield activeaction, blocklines
2775
2786
2776 @command('debugwireproto',
2787 @command('debugwireproto',
2777 [
2788 [
2778 ('', 'localssh', False, _('start an SSH server for this repo')),
2789 ('', 'localssh', False, _('start an SSH server for this repo')),
2779 ('', 'peer', '', _('construct a specific version of the peer')),
2790 ('', 'peer', '', _('construct a specific version of the peer')),
2780 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2791 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2781 ('', 'nologhandshake', False,
2792 ('', 'nologhandshake', False,
2782 _('do not log I/O related to the peer handshake')),
2793 _('do not log I/O related to the peer handshake')),
2783 ] + cmdutil.remoteopts,
2794 ] + cmdutil.remoteopts,
2784 _('[PATH]'),
2795 _('[PATH]'),
2785 optionalrepo=True)
2796 optionalrepo=True)
2786 def debugwireproto(ui, repo, path=None, **opts):
2797 def debugwireproto(ui, repo, path=None, **opts):
2787 """send wire protocol commands to a server
2798 """send wire protocol commands to a server
2788
2799
2789 This command can be used to issue wire protocol commands to remote
2800 This command can be used to issue wire protocol commands to remote
2790 peers and to debug the raw data being exchanged.
2801 peers and to debug the raw data being exchanged.
2791
2802
2792 ``--localssh`` will start an SSH server against the current repository
2803 ``--localssh`` will start an SSH server against the current repository
2793 and connect to that. By default, the connection will perform a handshake
2804 and connect to that. By default, the connection will perform a handshake
2794 and establish an appropriate peer instance.
2805 and establish an appropriate peer instance.
2795
2806
2796 ``--peer`` can be used to bypass the handshake protocol and construct a
2807 ``--peer`` can be used to bypass the handshake protocol and construct a
2797 peer instance using the specified class type. Valid values are ``raw``,
2808 peer instance using the specified class type. Valid values are ``raw``,
2798 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2809 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2799 raw data payloads and don't support higher-level command actions.
2810 raw data payloads and don't support higher-level command actions.
2800
2811
2801 ``--noreadstderr`` can be used to disable automatic reading from stderr
2812 ``--noreadstderr`` can be used to disable automatic reading from stderr
2802 of the peer (for SSH connections only). Disabling automatic reading of
2813 of the peer (for SSH connections only). Disabling automatic reading of
2803 stderr is useful for making output more deterministic.
2814 stderr is useful for making output more deterministic.
2804
2815
2805 Commands are issued via a mini language which is specified via stdin.
2816 Commands are issued via a mini language which is specified via stdin.
2806 The language consists of individual actions to perform. An action is
2817 The language consists of individual actions to perform. An action is
2807 defined by a block. A block is defined as a line with no leading
2818 defined by a block. A block is defined as a line with no leading
2808 space followed by 0 or more lines with leading space. Blocks are
2819 space followed by 0 or more lines with leading space. Blocks are
2809 effectively a high-level command with additional metadata.
2820 effectively a high-level command with additional metadata.
2810
2821
2811 Lines beginning with ``#`` are ignored.
2822 Lines beginning with ``#`` are ignored.
2812
2823
2813 The following sections denote available actions.
2824 The following sections denote available actions.
2814
2825
2815 raw
2826 raw
2816 ---
2827 ---
2817
2828
2818 Send raw data to the server.
2829 Send raw data to the server.
2819
2830
2820 The block payload contains the raw data to send as one atomic send
2831 The block payload contains the raw data to send as one atomic send
2821 operation. The data may not actually be delivered in a single system
2832 operation. The data may not actually be delivered in a single system
2822 call: it depends on the abilities of the transport being used.
2833 call: it depends on the abilities of the transport being used.
2823
2834
2824 Each line in the block is de-indented and concatenated. Then, that
2835 Each line in the block is de-indented and concatenated. Then, that
2825 value is evaluated as a Python b'' literal. This allows the use of
2836 value is evaluated as a Python b'' literal. This allows the use of
2826 backslash escaping, etc.
2837 backslash escaping, etc.
2827
2838
2828 raw+
2839 raw+
2829 ----
2840 ----
2830
2841
2831 Behaves like ``raw`` except flushes output afterwards.
2842 Behaves like ``raw`` except flushes output afterwards.
2832
2843
2833 command <X>
2844 command <X>
2834 -----------
2845 -----------
2835
2846
2836 Send a request to run a named command, whose name follows the ``command``
2847 Send a request to run a named command, whose name follows the ``command``
2837 string.
2848 string.
2838
2849
2839 Arguments to the command are defined as lines in this block. The format of
2850 Arguments to the command are defined as lines in this block. The format of
2840 each line is ``<key> <value>``. e.g.::
2851 each line is ``<key> <value>``. e.g.::
2841
2852
2842 command listkeys
2853 command listkeys
2843 namespace bookmarks
2854 namespace bookmarks
2844
2855
2845 If the value begins with ``eval:``, it will be interpreted as a Python
2856 If the value begins with ``eval:``, it will be interpreted as a Python
2846 literal expression. Otherwise values are interpreted as Python b'' literals.
2857 literal expression. Otherwise values are interpreted as Python b'' literals.
2847 This allows sending complex types and encoding special byte sequences via
2858 This allows sending complex types and encoding special byte sequences via
2848 backslash escaping.
2859 backslash escaping.
2849
2860
2850 The following arguments have special meaning:
2861 The following arguments have special meaning:
2851
2862
2852 ``PUSHFILE``
2863 ``PUSHFILE``
2853 When defined, the *push* mechanism of the peer will be used instead
2864 When defined, the *push* mechanism of the peer will be used instead
2854 of the static request-response mechanism and the content of the
2865 of the static request-response mechanism and the content of the
2855 file specified in the value of this argument will be sent as the
2866 file specified in the value of this argument will be sent as the
2856 command payload.
2867 command payload.
2857
2868
2858 This can be used to submit a local bundle file to the remote.
2869 This can be used to submit a local bundle file to the remote.
2859
2870
2860 batchbegin
2871 batchbegin
2861 ----------
2872 ----------
2862
2873
2863 Instruct the peer to begin a batched send.
2874 Instruct the peer to begin a batched send.
2864
2875
2865 All ``command`` blocks are queued for execution until the next
2876 All ``command`` blocks are queued for execution until the next
2866 ``batchsubmit`` block.
2877 ``batchsubmit`` block.
2867
2878
2868 batchsubmit
2879 batchsubmit
2869 -----------
2880 -----------
2870
2881
2871 Submit previously queued ``command`` blocks as a batch request.
2882 Submit previously queued ``command`` blocks as a batch request.
2872
2883
2873 This action MUST be paired with a ``batchbegin`` action.
2884 This action MUST be paired with a ``batchbegin`` action.
2874
2885
2875 httprequest <method> <path>
2886 httprequest <method> <path>
2876 ---------------------------
2887 ---------------------------
2877
2888
2878 (HTTP peer only)
2889 (HTTP peer only)
2879
2890
2880 Send an HTTP request to the peer.
2891 Send an HTTP request to the peer.
2881
2892
2882 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2893 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2883
2894
2884 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2895 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2885 headers to add to the request. e.g. ``Accept: foo``.
2896 headers to add to the request. e.g. ``Accept: foo``.
2886
2897
2887 The following arguments are special:
2898 The following arguments are special:
2888
2899
2889 ``BODYFILE``
2900 ``BODYFILE``
2890 The content of the file defined as the value to this argument will be
2901 The content of the file defined as the value to this argument will be
2891 transferred verbatim as the HTTP request body.
2902 transferred verbatim as the HTTP request body.
2892
2903
2893 ``frame <type> <flags> <payload>``
2904 ``frame <type> <flags> <payload>``
2894 Send a unified protocol frame as part of the request body.
2905 Send a unified protocol frame as part of the request body.
2895
2906
2896 All frames will be collected and sent as the body to the HTTP
2907 All frames will be collected and sent as the body to the HTTP
2897 request.
2908 request.
2898
2909
2899 close
2910 close
2900 -----
2911 -----
2901
2912
2902 Close the connection to the server.
2913 Close the connection to the server.
2903
2914
2904 flush
2915 flush
2905 -----
2916 -----
2906
2917
2907 Flush data written to the server.
2918 Flush data written to the server.
2908
2919
2909 readavailable
2920 readavailable
2910 -------------
2921 -------------
2911
2922
2912 Close the write end of the connection and read all available data from
2923 Close the write end of the connection and read all available data from
2913 the server.
2924 the server.
2914
2925
2915 If the connection to the server encompasses multiple pipes, we poll both
2926 If the connection to the server encompasses multiple pipes, we poll both
2916 pipes and read available data.
2927 pipes and read available data.
2917
2928
2918 readline
2929 readline
2919 --------
2930 --------
2920
2931
2921 Read a line of output from the server. If there are multiple output
2932 Read a line of output from the server. If there are multiple output
2922 pipes, reads only the main pipe.
2933 pipes, reads only the main pipe.
2923
2934
2924 ereadline
2935 ereadline
2925 ---------
2936 ---------
2926
2937
2927 Like ``readline``, but read from the stderr pipe, if available.
2938 Like ``readline``, but read from the stderr pipe, if available.
2928
2939
2929 read <X>
2940 read <X>
2930 --------
2941 --------
2931
2942
2932 ``read()`` N bytes from the server's main output pipe.
2943 ``read()`` N bytes from the server's main output pipe.
2933
2944
2934 eread <X>
2945 eread <X>
2935 ---------
2946 ---------
2936
2947
2937 ``read()`` N bytes from the server's stderr pipe, if available.
2948 ``read()`` N bytes from the server's stderr pipe, if available.
2938
2949
2939 Specifying Unified Frame-Based Protocol Frames
2950 Specifying Unified Frame-Based Protocol Frames
2940 ----------------------------------------------
2951 ----------------------------------------------
2941
2952
2942 It is possible to emit a *Unified Frame-Based Protocol* by using special
2953 It is possible to emit a *Unified Frame-Based Protocol* by using special
2943 syntax.
2954 syntax.
2944
2955
2945 A frame is composed as a type, flags, and payload. These can be parsed
2956 A frame is composed as a type, flags, and payload. These can be parsed
2946 from a string of the form:
2957 from a string of the form:
2947
2958
2948 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2959 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2949
2960
2950 ``request-id`` and ``stream-id`` are integers defining the request and
2961 ``request-id`` and ``stream-id`` are integers defining the request and
2951 stream identifiers.
2962 stream identifiers.
2952
2963
2953 ``type`` can be an integer value for the frame type or the string name
2964 ``type`` can be an integer value for the frame type or the string name
2954 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2965 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2955 ``command-name``.
2966 ``command-name``.
2956
2967
2957 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2968 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2958 components. Each component (and there can be just one) can be an integer
2969 components. Each component (and there can be just one) can be an integer
2959 or a flag name for stream flags or frame flags, respectively. Values are
2970 or a flag name for stream flags or frame flags, respectively. Values are
2960 resolved to integers and then bitwise OR'd together.
2971 resolved to integers and then bitwise OR'd together.
2961
2972
2962 ``payload`` represents the raw frame payload. If it begins with
2973 ``payload`` represents the raw frame payload. If it begins with
2963 ``cbor:``, the following string is evaluated as Python code and the
2974 ``cbor:``, the following string is evaluated as Python code and the
2964 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2975 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2965 as a Python byte string literal.
2976 as a Python byte string literal.
2966 """
2977 """
2967 opts = pycompat.byteskwargs(opts)
2978 opts = pycompat.byteskwargs(opts)
2968
2979
2969 if opts['localssh'] and not repo:
2980 if opts['localssh'] and not repo:
2970 raise error.Abort(_('--localssh requires a repository'))
2981 raise error.Abort(_('--localssh requires a repository'))
2971
2982
2972 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2983 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2973 raise error.Abort(_('invalid value for --peer'),
2984 raise error.Abort(_('invalid value for --peer'),
2974 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2985 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2975
2986
2976 if path and opts['localssh']:
2987 if path and opts['localssh']:
2977 raise error.Abort(_('cannot specify --localssh with an explicit '
2988 raise error.Abort(_('cannot specify --localssh with an explicit '
2978 'path'))
2989 'path'))
2979
2990
2980 if ui.interactive():
2991 if ui.interactive():
2981 ui.write(_('(waiting for commands on stdin)\n'))
2992 ui.write(_('(waiting for commands on stdin)\n'))
2982
2993
2983 blocks = list(_parsewirelangblocks(ui.fin))
2994 blocks = list(_parsewirelangblocks(ui.fin))
2984
2995
2985 proc = None
2996 proc = None
2986 stdin = None
2997 stdin = None
2987 stdout = None
2998 stdout = None
2988 stderr = None
2999 stderr = None
2989 opener = None
3000 opener = None
2990
3001
2991 if opts['localssh']:
3002 if opts['localssh']:
2992 # We start the SSH server in its own process so there is process
3003 # We start the SSH server in its own process so there is process
2993 # separation. This prevents a whole class of potential bugs around
3004 # separation. This prevents a whole class of potential bugs around
2994 # shared state from interfering with server operation.
3005 # shared state from interfering with server operation.
2995 args = procutil.hgcmd() + [
3006 args = procutil.hgcmd() + [
2996 '-R', repo.root,
3007 '-R', repo.root,
2997 'debugserve', '--sshstdio',
3008 'debugserve', '--sshstdio',
2998 ]
3009 ]
2999 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3010 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3000 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3011 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3001 bufsize=0)
3012 bufsize=0)
3002
3013
3003 stdin = proc.stdin
3014 stdin = proc.stdin
3004 stdout = proc.stdout
3015 stdout = proc.stdout
3005 stderr = proc.stderr
3016 stderr = proc.stderr
3006
3017
3007 # We turn the pipes into observers so we can log I/O.
3018 # We turn the pipes into observers so we can log I/O.
3008 if ui.verbose or opts['peer'] == 'raw':
3019 if ui.verbose or opts['peer'] == 'raw':
3009 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3020 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3010 logdata=True)
3021 logdata=True)
3011 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3022 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3012 logdata=True)
3023 logdata=True)
3013 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3024 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3014 logdata=True)
3025 logdata=True)
3015
3026
3016 # --localssh also implies the peer connection settings.
3027 # --localssh also implies the peer connection settings.
3017
3028
3018 url = 'ssh://localserver'
3029 url = 'ssh://localserver'
3019 autoreadstderr = not opts['noreadstderr']
3030 autoreadstderr = not opts['noreadstderr']
3020
3031
3021 if opts['peer'] == 'ssh1':
3032 if opts['peer'] == 'ssh1':
3022 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3033 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3023 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3034 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3024 None, autoreadstderr=autoreadstderr)
3035 None, autoreadstderr=autoreadstderr)
3025 elif opts['peer'] == 'ssh2':
3036 elif opts['peer'] == 'ssh2':
3026 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3037 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3027 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3038 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3028 None, autoreadstderr=autoreadstderr)
3039 None, autoreadstderr=autoreadstderr)
3029 elif opts['peer'] == 'raw':
3040 elif opts['peer'] == 'raw':
3030 ui.write(_('using raw connection to peer\n'))
3041 ui.write(_('using raw connection to peer\n'))
3031 peer = None
3042 peer = None
3032 else:
3043 else:
3033 ui.write(_('creating ssh peer from handshake results\n'))
3044 ui.write(_('creating ssh peer from handshake results\n'))
3034 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3045 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3035 autoreadstderr=autoreadstderr)
3046 autoreadstderr=autoreadstderr)
3036
3047
3037 elif path:
3048 elif path:
3038 # We bypass hg.peer() so we can proxy the sockets.
3049 # We bypass hg.peer() so we can proxy the sockets.
3039 # TODO consider not doing this because we skip
3050 # TODO consider not doing this because we skip
3040 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3051 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3041 u = util.url(path)
3052 u = util.url(path)
3042 if u.scheme != 'http':
3053 if u.scheme != 'http':
3043 raise error.Abort(_('only http:// paths are currently supported'))
3054 raise error.Abort(_('only http:// paths are currently supported'))
3044
3055
3045 url, authinfo = u.authinfo()
3056 url, authinfo = u.authinfo()
3046 openerargs = {
3057 openerargs = {
3047 r'useragent': b'Mercurial debugwireproto',
3058 r'useragent': b'Mercurial debugwireproto',
3048 }
3059 }
3049
3060
3050 # Turn pipes/sockets into observers so we can log I/O.
3061 # Turn pipes/sockets into observers so we can log I/O.
3051 if ui.verbose:
3062 if ui.verbose:
3052 openerargs.update({
3063 openerargs.update({
3053 r'loggingfh': ui,
3064 r'loggingfh': ui,
3054 r'loggingname': b's',
3065 r'loggingname': b's',
3055 r'loggingopts': {
3066 r'loggingopts': {
3056 r'logdata': True,
3067 r'logdata': True,
3057 r'logdataapis': False,
3068 r'logdataapis': False,
3058 },
3069 },
3059 })
3070 })
3060
3071
3061 if ui.debugflag:
3072 if ui.debugflag:
3062 openerargs[r'loggingopts'][r'logdataapis'] = True
3073 openerargs[r'loggingopts'][r'logdataapis'] = True
3063
3074
3064 # Don't send default headers when in raw mode. This allows us to
3075 # Don't send default headers when in raw mode. This allows us to
3065 # bypass most of the behavior of our URL handling code so we can
3076 # bypass most of the behavior of our URL handling code so we can
3066 # have near complete control over what's sent on the wire.
3077 # have near complete control over what's sent on the wire.
3067 if opts['peer'] == 'raw':
3078 if opts['peer'] == 'raw':
3068 openerargs[r'sendaccept'] = False
3079 openerargs[r'sendaccept'] = False
3069
3080
3070 opener = urlmod.opener(ui, authinfo, **openerargs)
3081 opener = urlmod.opener(ui, authinfo, **openerargs)
3071
3082
3072 if opts['peer'] == 'http2':
3083 if opts['peer'] == 'http2':
3073 ui.write(_('creating http peer for wire protocol version 2\n'))
3084 ui.write(_('creating http peer for wire protocol version 2\n'))
3074 # We go through makepeer() because we need an API descriptor for
3085 # We go through makepeer() because we need an API descriptor for
3075 # the peer instance to be useful.
3086 # the peer instance to be useful.
3076 with ui.configoverride({
3087 with ui.configoverride({
3077 ('experimental', 'httppeer.advertise-v2'): True}):
3088 ('experimental', 'httppeer.advertise-v2'): True}):
3078 if opts['nologhandshake']:
3089 if opts['nologhandshake']:
3079 ui.pushbuffer()
3090 ui.pushbuffer()
3080
3091
3081 peer = httppeer.makepeer(ui, path, opener=opener)
3092 peer = httppeer.makepeer(ui, path, opener=opener)
3082
3093
3083 if opts['nologhandshake']:
3094 if opts['nologhandshake']:
3084 ui.popbuffer()
3095 ui.popbuffer()
3085
3096
3086 if not isinstance(peer, httppeer.httpv2peer):
3097 if not isinstance(peer, httppeer.httpv2peer):
3087 raise error.Abort(_('could not instantiate HTTP peer for '
3098 raise error.Abort(_('could not instantiate HTTP peer for '
3088 'wire protocol version 2'),
3099 'wire protocol version 2'),
3089 hint=_('the server may not have the feature '
3100 hint=_('the server may not have the feature '
3090 'enabled or is not allowing this '
3101 'enabled or is not allowing this '
3091 'client version'))
3102 'client version'))
3092
3103
3093 elif opts['peer'] == 'raw':
3104 elif opts['peer'] == 'raw':
3094 ui.write(_('using raw connection to peer\n'))
3105 ui.write(_('using raw connection to peer\n'))
3095 peer = None
3106 peer = None
3096 elif opts['peer']:
3107 elif opts['peer']:
3097 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3108 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3098 opts['peer'])
3109 opts['peer'])
3099 else:
3110 else:
3100 peer = httppeer.makepeer(ui, path, opener=opener)
3111 peer = httppeer.makepeer(ui, path, opener=opener)
3101
3112
3102 # We /could/ populate stdin/stdout with sock.makefile()...
3113 # We /could/ populate stdin/stdout with sock.makefile()...
3103 else:
3114 else:
3104 raise error.Abort(_('unsupported connection configuration'))
3115 raise error.Abort(_('unsupported connection configuration'))
3105
3116
3106 batchedcommands = None
3117 batchedcommands = None
3107
3118
3108 # Now perform actions based on the parsed wire language instructions.
3119 # Now perform actions based on the parsed wire language instructions.
3109 for action, lines in blocks:
3120 for action, lines in blocks:
3110 if action in ('raw', 'raw+'):
3121 if action in ('raw', 'raw+'):
3111 if not stdin:
3122 if not stdin:
3112 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3123 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3113
3124
3114 # Concatenate the data together.
3125 # Concatenate the data together.
3115 data = ''.join(l.lstrip() for l in lines)
3126 data = ''.join(l.lstrip() for l in lines)
3116 data = stringutil.unescapestr(data)
3127 data = stringutil.unescapestr(data)
3117 stdin.write(data)
3128 stdin.write(data)
3118
3129
3119 if action == 'raw+':
3130 if action == 'raw+':
3120 stdin.flush()
3131 stdin.flush()
3121 elif action == 'flush':
3132 elif action == 'flush':
3122 if not stdin:
3133 if not stdin:
3123 raise error.Abort(_('cannot call flush on this peer'))
3134 raise error.Abort(_('cannot call flush on this peer'))
3124 stdin.flush()
3135 stdin.flush()
3125 elif action.startswith('command'):
3136 elif action.startswith('command'):
3126 if not peer:
3137 if not peer:
3127 raise error.Abort(_('cannot send commands unless peer instance '
3138 raise error.Abort(_('cannot send commands unless peer instance '
3128 'is available'))
3139 'is available'))
3129
3140
3130 command = action.split(' ', 1)[1]
3141 command = action.split(' ', 1)[1]
3131
3142
3132 args = {}
3143 args = {}
3133 for line in lines:
3144 for line in lines:
3134 # We need to allow empty values.
3145 # We need to allow empty values.
3135 fields = line.lstrip().split(' ', 1)
3146 fields = line.lstrip().split(' ', 1)
3136 if len(fields) == 1:
3147 if len(fields) == 1:
3137 key = fields[0]
3148 key = fields[0]
3138 value = ''
3149 value = ''
3139 else:
3150 else:
3140 key, value = fields
3151 key, value = fields
3141
3152
3142 if value.startswith('eval:'):
3153 if value.startswith('eval:'):
3143 value = stringutil.evalpythonliteral(value[5:])
3154 value = stringutil.evalpythonliteral(value[5:])
3144 else:
3155 else:
3145 value = stringutil.unescapestr(value)
3156 value = stringutil.unescapestr(value)
3146
3157
3147 args[key] = value
3158 args[key] = value
3148
3159
3149 if batchedcommands is not None:
3160 if batchedcommands is not None:
3150 batchedcommands.append((command, args))
3161 batchedcommands.append((command, args))
3151 continue
3162 continue
3152
3163
3153 ui.status(_('sending %s command\n') % command)
3164 ui.status(_('sending %s command\n') % command)
3154
3165
3155 if 'PUSHFILE' in args:
3166 if 'PUSHFILE' in args:
3156 with open(args['PUSHFILE'], r'rb') as fh:
3167 with open(args['PUSHFILE'], r'rb') as fh:
3157 del args['PUSHFILE']
3168 del args['PUSHFILE']
3158 res, output = peer._callpush(command, fh,
3169 res, output = peer._callpush(command, fh,
3159 **pycompat.strkwargs(args))
3170 **pycompat.strkwargs(args))
3160 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3171 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3161 ui.status(_('remote output: %s\n') %
3172 ui.status(_('remote output: %s\n') %
3162 stringutil.escapestr(output))
3173 stringutil.escapestr(output))
3163 else:
3174 else:
3164 with peer.commandexecutor() as e:
3175 with peer.commandexecutor() as e:
3165 res = e.callcommand(command, args).result()
3176 res = e.callcommand(command, args).result()
3166
3177
3167 if isinstance(res, wireprotov2peer.commandresponse):
3178 if isinstance(res, wireprotov2peer.commandresponse):
3168 val = list(res.cborobjects())
3179 val = list(res.cborobjects())
3169 ui.status(_('response: %s\n') %
3180 ui.status(_('response: %s\n') %
3170 stringutil.pprint(val, bprefix=True))
3181 stringutil.pprint(val, bprefix=True))
3171
3182
3172 else:
3183 else:
3173 ui.status(_('response: %s\n') %
3184 ui.status(_('response: %s\n') %
3174 stringutil.pprint(res, bprefix=True))
3185 stringutil.pprint(res, bprefix=True))
3175
3186
3176 elif action == 'batchbegin':
3187 elif action == 'batchbegin':
3177 if batchedcommands is not None:
3188 if batchedcommands is not None:
3178 raise error.Abort(_('nested batchbegin not allowed'))
3189 raise error.Abort(_('nested batchbegin not allowed'))
3179
3190
3180 batchedcommands = []
3191 batchedcommands = []
3181 elif action == 'batchsubmit':
3192 elif action == 'batchsubmit':
3182 # There is a batching API we could go through. But it would be
3193 # There is a batching API we could go through. But it would be
3183 # difficult to normalize requests into function calls. It is easier
3194 # difficult to normalize requests into function calls. It is easier
3184 # to bypass this layer and normalize to commands + args.
3195 # to bypass this layer and normalize to commands + args.
3185 ui.status(_('sending batch with %d sub-commands\n') %
3196 ui.status(_('sending batch with %d sub-commands\n') %
3186 len(batchedcommands))
3197 len(batchedcommands))
3187 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3198 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3188 ui.status(_('response #%d: %s\n') %
3199 ui.status(_('response #%d: %s\n') %
3189 (i, stringutil.escapestr(chunk)))
3200 (i, stringutil.escapestr(chunk)))
3190
3201
3191 batchedcommands = None
3202 batchedcommands = None
3192
3203
3193 elif action.startswith('httprequest '):
3204 elif action.startswith('httprequest '):
3194 if not opener:
3205 if not opener:
3195 raise error.Abort(_('cannot use httprequest without an HTTP '
3206 raise error.Abort(_('cannot use httprequest without an HTTP '
3196 'peer'))
3207 'peer'))
3197
3208
3198 request = action.split(' ', 2)
3209 request = action.split(' ', 2)
3199 if len(request) != 3:
3210 if len(request) != 3:
3200 raise error.Abort(_('invalid httprequest: expected format is '
3211 raise error.Abort(_('invalid httprequest: expected format is '
3201 '"httprequest <method> <path>'))
3212 '"httprequest <method> <path>'))
3202
3213
3203 method, httppath = request[1:]
3214 method, httppath = request[1:]
3204 headers = {}
3215 headers = {}
3205 body = None
3216 body = None
3206 frames = []
3217 frames = []
3207 for line in lines:
3218 for line in lines:
3208 line = line.lstrip()
3219 line = line.lstrip()
3209 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3220 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3210 if m:
3221 if m:
3211 headers[m.group(1)] = m.group(2)
3222 headers[m.group(1)] = m.group(2)
3212 continue
3223 continue
3213
3224
3214 if line.startswith(b'BODYFILE '):
3225 if line.startswith(b'BODYFILE '):
3215 with open(line.split(b' ', 1), 'rb') as fh:
3226 with open(line.split(b' ', 1), 'rb') as fh:
3216 body = fh.read()
3227 body = fh.read()
3217 elif line.startswith(b'frame '):
3228 elif line.startswith(b'frame '):
3218 frame = wireprotoframing.makeframefromhumanstring(
3229 frame = wireprotoframing.makeframefromhumanstring(
3219 line[len(b'frame '):])
3230 line[len(b'frame '):])
3220
3231
3221 frames.append(frame)
3232 frames.append(frame)
3222 else:
3233 else:
3223 raise error.Abort(_('unknown argument to httprequest: %s') %
3234 raise error.Abort(_('unknown argument to httprequest: %s') %
3224 line)
3235 line)
3225
3236
3226 url = path + httppath
3237 url = path + httppath
3227
3238
3228 if frames:
3239 if frames:
3229 body = b''.join(bytes(f) for f in frames)
3240 body = b''.join(bytes(f) for f in frames)
3230
3241
3231 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3242 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3232
3243
3233 # urllib.Request insists on using has_data() as a proxy for
3244 # urllib.Request insists on using has_data() as a proxy for
3234 # determining the request method. Override that to use our
3245 # determining the request method. Override that to use our
3235 # explicitly requested method.
3246 # explicitly requested method.
3236 req.get_method = lambda: pycompat.sysstr(method)
3247 req.get_method = lambda: pycompat.sysstr(method)
3237
3248
3238 try:
3249 try:
3239 res = opener.open(req)
3250 res = opener.open(req)
3240 body = res.read()
3251 body = res.read()
3241 except util.urlerr.urlerror as e:
3252 except util.urlerr.urlerror as e:
3242 # read() method must be called, but only exists in Python 2
3253 # read() method must be called, but only exists in Python 2
3243 getattr(e, 'read', lambda: None)()
3254 getattr(e, 'read', lambda: None)()
3244 continue
3255 continue
3245
3256
3246 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3257 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3247 ui.write(_('cbor> %s\n') %
3258 ui.write(_('cbor> %s\n') %
3248 stringutil.pprint(cbor.loads(body), bprefix=True))
3259 stringutil.pprint(cbor.loads(body), bprefix=True))
3249
3260
3250 elif action == 'close':
3261 elif action == 'close':
3251 peer.close()
3262 peer.close()
3252 elif action == 'readavailable':
3263 elif action == 'readavailable':
3253 if not stdout or not stderr:
3264 if not stdout or not stderr:
3254 raise error.Abort(_('readavailable not available on this peer'))
3265 raise error.Abort(_('readavailable not available on this peer'))
3255
3266
3256 stdin.close()
3267 stdin.close()
3257 stdout.read()
3268 stdout.read()
3258 stderr.read()
3269 stderr.read()
3259
3270
3260 elif action == 'readline':
3271 elif action == 'readline':
3261 if not stdout:
3272 if not stdout:
3262 raise error.Abort(_('readline not available on this peer'))
3273 raise error.Abort(_('readline not available on this peer'))
3263 stdout.readline()
3274 stdout.readline()
3264 elif action == 'ereadline':
3275 elif action == 'ereadline':
3265 if not stderr:
3276 if not stderr:
3266 raise error.Abort(_('ereadline not available on this peer'))
3277 raise error.Abort(_('ereadline not available on this peer'))
3267 stderr.readline()
3278 stderr.readline()
3268 elif action.startswith('read '):
3279 elif action.startswith('read '):
3269 count = int(action.split(' ', 1)[1])
3280 count = int(action.split(' ', 1)[1])
3270 if not stdout:
3281 if not stdout:
3271 raise error.Abort(_('read not available on this peer'))
3282 raise error.Abort(_('read not available on this peer'))
3272 stdout.read(count)
3283 stdout.read(count)
3273 elif action.startswith('eread '):
3284 elif action.startswith('eread '):
3274 count = int(action.split(' ', 1)[1])
3285 count = int(action.split(' ', 1)[1])
3275 if not stderr:
3286 if not stderr:
3276 raise error.Abort(_('eread not available on this peer'))
3287 raise error.Abort(_('eread not available on this peer'))
3277 stderr.read(count)
3288 stderr.read(count)
3278 else:
3289 else:
3279 raise error.Abort(_('unknown action: %s') % action)
3290 raise error.Abort(_('unknown action: %s') % action)
3280
3291
3281 if batchedcommands is not None:
3292 if batchedcommands is not None:
3282 raise error.Abort(_('unclosed "batchbegin" request'))
3293 raise error.Abort(_('unclosed "batchbegin" request'))
3283
3294
3284 if peer:
3295 if peer:
3285 peer.close()
3296 peer.close()
3286
3297
3287 if proc:
3298 if proc:
3288 proc.kill()
3299 proc.kill()
@@ -1,583 +1,589 b''
1 $ cat << EOF >> $HGRCPATH
1 $ cat << EOF >> $HGRCPATH
2 > [ui]
2 > [ui]
3 > interactive=yes
3 > interactive=yes
4 > EOF
4 > EOF
5
5
6 $ hg init debugrevlog
6 $ hg init debugrevlog
7 $ cd debugrevlog
7 $ cd debugrevlog
8 $ echo a > a
8 $ echo a > a
9 $ hg ci -Am adda
9 $ hg ci -Am adda
10 adding a
10 adding a
11 $ hg rm .
11 $ hg rm .
12 removing a
12 removing a
13 $ hg ci -Am make-it-empty
13 $ hg ci -Am make-it-empty
14 $ hg revert --all -r 0
14 $ hg revert --all -r 0
15 adding a
15 adding a
16 $ hg ci -Am make-it-full
16 $ hg ci -Am make-it-full
17 #if reporevlogstore
17 #if reporevlogstore
18 $ hg debugrevlog -c
18 $ hg debugrevlog -c
19 format : 1
19 format : 1
20 flags : inline
20 flags : inline
21
21
22 revisions : 3
22 revisions : 3
23 merges : 0 ( 0.00%)
23 merges : 0 ( 0.00%)
24 normal : 3 (100.00%)
24 normal : 3 (100.00%)
25 revisions : 3
25 revisions : 3
26 empty : 0 ( 0.00%)
26 empty : 0 ( 0.00%)
27 text : 0 (100.00%)
27 text : 0 (100.00%)
28 delta : 0 (100.00%)
28 delta : 0 (100.00%)
29 full : 3 (100.00%)
29 full : 3 (100.00%)
30 inter : 0 ( 0.00%)
30 deltas : 0 ( 0.00%)
31 deltas : 0 ( 0.00%)
31 revision size : 191
32 revision size : 191
32 full : 191 (100.00%)
33 full : 191 (100.00%)
34 inter : 0 ( 0.00%)
33 deltas : 0 ( 0.00%)
35 deltas : 0 ( 0.00%)
34
36
35 chunks : 3
37 chunks : 3
36 0x75 (u) : 3 (100.00%)
38 0x75 (u) : 3 (100.00%)
37 chunks size : 191
39 chunks size : 191
38 0x75 (u) : 191 (100.00%)
40 0x75 (u) : 191 (100.00%)
39
41
40 avg chain length : 0
42 avg chain length : 0
41 max chain length : 0
43 max chain length : 0
42 max chain reach : 67
44 max chain reach : 67
43 compression ratio : 0
45 compression ratio : 0
44
46
45 uncompressed data size (min/max/avg) : 57 / 66 / 62
47 uncompressed data size (min/max/avg) : 57 / 66 / 62
46 full revision size (min/max/avg) : 58 / 67 / 63
48 full revision size (min/max/avg) : 58 / 67 / 63
47 delta size (min/max/avg) : 0 / 0 / 0
49 delta size (min/max/avg) : 0 / 0 / 0
48 $ hg debugrevlog -m
50 $ hg debugrevlog -m
49 format : 1
51 format : 1
50 flags : inline, generaldelta
52 flags : inline, generaldelta
51
53
52 revisions : 3
54 revisions : 3
53 merges : 0 ( 0.00%)
55 merges : 0 ( 0.00%)
54 normal : 3 (100.00%)
56 normal : 3 (100.00%)
55 revisions : 3
57 revisions : 3
56 empty : 1 (33.33%)
58 empty : 1 (33.33%)
57 text : 1 (100.00%)
59 text : 1 (100.00%)
58 delta : 0 ( 0.00%)
60 delta : 0 ( 0.00%)
59 full : 2 (66.67%)
61 full : 2 (66.67%)
62 inter : 0 ( 0.00%)
60 deltas : 0 ( 0.00%)
63 deltas : 0 ( 0.00%)
61 revision size : 88
64 revision size : 88
62 full : 88 (100.00%)
65 full : 88 (100.00%)
66 inter : 0 ( 0.00%)
63 deltas : 0 ( 0.00%)
67 deltas : 0 ( 0.00%)
64
68
65 chunks : 3
69 chunks : 3
66 empty : 1 (33.33%)
70 empty : 1 (33.33%)
67 0x75 (u) : 2 (66.67%)
71 0x75 (u) : 2 (66.67%)
68 chunks size : 88
72 chunks size : 88
69 empty : 0 ( 0.00%)
73 empty : 0 ( 0.00%)
70 0x75 (u) : 88 (100.00%)
74 0x75 (u) : 88 (100.00%)
71
75
72 avg chain length : 0
76 avg chain length : 0
73 max chain length : 0
77 max chain length : 0
74 max chain reach : 44
78 max chain reach : 44
75 compression ratio : 0
79 compression ratio : 0
76
80
77 uncompressed data size (min/max/avg) : 0 / 43 / 28
81 uncompressed data size (min/max/avg) : 0 / 43 / 28
78 full revision size (min/max/avg) : 44 / 44 / 44
82 full revision size (min/max/avg) : 44 / 44 / 44
79 delta size (min/max/avg) : 0 / 0 / 0
83 delta size (min/max/avg) : 0 / 0 / 0
80 $ hg debugrevlog a
84 $ hg debugrevlog a
81 format : 1
85 format : 1
82 flags : inline, generaldelta
86 flags : inline, generaldelta
83
87
84 revisions : 1
88 revisions : 1
85 merges : 0 ( 0.00%)
89 merges : 0 ( 0.00%)
86 normal : 1 (100.00%)
90 normal : 1 (100.00%)
87 revisions : 1
91 revisions : 1
88 empty : 0 ( 0.00%)
92 empty : 0 ( 0.00%)
89 text : 0 (100.00%)
93 text : 0 (100.00%)
90 delta : 0 (100.00%)
94 delta : 0 (100.00%)
91 full : 1 (100.00%)
95 full : 1 (100.00%)
96 inter : 0 ( 0.00%)
92 deltas : 0 ( 0.00%)
97 deltas : 0 ( 0.00%)
93 revision size : 3
98 revision size : 3
94 full : 3 (100.00%)
99 full : 3 (100.00%)
100 inter : 0 ( 0.00%)
95 deltas : 0 ( 0.00%)
101 deltas : 0 ( 0.00%)
96
102
97 chunks : 1
103 chunks : 1
98 0x75 (u) : 1 (100.00%)
104 0x75 (u) : 1 (100.00%)
99 chunks size : 3
105 chunks size : 3
100 0x75 (u) : 3 (100.00%)
106 0x75 (u) : 3 (100.00%)
101
107
102 avg chain length : 0
108 avg chain length : 0
103 max chain length : 0
109 max chain length : 0
104 max chain reach : 3
110 max chain reach : 3
105 compression ratio : 0
111 compression ratio : 0
106
112
107 uncompressed data size (min/max/avg) : 2 / 2 / 2
113 uncompressed data size (min/max/avg) : 2 / 2 / 2
108 full revision size (min/max/avg) : 3 / 3 / 3
114 full revision size (min/max/avg) : 3 / 3 / 3
109 delta size (min/max/avg) : 0 / 0 / 0
115 delta size (min/max/avg) : 0 / 0 / 0
110 #endif
116 #endif
111
117
112 Test debugindex, with and without the --verbose/--debug flag
118 Test debugindex, with and without the --verbose/--debug flag
113 $ hg debugindex a
119 $ hg debugindex a
114 rev linkrev nodeid p1 p2
120 rev linkrev nodeid p1 p2
115 0 0 b789fdd96dc2 000000000000 000000000000
121 0 0 b789fdd96dc2 000000000000 000000000000
116
122
117 #if no-reposimplestore
123 #if no-reposimplestore
118 $ hg --verbose debugindex a
124 $ hg --verbose debugindex a
119 rev offset length linkrev nodeid p1 p2
125 rev offset length linkrev nodeid p1 p2
120 0 0 3 0 b789fdd96dc2 000000000000 000000000000
126 0 0 3 0 b789fdd96dc2 000000000000 000000000000
121
127
122 $ hg --debug debugindex a
128 $ hg --debug debugindex a
123 rev offset length linkrev nodeid p1 p2
129 rev offset length linkrev nodeid p1 p2
124 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
130 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
125 #endif
131 #endif
126
132
127 $ hg debugindex -f 1 a
133 $ hg debugindex -f 1 a
128 rev flag size link p1 p2 nodeid
134 rev flag size link p1 p2 nodeid
129 0 0000 2 0 -1 -1 b789fdd96dc2
135 0 0000 2 0 -1 -1 b789fdd96dc2
130
136
131 #if no-reposimplestore
137 #if no-reposimplestore
132 $ hg --verbose debugindex -f 1 a
138 $ hg --verbose debugindex -f 1 a
133 rev flag offset length size link p1 p2 nodeid
139 rev flag offset length size link p1 p2 nodeid
134 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
140 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
135
141
136 $ hg --debug debugindex -f 1 a
142 $ hg --debug debugindex -f 1 a
137 rev flag offset length size link p1 p2 nodeid
143 rev flag offset length size link p1 p2 nodeid
138 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
144 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
139 #endif
145 #endif
140
146
141 debugdelta chain basic output
147 debugdelta chain basic output
142
148
143 #if reporevlogstore
149 #if reporevlogstore
144 $ hg debugdeltachain -m
150 $ hg debugdeltachain -m
145 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
151 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
146 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
152 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
147 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000
153 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000
148 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000
154 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000
149
155
150 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
156 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
151 0 1 1
157 0 1 1
152 1 2 1
158 1 2 1
153 2 3 1
159 2 3 1
154
160
155 $ hg debugdeltachain -m -Tjson
161 $ hg debugdeltachain -m -Tjson
156 [
162 [
157 {
163 {
158 "chainid": 1,
164 "chainid": 1,
159 "chainlen": 1,
165 "chainlen": 1,
160 "chainratio": 1.02325581395,
166 "chainratio": 1.02325581395,
161 "chainsize": 44,
167 "chainsize": 44,
162 "compsize": 44,
168 "compsize": 44,
163 "deltatype": "base",
169 "deltatype": "base",
164 "extradist": 0,
170 "extradist": 0,
165 "extraratio": 0.0,
171 "extraratio": 0.0,
166 "lindist": 44,
172 "lindist": 44,
167 "prevrev": -1,
173 "prevrev": -1,
168 "rev": 0,
174 "rev": 0,
169 "uncompsize": 43
175 "uncompsize": 43
170 },
176 },
171 {
177 {
172 "chainid": 2,
178 "chainid": 2,
173 "chainlen": 1,
179 "chainlen": 1,
174 "chainratio": 0,
180 "chainratio": 0,
175 "chainsize": 0,
181 "chainsize": 0,
176 "compsize": 0,
182 "compsize": 0,
177 "deltatype": "base",
183 "deltatype": "base",
178 "extradist": 0,
184 "extradist": 0,
179 "extraratio": 0,
185 "extraratio": 0,
180 "lindist": 0,
186 "lindist": 0,
181 "prevrev": -1,
187 "prevrev": -1,
182 "rev": 1,
188 "rev": 1,
183 "uncompsize": 0
189 "uncompsize": 0
184 },
190 },
185 {
191 {
186 "chainid": 3,
192 "chainid": 3,
187 "chainlen": 1,
193 "chainlen": 1,
188 "chainratio": 1.02325581395,
194 "chainratio": 1.02325581395,
189 "chainsize": 44,
195 "chainsize": 44,
190 "compsize": 44,
196 "compsize": 44,
191 "deltatype": "base",
197 "deltatype": "base",
192 "extradist": 0,
198 "extradist": 0,
193 "extraratio": 0.0,
199 "extraratio": 0.0,
194 "lindist": 44,
200 "lindist": 44,
195 "prevrev": -1,
201 "prevrev": -1,
196 "rev": 2,
202 "rev": 2,
197 "uncompsize": 43
203 "uncompsize": 43
198 }
204 }
199 ]
205 ]
200
206
201 debugdelta chain with sparse read enabled
207 debugdelta chain with sparse read enabled
202
208
203 $ cat >> $HGRCPATH <<EOF
209 $ cat >> $HGRCPATH <<EOF
204 > [experimental]
210 > [experimental]
205 > sparse-read = True
211 > sparse-read = True
206 > EOF
212 > EOF
207 $ hg debugdeltachain -m
213 $ hg debugdeltachain -m
208 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
214 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
209 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
215 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
210 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
216 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
211 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
217 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
212
218
213 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
219 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
214 0 1 1 44 44 1.0
220 0 1 1 44 44 1.0
215 1 2 1 0 0 1
221 1 2 1 0 0 1
216 2 3 1 44 44 1.0
222 2 3 1 44 44 1.0
217
223
218 $ hg debugdeltachain -m -Tjson
224 $ hg debugdeltachain -m -Tjson
219 [
225 [
220 {
226 {
221 "chainid": 1,
227 "chainid": 1,
222 "chainlen": 1,
228 "chainlen": 1,
223 "chainratio": 1.02325581395,
229 "chainratio": 1.02325581395,
224 "chainsize": 44,
230 "chainsize": 44,
225 "compsize": 44,
231 "compsize": 44,
226 "deltatype": "base",
232 "deltatype": "base",
227 "extradist": 0,
233 "extradist": 0,
228 "extraratio": 0.0,
234 "extraratio": 0.0,
229 "largestblock": 44,
235 "largestblock": 44,
230 "lindist": 44,
236 "lindist": 44,
231 "prevrev": -1,
237 "prevrev": -1,
232 "readdensity": 1.0,
238 "readdensity": 1.0,
233 "readsize": 44,
239 "readsize": 44,
234 "rev": 0,
240 "rev": 0,
235 "srchunks": 1,
241 "srchunks": 1,
236 "uncompsize": 43
242 "uncompsize": 43
237 },
243 },
238 {
244 {
239 "chainid": 2,
245 "chainid": 2,
240 "chainlen": 1,
246 "chainlen": 1,
241 "chainratio": 0,
247 "chainratio": 0,
242 "chainsize": 0,
248 "chainsize": 0,
243 "compsize": 0,
249 "compsize": 0,
244 "deltatype": "base",
250 "deltatype": "base",
245 "extradist": 0,
251 "extradist": 0,
246 "extraratio": 0,
252 "extraratio": 0,
247 "largestblock": 0,
253 "largestblock": 0,
248 "lindist": 0,
254 "lindist": 0,
249 "prevrev": -1,
255 "prevrev": -1,
250 "readdensity": 1,
256 "readdensity": 1,
251 "readsize": 0,
257 "readsize": 0,
252 "rev": 1,
258 "rev": 1,
253 "srchunks": 1,
259 "srchunks": 1,
254 "uncompsize": 0
260 "uncompsize": 0
255 },
261 },
256 {
262 {
257 "chainid": 3,
263 "chainid": 3,
258 "chainlen": 1,
264 "chainlen": 1,
259 "chainratio": 1.02325581395,
265 "chainratio": 1.02325581395,
260 "chainsize": 44,
266 "chainsize": 44,
261 "compsize": 44,
267 "compsize": 44,
262 "deltatype": "base",
268 "deltatype": "base",
263 "extradist": 0,
269 "extradist": 0,
264 "extraratio": 0.0,
270 "extraratio": 0.0,
265 "largestblock": 44,
271 "largestblock": 44,
266 "lindist": 44,
272 "lindist": 44,
267 "prevrev": -1,
273 "prevrev": -1,
268 "readdensity": 1.0,
274 "readdensity": 1.0,
269 "readsize": 44,
275 "readsize": 44,
270 "rev": 2,
276 "rev": 2,
271 "srchunks": 1,
277 "srchunks": 1,
272 "uncompsize": 43
278 "uncompsize": 43
273 }
279 }
274 ]
280 ]
275
281
276 $ printf "This test checks things.\n" >> a
282 $ printf "This test checks things.\n" >> a
277 $ hg ci -m a
283 $ hg ci -m a
278 $ hg branch other
284 $ hg branch other
279 marked working directory as branch other
285 marked working directory as branch other
280 (branches are permanent and global, did you want a bookmark?)
286 (branches are permanent and global, did you want a bookmark?)
281 $ for i in `$TESTDIR/seq.py 5`; do
287 $ for i in `$TESTDIR/seq.py 5`; do
282 > printf "shorter ${i}" >> a
288 > printf "shorter ${i}" >> a
283 > hg ci -m "a other:$i"
289 > hg ci -m "a other:$i"
284 > hg up -q default
290 > hg up -q default
285 > printf "for the branch default we want longer chains: ${i}" >> a
291 > printf "for the branch default we want longer chains: ${i}" >> a
286 > hg ci -m "a default:$i"
292 > hg ci -m "a default:$i"
287 > hg up -q other
293 > hg up -q other
288 > done
294 > done
289 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
295 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
290 > --config experimental.sparse-read.density-threshold=0.50 \
296 > --config experimental.sparse-read.density-threshold=0.50 \
291 > --config experimental.sparse-read.min-gap-size=0
297 > --config experimental.sparse-read.min-gap-size=0
292 0 1
298 0 1
293 1 1
299 1 1
294 2 1
300 2 1
295 3 1
301 3 1
296 4 1
302 4 1
297 5 1
303 5 1
298 6 1
304 6 1
299 7 1
305 7 1
300 8 1
306 8 1
301 9 1
307 9 1
302 10 2
308 10 2
303 11 1
309 11 1
304 $ hg --config extensions.strip= strip --no-backup -r 1
310 $ hg --config extensions.strip= strip --no-backup -r 1
305 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
311 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
306
312
307 Test max chain len
313 Test max chain len
308 $ cat >> $HGRCPATH << EOF
314 $ cat >> $HGRCPATH << EOF
309 > [format]
315 > [format]
310 > maxchainlen=4
316 > maxchainlen=4
311 > EOF
317 > EOF
312
318
313 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
319 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
314 $ hg ci -m a
320 $ hg ci -m a
315 $ printf "b\n" >> a
321 $ printf "b\n" >> a
316 $ hg ci -m a
322 $ hg ci -m a
317 $ printf "c\n" >> a
323 $ printf "c\n" >> a
318 $ hg ci -m a
324 $ hg ci -m a
319 $ printf "d\n" >> a
325 $ printf "d\n" >> a
320 $ hg ci -m a
326 $ hg ci -m a
321 $ printf "e\n" >> a
327 $ printf "e\n" >> a
322 $ hg ci -m a
328 $ hg ci -m a
323 $ printf "f\n" >> a
329 $ printf "f\n" >> a
324 $ hg ci -m a
330 $ hg ci -m a
325 $ printf 'g\n' >> a
331 $ printf 'g\n' >> a
326 $ hg ci -m a
332 $ hg ci -m a
327 $ printf 'h\n' >> a
333 $ printf 'h\n' >> a
328 $ hg ci -m a
334 $ hg ci -m a
329
335
330 $ hg debugrevlog -d a
336 $ hg debugrevlog -d a
331 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
337 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
332 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
338 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
333 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
339 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
334 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
340 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
335 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
341 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
336 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
342 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
337 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
343 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
338 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
344 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
339 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
345 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
340 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
346 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
341 #endif
347 #endif
342
348
343 Test debuglocks command:
349 Test debuglocks command:
344
350
345 $ hg debuglocks
351 $ hg debuglocks
346 lock: free
352 lock: free
347 wlock: free
353 wlock: free
348
354
349 * Test setting the lock
355 * Test setting the lock
350
356
351 waitlock <file> will wait for file to be created. If it isn't in a reasonable
357 waitlock <file> will wait for file to be created. If it isn't in a reasonable
352 amount of time, displays error message and returns 1
358 amount of time, displays error message and returns 1
353 $ waitlock() {
359 $ waitlock() {
354 > start=`date +%s`
360 > start=`date +%s`
355 > timeout=5
361 > timeout=5
356 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
362 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
357 > now=`date +%s`
363 > now=`date +%s`
358 > if [ "`expr $now - $start`" -gt $timeout ]; then
364 > if [ "`expr $now - $start`" -gt $timeout ]; then
359 > echo "timeout: $1 was not created in $timeout seconds"
365 > echo "timeout: $1 was not created in $timeout seconds"
360 > return 1
366 > return 1
361 > fi
367 > fi
362 > sleep 0.1
368 > sleep 0.1
363 > done
369 > done
364 > }
370 > }
365 $ dolock() {
371 $ dolock() {
366 > {
372 > {
367 > waitlock .hg/unlock
373 > waitlock .hg/unlock
368 > rm -f .hg/unlock
374 > rm -f .hg/unlock
369 > echo y
375 > echo y
370 > } | hg debuglocks "$@" > /dev/null
376 > } | hg debuglocks "$@" > /dev/null
371 > }
377 > }
372 $ dolock -s &
378 $ dolock -s &
373 $ waitlock .hg/store/lock
379 $ waitlock .hg/store/lock
374
380
375 $ hg debuglocks
381 $ hg debuglocks
376 lock: user *, process * (*s) (glob)
382 lock: user *, process * (*s) (glob)
377 wlock: free
383 wlock: free
378 [1]
384 [1]
379 $ touch .hg/unlock
385 $ touch .hg/unlock
380 $ wait
386 $ wait
381 $ [ -f .hg/store/lock ] || echo "There is no lock"
387 $ [ -f .hg/store/lock ] || echo "There is no lock"
382 There is no lock
388 There is no lock
383
389
384 * Test setting the wlock
390 * Test setting the wlock
385
391
386 $ dolock -S &
392 $ dolock -S &
387 $ waitlock .hg/wlock
393 $ waitlock .hg/wlock
388
394
389 $ hg debuglocks
395 $ hg debuglocks
390 lock: free
396 lock: free
391 wlock: user *, process * (*s) (glob)
397 wlock: user *, process * (*s) (glob)
392 [1]
398 [1]
393 $ touch .hg/unlock
399 $ touch .hg/unlock
394 $ wait
400 $ wait
395 $ [ -f .hg/wlock ] || echo "There is no wlock"
401 $ [ -f .hg/wlock ] || echo "There is no wlock"
396 There is no wlock
402 There is no wlock
397
403
398 * Test setting both locks
404 * Test setting both locks
399
405
400 $ dolock -Ss &
406 $ dolock -Ss &
401 $ waitlock .hg/wlock && waitlock .hg/store/lock
407 $ waitlock .hg/wlock && waitlock .hg/store/lock
402
408
403 $ hg debuglocks
409 $ hg debuglocks
404 lock: user *, process * (*s) (glob)
410 lock: user *, process * (*s) (glob)
405 wlock: user *, process * (*s) (glob)
411 wlock: user *, process * (*s) (glob)
406 [2]
412 [2]
407
413
408 * Test failing to set a lock
414 * Test failing to set a lock
409
415
410 $ hg debuglocks -s
416 $ hg debuglocks -s
411 abort: lock is already held
417 abort: lock is already held
412 [255]
418 [255]
413
419
414 $ hg debuglocks -S
420 $ hg debuglocks -S
415 abort: wlock is already held
421 abort: wlock is already held
416 [255]
422 [255]
417
423
418 $ touch .hg/unlock
424 $ touch .hg/unlock
419 $ wait
425 $ wait
420
426
421 $ hg debuglocks
427 $ hg debuglocks
422 lock: free
428 lock: free
423 wlock: free
429 wlock: free
424
430
425 * Test forcing the lock
431 * Test forcing the lock
426
432
427 $ dolock -s &
433 $ dolock -s &
428 $ waitlock .hg/store/lock
434 $ waitlock .hg/store/lock
429
435
430 $ hg debuglocks
436 $ hg debuglocks
431 lock: user *, process * (*s) (glob)
437 lock: user *, process * (*s) (glob)
432 wlock: free
438 wlock: free
433 [1]
439 [1]
434
440
435 $ hg debuglocks -L
441 $ hg debuglocks -L
436
442
437 $ hg debuglocks
443 $ hg debuglocks
438 lock: free
444 lock: free
439 wlock: free
445 wlock: free
440
446
441 $ touch .hg/unlock
447 $ touch .hg/unlock
442 $ wait
448 $ wait
443
449
444 * Test forcing the wlock
450 * Test forcing the wlock
445
451
446 $ dolock -S &
452 $ dolock -S &
447 $ waitlock .hg/wlock
453 $ waitlock .hg/wlock
448
454
449 $ hg debuglocks
455 $ hg debuglocks
450 lock: free
456 lock: free
451 wlock: user *, process * (*s) (glob)
457 wlock: user *, process * (*s) (glob)
452 [1]
458 [1]
453
459
454 $ hg debuglocks -W
460 $ hg debuglocks -W
455
461
456 $ hg debuglocks
462 $ hg debuglocks
457 lock: free
463 lock: free
458 wlock: free
464 wlock: free
459
465
460 $ touch .hg/unlock
466 $ touch .hg/unlock
461 $ wait
467 $ wait
462
468
463 Test WdirUnsupported exception
469 Test WdirUnsupported exception
464
470
465 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
471 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
466 abort: working directory revision cannot be specified
472 abort: working directory revision cannot be specified
467 [255]
473 [255]
468
474
469 Test cache warming command
475 Test cache warming command
470
476
471 $ rm -rf .hg/cache/
477 $ rm -rf .hg/cache/
472 $ hg debugupdatecaches --debug
478 $ hg debugupdatecaches --debug
473 updating the branch cache
479 updating the branch cache
474 $ ls -r .hg/cache/*
480 $ ls -r .hg/cache/*
475 .hg/cache/rbc-revs-v1
481 .hg/cache/rbc-revs-v1
476 .hg/cache/rbc-names-v1
482 .hg/cache/rbc-names-v1
477 .hg/cache/manifestfulltextcache
483 .hg/cache/manifestfulltextcache
478 .hg/cache/branch2-served
484 .hg/cache/branch2-served
479
485
480 Test debugcolor
486 Test debugcolor
481
487
482 #if no-windows
488 #if no-windows
483 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
489 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
484 color mode: 'ansi'
490 color mode: 'ansi'
485 available style:
491 available style:
486 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
492 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
487 #endif
493 #endif
488
494
489 $ hg debugcolor --style --color never
495 $ hg debugcolor --style --color never
490 color mode: None
496 color mode: None
491 available style:
497 available style:
492
498
493 $ cd ..
499 $ cd ..
494
500
495 Test internal debugstacktrace command
501 Test internal debugstacktrace command
496
502
497 $ cat > debugstacktrace.py << EOF
503 $ cat > debugstacktrace.py << EOF
498 > from __future__ import absolute_import
504 > from __future__ import absolute_import
499 > import sys
505 > import sys
500 > from mercurial import util
506 > from mercurial import util
501 > def f():
507 > def f():
502 > util.debugstacktrace(f=sys.stdout)
508 > util.debugstacktrace(f=sys.stdout)
503 > g()
509 > g()
504 > def g():
510 > def g():
505 > util.dst('hello from g\\n', skip=1)
511 > util.dst('hello from g\\n', skip=1)
506 > h()
512 > h()
507 > def h():
513 > def h():
508 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
514 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
509 > f()
515 > f()
510 > EOF
516 > EOF
511 $ $PYTHON debugstacktrace.py
517 $ $PYTHON debugstacktrace.py
512 stacktrace at:
518 stacktrace at:
513 debugstacktrace.py:12 in * (glob)
519 debugstacktrace.py:12 in * (glob)
514 debugstacktrace.py:5 in f
520 debugstacktrace.py:5 in f
515 hello from g at:
521 hello from g at:
516 debugstacktrace.py:12 in * (glob)
522 debugstacktrace.py:12 in * (glob)
517 debugstacktrace.py:6 in f
523 debugstacktrace.py:6 in f
518 hi ...
524 hi ...
519 from h hidden in g at:
525 from h hidden in g at:
520 debugstacktrace.py:6 in f
526 debugstacktrace.py:6 in f
521 debugstacktrace.py:9 in g
527 debugstacktrace.py:9 in g
522
528
523 Test debugcapabilities command:
529 Test debugcapabilities command:
524
530
525 $ hg debugcapabilities ./debugrevlog/
531 $ hg debugcapabilities ./debugrevlog/
526 Main capabilities:
532 Main capabilities:
527 branchmap
533 branchmap
528 $USUAL_BUNDLE2_CAPS$
534 $USUAL_BUNDLE2_CAPS$
529 getbundle
535 getbundle
530 known
536 known
531 lookup
537 lookup
532 pushkey
538 pushkey
533 unbundle
539 unbundle
534 Bundle2 capabilities:
540 Bundle2 capabilities:
535 HG20
541 HG20
536 bookmarks
542 bookmarks
537 changegroup
543 changegroup
538 01
544 01
539 02
545 02
540 digests
546 digests
541 md5
547 md5
542 sha1
548 sha1
543 sha512
549 sha512
544 error
550 error
545 abort
551 abort
546 unsupportedcontent
552 unsupportedcontent
547 pushraced
553 pushraced
548 pushkey
554 pushkey
549 hgtagsfnodes
555 hgtagsfnodes
550 listkeys
556 listkeys
551 phases
557 phases
552 heads
558 heads
553 pushkey
559 pushkey
554 remote-changegroup
560 remote-changegroup
555 http
561 http
556 https
562 https
557 rev-branch-cache
563 rev-branch-cache
558 stream
564 stream
559 v2
565 v2
560
566
561 Test debugpeer
567 Test debugpeer
562
568
563 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
569 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
564 url: ssh://user@dummy/debugrevlog
570 url: ssh://user@dummy/debugrevlog
565 local: no
571 local: no
566 pushable: yes
572 pushable: yes
567
573
568 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
574 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
569 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
575 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
570 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
576 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
571 devel-peer-request: hello+between
577 devel-peer-request: hello+between
572 devel-peer-request: pairs: 81 bytes
578 devel-peer-request: pairs: 81 bytes
573 sending hello command
579 sending hello command
574 sending between command
580 sending between command
575 remote: 413
581 remote: 413
576 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS_SERVER$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
582 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS_SERVER$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
577 remote: 1
583 remote: 1
578 devel-peer-request: protocaps
584 devel-peer-request: protocaps
579 devel-peer-request: caps: * bytes (glob)
585 devel-peer-request: caps: * bytes (glob)
580 sending protocaps command
586 sending protocaps command
581 url: ssh://user@dummy/debugrevlog
587 url: ssh://user@dummy/debugrevlog
582 local: no
588 local: no
583 pushable: yes
589 pushable: yes
General Comments 0
You need to be logged in to leave comments. Login now