##// END OF EJS Templates
manifestcache: do not display data when using --add...
marmoute -
r42109:1e75311d default
parent child Browse files
Show More
@@ -1,3425 +1,3426
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 from .revlogutils import (
90 from .revlogutils import (
91 deltas as deltautil
91 deltas as deltautil
92 )
92 )
93
93
94 release = lockmod.release
94 release = lockmod.release
95
95
96 command = registrar.command()
96 command = registrar.command()
97
97
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
100 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
101 if len(args) == 3:
101 if len(args) == 3:
102 index, rev1, rev2 = args
102 index, rev1, rev2 = args
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 lookup = r.lookup
104 lookup = r.lookup
105 elif len(args) == 2:
105 elif len(args) == 2:
106 if not repo:
106 if not repo:
107 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
108 '(.hg not found)'))
108 '(.hg not found)'))
109 rev1, rev2 = args
109 rev1, rev2 = args
110 r = repo.changelog
110 r = repo.changelog
111 lookup = repo.lookup
111 lookup = repo.lookup
112 else:
112 else:
113 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116
116
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
118 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
119 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
120 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
122 gen.apply(repo)
122 gen.apply(repo)
123
123
124 @command('debugbuilddag',
124 @command('debugbuilddag',
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
128 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
129 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
130 mergeable_file=False,
130 mergeable_file=False,
131 overwritten_file=False,
131 overwritten_file=False,
132 new_file=False):
132 new_file=False):
133 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
134
134
135 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
136 command line.
136 command line.
137
137
138 Elements:
138 Elements:
139
139
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
141 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
142 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
143 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
144 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
145 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
148 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
149 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
150 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
151
151
152 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
153
153
154 A backref is either
154 A backref is either
155
155
156 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
157 node, or
157 node, or
158 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
159 - empty to denote the default parent.
159 - empty to denote the default parent.
160
160
161 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 """
163 """
164
164
165 if text is None:
165 if text is None:
166 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
167 text = ui.fin.read()
167 text = ui.fin.read()
168
168
169 cl = repo.changelog
169 cl = repo.changelog
170 if len(cl) > 0:
170 if len(cl) > 0:
171 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
172
172
173 # determine number of revs in DAG
173 # determine number of revs in DAG
174 total = 0
174 total = 0
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 total += 1
177 total += 1
178
178
179 if mergeable_file:
179 if mergeable_file:
180 linesperrev = 2
180 linesperrev = 2
181 # make a file with k lines per rev
181 # make a file with k lines per rev
182 initialmergedlines = ['%d' % i
182 initialmergedlines = ['%d' % i
183 for i in pycompat.xrange(0, total * linesperrev)]
183 for i in pycompat.xrange(0, total * linesperrev)]
184 initialmergedlines.append("")
184 initialmergedlines.append("")
185
185
186 tags = []
186 tags = []
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 total=total)
188 total=total)
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 at = -1
190 at = -1
191 atbranch = 'default'
191 atbranch = 'default'
192 nodeids = []
192 nodeids = []
193 id = 0
193 id = 0
194 progress.update(id)
194 progress.update(id)
195 for type, data in dagparser.parsedag(text):
195 for type, data in dagparser.parsedag(text):
196 if type == 'n':
196 if type == 'n':
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 id, ps = data
198 id, ps = data
199
199
200 files = []
200 files = []
201 filecontent = {}
201 filecontent = {}
202
202
203 p2 = None
203 p2 = None
204 if mergeable_file:
204 if mergeable_file:
205 fn = "mf"
205 fn = "mf"
206 p1 = repo[ps[0]]
206 p1 = repo[ps[0]]
207 if len(ps) > 1:
207 if len(ps) > 1:
208 p2 = repo[ps[1]]
208 p2 = repo[ps[1]]
209 pa = p1.ancestor(p2)
209 pa = p1.ancestor(p2)
210 base, local, other = [x[fn].data() for x in (pa, p1,
210 base, local, other = [x[fn].data() for x in (pa, p1,
211 p2)]
211 p2)]
212 m3 = simplemerge.Merge3Text(base, local, other)
212 m3 = simplemerge.Merge3Text(base, local, other)
213 ml = [l.strip() for l in m3.merge_lines()]
213 ml = [l.strip() for l in m3.merge_lines()]
214 ml.append("")
214 ml.append("")
215 elif at > 0:
215 elif at > 0:
216 ml = p1[fn].data().split("\n")
216 ml = p1[fn].data().split("\n")
217 else:
217 else:
218 ml = initialmergedlines
218 ml = initialmergedlines
219 ml[id * linesperrev] += " r%i" % id
219 ml[id * linesperrev] += " r%i" % id
220 mergedtext = "\n".join(ml)
220 mergedtext = "\n".join(ml)
221 files.append(fn)
221 files.append(fn)
222 filecontent[fn] = mergedtext
222 filecontent[fn] = mergedtext
223
223
224 if overwritten_file:
224 if overwritten_file:
225 fn = "of"
225 fn = "of"
226 files.append(fn)
226 files.append(fn)
227 filecontent[fn] = "r%i\n" % id
227 filecontent[fn] = "r%i\n" % id
228
228
229 if new_file:
229 if new_file:
230 fn = "nf%i" % id
230 fn = "nf%i" % id
231 files.append(fn)
231 files.append(fn)
232 filecontent[fn] = "r%i\n" % id
232 filecontent[fn] = "r%i\n" % id
233 if len(ps) > 1:
233 if len(ps) > 1:
234 if not p2:
234 if not p2:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 for fn in p2:
236 for fn in p2:
237 if fn.startswith("nf"):
237 if fn.startswith("nf"):
238 files.append(fn)
238 files.append(fn)
239 filecontent[fn] = p2[fn].data()
239 filecontent[fn] = p2[fn].data()
240
240
241 def fctxfn(repo, cx, path):
241 def fctxfn(repo, cx, path):
242 if path in filecontent:
242 if path in filecontent:
243 return context.memfilectx(repo, cx, path,
243 return context.memfilectx(repo, cx, path,
244 filecontent[path])
244 filecontent[path])
245 return None
245 return None
246
246
247 if len(ps) == 0 or ps[0] < 0:
247 if len(ps) == 0 or ps[0] < 0:
248 pars = [None, None]
248 pars = [None, None]
249 elif len(ps) == 1:
249 elif len(ps) == 1:
250 pars = [nodeids[ps[0]], None]
250 pars = [nodeids[ps[0]], None]
251 else:
251 else:
252 pars = [nodeids[p] for p in ps]
252 pars = [nodeids[p] for p in ps]
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 date=(id, 0),
254 date=(id, 0),
255 user="debugbuilddag",
255 user="debugbuilddag",
256 extra={'branch': atbranch})
256 extra={'branch': atbranch})
257 nodeid = repo.commitctx(cx)
257 nodeid = repo.commitctx(cx)
258 nodeids.append(nodeid)
258 nodeids.append(nodeid)
259 at = id
259 at = id
260 elif type == 'l':
260 elif type == 'l':
261 id, name = data
261 id, name = data
262 ui.note(('tag %s\n' % name))
262 ui.note(('tag %s\n' % name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 elif type == 'a':
264 elif type == 'a':
265 ui.note(('branch %s\n' % data))
265 ui.note(('branch %s\n' % data))
266 atbranch = data
266 atbranch = data
267 progress.update(id)
267 progress.update(id)
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271
271
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 indent_string = ' ' * indent
273 indent_string = ' ' * indent
274 if all:
274 if all:
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 % indent_string)
276 % indent_string)
277
277
278 def showchunks(named):
278 def showchunks(named):
279 ui.write("\n%s%s\n" % (indent_string, named))
279 ui.write("\n%s%s\n" % (indent_string, named))
280 for deltadata in gen.deltaiter():
280 for deltadata in gen.deltaiter():
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 ui.write("%s%s %s %s %s %s %d\n" %
282 ui.write("%s%s %s %s %s %s %d\n" %
283 (indent_string, hex(node), hex(p1), hex(p2),
283 (indent_string, hex(node), hex(p1), hex(p2),
284 hex(cs), hex(deltabase), len(delta)))
284 hex(cs), hex(deltabase), len(delta)))
285
285
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 showchunks("changelog")
287 showchunks("changelog")
288 chunkdata = gen.manifestheader()
288 chunkdata = gen.manifestheader()
289 showchunks("manifest")
289 showchunks("manifest")
290 for chunkdata in iter(gen.filelogheader, {}):
290 for chunkdata in iter(gen.filelogheader, {}):
291 fname = chunkdata['filename']
291 fname = chunkdata['filename']
292 showchunks(fname)
292 showchunks(fname)
293 else:
293 else:
294 if isinstance(gen, bundle2.unbundle20):
294 if isinstance(gen, bundle2.unbundle20):
295 raise error.Abort(_('use debugbundle2 for this file'))
295 raise error.Abort(_('use debugbundle2 for this file'))
296 chunkdata = gen.changelogheader()
296 chunkdata = gen.changelogheader()
297 for deltadata in gen.deltaiter():
297 for deltadata in gen.deltaiter():
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 ui.write("%s%s\n" % (indent_string, hex(node)))
299 ui.write("%s%s\n" % (indent_string, hex(node)))
300
300
301 def _debugobsmarkers(ui, part, indent=0, **opts):
301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 """display version and markers contained in 'data'"""
302 """display version and markers contained in 'data'"""
303 opts = pycompat.byteskwargs(opts)
303 opts = pycompat.byteskwargs(opts)
304 data = part.read()
304 data = part.read()
305 indent_string = ' ' * indent
305 indent_string = ' ' * indent
306 try:
306 try:
307 version, markers = obsolete._readmarkers(data)
307 version, markers = obsolete._readmarkers(data)
308 except error.UnknownVersion as exc:
308 except error.UnknownVersion as exc:
309 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg %= indent_string, exc.version, len(data)
310 msg %= indent_string, exc.version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 else:
312 else:
313 msg = "%sversion: %d (%d bytes)\n"
313 msg = "%sversion: %d (%d bytes)\n"
314 msg %= indent_string, version, len(data)
314 msg %= indent_string, version, len(data)
315 ui.write(msg)
315 ui.write(msg)
316 fm = ui.formatter('debugobsolete', opts)
316 fm = ui.formatter('debugobsolete', opts)
317 for rawmarker in sorted(markers):
317 for rawmarker in sorted(markers):
318 m = obsutil.marker(None, rawmarker)
318 m = obsutil.marker(None, rawmarker)
319 fm.startitem()
319 fm.startitem()
320 fm.plain(indent_string)
320 fm.plain(indent_string)
321 cmdutil.showmarker(fm, m)
321 cmdutil.showmarker(fm, m)
322 fm.end()
322 fm.end()
323
323
324 def _debugphaseheads(ui, data, indent=0):
324 def _debugphaseheads(ui, data, indent=0):
325 """display version and markers contained in 'data'"""
325 """display version and markers contained in 'data'"""
326 indent_string = ' ' * indent
326 indent_string = ' ' * indent
327 headsbyphase = phases.binarydecode(data)
327 headsbyphase = phases.binarydecode(data)
328 for phase in phases.allphases:
328 for phase in phases.allphases:
329 for head in headsbyphase[phase]:
329 for head in headsbyphase[phase]:
330 ui.write(indent_string)
330 ui.write(indent_string)
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332
332
333 def _quasirepr(thing):
333 def _quasirepr(thing):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 return '{%s}' % (
335 return '{%s}' % (
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 return pycompat.bytestr(repr(thing))
337 return pycompat.bytestr(repr(thing))
338
338
339 def _debugbundle2(ui, gen, all=None, **opts):
339 def _debugbundle2(ui, gen, all=None, **opts):
340 """lists the contents of a bundle2"""
340 """lists the contents of a bundle2"""
341 if not isinstance(gen, bundle2.unbundle20):
341 if not isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_('not a bundle2 file'))
342 raise error.Abort(_('not a bundle2 file'))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 parttypes = opts.get(r'part_type', [])
344 parttypes = opts.get(r'part_type', [])
345 for part in gen.iterparts():
345 for part in gen.iterparts():
346 if parttypes and part.type not in parttypes:
346 if parttypes and part.type not in parttypes:
347 continue
347 continue
348 msg = '%s -- %s (mandatory: %r)\n'
348 msg = '%s -- %s (mandatory: %r)\n'
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 if part.type == 'changegroup':
350 if part.type == 'changegroup':
351 version = part.params.get('version', '01')
351 version = part.params.get('version', '01')
352 cg = changegroup.getunbundler(version, part, 'UN')
352 cg = changegroup.getunbundler(version, part, 'UN')
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 if part.type == 'obsmarkers':
355 if part.type == 'obsmarkers':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugobsmarkers(ui, part, indent=4, **opts)
357 _debugobsmarkers(ui, part, indent=4, **opts)
358 if part.type == 'phase-heads':
358 if part.type == 'phase-heads':
359 if not ui.quiet:
359 if not ui.quiet:
360 _debugphaseheads(ui, part, indent=4)
360 _debugphaseheads(ui, part, indent=4)
361
361
362 @command('debugbundle',
362 @command('debugbundle',
363 [('a', 'all', None, _('show all details')),
363 [('a', 'all', None, _('show all details')),
364 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 _('FILE'),
366 _('FILE'),
367 norepo=True)
367 norepo=True)
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 """lists the contents of a bundle"""
369 """lists the contents of a bundle"""
370 with hg.openpath(ui, bundlepath) as f:
370 with hg.openpath(ui, bundlepath) as f:
371 if spec:
371 if spec:
372 spec = exchange.getbundlespec(ui, f)
372 spec = exchange.getbundlespec(ui, f)
373 ui.write('%s\n' % spec)
373 ui.write('%s\n' % spec)
374 return
374 return
375
375
376 gen = exchange.readbundle(ui, f, bundlepath)
376 gen = exchange.readbundle(ui, f, bundlepath)
377 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
378 return _debugbundle2(ui, gen, all=all, **opts)
378 return _debugbundle2(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
380
380
381 @command('debugcapabilities',
381 @command('debugcapabilities',
382 [], _('PATH'),
382 [], _('PATH'),
383 norepo=True)
383 norepo=True)
384 def debugcapabilities(ui, path, **opts):
384 def debugcapabilities(ui, path, **opts):
385 """lists the capabilities of a remote peer"""
385 """lists the capabilities of a remote peer"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 peer = hg.peer(ui, opts, path)
387 peer = hg.peer(ui, opts, path)
388 caps = peer.capabilities()
388 caps = peer.capabilities()
389 ui.write(('Main capabilities:\n'))
389 ui.write(('Main capabilities:\n'))
390 for c in sorted(caps):
390 for c in sorted(caps):
391 ui.write((' %s\n') % c)
391 ui.write((' %s\n') % c)
392 b2caps = bundle2.bundle2caps(peer)
392 b2caps = bundle2.bundle2caps(peer)
393 if b2caps:
393 if b2caps:
394 ui.write(('Bundle2 capabilities:\n'))
394 ui.write(('Bundle2 capabilities:\n'))
395 for key, values in sorted(b2caps.iteritems()):
395 for key, values in sorted(b2caps.iteritems()):
396 ui.write((' %s\n') % key)
396 ui.write((' %s\n') % key)
397 for v in values:
397 for v in values:
398 ui.write((' %s\n') % v)
398 ui.write((' %s\n') % v)
399
399
400 @command('debugcheckstate', [], '')
400 @command('debugcheckstate', [], '')
401 def debugcheckstate(ui, repo):
401 def debugcheckstate(ui, repo):
402 """validate the correctness of the current dirstate"""
402 """validate the correctness of the current dirstate"""
403 parent1, parent2 = repo.dirstate.parents()
403 parent1, parent2 = repo.dirstate.parents()
404 m1 = repo[parent1].manifest()
404 m1 = repo[parent1].manifest()
405 m2 = repo[parent2].manifest()
405 m2 = repo[parent2].manifest()
406 errors = 0
406 errors = 0
407 for f in repo.dirstate:
407 for f in repo.dirstate:
408 state = repo.dirstate[f]
408 state = repo.dirstate[f]
409 if state in "nr" and f not in m1:
409 if state in "nr" and f not in m1:
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "a" and f in m1:
412 if state in "a" and f in m1:
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 errors += 1
414 errors += 1
415 if state in "m" and f not in m1 and f not in m2:
415 if state in "m" and f not in m1 and f not in m2:
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 (f, state))
417 (f, state))
418 errors += 1
418 errors += 1
419 for f in m1:
419 for f in m1:
420 state = repo.dirstate[f]
420 state = repo.dirstate[f]
421 if state not in "nrm":
421 if state not in "nrm":
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 errors += 1
423 errors += 1
424 if errors:
424 if errors:
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 raise error.Abort(error)
426 raise error.Abort(error)
427
427
428 @command('debugcolor',
428 @command('debugcolor',
429 [('', 'style', None, _('show all configured styles'))],
429 [('', 'style', None, _('show all configured styles'))],
430 'hg debugcolor')
430 'hg debugcolor')
431 def debugcolor(ui, repo, **opts):
431 def debugcolor(ui, repo, **opts):
432 """show available color, effects or style"""
432 """show available color, effects or style"""
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 if opts.get(r'style'):
434 if opts.get(r'style'):
435 return _debugdisplaystyle(ui)
435 return _debugdisplaystyle(ui)
436 else:
436 else:
437 return _debugdisplaycolor(ui)
437 return _debugdisplaycolor(ui)
438
438
439 def _debugdisplaycolor(ui):
439 def _debugdisplaycolor(ui):
440 ui = ui.copy()
440 ui = ui.copy()
441 ui._styles.clear()
441 ui._styles.clear()
442 for effect in color._activeeffects(ui).keys():
442 for effect in color._activeeffects(ui).keys():
443 ui._styles[effect] = effect
443 ui._styles[effect] = effect
444 if ui._terminfoparams:
444 if ui._terminfoparams:
445 for k, v in ui.configitems('color'):
445 for k, v in ui.configitems('color'):
446 if k.startswith('color.'):
446 if k.startswith('color.'):
447 ui._styles[k] = k[6:]
447 ui._styles[k] = k[6:]
448 elif k.startswith('terminfo.'):
448 elif k.startswith('terminfo.'):
449 ui._styles[k] = k[9:]
449 ui._styles[k] = k[9:]
450 ui.write(_('available colors:\n'))
450 ui.write(_('available colors:\n'))
451 # sort label with a '_' after the other to group '_background' entry.
451 # sort label with a '_' after the other to group '_background' entry.
452 items = sorted(ui._styles.items(),
452 items = sorted(ui._styles.items(),
453 key=lambda i: ('_' in i[0], i[0], i[1]))
453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 for colorname, label in items:
454 for colorname, label in items:
455 ui.write(('%s\n') % colorname, label=label)
455 ui.write(('%s\n') % colorname, label=label)
456
456
457 def _debugdisplaystyle(ui):
457 def _debugdisplaystyle(ui):
458 ui.write(_('available style:\n'))
458 ui.write(_('available style:\n'))
459 if not ui._styles:
459 if not ui._styles:
460 return
460 return
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 start = r.start
626 start = r.start
627 length = r.length
627 length = r.length
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 withsparseread = getattr(r, '_withsparseread', False)
629 withsparseread = getattr(r, '_withsparseread', False)
630
630
631 def revinfo(rev):
631 def revinfo(rev):
632 e = index[rev]
632 e = index[rev]
633 compsize = e[1]
633 compsize = e[1]
634 uncompsize = e[2]
634 uncompsize = e[2]
635 chainsize = 0
635 chainsize = 0
636
636
637 if generaldelta:
637 if generaldelta:
638 if e[3] == e[5]:
638 if e[3] == e[5]:
639 deltatype = 'p1'
639 deltatype = 'p1'
640 elif e[3] == e[6]:
640 elif e[3] == e[6]:
641 deltatype = 'p2'
641 deltatype = 'p2'
642 elif e[3] == rev - 1:
642 elif e[3] == rev - 1:
643 deltatype = 'prev'
643 deltatype = 'prev'
644 elif e[3] == rev:
644 elif e[3] == rev:
645 deltatype = 'base'
645 deltatype = 'base'
646 else:
646 else:
647 deltatype = 'other'
647 deltatype = 'other'
648 else:
648 else:
649 if e[3] == rev:
649 if e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'prev'
652 deltatype = 'prev'
653
653
654 chain = r._deltachain(rev)[0]
654 chain = r._deltachain(rev)[0]
655 for iterrev in chain:
655 for iterrev in chain:
656 e = index[iterrev]
656 e = index[iterrev]
657 chainsize += e[1]
657 chainsize += e[1]
658
658
659 return compsize, uncompsize, deltatype, chain, chainsize
659 return compsize, uncompsize, deltatype, chain, chainsize
660
660
661 fm = ui.formatter('debugdeltachain', opts)
661 fm = ui.formatter('debugdeltachain', opts)
662
662
663 fm.plain(' rev chain# chainlen prev delta '
663 fm.plain(' rev chain# chainlen prev delta '
664 'size rawsize chainsize ratio lindist extradist '
664 'size rawsize chainsize ratio lindist extradist '
665 'extraratio')
665 'extraratio')
666 if withsparseread:
666 if withsparseread:
667 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain('\n')
668 fm.plain('\n')
669
669
670 chainbases = {}
670 chainbases = {}
671 for rev in r:
671 for rev in r:
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 chainbase = chain[0]
673 chainbase = chain[0]
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 if uncomp != 0:
684 if uncomp != 0:
685 chainratio = float(chainsize) / float(uncomp)
685 chainratio = float(chainsize) / float(uncomp)
686 else:
686 else:
687 chainratio = chainsize
687 chainratio = chainsize
688
688
689 if chainsize != 0:
689 if chainsize != 0:
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691 else:
691 else:
692 extraratio = extradist
692 extraratio = extradist
693
693
694 fm.startitem()
694 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
697 'extraratio',
697 'extraratio',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
701 extraratio,
701 extraratio,
702 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
705 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
706 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
707 if withsparseread:
707 if withsparseread:
708 readsize = 0
708 readsize = 0
709 largestblock = 0
709 largestblock = 0
710 srchunks = 0
710 srchunks = 0
711
711
712 for revschunk in deltautil.slicechunk(r, chain):
712 for revschunk in deltautil.slicechunk(r, chain):
713 srchunks += 1
713 srchunks += 1
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
716
716
717 readsize += blksize
717 readsize += blksize
718 if largestblock < blksize:
718 if largestblock < blksize:
719 largestblock = blksize
719 largestblock = blksize
720
720
721 if readsize:
721 if readsize:
722 readdensity = float(chainsize) / float(readsize)
722 readdensity = float(chainsize) / float(readsize)
723 else:
723 else:
724 readdensity = 1
724 readdensity = 1
725
725
726 fm.write('readsize largestblock readdensity srchunks',
726 fm.write('readsize largestblock readdensity srchunks',
727 ' %10d %10d %9.5f %8d',
727 ' %10d %10d %9.5f %8d',
728 readsize, largestblock, readdensity, srchunks,
728 readsize, largestblock, readdensity, srchunks,
729 readsize=readsize, largestblock=largestblock,
729 readsize=readsize, largestblock=largestblock,
730 readdensity=readdensity, srchunks=srchunks)
730 readdensity=readdensity, srchunks=srchunks)
731
731
732 fm.plain('\n')
732 fm.plain('\n')
733
733
734 fm.end()
734 fm.end()
735
735
736 @command('debugdirstate|debugstate',
736 @command('debugdirstate|debugstate',
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
739 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
740 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
741 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
742 """show the contents of the current dirstate"""
743
743
744 nodates = not opts[r'dates']
744 nodates = not opts[r'dates']
745 if opts.get(r'nodates') is not None:
745 if opts.get(r'nodates') is not None:
746 nodates = True
746 nodates = True
747 datesort = opts.get(r'datesort')
747 datesort = opts.get(r'datesort')
748
748
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ] + cmdutil.remoteopts,
775 ] + cmdutil.remoteopts,
776 _('[--rev REV] [OTHER]'))
776 _('[--rev REV] [OTHER]'))
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 """runs the changeset discovery protocol in isolation"""
778 """runs the changeset discovery protocol in isolation"""
779 opts = pycompat.byteskwargs(opts)
779 opts = pycompat.byteskwargs(opts)
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remote = hg.peer(repo, opts, remoteurl)
781 remote = hg.peer(repo, opts, remoteurl)
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783
783
784 # make sure tests are repeatable
784 # make sure tests are repeatable
785 random.seed(12323)
785 random.seed(12323)
786
786
787 def doit(pushedrevs, remoteheads, remote=remote):
787 def doit(pushedrevs, remoteheads, remote=remote):
788 if opts.get('old'):
788 if opts.get('old'):
789 if not util.safehasattr(remote, 'branches'):
789 if not util.safehasattr(remote, 'branches'):
790 # enable in-client legacy support
790 # enable in-client legacy support
791 remote = localrepo.locallegacypeer(remote.local())
791 remote = localrepo.locallegacypeer(remote.local())
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 force=True)
793 force=True)
794 common = set(common)
794 common = set(common)
795 if not opts.get('nonheads'):
795 if not opts.get('nonheads'):
796 ui.write(("unpruned common: %s\n") %
796 ui.write(("unpruned common: %s\n") %
797 " ".join(sorted(short(n) for n in common)))
797 " ".join(sorted(short(n) for n in common)))
798
798
799 clnode = repo.changelog.node
799 clnode = repo.changelog.node
800 common = repo.revs('heads(::%ln)', common)
800 common = repo.revs('heads(::%ln)', common)
801 common = {clnode(r) for r in common}
801 common = {clnode(r) for r in common}
802 else:
802 else:
803 nodes = None
803 nodes = None
804 if pushedrevs:
804 if pushedrevs:
805 revs = scmutil.revrange(repo, pushedrevs)
805 revs = scmutil.revrange(repo, pushedrevs)
806 nodes = [repo[r].node() for r in revs]
806 nodes = [repo[r].node() for r in revs]
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 ancestorsof=nodes)
808 ancestorsof=nodes)
809 common = set(common)
809 common = set(common)
810 rheads = set(hds)
810 rheads = set(hds)
811 lheads = set(repo.heads())
811 lheads = set(repo.heads())
812 ui.write(("common heads: %s\n") %
812 ui.write(("common heads: %s\n") %
813 " ".join(sorted(short(n) for n in common)))
813 " ".join(sorted(short(n) for n in common)))
814 if lheads <= common:
814 if lheads <= common:
815 ui.write(("local is subset\n"))
815 ui.write(("local is subset\n"))
816 elif rheads <= common:
816 elif rheads <= common:
817 ui.write(("remote is subset\n"))
817 ui.write(("remote is subset\n"))
818
818
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 localrevs = opts['rev']
820 localrevs = opts['rev']
821 doit(localrevs, remoterevs)
821 doit(localrevs, remoterevs)
822
822
823 _chunksize = 4 << 10
823 _chunksize = 4 << 10
824
824
825 @command('debugdownload',
825 @command('debugdownload',
826 [
826 [
827 ('o', 'output', '', _('path')),
827 ('o', 'output', '', _('path')),
828 ],
828 ],
829 optionalrepo=True)
829 optionalrepo=True)
830 def debugdownload(ui, repo, url, output=None, **opts):
830 def debugdownload(ui, repo, url, output=None, **opts):
831 """download a resource using Mercurial logic and config
831 """download a resource using Mercurial logic and config
832 """
832 """
833 fh = urlmod.open(ui, url, output)
833 fh = urlmod.open(ui, url, output)
834
834
835 dest = ui
835 dest = ui
836 if output:
836 if output:
837 dest = open(output, "wb", _chunksize)
837 dest = open(output, "wb", _chunksize)
838 try:
838 try:
839 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
840 while data:
840 while data:
841 dest.write(data)
841 dest.write(data)
842 data = fh.read(_chunksize)
842 data = fh.read(_chunksize)
843 finally:
843 finally:
844 if output:
844 if output:
845 dest.close()
845 dest.close()
846
846
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 def debugextensions(ui, repo, **opts):
848 def debugextensions(ui, repo, **opts):
849 '''show information about active extensions'''
849 '''show information about active extensions'''
850 opts = pycompat.byteskwargs(opts)
850 opts = pycompat.byteskwargs(opts)
851 exts = extensions.extensions(ui)
851 exts = extensions.extensions(ui)
852 hgver = util.version()
852 hgver = util.version()
853 fm = ui.formatter('debugextensions', opts)
853 fm = ui.formatter('debugextensions', opts)
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 isinternal = extensions.ismoduleinternal(extmod)
855 isinternal = extensions.ismoduleinternal(extmod)
856 extsource = pycompat.fsencode(extmod.__file__)
856 extsource = pycompat.fsencode(extmod.__file__)
857 if isinternal:
857 if isinternal:
858 exttestedwith = [] # never expose magic string to users
858 exttestedwith = [] # never expose magic string to users
859 else:
859 else:
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 extbuglink = getattr(extmod, 'buglink', None)
861 extbuglink = getattr(extmod, 'buglink', None)
862
862
863 fm.startitem()
863 fm.startitem()
864
864
865 if ui.quiet or ui.verbose:
865 if ui.quiet or ui.verbose:
866 fm.write('name', '%s\n', extname)
866 fm.write('name', '%s\n', extname)
867 else:
867 else:
868 fm.write('name', '%s', extname)
868 fm.write('name', '%s', extname)
869 if isinternal or hgver in exttestedwith:
869 if isinternal or hgver in exttestedwith:
870 fm.plain('\n')
870 fm.plain('\n')
871 elif not exttestedwith:
871 elif not exttestedwith:
872 fm.plain(_(' (untested!)\n'))
872 fm.plain(_(' (untested!)\n'))
873 else:
873 else:
874 lasttestedversion = exttestedwith[-1]
874 lasttestedversion = exttestedwith[-1]
875 fm.plain(' (%s!)\n' % lasttestedversion)
875 fm.plain(' (%s!)\n' % lasttestedversion)
876
876
877 fm.condwrite(ui.verbose and extsource, 'source',
877 fm.condwrite(ui.verbose and extsource, 'source',
878 _(' location: %s\n'), extsource or "")
878 _(' location: %s\n'), extsource or "")
879
879
880 if ui.verbose:
880 if ui.verbose:
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 fm.data(bundled=isinternal)
882 fm.data(bundled=isinternal)
883
883
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 _(' tested with: %s\n'),
885 _(' tested with: %s\n'),
886 fm.formatlist(exttestedwith, name='ver'))
886 fm.formatlist(exttestedwith, name='ver'))
887
887
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 _(' bug reporting: %s\n'), extbuglink or "")
889 _(' bug reporting: %s\n'), extbuglink or "")
890
890
891 fm.end()
891 fm.end()
892
892
893 @command('debugfileset',
893 @command('debugfileset',
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 ('', 'all-files', False,
895 ('', 'all-files', False,
896 _('test files from all revisions and working directory')),
896 _('test files from all revisions and working directory')),
897 ('s', 'show-matcher', None,
897 ('s', 'show-matcher', None,
898 _('print internal representation of matcher')),
898 _('print internal representation of matcher')),
899 ('p', 'show-stage', [],
899 ('p', 'show-stage', [],
900 _('print parsed tree at the given stage'), _('NAME'))],
900 _('print parsed tree at the given stage'), _('NAME'))],
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 def debugfileset(ui, repo, expr, **opts):
902 def debugfileset(ui, repo, expr, **opts):
903 '''parse and apply a fileset specification'''
903 '''parse and apply a fileset specification'''
904 from . import fileset
904 from . import fileset
905 fileset.symbols # force import of fileset so we have predicates to optimize
905 fileset.symbols # force import of fileset so we have predicates to optimize
906 opts = pycompat.byteskwargs(opts)
906 opts = pycompat.byteskwargs(opts)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908
908
909 stages = [
909 stages = [
910 ('parsed', pycompat.identity),
910 ('parsed', pycompat.identity),
911 ('analyzed', filesetlang.analyze),
911 ('analyzed', filesetlang.analyze),
912 ('optimized', filesetlang.optimize),
912 ('optimized', filesetlang.optimize),
913 ]
913 ]
914 stagenames = set(n for n, f in stages)
914 stagenames = set(n for n, f in stages)
915
915
916 showalways = set()
916 showalways = set()
917 if ui.verbose and not opts['show_stage']:
917 if ui.verbose and not opts['show_stage']:
918 # show parsed tree by --verbose (deprecated)
918 # show parsed tree by --verbose (deprecated)
919 showalways.add('parsed')
919 showalways.add('parsed')
920 if opts['show_stage'] == ['all']:
920 if opts['show_stage'] == ['all']:
921 showalways.update(stagenames)
921 showalways.update(stagenames)
922 else:
922 else:
923 for n in opts['show_stage']:
923 for n in opts['show_stage']:
924 if n not in stagenames:
924 if n not in stagenames:
925 raise error.Abort(_('invalid stage name: %s') % n)
925 raise error.Abort(_('invalid stage name: %s') % n)
926 showalways.update(opts['show_stage'])
926 showalways.update(opts['show_stage'])
927
927
928 tree = filesetlang.parse(expr)
928 tree = filesetlang.parse(expr)
929 for n, f in stages:
929 for n, f in stages:
930 tree = f(tree)
930 tree = f(tree)
931 if n in showalways:
931 if n in showalways:
932 if opts['show_stage'] or n != 'parsed':
932 if opts['show_stage'] or n != 'parsed':
933 ui.write(("* %s:\n") % n)
933 ui.write(("* %s:\n") % n)
934 ui.write(filesetlang.prettyformat(tree), "\n")
934 ui.write(filesetlang.prettyformat(tree), "\n")
935
935
936 files = set()
936 files = set()
937 if opts['all_files']:
937 if opts['all_files']:
938 for r in repo:
938 for r in repo:
939 c = repo[r]
939 c = repo[r]
940 files.update(c.files())
940 files.update(c.files())
941 files.update(c.substate)
941 files.update(c.substate)
942 if opts['all_files'] or ctx.rev() is None:
942 if opts['all_files'] or ctx.rev() is None:
943 wctx = repo[None]
943 wctx = repo[None]
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 subrepos=list(wctx.substate),
945 subrepos=list(wctx.substate),
946 unknown=True, ignored=True))
946 unknown=True, ignored=True))
947 files.update(wctx.substate)
947 files.update(wctx.substate)
948 else:
948 else:
949 files.update(ctx.files())
949 files.update(ctx.files())
950 files.update(ctx.substate)
950 files.update(ctx.substate)
951
951
952 m = ctx.matchfileset(expr)
952 m = ctx.matchfileset(expr)
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 for f in sorted(files):
955 for f in sorted(files):
956 if not m(f):
956 if not m(f):
957 continue
957 continue
958 ui.write("%s\n" % f)
958 ui.write("%s\n" % f)
959
959
960 @command('debugformat',
960 @command('debugformat',
961 [] + cmdutil.formatteropts)
961 [] + cmdutil.formatteropts)
962 def debugformat(ui, repo, **opts):
962 def debugformat(ui, repo, **opts):
963 """display format information about the current repository
963 """display format information about the current repository
964
964
965 Use --verbose to get extra information about current config value and
965 Use --verbose to get extra information about current config value and
966 Mercurial default."""
966 Mercurial default."""
967 opts = pycompat.byteskwargs(opts)
967 opts = pycompat.byteskwargs(opts)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970
970
971 def makeformatname(name):
971 def makeformatname(name):
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973
973
974 fm = ui.formatter('debugformat', opts)
974 fm = ui.formatter('debugformat', opts)
975 if fm.isplain():
975 if fm.isplain():
976 def formatvalue(value):
976 def formatvalue(value):
977 if util.safehasattr(value, 'startswith'):
977 if util.safehasattr(value, 'startswith'):
978 return value
978 return value
979 if value:
979 if value:
980 return 'yes'
980 return 'yes'
981 else:
981 else:
982 return 'no'
982 return 'no'
983 else:
983 else:
984 formatvalue = pycompat.identity
984 formatvalue = pycompat.identity
985
985
986 fm.plain('format-variant')
986 fm.plain('format-variant')
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 fm.plain(' repo')
988 fm.plain(' repo')
989 if ui.verbose:
989 if ui.verbose:
990 fm.plain(' config default')
990 fm.plain(' config default')
991 fm.plain('\n')
991 fm.plain('\n')
992 for fv in upgrade.allformatvariant:
992 for fv in upgrade.allformatvariant:
993 fm.startitem()
993 fm.startitem()
994 repovalue = fv.fromrepo(repo)
994 repovalue = fv.fromrepo(repo)
995 configvalue = fv.fromconfig(repo)
995 configvalue = fv.fromconfig(repo)
996
996
997 if repovalue != configvalue:
997 if repovalue != configvalue:
998 namelabel = 'formatvariant.name.mismatchconfig'
998 namelabel = 'formatvariant.name.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 elif repovalue != fv.default:
1000 elif repovalue != fv.default:
1001 namelabel = 'formatvariant.name.mismatchdefault'
1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 else:
1003 else:
1004 namelabel = 'formatvariant.name.uptodate'
1004 namelabel = 'formatvariant.name.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1006
1006
1007 fm.write('name', makeformatname(fv.name), fv.name,
1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 label=namelabel)
1008 label=namelabel)
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 label=repolabel)
1010 label=repolabel)
1011 if fv.default != configvalue:
1011 if fv.default != configvalue:
1012 configlabel = 'formatvariant.config.special'
1012 configlabel = 'formatvariant.config.special'
1013 else:
1013 else:
1014 configlabel = 'formatvariant.config.default'
1014 configlabel = 'formatvariant.config.default'
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 label=configlabel)
1016 label=configlabel)
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 label='formatvariant.default')
1018 label='formatvariant.default')
1019 fm.plain('\n')
1019 fm.plain('\n')
1020 fm.end()
1020 fm.end()
1021
1021
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 def debugfsinfo(ui, path="."):
1023 def debugfsinfo(ui, path="."):
1024 """show information detected about current filesystem"""
1024 """show information detected about current filesystem"""
1025 ui.write(('path: %s\n') % path)
1025 ui.write(('path: %s\n') % path)
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 casesensitive = '(unknown)'
1031 casesensitive = '(unknown)'
1032 try:
1032 try:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 except OSError:
1035 except OSError:
1036 pass
1036 pass
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038
1038
1039 @command('debuggetbundle',
1039 @command('debuggetbundle',
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 _('REPO FILE [-H|-C ID]...'),
1043 _('REPO FILE [-H|-C ID]...'),
1044 norepo=True)
1044 norepo=True)
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 """retrieves a bundle from a repo
1046 """retrieves a bundle from a repo
1047
1047
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 given file.
1049 given file.
1050 """
1050 """
1051 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1052 repo = hg.peer(ui, opts, repopath)
1052 repo = hg.peer(ui, opts, repopath)
1053 if not repo.capable('getbundle'):
1053 if not repo.capable('getbundle'):
1054 raise error.Abort("getbundle() not supported by target repository")
1054 raise error.Abort("getbundle() not supported by target repository")
1055 args = {}
1055 args = {}
1056 if common:
1056 if common:
1057 args[r'common'] = [bin(s) for s in common]
1057 args[r'common'] = [bin(s) for s in common]
1058 if head:
1058 if head:
1059 args[r'heads'] = [bin(s) for s in head]
1059 args[r'heads'] = [bin(s) for s in head]
1060 # TODO: get desired bundlecaps from command line.
1060 # TODO: get desired bundlecaps from command line.
1061 args[r'bundlecaps'] = None
1061 args[r'bundlecaps'] = None
1062 bundle = repo.getbundle('debug', **args)
1062 bundle = repo.getbundle('debug', **args)
1063
1063
1064 bundletype = opts.get('type', 'bzip2').lower()
1064 bundletype = opts.get('type', 'bzip2').lower()
1065 btypes = {'none': 'HG10UN',
1065 btypes = {'none': 'HG10UN',
1066 'bzip2': 'HG10BZ',
1066 'bzip2': 'HG10BZ',
1067 'gzip': 'HG10GZ',
1067 'gzip': 'HG10GZ',
1068 'bundle2': 'HG20'}
1068 'bundle2': 'HG20'}
1069 bundletype = btypes.get(bundletype)
1069 bundletype = btypes.get(bundletype)
1070 if bundletype not in bundle2.bundletypes:
1070 if bundletype not in bundle2.bundletypes:
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073
1073
1074 @command('debugignore', [], '[FILE]')
1074 @command('debugignore', [], '[FILE]')
1075 def debugignore(ui, repo, *files, **opts):
1075 def debugignore(ui, repo, *files, **opts):
1076 """display the combined ignore pattern and information about ignored files
1076 """display the combined ignore pattern and information about ignored files
1077
1077
1078 With no argument display the combined ignore pattern.
1078 With no argument display the combined ignore pattern.
1079
1079
1080 Given space separated file names, shows if the given file is ignored and
1080 Given space separated file names, shows if the given file is ignored and
1081 if so, show the ignore rule (file and line number) that matched it.
1081 if so, show the ignore rule (file and line number) that matched it.
1082 """
1082 """
1083 ignore = repo.dirstate._ignore
1083 ignore = repo.dirstate._ignore
1084 if not files:
1084 if not files:
1085 # Show all the patterns
1085 # Show all the patterns
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 else:
1087 else:
1088 m = scmutil.match(repo[None], pats=files)
1088 m = scmutil.match(repo[None], pats=files)
1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 for f in m.files():
1090 for f in m.files():
1091 nf = util.normpath(f)
1091 nf = util.normpath(f)
1092 ignored = None
1092 ignored = None
1093 ignoredata = None
1093 ignoredata = None
1094 if nf != '.':
1094 if nf != '.':
1095 if ignore(nf):
1095 if ignore(nf):
1096 ignored = nf
1096 ignored = nf
1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 else:
1098 else:
1099 for p in util.finddirs(nf):
1099 for p in util.finddirs(nf):
1100 if ignore(p):
1100 if ignore(p):
1101 ignored = p
1101 ignored = p
1102 ignoredata = repo.dirstate._ignorefileandline(p)
1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 break
1103 break
1104 if ignored:
1104 if ignored:
1105 if ignored == nf:
1105 if ignored == nf:
1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 else:
1107 else:
1108 ui.write(_("%s is ignored because of "
1108 ui.write(_("%s is ignored because of "
1109 "containing folder %s\n")
1109 "containing folder %s\n")
1110 % (uipathfn(f), ignored))
1110 % (uipathfn(f), ignored))
1111 ignorefile, lineno, line = ignoredata
1111 ignorefile, lineno, line = ignoredata
1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 % (ignorefile, lineno, line))
1113 % (ignorefile, lineno, line))
1114 else:
1114 else:
1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116
1116
1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 _('-c|-m|FILE'))
1118 _('-c|-m|FILE'))
1119 def debugindex(ui, repo, file_=None, **opts):
1119 def debugindex(ui, repo, file_=None, **opts):
1120 """dump index data for a storage primitive"""
1120 """dump index data for a storage primitive"""
1121 opts = pycompat.byteskwargs(opts)
1121 opts = pycompat.byteskwargs(opts)
1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123
1123
1124 if ui.debugflag:
1124 if ui.debugflag:
1125 shortfn = hex
1125 shortfn = hex
1126 else:
1126 else:
1127 shortfn = short
1127 shortfn = short
1128
1128
1129 idlen = 12
1129 idlen = 12
1130 for i in store:
1130 for i in store:
1131 idlen = len(shortfn(store.node(i)))
1131 idlen = len(shortfn(store.node(i)))
1132 break
1132 break
1133
1133
1134 fm = ui.formatter('debugindex', opts)
1134 fm = ui.formatter('debugindex', opts)
1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 b'nodeid'.ljust(idlen),
1136 b'nodeid'.ljust(idlen),
1137 b'p1'.ljust(idlen)))
1137 b'p1'.ljust(idlen)))
1138
1138
1139 for rev in store:
1139 for rev in store:
1140 node = store.node(rev)
1140 node = store.node(rev)
1141 parents = store.parents(node)
1141 parents = store.parents(node)
1142
1142
1143 fm.startitem()
1143 fm.startitem()
1144 fm.write(b'rev', b'%6d ', rev)
1144 fm.write(b'rev', b'%6d ', rev)
1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 fm.write(b'node', '%s ', shortfn(node))
1146 fm.write(b'node', '%s ', shortfn(node))
1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 fm.plain(b'\n')
1149 fm.plain(b'\n')
1150
1150
1151 fm.end()
1151 fm.end()
1152
1152
1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 _('-c|-m|FILE'), optionalrepo=True)
1154 _('-c|-m|FILE'), optionalrepo=True)
1155 def debugindexdot(ui, repo, file_=None, **opts):
1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 """dump an index DAG as a graphviz dot file"""
1156 """dump an index DAG as a graphviz dot file"""
1157 opts = pycompat.byteskwargs(opts)
1157 opts = pycompat.byteskwargs(opts)
1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 ui.write(("digraph G {\n"))
1159 ui.write(("digraph G {\n"))
1160 for i in r:
1160 for i in r:
1161 node = r.node(i)
1161 node = r.node(i)
1162 pp = r.parents(node)
1162 pp = r.parents(node)
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 if pp[1] != nullid:
1164 if pp[1] != nullid:
1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 ui.write("}\n")
1166 ui.write("}\n")
1167
1167
1168 @command('debugindexstats', [])
1168 @command('debugindexstats', [])
1169 def debugindexstats(ui, repo):
1169 def debugindexstats(ui, repo):
1170 """show stats related to the changelog index"""
1170 """show stats related to the changelog index"""
1171 repo.changelog.shortest(nullid, 1)
1171 repo.changelog.shortest(nullid, 1)
1172 index = repo.changelog.index
1172 index = repo.changelog.index
1173 if not util.safehasattr(index, 'stats'):
1173 if not util.safehasattr(index, 'stats'):
1174 raise error.Abort(_('debugindexstats only works with native code'))
1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 for k, v in sorted(index.stats().items()):
1175 for k, v in sorted(index.stats().items()):
1176 ui.write('%s: %d\n' % (k, v))
1176 ui.write('%s: %d\n' % (k, v))
1177
1177
1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 def debuginstall(ui, **opts):
1179 def debuginstall(ui, **opts):
1180 '''test Mercurial installation
1180 '''test Mercurial installation
1181
1181
1182 Returns 0 on success.
1182 Returns 0 on success.
1183 '''
1183 '''
1184 opts = pycompat.byteskwargs(opts)
1184 opts = pycompat.byteskwargs(opts)
1185
1185
1186 problems = 0
1186 problems = 0
1187
1187
1188 fm = ui.formatter('debuginstall', opts)
1188 fm = ui.formatter('debuginstall', opts)
1189 fm.startitem()
1189 fm.startitem()
1190
1190
1191 # encoding
1191 # encoding
1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 err = None
1193 err = None
1194 try:
1194 try:
1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 except LookupError as inst:
1196 except LookupError as inst:
1197 err = stringutil.forcebytestr(inst)
1197 err = stringutil.forcebytestr(inst)
1198 problems += 1
1198 problems += 1
1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 " (check that your locale is properly set)\n"), err)
1200 " (check that your locale is properly set)\n"), err)
1201
1201
1202 # Python
1202 # Python
1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 pycompat.sysexecutable)
1204 pycompat.sysexecutable)
1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 ("%d.%d.%d" % sys.version_info[:3]))
1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209
1209
1210 security = set(sslutil.supportedprotocols)
1210 security = set(sslutil.supportedprotocols)
1211 if sslutil.hassni:
1211 if sslutil.hassni:
1212 security.add('sni')
1212 security.add('sni')
1213
1213
1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 fm.formatlist(sorted(security), name='protocol',
1215 fm.formatlist(sorted(security), name='protocol',
1216 fmt='%s', sep=','))
1216 fmt='%s', sep=','))
1217
1217
1218 # These are warnings, not errors. So don't increment problem count. This
1218 # These are warnings, not errors. So don't increment problem count. This
1219 # may change in the future.
1219 # may change in the future.
1220 if 'tls1.2' not in security:
1220 if 'tls1.2' not in security:
1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 'network connections lack modern security\n'))
1222 'network connections lack modern security\n'))
1223 if 'sni' not in security:
1223 if 'sni' not in security:
1224 fm.plain(_(' SNI not supported by Python install; may have '
1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 'connectivity issues with some servers\n'))
1225 'connectivity issues with some servers\n'))
1226
1226
1227 # TODO print CA cert info
1227 # TODO print CA cert info
1228
1228
1229 # hg version
1229 # hg version
1230 hgver = util.version()
1230 hgver = util.version()
1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 hgver.split('+')[0])
1232 hgver.split('+')[0])
1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 '+'.join(hgver.split('+')[1:]))
1234 '+'.join(hgver.split('+')[1:]))
1235
1235
1236 # compiled modules
1236 # compiled modules
1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 policy.policy)
1238 policy.policy)
1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 os.path.dirname(pycompat.fsencode(__file__)))
1240 os.path.dirname(pycompat.fsencode(__file__)))
1241
1241
1242 if policy.policy in ('c', 'allow'):
1242 if policy.policy in ('c', 'allow'):
1243 err = None
1243 err = None
1244 try:
1244 try:
1245 from .cext import (
1245 from .cext import (
1246 base85,
1246 base85,
1247 bdiff,
1247 bdiff,
1248 mpatch,
1248 mpatch,
1249 osutil,
1249 osutil,
1250 )
1250 )
1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 except Exception as inst:
1252 except Exception as inst:
1253 err = stringutil.forcebytestr(inst)
1253 err = stringutil.forcebytestr(inst)
1254 problems += 1
1254 problems += 1
1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256
1256
1257 compengines = util.compengines._engines.values()
1257 compengines = util.compengines._engines.values()
1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 fm.formatlist(sorted(e.name() for e in compengines),
1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 name='compengine', fmt='%s', sep=', '))
1260 name='compengine', fmt='%s', sep=', '))
1261 fm.write('compenginesavail', _('checking available compression engines '
1261 fm.write('compenginesavail', _('checking available compression engines '
1262 '(%s)\n'),
1262 '(%s)\n'),
1263 fm.formatlist(sorted(e.name() for e in compengines
1263 fm.formatlist(sorted(e.name() for e in compengines
1264 if e.available()),
1264 if e.available()),
1265 name='compengine', fmt='%s', sep=', '))
1265 name='compengine', fmt='%s', sep=', '))
1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 fm.write('compenginesserver', _('checking available compression engines '
1267 fm.write('compenginesserver', _('checking available compression engines '
1268 'for wire protocol (%s)\n'),
1268 'for wire protocol (%s)\n'),
1269 fm.formatlist([e.name() for e in wirecompengines
1269 fm.formatlist([e.name() for e in wirecompengines
1270 if e.wireprotosupport()],
1270 if e.wireprotosupport()],
1271 name='compengine', fmt='%s', sep=', '))
1271 name='compengine', fmt='%s', sep=', '))
1272 re2 = 'missing'
1272 re2 = 'missing'
1273 if util._re2:
1273 if util._re2:
1274 re2 = 'available'
1274 re2 = 'available'
1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 fm.data(re2=bool(util._re2))
1276 fm.data(re2=bool(util._re2))
1277
1277
1278 # templates
1278 # templates
1279 p = templater.templatepaths()
1279 p = templater.templatepaths()
1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 if p:
1282 if p:
1283 m = templater.templatepath("map-cmdline.default")
1283 m = templater.templatepath("map-cmdline.default")
1284 if m:
1284 if m:
1285 # template found, check if it is working
1285 # template found, check if it is working
1286 err = None
1286 err = None
1287 try:
1287 try:
1288 templater.templater.frommapfile(m)
1288 templater.templater.frommapfile(m)
1289 except Exception as inst:
1289 except Exception as inst:
1290 err = stringutil.forcebytestr(inst)
1290 err = stringutil.forcebytestr(inst)
1291 p = None
1291 p = None
1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 else:
1293 else:
1294 p = None
1294 p = None
1295 fm.condwrite(p, 'defaulttemplate',
1295 fm.condwrite(p, 'defaulttemplate',
1296 _("checking default template (%s)\n"), m)
1296 _("checking default template (%s)\n"), m)
1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 _(" template '%s' not found\n"), "default")
1298 _(" template '%s' not found\n"), "default")
1299 if not p:
1299 if not p:
1300 problems += 1
1300 problems += 1
1301 fm.condwrite(not p, '',
1301 fm.condwrite(not p, '',
1302 _(" (templates seem to have been installed incorrectly)\n"))
1302 _(" (templates seem to have been installed incorrectly)\n"))
1303
1303
1304 # editor
1304 # editor
1305 editor = ui.geteditor()
1305 editor = ui.geteditor()
1306 editor = util.expandpath(editor)
1306 editor = util.expandpath(editor)
1307 editorbin = procutil.shellsplit(editor)[0]
1307 editorbin = procutil.shellsplit(editor)[0]
1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 cmdpath = procutil.findexe(editorbin)
1309 cmdpath = procutil.findexe(editorbin)
1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 _(" No commit editor set and can't find %s in PATH\n"
1311 _(" No commit editor set and can't find %s in PATH\n"
1312 " (specify a commit editor in your configuration"
1312 " (specify a commit editor in your configuration"
1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 _(" Can't find editor '%s' in PATH\n"
1315 _(" Can't find editor '%s' in PATH\n"
1316 " (specify a commit editor in your configuration"
1316 " (specify a commit editor in your configuration"
1317 " file)\n"), not cmdpath and editorbin)
1317 " file)\n"), not cmdpath and editorbin)
1318 if not cmdpath and editor != 'vi':
1318 if not cmdpath and editor != 'vi':
1319 problems += 1
1319 problems += 1
1320
1320
1321 # check username
1321 # check username
1322 username = None
1322 username = None
1323 err = None
1323 err = None
1324 try:
1324 try:
1325 username = ui.username()
1325 username = ui.username()
1326 except error.Abort as e:
1326 except error.Abort as e:
1327 err = stringutil.forcebytestr(e)
1327 err = stringutil.forcebytestr(e)
1328 problems += 1
1328 problems += 1
1329
1329
1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 " (specify a username in your configuration file)\n"), err)
1332 " (specify a username in your configuration file)\n"), err)
1333
1333
1334 fm.condwrite(not problems, '',
1334 fm.condwrite(not problems, '',
1335 _("no problems detected\n"))
1335 _("no problems detected\n"))
1336 if not problems:
1336 if not problems:
1337 fm.data(problems=problems)
1337 fm.data(problems=problems)
1338 fm.condwrite(problems, 'problems',
1338 fm.condwrite(problems, 'problems',
1339 _("%d problems detected,"
1339 _("%d problems detected,"
1340 " please check your install!\n"), problems)
1340 " please check your install!\n"), problems)
1341 fm.end()
1341 fm.end()
1342
1342
1343 return problems
1343 return problems
1344
1344
1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 def debugknown(ui, repopath, *ids, **opts):
1346 def debugknown(ui, repopath, *ids, **opts):
1347 """test whether node ids are known to a repo
1347 """test whether node ids are known to a repo
1348
1348
1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 and 1s indicating unknown/known.
1350 and 1s indicating unknown/known.
1351 """
1351 """
1352 opts = pycompat.byteskwargs(opts)
1352 opts = pycompat.byteskwargs(opts)
1353 repo = hg.peer(ui, opts, repopath)
1353 repo = hg.peer(ui, opts, repopath)
1354 if not repo.capable('known'):
1354 if not repo.capable('known'):
1355 raise error.Abort("known() not supported by target repository")
1355 raise error.Abort("known() not supported by target repository")
1356 flags = repo.known([bin(s) for s in ids])
1356 flags = repo.known([bin(s) for s in ids])
1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358
1358
1359 @command('debuglabelcomplete', [], _('LABEL...'))
1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 def debuglabelcomplete(ui, repo, *args):
1360 def debuglabelcomplete(ui, repo, *args):
1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 debugnamecomplete(ui, repo, *args)
1362 debugnamecomplete(ui, repo, *args)
1363
1363
1364 @command('debuglocks',
1364 @command('debuglocks',
1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 ('W', 'force-wlock', None,
1366 ('W', 'force-wlock', None,
1367 _('free the working state lock (DANGEROUS)')),
1367 _('free the working state lock (DANGEROUS)')),
1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 ('S', 'set-wlock', None,
1369 ('S', 'set-wlock', None,
1370 _('set the working state lock until stopped'))],
1370 _('set the working state lock until stopped'))],
1371 _('[OPTION]...'))
1371 _('[OPTION]...'))
1372 def debuglocks(ui, repo, **opts):
1372 def debuglocks(ui, repo, **opts):
1373 """show or modify state of locks
1373 """show or modify state of locks
1374
1374
1375 By default, this command will show which locks are held. This
1375 By default, this command will show which locks are held. This
1376 includes the user and process holding the lock, the amount of time
1376 includes the user and process holding the lock, the amount of time
1377 the lock has been held, and the machine name where the process is
1377 the lock has been held, and the machine name where the process is
1378 running if it's not local.
1378 running if it's not local.
1379
1379
1380 Locks protect the integrity of Mercurial's data, so should be
1380 Locks protect the integrity of Mercurial's data, so should be
1381 treated with care. System crashes or other interruptions may cause
1381 treated with care. System crashes or other interruptions may cause
1382 locks to not be properly released, though Mercurial will usually
1382 locks to not be properly released, though Mercurial will usually
1383 detect and remove such stale locks automatically.
1383 detect and remove such stale locks automatically.
1384
1384
1385 However, detecting stale locks may not always be possible (for
1385 However, detecting stale locks may not always be possible (for
1386 instance, on a shared filesystem). Removing locks may also be
1386 instance, on a shared filesystem). Removing locks may also be
1387 blocked by filesystem permissions.
1387 blocked by filesystem permissions.
1388
1388
1389 Setting a lock will prevent other commands from changing the data.
1389 Setting a lock will prevent other commands from changing the data.
1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 The set locks are removed when the command exits.
1391 The set locks are removed when the command exits.
1392
1392
1393 Returns 0 if no locks are held.
1393 Returns 0 if no locks are held.
1394
1394
1395 """
1395 """
1396
1396
1397 if opts.get(r'force_lock'):
1397 if opts.get(r'force_lock'):
1398 repo.svfs.unlink('lock')
1398 repo.svfs.unlink('lock')
1399 if opts.get(r'force_wlock'):
1399 if opts.get(r'force_wlock'):
1400 repo.vfs.unlink('wlock')
1400 repo.vfs.unlink('wlock')
1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 return 0
1402 return 0
1403
1403
1404 locks = []
1404 locks = []
1405 try:
1405 try:
1406 if opts.get(r'set_wlock'):
1406 if opts.get(r'set_wlock'):
1407 try:
1407 try:
1408 locks.append(repo.wlock(False))
1408 locks.append(repo.wlock(False))
1409 except error.LockHeld:
1409 except error.LockHeld:
1410 raise error.Abort(_('wlock is already held'))
1410 raise error.Abort(_('wlock is already held'))
1411 if opts.get(r'set_lock'):
1411 if opts.get(r'set_lock'):
1412 try:
1412 try:
1413 locks.append(repo.lock(False))
1413 locks.append(repo.lock(False))
1414 except error.LockHeld:
1414 except error.LockHeld:
1415 raise error.Abort(_('lock is already held'))
1415 raise error.Abort(_('lock is already held'))
1416 if len(locks):
1416 if len(locks):
1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 return 0
1418 return 0
1419 finally:
1419 finally:
1420 release(*locks)
1420 release(*locks)
1421
1421
1422 now = time.time()
1422 now = time.time()
1423 held = 0
1423 held = 0
1424
1424
1425 def report(vfs, name, method):
1425 def report(vfs, name, method):
1426 # this causes stale locks to get reaped for more accurate reporting
1426 # this causes stale locks to get reaped for more accurate reporting
1427 try:
1427 try:
1428 l = method(False)
1428 l = method(False)
1429 except error.LockHeld:
1429 except error.LockHeld:
1430 l = None
1430 l = None
1431
1431
1432 if l:
1432 if l:
1433 l.release()
1433 l.release()
1434 else:
1434 else:
1435 try:
1435 try:
1436 st = vfs.lstat(name)
1436 st = vfs.lstat(name)
1437 age = now - st[stat.ST_MTIME]
1437 age = now - st[stat.ST_MTIME]
1438 user = util.username(st.st_uid)
1438 user = util.username(st.st_uid)
1439 locker = vfs.readlock(name)
1439 locker = vfs.readlock(name)
1440 if ":" in locker:
1440 if ":" in locker:
1441 host, pid = locker.split(':')
1441 host, pid = locker.split(':')
1442 if host == socket.gethostname():
1442 if host == socket.gethostname():
1443 locker = 'user %s, process %s' % (user or b'None', pid)
1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 else:
1444 else:
1445 locker = ('user %s, process %s, host %s'
1445 locker = ('user %s, process %s, host %s'
1446 % (user or b'None', pid, host))
1446 % (user or b'None', pid, host))
1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 return 1
1448 return 1
1449 except OSError as e:
1449 except OSError as e:
1450 if e.errno != errno.ENOENT:
1450 if e.errno != errno.ENOENT:
1451 raise
1451 raise
1452
1452
1453 ui.write(("%-6s free\n") % (name + ":"))
1453 ui.write(("%-6s free\n") % (name + ":"))
1454 return 0
1454 return 0
1455
1455
1456 held += report(repo.svfs, "lock", repo.lock)
1456 held += report(repo.svfs, "lock", repo.lock)
1457 held += report(repo.vfs, "wlock", repo.wlock)
1457 held += report(repo.vfs, "wlock", repo.wlock)
1458
1458
1459 return held
1459 return held
1460
1460
1461 @command('debugmanifestfulltextcache', [
1461 @command('debugmanifestfulltextcache', [
1462 ('', 'clear', False, _('clear the cache')),
1462 ('', 'clear', False, _('clear the cache')),
1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1464 _('NODE'))
1464 _('NODE'))
1465 ], '')
1465 ], '')
1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1467 """show, clear or amend the contents of the manifest fulltext cache"""
1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468
1468
1469 def getcache():
1469 def getcache():
1470 r = repo.manifestlog.getstorage(b'')
1470 r = repo.manifestlog.getstorage(b'')
1471 try:
1471 try:
1472 return r._fulltextcache
1472 return r._fulltextcache
1473 except AttributeError:
1473 except AttributeError:
1474 msg = _("Current revlog implementation doesn't appear to have a "
1474 msg = _("Current revlog implementation doesn't appear to have a "
1475 "manifest fulltext cache\n")
1475 "manifest fulltext cache\n")
1476 raise error.Abort(msg)
1476 raise error.Abort(msg)
1477
1477
1478 if opts.get(r'clear'):
1478 if opts.get(r'clear'):
1479 with repo.lock():
1479 with repo.lock():
1480 cache = getcache()
1480 cache = getcache()
1481 cache.clear()
1481 cache.clear()
1482
1482
1483 if add:
1483 if add:
1484 with repo.lock():
1484 with repo.lock():
1485 try:
1485 try:
1486 m = repo.manifestlog
1486 m = repo.manifestlog
1487 manifest = m[m.getstorage(b'').lookup(add)]
1487 manifest = m[m.getstorage(b'').lookup(add)]
1488 except error.LookupError as e:
1488 except error.LookupError as e:
1489 raise error.Abort(e, hint="Check your manifest node id")
1489 raise error.Abort(e, hint="Check your manifest node id")
1490 manifest.read() # stores revisision in cache too
1490 manifest.read() # stores revisision in cache too
1491 return
1491
1492
1492 cache = getcache()
1493 cache = getcache()
1493 if not len(cache):
1494 if not len(cache):
1494 ui.write(_('cache empty\n'))
1495 ui.write(_('cache empty\n'))
1495 else:
1496 else:
1496 ui.write(
1497 ui.write(
1497 _('cache contains %d manifest entries, in order of most to '
1498 _('cache contains %d manifest entries, in order of most to '
1498 'least recent:\n') % (len(cache),))
1499 'least recent:\n') % (len(cache),))
1499 totalsize = 0
1500 totalsize = 0
1500 for nodeid in cache:
1501 for nodeid in cache:
1501 # Use cache.get to not update the LRU order
1502 # Use cache.get to not update the LRU order
1502 data = cache.get(nodeid)
1503 data = cache.get(nodeid)
1503 size = len(data)
1504 size = len(data)
1504 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1505 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1505 ui.write(_('id: %s, size %s\n') % (
1506 ui.write(_('id: %s, size %s\n') % (
1506 hex(nodeid), util.bytecount(size)))
1507 hex(nodeid), util.bytecount(size)))
1507 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1508 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1508 ui.write(
1509 ui.write(
1509 _('total cache data size %s, on-disk %s\n') % (
1510 _('total cache data size %s, on-disk %s\n') % (
1510 util.bytecount(totalsize), util.bytecount(ondisk))
1511 util.bytecount(totalsize), util.bytecount(ondisk))
1511 )
1512 )
1512
1513
1513 @command('debugmergestate', [], '')
1514 @command('debugmergestate', [], '')
1514 def debugmergestate(ui, repo, *args):
1515 def debugmergestate(ui, repo, *args):
1515 """print merge state
1516 """print merge state
1516
1517
1517 Use --verbose to print out information about whether v1 or v2 merge state
1518 Use --verbose to print out information about whether v1 or v2 merge state
1518 was chosen."""
1519 was chosen."""
1519 def _hashornull(h):
1520 def _hashornull(h):
1520 if h == nullhex:
1521 if h == nullhex:
1521 return 'null'
1522 return 'null'
1522 else:
1523 else:
1523 return h
1524 return h
1524
1525
1525 def printrecords(version):
1526 def printrecords(version):
1526 ui.write(('* version %d records\n') % version)
1527 ui.write(('* version %d records\n') % version)
1527 if version == 1:
1528 if version == 1:
1528 records = v1records
1529 records = v1records
1529 else:
1530 else:
1530 records = v2records
1531 records = v2records
1531
1532
1532 for rtype, record in records:
1533 for rtype, record in records:
1533 # pretty print some record types
1534 # pretty print some record types
1534 if rtype == 'L':
1535 if rtype == 'L':
1535 ui.write(('local: %s\n') % record)
1536 ui.write(('local: %s\n') % record)
1536 elif rtype == 'O':
1537 elif rtype == 'O':
1537 ui.write(('other: %s\n') % record)
1538 ui.write(('other: %s\n') % record)
1538 elif rtype == 'm':
1539 elif rtype == 'm':
1539 driver, mdstate = record.split('\0', 1)
1540 driver, mdstate = record.split('\0', 1)
1540 ui.write(('merge driver: %s (state "%s")\n')
1541 ui.write(('merge driver: %s (state "%s")\n')
1541 % (driver, mdstate))
1542 % (driver, mdstate))
1542 elif rtype in 'FDC':
1543 elif rtype in 'FDC':
1543 r = record.split('\0')
1544 r = record.split('\0')
1544 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1545 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1545 if version == 1:
1546 if version == 1:
1546 onode = 'not stored in v1 format'
1547 onode = 'not stored in v1 format'
1547 flags = r[7]
1548 flags = r[7]
1548 else:
1549 else:
1549 onode, flags = r[7:9]
1550 onode, flags = r[7:9]
1550 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1551 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1551 % (f, rtype, state, _hashornull(hash)))
1552 % (f, rtype, state, _hashornull(hash)))
1552 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1553 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1553 ui.write((' ancestor path: %s (node %s)\n')
1554 ui.write((' ancestor path: %s (node %s)\n')
1554 % (afile, _hashornull(anode)))
1555 % (afile, _hashornull(anode)))
1555 ui.write((' other path: %s (node %s)\n')
1556 ui.write((' other path: %s (node %s)\n')
1556 % (ofile, _hashornull(onode)))
1557 % (ofile, _hashornull(onode)))
1557 elif rtype == 'f':
1558 elif rtype == 'f':
1558 filename, rawextras = record.split('\0', 1)
1559 filename, rawextras = record.split('\0', 1)
1559 extras = rawextras.split('\0')
1560 extras = rawextras.split('\0')
1560 i = 0
1561 i = 0
1561 extrastrings = []
1562 extrastrings = []
1562 while i < len(extras):
1563 while i < len(extras):
1563 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1564 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1564 i += 2
1565 i += 2
1565
1566
1566 ui.write(('file extras: %s (%s)\n')
1567 ui.write(('file extras: %s (%s)\n')
1567 % (filename, ', '.join(extrastrings)))
1568 % (filename, ', '.join(extrastrings)))
1568 elif rtype == 'l':
1569 elif rtype == 'l':
1569 labels = record.split('\0', 2)
1570 labels = record.split('\0', 2)
1570 labels = [l for l in labels if len(l) > 0]
1571 labels = [l for l in labels if len(l) > 0]
1571 ui.write(('labels:\n'))
1572 ui.write(('labels:\n'))
1572 ui.write((' local: %s\n' % labels[0]))
1573 ui.write((' local: %s\n' % labels[0]))
1573 ui.write((' other: %s\n' % labels[1]))
1574 ui.write((' other: %s\n' % labels[1]))
1574 if len(labels) > 2:
1575 if len(labels) > 2:
1575 ui.write((' base: %s\n' % labels[2]))
1576 ui.write((' base: %s\n' % labels[2]))
1576 else:
1577 else:
1577 ui.write(('unrecognized entry: %s\t%s\n')
1578 ui.write(('unrecognized entry: %s\t%s\n')
1578 % (rtype, record.replace('\0', '\t')))
1579 % (rtype, record.replace('\0', '\t')))
1579
1580
1580 # Avoid mergestate.read() since it may raise an exception for unsupported
1581 # Avoid mergestate.read() since it may raise an exception for unsupported
1581 # merge state records. We shouldn't be doing this, but this is OK since this
1582 # merge state records. We shouldn't be doing this, but this is OK since this
1582 # command is pretty low-level.
1583 # command is pretty low-level.
1583 ms = mergemod.mergestate(repo)
1584 ms = mergemod.mergestate(repo)
1584
1585
1585 # sort so that reasonable information is on top
1586 # sort so that reasonable information is on top
1586 v1records = ms._readrecordsv1()
1587 v1records = ms._readrecordsv1()
1587 v2records = ms._readrecordsv2()
1588 v2records = ms._readrecordsv2()
1588 order = 'LOml'
1589 order = 'LOml'
1589 def key(r):
1590 def key(r):
1590 idx = order.find(r[0])
1591 idx = order.find(r[0])
1591 if idx == -1:
1592 if idx == -1:
1592 return (1, r[1])
1593 return (1, r[1])
1593 else:
1594 else:
1594 return (0, idx)
1595 return (0, idx)
1595 v1records.sort(key=key)
1596 v1records.sort(key=key)
1596 v2records.sort(key=key)
1597 v2records.sort(key=key)
1597
1598
1598 if not v1records and not v2records:
1599 if not v1records and not v2records:
1599 ui.write(('no merge state found\n'))
1600 ui.write(('no merge state found\n'))
1600 elif not v2records:
1601 elif not v2records:
1601 ui.note(('no version 2 merge state\n'))
1602 ui.note(('no version 2 merge state\n'))
1602 printrecords(1)
1603 printrecords(1)
1603 elif ms._v1v2match(v1records, v2records):
1604 elif ms._v1v2match(v1records, v2records):
1604 ui.note(('v1 and v2 states match: using v2\n'))
1605 ui.note(('v1 and v2 states match: using v2\n'))
1605 printrecords(2)
1606 printrecords(2)
1606 else:
1607 else:
1607 ui.note(('v1 and v2 states mismatch: using v1\n'))
1608 ui.note(('v1 and v2 states mismatch: using v1\n'))
1608 printrecords(1)
1609 printrecords(1)
1609 if ui.verbose:
1610 if ui.verbose:
1610 printrecords(2)
1611 printrecords(2)
1611
1612
1612 @command('debugnamecomplete', [], _('NAME...'))
1613 @command('debugnamecomplete', [], _('NAME...'))
1613 def debugnamecomplete(ui, repo, *args):
1614 def debugnamecomplete(ui, repo, *args):
1614 '''complete "names" - tags, open branch names, bookmark names'''
1615 '''complete "names" - tags, open branch names, bookmark names'''
1615
1616
1616 names = set()
1617 names = set()
1617 # since we previously only listed open branches, we will handle that
1618 # since we previously only listed open branches, we will handle that
1618 # specially (after this for loop)
1619 # specially (after this for loop)
1619 for name, ns in repo.names.iteritems():
1620 for name, ns in repo.names.iteritems():
1620 if name != 'branches':
1621 if name != 'branches':
1621 names.update(ns.listnames(repo))
1622 names.update(ns.listnames(repo))
1622 names.update(tag for (tag, heads, tip, closed)
1623 names.update(tag for (tag, heads, tip, closed)
1623 in repo.branchmap().iterbranches() if not closed)
1624 in repo.branchmap().iterbranches() if not closed)
1624 completions = set()
1625 completions = set()
1625 if not args:
1626 if not args:
1626 args = ['']
1627 args = ['']
1627 for a in args:
1628 for a in args:
1628 completions.update(n for n in names if n.startswith(a))
1629 completions.update(n for n in names if n.startswith(a))
1629 ui.write('\n'.join(sorted(completions)))
1630 ui.write('\n'.join(sorted(completions)))
1630 ui.write('\n')
1631 ui.write('\n')
1631
1632
1632 @command('debugobsolete',
1633 @command('debugobsolete',
1633 [('', 'flags', 0, _('markers flag')),
1634 [('', 'flags', 0, _('markers flag')),
1634 ('', 'record-parents', False,
1635 ('', 'record-parents', False,
1635 _('record parent information for the precursor')),
1636 _('record parent information for the precursor')),
1636 ('r', 'rev', [], _('display markers relevant to REV')),
1637 ('r', 'rev', [], _('display markers relevant to REV')),
1637 ('', 'exclusive', False, _('restrict display to markers only '
1638 ('', 'exclusive', False, _('restrict display to markers only '
1638 'relevant to REV')),
1639 'relevant to REV')),
1639 ('', 'index', False, _('display index of the marker')),
1640 ('', 'index', False, _('display index of the marker')),
1640 ('', 'delete', [], _('delete markers specified by indices')),
1641 ('', 'delete', [], _('delete markers specified by indices')),
1641 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1642 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1642 _('[OBSOLETED [REPLACEMENT ...]]'))
1643 _('[OBSOLETED [REPLACEMENT ...]]'))
1643 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1644 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1644 """create arbitrary obsolete marker
1645 """create arbitrary obsolete marker
1645
1646
1646 With no arguments, displays the list of obsolescence markers."""
1647 With no arguments, displays the list of obsolescence markers."""
1647
1648
1648 opts = pycompat.byteskwargs(opts)
1649 opts = pycompat.byteskwargs(opts)
1649
1650
1650 def parsenodeid(s):
1651 def parsenodeid(s):
1651 try:
1652 try:
1652 # We do not use revsingle/revrange functions here to accept
1653 # We do not use revsingle/revrange functions here to accept
1653 # arbitrary node identifiers, possibly not present in the
1654 # arbitrary node identifiers, possibly not present in the
1654 # local repository.
1655 # local repository.
1655 n = bin(s)
1656 n = bin(s)
1656 if len(n) != len(nullid):
1657 if len(n) != len(nullid):
1657 raise TypeError()
1658 raise TypeError()
1658 return n
1659 return n
1659 except TypeError:
1660 except TypeError:
1660 raise error.Abort('changeset references must be full hexadecimal '
1661 raise error.Abort('changeset references must be full hexadecimal '
1661 'node identifiers')
1662 'node identifiers')
1662
1663
1663 if opts.get('delete'):
1664 if opts.get('delete'):
1664 indices = []
1665 indices = []
1665 for v in opts.get('delete'):
1666 for v in opts.get('delete'):
1666 try:
1667 try:
1667 indices.append(int(v))
1668 indices.append(int(v))
1668 except ValueError:
1669 except ValueError:
1669 raise error.Abort(_('invalid index value: %r') % v,
1670 raise error.Abort(_('invalid index value: %r') % v,
1670 hint=_('use integers for indices'))
1671 hint=_('use integers for indices'))
1671
1672
1672 if repo.currenttransaction():
1673 if repo.currenttransaction():
1673 raise error.Abort(_('cannot delete obsmarkers in the middle '
1674 raise error.Abort(_('cannot delete obsmarkers in the middle '
1674 'of transaction.'))
1675 'of transaction.'))
1675
1676
1676 with repo.lock():
1677 with repo.lock():
1677 n = repair.deleteobsmarkers(repo.obsstore, indices)
1678 n = repair.deleteobsmarkers(repo.obsstore, indices)
1678 ui.write(_('deleted %i obsolescence markers\n') % n)
1679 ui.write(_('deleted %i obsolescence markers\n') % n)
1679
1680
1680 return
1681 return
1681
1682
1682 if precursor is not None:
1683 if precursor is not None:
1683 if opts['rev']:
1684 if opts['rev']:
1684 raise error.Abort('cannot select revision when creating marker')
1685 raise error.Abort('cannot select revision when creating marker')
1685 metadata = {}
1686 metadata = {}
1686 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1687 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1687 succs = tuple(parsenodeid(succ) for succ in successors)
1688 succs = tuple(parsenodeid(succ) for succ in successors)
1688 l = repo.lock()
1689 l = repo.lock()
1689 try:
1690 try:
1690 tr = repo.transaction('debugobsolete')
1691 tr = repo.transaction('debugobsolete')
1691 try:
1692 try:
1692 date = opts.get('date')
1693 date = opts.get('date')
1693 if date:
1694 if date:
1694 date = dateutil.parsedate(date)
1695 date = dateutil.parsedate(date)
1695 else:
1696 else:
1696 date = None
1697 date = None
1697 prec = parsenodeid(precursor)
1698 prec = parsenodeid(precursor)
1698 parents = None
1699 parents = None
1699 if opts['record_parents']:
1700 if opts['record_parents']:
1700 if prec not in repo.unfiltered():
1701 if prec not in repo.unfiltered():
1701 raise error.Abort('cannot used --record-parents on '
1702 raise error.Abort('cannot used --record-parents on '
1702 'unknown changesets')
1703 'unknown changesets')
1703 parents = repo.unfiltered()[prec].parents()
1704 parents = repo.unfiltered()[prec].parents()
1704 parents = tuple(p.node() for p in parents)
1705 parents = tuple(p.node() for p in parents)
1705 repo.obsstore.create(tr, prec, succs, opts['flags'],
1706 repo.obsstore.create(tr, prec, succs, opts['flags'],
1706 parents=parents, date=date,
1707 parents=parents, date=date,
1707 metadata=metadata, ui=ui)
1708 metadata=metadata, ui=ui)
1708 tr.close()
1709 tr.close()
1709 except ValueError as exc:
1710 except ValueError as exc:
1710 raise error.Abort(_('bad obsmarker input: %s') %
1711 raise error.Abort(_('bad obsmarker input: %s') %
1711 pycompat.bytestr(exc))
1712 pycompat.bytestr(exc))
1712 finally:
1713 finally:
1713 tr.release()
1714 tr.release()
1714 finally:
1715 finally:
1715 l.release()
1716 l.release()
1716 else:
1717 else:
1717 if opts['rev']:
1718 if opts['rev']:
1718 revs = scmutil.revrange(repo, opts['rev'])
1719 revs = scmutil.revrange(repo, opts['rev'])
1719 nodes = [repo[r].node() for r in revs]
1720 nodes = [repo[r].node() for r in revs]
1720 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1721 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1721 exclusive=opts['exclusive']))
1722 exclusive=opts['exclusive']))
1722 markers.sort(key=lambda x: x._data)
1723 markers.sort(key=lambda x: x._data)
1723 else:
1724 else:
1724 markers = obsutil.getmarkers(repo)
1725 markers = obsutil.getmarkers(repo)
1725
1726
1726 markerstoiter = markers
1727 markerstoiter = markers
1727 isrelevant = lambda m: True
1728 isrelevant = lambda m: True
1728 if opts.get('rev') and opts.get('index'):
1729 if opts.get('rev') and opts.get('index'):
1729 markerstoiter = obsutil.getmarkers(repo)
1730 markerstoiter = obsutil.getmarkers(repo)
1730 markerset = set(markers)
1731 markerset = set(markers)
1731 isrelevant = lambda m: m in markerset
1732 isrelevant = lambda m: m in markerset
1732
1733
1733 fm = ui.formatter('debugobsolete', opts)
1734 fm = ui.formatter('debugobsolete', opts)
1734 for i, m in enumerate(markerstoiter):
1735 for i, m in enumerate(markerstoiter):
1735 if not isrelevant(m):
1736 if not isrelevant(m):
1736 # marker can be irrelevant when we're iterating over a set
1737 # marker can be irrelevant when we're iterating over a set
1737 # of markers (markerstoiter) which is bigger than the set
1738 # of markers (markerstoiter) which is bigger than the set
1738 # of markers we want to display (markers)
1739 # of markers we want to display (markers)
1739 # this can happen if both --index and --rev options are
1740 # this can happen if both --index and --rev options are
1740 # provided and thus we need to iterate over all of the markers
1741 # provided and thus we need to iterate over all of the markers
1741 # to get the correct indices, but only display the ones that
1742 # to get the correct indices, but only display the ones that
1742 # are relevant to --rev value
1743 # are relevant to --rev value
1743 continue
1744 continue
1744 fm.startitem()
1745 fm.startitem()
1745 ind = i if opts.get('index') else None
1746 ind = i if opts.get('index') else None
1746 cmdutil.showmarker(fm, m, index=ind)
1747 cmdutil.showmarker(fm, m, index=ind)
1747 fm.end()
1748 fm.end()
1748
1749
1749 @command('debugp1copies',
1750 @command('debugp1copies',
1750 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1751 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1751 _('[-r REV]'))
1752 _('[-r REV]'))
1752 def debugp1copies(ui, repo, **opts):
1753 def debugp1copies(ui, repo, **opts):
1753 """dump copy information compared to p1"""
1754 """dump copy information compared to p1"""
1754
1755
1755 opts = pycompat.byteskwargs(opts)
1756 opts = pycompat.byteskwargs(opts)
1756 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1757 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1757 for dst, src in ctx.p1copies().items():
1758 for dst, src in ctx.p1copies().items():
1758 ui.write('%s -> %s\n' % (src, dst))
1759 ui.write('%s -> %s\n' % (src, dst))
1759
1760
1760 @command('debugp2copies',
1761 @command('debugp2copies',
1761 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1762 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1762 _('[-r REV]'))
1763 _('[-r REV]'))
1763 def debugp1copies(ui, repo, **opts):
1764 def debugp1copies(ui, repo, **opts):
1764 """dump copy information compared to p2"""
1765 """dump copy information compared to p2"""
1765
1766
1766 opts = pycompat.byteskwargs(opts)
1767 opts = pycompat.byteskwargs(opts)
1767 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1768 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1768 for dst, src in ctx.p2copies().items():
1769 for dst, src in ctx.p2copies().items():
1769 ui.write('%s -> %s\n' % (src, dst))
1770 ui.write('%s -> %s\n' % (src, dst))
1770
1771
1771 @command('debugpathcomplete',
1772 @command('debugpathcomplete',
1772 [('f', 'full', None, _('complete an entire path')),
1773 [('f', 'full', None, _('complete an entire path')),
1773 ('n', 'normal', None, _('show only normal files')),
1774 ('n', 'normal', None, _('show only normal files')),
1774 ('a', 'added', None, _('show only added files')),
1775 ('a', 'added', None, _('show only added files')),
1775 ('r', 'removed', None, _('show only removed files'))],
1776 ('r', 'removed', None, _('show only removed files'))],
1776 _('FILESPEC...'))
1777 _('FILESPEC...'))
1777 def debugpathcomplete(ui, repo, *specs, **opts):
1778 def debugpathcomplete(ui, repo, *specs, **opts):
1778 '''complete part or all of a tracked path
1779 '''complete part or all of a tracked path
1779
1780
1780 This command supports shells that offer path name completion. It
1781 This command supports shells that offer path name completion. It
1781 currently completes only files already known to the dirstate.
1782 currently completes only files already known to the dirstate.
1782
1783
1783 Completion extends only to the next path segment unless
1784 Completion extends only to the next path segment unless
1784 --full is specified, in which case entire paths are used.'''
1785 --full is specified, in which case entire paths are used.'''
1785
1786
1786 def complete(path, acceptable):
1787 def complete(path, acceptable):
1787 dirstate = repo.dirstate
1788 dirstate = repo.dirstate
1788 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1789 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1789 rootdir = repo.root + pycompat.ossep
1790 rootdir = repo.root + pycompat.ossep
1790 if spec != repo.root and not spec.startswith(rootdir):
1791 if spec != repo.root and not spec.startswith(rootdir):
1791 return [], []
1792 return [], []
1792 if os.path.isdir(spec):
1793 if os.path.isdir(spec):
1793 spec += '/'
1794 spec += '/'
1794 spec = spec[len(rootdir):]
1795 spec = spec[len(rootdir):]
1795 fixpaths = pycompat.ossep != '/'
1796 fixpaths = pycompat.ossep != '/'
1796 if fixpaths:
1797 if fixpaths:
1797 spec = spec.replace(pycompat.ossep, '/')
1798 spec = spec.replace(pycompat.ossep, '/')
1798 speclen = len(spec)
1799 speclen = len(spec)
1799 fullpaths = opts[r'full']
1800 fullpaths = opts[r'full']
1800 files, dirs = set(), set()
1801 files, dirs = set(), set()
1801 adddir, addfile = dirs.add, files.add
1802 adddir, addfile = dirs.add, files.add
1802 for f, st in dirstate.iteritems():
1803 for f, st in dirstate.iteritems():
1803 if f.startswith(spec) and st[0] in acceptable:
1804 if f.startswith(spec) and st[0] in acceptable:
1804 if fixpaths:
1805 if fixpaths:
1805 f = f.replace('/', pycompat.ossep)
1806 f = f.replace('/', pycompat.ossep)
1806 if fullpaths:
1807 if fullpaths:
1807 addfile(f)
1808 addfile(f)
1808 continue
1809 continue
1809 s = f.find(pycompat.ossep, speclen)
1810 s = f.find(pycompat.ossep, speclen)
1810 if s >= 0:
1811 if s >= 0:
1811 adddir(f[:s])
1812 adddir(f[:s])
1812 else:
1813 else:
1813 addfile(f)
1814 addfile(f)
1814 return files, dirs
1815 return files, dirs
1815
1816
1816 acceptable = ''
1817 acceptable = ''
1817 if opts[r'normal']:
1818 if opts[r'normal']:
1818 acceptable += 'nm'
1819 acceptable += 'nm'
1819 if opts[r'added']:
1820 if opts[r'added']:
1820 acceptable += 'a'
1821 acceptable += 'a'
1821 if opts[r'removed']:
1822 if opts[r'removed']:
1822 acceptable += 'r'
1823 acceptable += 'r'
1823 cwd = repo.getcwd()
1824 cwd = repo.getcwd()
1824 if not specs:
1825 if not specs:
1825 specs = ['.']
1826 specs = ['.']
1826
1827
1827 files, dirs = set(), set()
1828 files, dirs = set(), set()
1828 for spec in specs:
1829 for spec in specs:
1829 f, d = complete(spec, acceptable or 'nmar')
1830 f, d = complete(spec, acceptable or 'nmar')
1830 files.update(f)
1831 files.update(f)
1831 dirs.update(d)
1832 dirs.update(d)
1832 files.update(dirs)
1833 files.update(dirs)
1833 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1834 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1834 ui.write('\n')
1835 ui.write('\n')
1835
1836
1836 @command('debugpathcopies',
1837 @command('debugpathcopies',
1837 cmdutil.walkopts,
1838 cmdutil.walkopts,
1838 'hg debugpathcopies REV1 REV2 [FILE]',
1839 'hg debugpathcopies REV1 REV2 [FILE]',
1839 inferrepo=True)
1840 inferrepo=True)
1840 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1841 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1841 """show copies between two revisions"""
1842 """show copies between two revisions"""
1842 ctx1 = scmutil.revsingle(repo, rev1)
1843 ctx1 = scmutil.revsingle(repo, rev1)
1843 ctx2 = scmutil.revsingle(repo, rev2)
1844 ctx2 = scmutil.revsingle(repo, rev2)
1844 m = scmutil.match(ctx1, pats, opts)
1845 m = scmutil.match(ctx1, pats, opts)
1845 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1846 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1846 ui.write('%s -> %s\n' % (src, dst))
1847 ui.write('%s -> %s\n' % (src, dst))
1847
1848
1848 @command('debugpeer', [], _('PATH'), norepo=True)
1849 @command('debugpeer', [], _('PATH'), norepo=True)
1849 def debugpeer(ui, path):
1850 def debugpeer(ui, path):
1850 """establish a connection to a peer repository"""
1851 """establish a connection to a peer repository"""
1851 # Always enable peer request logging. Requires --debug to display
1852 # Always enable peer request logging. Requires --debug to display
1852 # though.
1853 # though.
1853 overrides = {
1854 overrides = {
1854 ('devel', 'debug.peer-request'): True,
1855 ('devel', 'debug.peer-request'): True,
1855 }
1856 }
1856
1857
1857 with ui.configoverride(overrides):
1858 with ui.configoverride(overrides):
1858 peer = hg.peer(ui, {}, path)
1859 peer = hg.peer(ui, {}, path)
1859
1860
1860 local = peer.local() is not None
1861 local = peer.local() is not None
1861 canpush = peer.canpush()
1862 canpush = peer.canpush()
1862
1863
1863 ui.write(_('url: %s\n') % peer.url())
1864 ui.write(_('url: %s\n') % peer.url())
1864 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1865 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1865 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1866 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1866
1867
1867 @command('debugpickmergetool',
1868 @command('debugpickmergetool',
1868 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1869 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1869 ('', 'changedelete', None, _('emulate merging change and delete')),
1870 ('', 'changedelete', None, _('emulate merging change and delete')),
1870 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1871 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1871 _('[PATTERN]...'),
1872 _('[PATTERN]...'),
1872 inferrepo=True)
1873 inferrepo=True)
1873 def debugpickmergetool(ui, repo, *pats, **opts):
1874 def debugpickmergetool(ui, repo, *pats, **opts):
1874 """examine which merge tool is chosen for specified file
1875 """examine which merge tool is chosen for specified file
1875
1876
1876 As described in :hg:`help merge-tools`, Mercurial examines
1877 As described in :hg:`help merge-tools`, Mercurial examines
1877 configurations below in this order to decide which merge tool is
1878 configurations below in this order to decide which merge tool is
1878 chosen for specified file.
1879 chosen for specified file.
1879
1880
1880 1. ``--tool`` option
1881 1. ``--tool`` option
1881 2. ``HGMERGE`` environment variable
1882 2. ``HGMERGE`` environment variable
1882 3. configurations in ``merge-patterns`` section
1883 3. configurations in ``merge-patterns`` section
1883 4. configuration of ``ui.merge``
1884 4. configuration of ``ui.merge``
1884 5. configurations in ``merge-tools`` section
1885 5. configurations in ``merge-tools`` section
1885 6. ``hgmerge`` tool (for historical reason only)
1886 6. ``hgmerge`` tool (for historical reason only)
1886 7. default tool for fallback (``:merge`` or ``:prompt``)
1887 7. default tool for fallback (``:merge`` or ``:prompt``)
1887
1888
1888 This command writes out examination result in the style below::
1889 This command writes out examination result in the style below::
1889
1890
1890 FILE = MERGETOOL
1891 FILE = MERGETOOL
1891
1892
1892 By default, all files known in the first parent context of the
1893 By default, all files known in the first parent context of the
1893 working directory are examined. Use file patterns and/or -I/-X
1894 working directory are examined. Use file patterns and/or -I/-X
1894 options to limit target files. -r/--rev is also useful to examine
1895 options to limit target files. -r/--rev is also useful to examine
1895 files in another context without actual updating to it.
1896 files in another context without actual updating to it.
1896
1897
1897 With --debug, this command shows warning messages while matching
1898 With --debug, this command shows warning messages while matching
1898 against ``merge-patterns`` and so on, too. It is recommended to
1899 against ``merge-patterns`` and so on, too. It is recommended to
1899 use this option with explicit file patterns and/or -I/-X options,
1900 use this option with explicit file patterns and/or -I/-X options,
1900 because this option increases amount of output per file according
1901 because this option increases amount of output per file according
1901 to configurations in hgrc.
1902 to configurations in hgrc.
1902
1903
1903 With -v/--verbose, this command shows configurations below at
1904 With -v/--verbose, this command shows configurations below at
1904 first (only if specified).
1905 first (only if specified).
1905
1906
1906 - ``--tool`` option
1907 - ``--tool`` option
1907 - ``HGMERGE`` environment variable
1908 - ``HGMERGE`` environment variable
1908 - configuration of ``ui.merge``
1909 - configuration of ``ui.merge``
1909
1910
1910 If merge tool is chosen before matching against
1911 If merge tool is chosen before matching against
1911 ``merge-patterns``, this command can't show any helpful
1912 ``merge-patterns``, this command can't show any helpful
1912 information, even with --debug. In such case, information above is
1913 information, even with --debug. In such case, information above is
1913 useful to know why a merge tool is chosen.
1914 useful to know why a merge tool is chosen.
1914 """
1915 """
1915 opts = pycompat.byteskwargs(opts)
1916 opts = pycompat.byteskwargs(opts)
1916 overrides = {}
1917 overrides = {}
1917 if opts['tool']:
1918 if opts['tool']:
1918 overrides[('ui', 'forcemerge')] = opts['tool']
1919 overrides[('ui', 'forcemerge')] = opts['tool']
1919 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1920 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1920
1921
1921 with ui.configoverride(overrides, 'debugmergepatterns'):
1922 with ui.configoverride(overrides, 'debugmergepatterns'):
1922 hgmerge = encoding.environ.get("HGMERGE")
1923 hgmerge = encoding.environ.get("HGMERGE")
1923 if hgmerge is not None:
1924 if hgmerge is not None:
1924 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1925 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1925 uimerge = ui.config("ui", "merge")
1926 uimerge = ui.config("ui", "merge")
1926 if uimerge:
1927 if uimerge:
1927 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1928 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1928
1929
1929 ctx = scmutil.revsingle(repo, opts.get('rev'))
1930 ctx = scmutil.revsingle(repo, opts.get('rev'))
1930 m = scmutil.match(ctx, pats, opts)
1931 m = scmutil.match(ctx, pats, opts)
1931 changedelete = opts['changedelete']
1932 changedelete = opts['changedelete']
1932 for path in ctx.walk(m):
1933 for path in ctx.walk(m):
1933 fctx = ctx[path]
1934 fctx = ctx[path]
1934 try:
1935 try:
1935 if not ui.debugflag:
1936 if not ui.debugflag:
1936 ui.pushbuffer(error=True)
1937 ui.pushbuffer(error=True)
1937 tool, toolpath = filemerge._picktool(repo, ui, path,
1938 tool, toolpath = filemerge._picktool(repo, ui, path,
1938 fctx.isbinary(),
1939 fctx.isbinary(),
1939 'l' in fctx.flags(),
1940 'l' in fctx.flags(),
1940 changedelete)
1941 changedelete)
1941 finally:
1942 finally:
1942 if not ui.debugflag:
1943 if not ui.debugflag:
1943 ui.popbuffer()
1944 ui.popbuffer()
1944 ui.write(('%s = %s\n') % (path, tool))
1945 ui.write(('%s = %s\n') % (path, tool))
1945
1946
1946 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1947 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1947 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1948 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1948 '''access the pushkey key/value protocol
1949 '''access the pushkey key/value protocol
1949
1950
1950 With two args, list the keys in the given namespace.
1951 With two args, list the keys in the given namespace.
1951
1952
1952 With five args, set a key to new if it currently is set to old.
1953 With five args, set a key to new if it currently is set to old.
1953 Reports success or failure.
1954 Reports success or failure.
1954 '''
1955 '''
1955
1956
1956 target = hg.peer(ui, {}, repopath)
1957 target = hg.peer(ui, {}, repopath)
1957 if keyinfo:
1958 if keyinfo:
1958 key, old, new = keyinfo
1959 key, old, new = keyinfo
1959 with target.commandexecutor() as e:
1960 with target.commandexecutor() as e:
1960 r = e.callcommand('pushkey', {
1961 r = e.callcommand('pushkey', {
1961 'namespace': namespace,
1962 'namespace': namespace,
1962 'key': key,
1963 'key': key,
1963 'old': old,
1964 'old': old,
1964 'new': new,
1965 'new': new,
1965 }).result()
1966 }).result()
1966
1967
1967 ui.status(pycompat.bytestr(r) + '\n')
1968 ui.status(pycompat.bytestr(r) + '\n')
1968 return not r
1969 return not r
1969 else:
1970 else:
1970 for k, v in sorted(target.listkeys(namespace).iteritems()):
1971 for k, v in sorted(target.listkeys(namespace).iteritems()):
1971 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1972 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1972 stringutil.escapestr(v)))
1973 stringutil.escapestr(v)))
1973
1974
1974 @command('debugpvec', [], _('A B'))
1975 @command('debugpvec', [], _('A B'))
1975 def debugpvec(ui, repo, a, b=None):
1976 def debugpvec(ui, repo, a, b=None):
1976 ca = scmutil.revsingle(repo, a)
1977 ca = scmutil.revsingle(repo, a)
1977 cb = scmutil.revsingle(repo, b)
1978 cb = scmutil.revsingle(repo, b)
1978 pa = pvec.ctxpvec(ca)
1979 pa = pvec.ctxpvec(ca)
1979 pb = pvec.ctxpvec(cb)
1980 pb = pvec.ctxpvec(cb)
1980 if pa == pb:
1981 if pa == pb:
1981 rel = "="
1982 rel = "="
1982 elif pa > pb:
1983 elif pa > pb:
1983 rel = ">"
1984 rel = ">"
1984 elif pa < pb:
1985 elif pa < pb:
1985 rel = "<"
1986 rel = "<"
1986 elif pa | pb:
1987 elif pa | pb:
1987 rel = "|"
1988 rel = "|"
1988 ui.write(_("a: %s\n") % pa)
1989 ui.write(_("a: %s\n") % pa)
1989 ui.write(_("b: %s\n") % pb)
1990 ui.write(_("b: %s\n") % pb)
1990 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1991 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1991 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1992 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1992 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1993 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1993 pa.distance(pb), rel))
1994 pa.distance(pb), rel))
1994
1995
1995 @command('debugrebuilddirstate|debugrebuildstate',
1996 @command('debugrebuilddirstate|debugrebuildstate',
1996 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1997 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1997 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1998 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1998 'the working copy parent')),
1999 'the working copy parent')),
1999 ],
2000 ],
2000 _('[-r REV]'))
2001 _('[-r REV]'))
2001 def debugrebuilddirstate(ui, repo, rev, **opts):
2002 def debugrebuilddirstate(ui, repo, rev, **opts):
2002 """rebuild the dirstate as it would look like for the given revision
2003 """rebuild the dirstate as it would look like for the given revision
2003
2004
2004 If no revision is specified the first current parent will be used.
2005 If no revision is specified the first current parent will be used.
2005
2006
2006 The dirstate will be set to the files of the given revision.
2007 The dirstate will be set to the files of the given revision.
2007 The actual working directory content or existing dirstate
2008 The actual working directory content or existing dirstate
2008 information such as adds or removes is not considered.
2009 information such as adds or removes is not considered.
2009
2010
2010 ``minimal`` will only rebuild the dirstate status for files that claim to be
2011 ``minimal`` will only rebuild the dirstate status for files that claim to be
2011 tracked but are not in the parent manifest, or that exist in the parent
2012 tracked but are not in the parent manifest, or that exist in the parent
2012 manifest but are not in the dirstate. It will not change adds, removes, or
2013 manifest but are not in the dirstate. It will not change adds, removes, or
2013 modified files that are in the working copy parent.
2014 modified files that are in the working copy parent.
2014
2015
2015 One use of this command is to make the next :hg:`status` invocation
2016 One use of this command is to make the next :hg:`status` invocation
2016 check the actual file content.
2017 check the actual file content.
2017 """
2018 """
2018 ctx = scmutil.revsingle(repo, rev)
2019 ctx = scmutil.revsingle(repo, rev)
2019 with repo.wlock():
2020 with repo.wlock():
2020 dirstate = repo.dirstate
2021 dirstate = repo.dirstate
2021 changedfiles = None
2022 changedfiles = None
2022 # See command doc for what minimal does.
2023 # See command doc for what minimal does.
2023 if opts.get(r'minimal'):
2024 if opts.get(r'minimal'):
2024 manifestfiles = set(ctx.manifest().keys())
2025 manifestfiles = set(ctx.manifest().keys())
2025 dirstatefiles = set(dirstate)
2026 dirstatefiles = set(dirstate)
2026 manifestonly = manifestfiles - dirstatefiles
2027 manifestonly = manifestfiles - dirstatefiles
2027 dsonly = dirstatefiles - manifestfiles
2028 dsonly = dirstatefiles - manifestfiles
2028 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2029 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2029 changedfiles = manifestonly | dsnotadded
2030 changedfiles = manifestonly | dsnotadded
2030
2031
2031 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2032 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2032
2033
2033 @command('debugrebuildfncache', [], '')
2034 @command('debugrebuildfncache', [], '')
2034 def debugrebuildfncache(ui, repo):
2035 def debugrebuildfncache(ui, repo):
2035 """rebuild the fncache file"""
2036 """rebuild the fncache file"""
2036 repair.rebuildfncache(ui, repo)
2037 repair.rebuildfncache(ui, repo)
2037
2038
2038 @command('debugrename',
2039 @command('debugrename',
2039 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2040 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2040 _('[-r REV] [FILE]...'))
2041 _('[-r REV] [FILE]...'))
2041 def debugrename(ui, repo, *pats, **opts):
2042 def debugrename(ui, repo, *pats, **opts):
2042 """dump rename information"""
2043 """dump rename information"""
2043
2044
2044 opts = pycompat.byteskwargs(opts)
2045 opts = pycompat.byteskwargs(opts)
2045 ctx = scmutil.revsingle(repo, opts.get('rev'))
2046 ctx = scmutil.revsingle(repo, opts.get('rev'))
2046 m = scmutil.match(ctx, pats, opts)
2047 m = scmutil.match(ctx, pats, opts)
2047 for abs in ctx.walk(m):
2048 for abs in ctx.walk(m):
2048 fctx = ctx[abs]
2049 fctx = ctx[abs]
2049 o = fctx.filelog().renamed(fctx.filenode())
2050 o = fctx.filelog().renamed(fctx.filenode())
2050 rel = repo.pathto(abs)
2051 rel = repo.pathto(abs)
2051 if o:
2052 if o:
2052 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2053 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2053 else:
2054 else:
2054 ui.write(_("%s not renamed\n") % rel)
2055 ui.write(_("%s not renamed\n") % rel)
2055
2056
2056 @command('debugrevlog', cmdutil.debugrevlogopts +
2057 @command('debugrevlog', cmdutil.debugrevlogopts +
2057 [('d', 'dump', False, _('dump index data'))],
2058 [('d', 'dump', False, _('dump index data'))],
2058 _('-c|-m|FILE'),
2059 _('-c|-m|FILE'),
2059 optionalrepo=True)
2060 optionalrepo=True)
2060 def debugrevlog(ui, repo, file_=None, **opts):
2061 def debugrevlog(ui, repo, file_=None, **opts):
2061 """show data and statistics about a revlog"""
2062 """show data and statistics about a revlog"""
2062 opts = pycompat.byteskwargs(opts)
2063 opts = pycompat.byteskwargs(opts)
2063 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2064 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2064
2065
2065 if opts.get("dump"):
2066 if opts.get("dump"):
2066 numrevs = len(r)
2067 numrevs = len(r)
2067 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2068 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2068 " rawsize totalsize compression heads chainlen\n"))
2069 " rawsize totalsize compression heads chainlen\n"))
2069 ts = 0
2070 ts = 0
2070 heads = set()
2071 heads = set()
2071
2072
2072 for rev in pycompat.xrange(numrevs):
2073 for rev in pycompat.xrange(numrevs):
2073 dbase = r.deltaparent(rev)
2074 dbase = r.deltaparent(rev)
2074 if dbase == -1:
2075 if dbase == -1:
2075 dbase = rev
2076 dbase = rev
2076 cbase = r.chainbase(rev)
2077 cbase = r.chainbase(rev)
2077 clen = r.chainlen(rev)
2078 clen = r.chainlen(rev)
2078 p1, p2 = r.parentrevs(rev)
2079 p1, p2 = r.parentrevs(rev)
2079 rs = r.rawsize(rev)
2080 rs = r.rawsize(rev)
2080 ts = ts + rs
2081 ts = ts + rs
2081 heads -= set(r.parentrevs(rev))
2082 heads -= set(r.parentrevs(rev))
2082 heads.add(rev)
2083 heads.add(rev)
2083 try:
2084 try:
2084 compression = ts / r.end(rev)
2085 compression = ts / r.end(rev)
2085 except ZeroDivisionError:
2086 except ZeroDivisionError:
2086 compression = 0
2087 compression = 0
2087 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2088 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2088 "%11d %5d %8d\n" %
2089 "%11d %5d %8d\n" %
2089 (rev, p1, p2, r.start(rev), r.end(rev),
2090 (rev, p1, p2, r.start(rev), r.end(rev),
2090 r.start(dbase), r.start(cbase),
2091 r.start(dbase), r.start(cbase),
2091 r.start(p1), r.start(p2),
2092 r.start(p1), r.start(p2),
2092 rs, ts, compression, len(heads), clen))
2093 rs, ts, compression, len(heads), clen))
2093 return 0
2094 return 0
2094
2095
2095 v = r.version
2096 v = r.version
2096 format = v & 0xFFFF
2097 format = v & 0xFFFF
2097 flags = []
2098 flags = []
2098 gdelta = False
2099 gdelta = False
2099 if v & revlog.FLAG_INLINE_DATA:
2100 if v & revlog.FLAG_INLINE_DATA:
2100 flags.append('inline')
2101 flags.append('inline')
2101 if v & revlog.FLAG_GENERALDELTA:
2102 if v & revlog.FLAG_GENERALDELTA:
2102 gdelta = True
2103 gdelta = True
2103 flags.append('generaldelta')
2104 flags.append('generaldelta')
2104 if not flags:
2105 if not flags:
2105 flags = ['(none)']
2106 flags = ['(none)']
2106
2107
2107 ### tracks merge vs single parent
2108 ### tracks merge vs single parent
2108 nummerges = 0
2109 nummerges = 0
2109
2110
2110 ### tracks ways the "delta" are build
2111 ### tracks ways the "delta" are build
2111 # nodelta
2112 # nodelta
2112 numempty = 0
2113 numempty = 0
2113 numemptytext = 0
2114 numemptytext = 0
2114 numemptydelta = 0
2115 numemptydelta = 0
2115 # full file content
2116 # full file content
2116 numfull = 0
2117 numfull = 0
2117 # intermediate snapshot against a prior snapshot
2118 # intermediate snapshot against a prior snapshot
2118 numsemi = 0
2119 numsemi = 0
2119 # snapshot count per depth
2120 # snapshot count per depth
2120 numsnapdepth = collections.defaultdict(lambda: 0)
2121 numsnapdepth = collections.defaultdict(lambda: 0)
2121 # delta against previous revision
2122 # delta against previous revision
2122 numprev = 0
2123 numprev = 0
2123 # delta against first or second parent (not prev)
2124 # delta against first or second parent (not prev)
2124 nump1 = 0
2125 nump1 = 0
2125 nump2 = 0
2126 nump2 = 0
2126 # delta against neither prev nor parents
2127 # delta against neither prev nor parents
2127 numother = 0
2128 numother = 0
2128 # delta against prev that are also first or second parent
2129 # delta against prev that are also first or second parent
2129 # (details of `numprev`)
2130 # (details of `numprev`)
2130 nump1prev = 0
2131 nump1prev = 0
2131 nump2prev = 0
2132 nump2prev = 0
2132
2133
2133 # data about delta chain of each revs
2134 # data about delta chain of each revs
2134 chainlengths = []
2135 chainlengths = []
2135 chainbases = []
2136 chainbases = []
2136 chainspans = []
2137 chainspans = []
2137
2138
2138 # data about each revision
2139 # data about each revision
2139 datasize = [None, 0, 0]
2140 datasize = [None, 0, 0]
2140 fullsize = [None, 0, 0]
2141 fullsize = [None, 0, 0]
2141 semisize = [None, 0, 0]
2142 semisize = [None, 0, 0]
2142 # snapshot count per depth
2143 # snapshot count per depth
2143 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2144 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2144 deltasize = [None, 0, 0]
2145 deltasize = [None, 0, 0]
2145 chunktypecounts = {}
2146 chunktypecounts = {}
2146 chunktypesizes = {}
2147 chunktypesizes = {}
2147
2148
2148 def addsize(size, l):
2149 def addsize(size, l):
2149 if l[0] is None or size < l[0]:
2150 if l[0] is None or size < l[0]:
2150 l[0] = size
2151 l[0] = size
2151 if size > l[1]:
2152 if size > l[1]:
2152 l[1] = size
2153 l[1] = size
2153 l[2] += size
2154 l[2] += size
2154
2155
2155 numrevs = len(r)
2156 numrevs = len(r)
2156 for rev in pycompat.xrange(numrevs):
2157 for rev in pycompat.xrange(numrevs):
2157 p1, p2 = r.parentrevs(rev)
2158 p1, p2 = r.parentrevs(rev)
2158 delta = r.deltaparent(rev)
2159 delta = r.deltaparent(rev)
2159 if format > 0:
2160 if format > 0:
2160 addsize(r.rawsize(rev), datasize)
2161 addsize(r.rawsize(rev), datasize)
2161 if p2 != nullrev:
2162 if p2 != nullrev:
2162 nummerges += 1
2163 nummerges += 1
2163 size = r.length(rev)
2164 size = r.length(rev)
2164 if delta == nullrev:
2165 if delta == nullrev:
2165 chainlengths.append(0)
2166 chainlengths.append(0)
2166 chainbases.append(r.start(rev))
2167 chainbases.append(r.start(rev))
2167 chainspans.append(size)
2168 chainspans.append(size)
2168 if size == 0:
2169 if size == 0:
2169 numempty += 1
2170 numempty += 1
2170 numemptytext += 1
2171 numemptytext += 1
2171 else:
2172 else:
2172 numfull += 1
2173 numfull += 1
2173 numsnapdepth[0] += 1
2174 numsnapdepth[0] += 1
2174 addsize(size, fullsize)
2175 addsize(size, fullsize)
2175 addsize(size, snapsizedepth[0])
2176 addsize(size, snapsizedepth[0])
2176 else:
2177 else:
2177 chainlengths.append(chainlengths[delta] + 1)
2178 chainlengths.append(chainlengths[delta] + 1)
2178 baseaddr = chainbases[delta]
2179 baseaddr = chainbases[delta]
2179 revaddr = r.start(rev)
2180 revaddr = r.start(rev)
2180 chainbases.append(baseaddr)
2181 chainbases.append(baseaddr)
2181 chainspans.append((revaddr - baseaddr) + size)
2182 chainspans.append((revaddr - baseaddr) + size)
2182 if size == 0:
2183 if size == 0:
2183 numempty += 1
2184 numempty += 1
2184 numemptydelta += 1
2185 numemptydelta += 1
2185 elif r.issnapshot(rev):
2186 elif r.issnapshot(rev):
2186 addsize(size, semisize)
2187 addsize(size, semisize)
2187 numsemi += 1
2188 numsemi += 1
2188 depth = r.snapshotdepth(rev)
2189 depth = r.snapshotdepth(rev)
2189 numsnapdepth[depth] += 1
2190 numsnapdepth[depth] += 1
2190 addsize(size, snapsizedepth[depth])
2191 addsize(size, snapsizedepth[depth])
2191 else:
2192 else:
2192 addsize(size, deltasize)
2193 addsize(size, deltasize)
2193 if delta == rev - 1:
2194 if delta == rev - 1:
2194 numprev += 1
2195 numprev += 1
2195 if delta == p1:
2196 if delta == p1:
2196 nump1prev += 1
2197 nump1prev += 1
2197 elif delta == p2:
2198 elif delta == p2:
2198 nump2prev += 1
2199 nump2prev += 1
2199 elif delta == p1:
2200 elif delta == p1:
2200 nump1 += 1
2201 nump1 += 1
2201 elif delta == p2:
2202 elif delta == p2:
2202 nump2 += 1
2203 nump2 += 1
2203 elif delta != nullrev:
2204 elif delta != nullrev:
2204 numother += 1
2205 numother += 1
2205
2206
2206 # Obtain data on the raw chunks in the revlog.
2207 # Obtain data on the raw chunks in the revlog.
2207 if util.safehasattr(r, '_getsegmentforrevs'):
2208 if util.safehasattr(r, '_getsegmentforrevs'):
2208 segment = r._getsegmentforrevs(rev, rev)[1]
2209 segment = r._getsegmentforrevs(rev, rev)[1]
2209 else:
2210 else:
2210 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2211 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2211 if segment:
2212 if segment:
2212 chunktype = bytes(segment[0:1])
2213 chunktype = bytes(segment[0:1])
2213 else:
2214 else:
2214 chunktype = 'empty'
2215 chunktype = 'empty'
2215
2216
2216 if chunktype not in chunktypecounts:
2217 if chunktype not in chunktypecounts:
2217 chunktypecounts[chunktype] = 0
2218 chunktypecounts[chunktype] = 0
2218 chunktypesizes[chunktype] = 0
2219 chunktypesizes[chunktype] = 0
2219
2220
2220 chunktypecounts[chunktype] += 1
2221 chunktypecounts[chunktype] += 1
2221 chunktypesizes[chunktype] += size
2222 chunktypesizes[chunktype] += size
2222
2223
2223 # Adjust size min value for empty cases
2224 # Adjust size min value for empty cases
2224 for size in (datasize, fullsize, semisize, deltasize):
2225 for size in (datasize, fullsize, semisize, deltasize):
2225 if size[0] is None:
2226 if size[0] is None:
2226 size[0] = 0
2227 size[0] = 0
2227
2228
2228 numdeltas = numrevs - numfull - numempty - numsemi
2229 numdeltas = numrevs - numfull - numempty - numsemi
2229 numoprev = numprev - nump1prev - nump2prev
2230 numoprev = numprev - nump1prev - nump2prev
2230 totalrawsize = datasize[2]
2231 totalrawsize = datasize[2]
2231 datasize[2] /= numrevs
2232 datasize[2] /= numrevs
2232 fulltotal = fullsize[2]
2233 fulltotal = fullsize[2]
2233 fullsize[2] /= numfull
2234 fullsize[2] /= numfull
2234 semitotal = semisize[2]
2235 semitotal = semisize[2]
2235 snaptotal = {}
2236 snaptotal = {}
2236 if numsemi > 0:
2237 if numsemi > 0:
2237 semisize[2] /= numsemi
2238 semisize[2] /= numsemi
2238 for depth in snapsizedepth:
2239 for depth in snapsizedepth:
2239 snaptotal[depth] = snapsizedepth[depth][2]
2240 snaptotal[depth] = snapsizedepth[depth][2]
2240 snapsizedepth[depth][2] /= numsnapdepth[depth]
2241 snapsizedepth[depth][2] /= numsnapdepth[depth]
2241
2242
2242 deltatotal = deltasize[2]
2243 deltatotal = deltasize[2]
2243 if numdeltas > 0:
2244 if numdeltas > 0:
2244 deltasize[2] /= numdeltas
2245 deltasize[2] /= numdeltas
2245 totalsize = fulltotal + semitotal + deltatotal
2246 totalsize = fulltotal + semitotal + deltatotal
2246 avgchainlen = sum(chainlengths) / numrevs
2247 avgchainlen = sum(chainlengths) / numrevs
2247 maxchainlen = max(chainlengths)
2248 maxchainlen = max(chainlengths)
2248 maxchainspan = max(chainspans)
2249 maxchainspan = max(chainspans)
2249 compratio = 1
2250 compratio = 1
2250 if totalsize:
2251 if totalsize:
2251 compratio = totalrawsize / totalsize
2252 compratio = totalrawsize / totalsize
2252
2253
2253 basedfmtstr = '%%%dd\n'
2254 basedfmtstr = '%%%dd\n'
2254 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2255 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2255
2256
2256 def dfmtstr(max):
2257 def dfmtstr(max):
2257 return basedfmtstr % len(str(max))
2258 return basedfmtstr % len(str(max))
2258 def pcfmtstr(max, padding=0):
2259 def pcfmtstr(max, padding=0):
2259 return basepcfmtstr % (len(str(max)), ' ' * padding)
2260 return basepcfmtstr % (len(str(max)), ' ' * padding)
2260
2261
2261 def pcfmt(value, total):
2262 def pcfmt(value, total):
2262 if total:
2263 if total:
2263 return (value, 100 * float(value) / total)
2264 return (value, 100 * float(value) / total)
2264 else:
2265 else:
2265 return value, 100.0
2266 return value, 100.0
2266
2267
2267 ui.write(('format : %d\n') % format)
2268 ui.write(('format : %d\n') % format)
2268 ui.write(('flags : %s\n') % ', '.join(flags))
2269 ui.write(('flags : %s\n') % ', '.join(flags))
2269
2270
2270 ui.write('\n')
2271 ui.write('\n')
2271 fmt = pcfmtstr(totalsize)
2272 fmt = pcfmtstr(totalsize)
2272 fmt2 = dfmtstr(totalsize)
2273 fmt2 = dfmtstr(totalsize)
2273 ui.write(('revisions : ') + fmt2 % numrevs)
2274 ui.write(('revisions : ') + fmt2 % numrevs)
2274 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2275 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2275 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2276 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2276 ui.write(('revisions : ') + fmt2 % numrevs)
2277 ui.write(('revisions : ') + fmt2 % numrevs)
2277 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2278 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2278 ui.write((' text : ')
2279 ui.write((' text : ')
2279 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2280 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2280 ui.write((' delta : ')
2281 ui.write((' delta : ')
2281 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2282 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2282 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2283 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2283 for depth in sorted(numsnapdepth):
2284 for depth in sorted(numsnapdepth):
2284 ui.write((' lvl-%-3d : ' % depth)
2285 ui.write((' lvl-%-3d : ' % depth)
2285 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2286 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2286 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2287 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2287 ui.write(('revision size : ') + fmt2 % totalsize)
2288 ui.write(('revision size : ') + fmt2 % totalsize)
2288 ui.write((' snapshot : ')
2289 ui.write((' snapshot : ')
2289 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2290 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2290 for depth in sorted(numsnapdepth):
2291 for depth in sorted(numsnapdepth):
2291 ui.write((' lvl-%-3d : ' % depth)
2292 ui.write((' lvl-%-3d : ' % depth)
2292 + fmt % pcfmt(snaptotal[depth], totalsize))
2293 + fmt % pcfmt(snaptotal[depth], totalsize))
2293 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2294 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2294
2295
2295 def fmtchunktype(chunktype):
2296 def fmtchunktype(chunktype):
2296 if chunktype == 'empty':
2297 if chunktype == 'empty':
2297 return ' %s : ' % chunktype
2298 return ' %s : ' % chunktype
2298 elif chunktype in pycompat.bytestr(string.ascii_letters):
2299 elif chunktype in pycompat.bytestr(string.ascii_letters):
2299 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2300 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2300 else:
2301 else:
2301 return ' 0x%s : ' % hex(chunktype)
2302 return ' 0x%s : ' % hex(chunktype)
2302
2303
2303 ui.write('\n')
2304 ui.write('\n')
2304 ui.write(('chunks : ') + fmt2 % numrevs)
2305 ui.write(('chunks : ') + fmt2 % numrevs)
2305 for chunktype in sorted(chunktypecounts):
2306 for chunktype in sorted(chunktypecounts):
2306 ui.write(fmtchunktype(chunktype))
2307 ui.write(fmtchunktype(chunktype))
2307 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2308 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2308 ui.write(('chunks size : ') + fmt2 % totalsize)
2309 ui.write(('chunks size : ') + fmt2 % totalsize)
2309 for chunktype in sorted(chunktypecounts):
2310 for chunktype in sorted(chunktypecounts):
2310 ui.write(fmtchunktype(chunktype))
2311 ui.write(fmtchunktype(chunktype))
2311 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2312 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2312
2313
2313 ui.write('\n')
2314 ui.write('\n')
2314 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2315 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2315 ui.write(('avg chain length : ') + fmt % avgchainlen)
2316 ui.write(('avg chain length : ') + fmt % avgchainlen)
2316 ui.write(('max chain length : ') + fmt % maxchainlen)
2317 ui.write(('max chain length : ') + fmt % maxchainlen)
2317 ui.write(('max chain reach : ') + fmt % maxchainspan)
2318 ui.write(('max chain reach : ') + fmt % maxchainspan)
2318 ui.write(('compression ratio : ') + fmt % compratio)
2319 ui.write(('compression ratio : ') + fmt % compratio)
2319
2320
2320 if format > 0:
2321 if format > 0:
2321 ui.write('\n')
2322 ui.write('\n')
2322 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2323 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2323 % tuple(datasize))
2324 % tuple(datasize))
2324 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2325 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2325 % tuple(fullsize))
2326 % tuple(fullsize))
2326 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2327 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2327 % tuple(semisize))
2328 % tuple(semisize))
2328 for depth in sorted(snapsizedepth):
2329 for depth in sorted(snapsizedepth):
2329 if depth == 0:
2330 if depth == 0:
2330 continue
2331 continue
2331 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2332 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2332 % ((depth,) + tuple(snapsizedepth[depth])))
2333 % ((depth,) + tuple(snapsizedepth[depth])))
2333 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2334 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2334 % tuple(deltasize))
2335 % tuple(deltasize))
2335
2336
2336 if numdeltas > 0:
2337 if numdeltas > 0:
2337 ui.write('\n')
2338 ui.write('\n')
2338 fmt = pcfmtstr(numdeltas)
2339 fmt = pcfmtstr(numdeltas)
2339 fmt2 = pcfmtstr(numdeltas, 4)
2340 fmt2 = pcfmtstr(numdeltas, 4)
2340 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2341 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2341 if numprev > 0:
2342 if numprev > 0:
2342 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2343 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2343 numprev))
2344 numprev))
2344 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2345 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2345 numprev))
2346 numprev))
2346 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2347 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2347 numprev))
2348 numprev))
2348 if gdelta:
2349 if gdelta:
2349 ui.write(('deltas against p1 : ')
2350 ui.write(('deltas against p1 : ')
2350 + fmt % pcfmt(nump1, numdeltas))
2351 + fmt % pcfmt(nump1, numdeltas))
2351 ui.write(('deltas against p2 : ')
2352 ui.write(('deltas against p2 : ')
2352 + fmt % pcfmt(nump2, numdeltas))
2353 + fmt % pcfmt(nump2, numdeltas))
2353 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2354 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2354 numdeltas))
2355 numdeltas))
2355
2356
2356 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2357 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2357 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2358 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2358 _('[-f FORMAT] -c|-m|FILE'),
2359 _('[-f FORMAT] -c|-m|FILE'),
2359 optionalrepo=True)
2360 optionalrepo=True)
2360 def debugrevlogindex(ui, repo, file_=None, **opts):
2361 def debugrevlogindex(ui, repo, file_=None, **opts):
2361 """dump the contents of a revlog index"""
2362 """dump the contents of a revlog index"""
2362 opts = pycompat.byteskwargs(opts)
2363 opts = pycompat.byteskwargs(opts)
2363 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2364 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2364 format = opts.get('format', 0)
2365 format = opts.get('format', 0)
2365 if format not in (0, 1):
2366 if format not in (0, 1):
2366 raise error.Abort(_("unknown format %d") % format)
2367 raise error.Abort(_("unknown format %d") % format)
2367
2368
2368 if ui.debugflag:
2369 if ui.debugflag:
2369 shortfn = hex
2370 shortfn = hex
2370 else:
2371 else:
2371 shortfn = short
2372 shortfn = short
2372
2373
2373 # There might not be anything in r, so have a sane default
2374 # There might not be anything in r, so have a sane default
2374 idlen = 12
2375 idlen = 12
2375 for i in r:
2376 for i in r:
2376 idlen = len(shortfn(r.node(i)))
2377 idlen = len(shortfn(r.node(i)))
2377 break
2378 break
2378
2379
2379 if format == 0:
2380 if format == 0:
2380 if ui.verbose:
2381 if ui.verbose:
2381 ui.write((" rev offset length linkrev"
2382 ui.write((" rev offset length linkrev"
2382 " %s %s p2\n") % ("nodeid".ljust(idlen),
2383 " %s %s p2\n") % ("nodeid".ljust(idlen),
2383 "p1".ljust(idlen)))
2384 "p1".ljust(idlen)))
2384 else:
2385 else:
2385 ui.write((" rev linkrev %s %s p2\n") % (
2386 ui.write((" rev linkrev %s %s p2\n") % (
2386 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2387 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2387 elif format == 1:
2388 elif format == 1:
2388 if ui.verbose:
2389 if ui.verbose:
2389 ui.write((" rev flag offset length size link p1"
2390 ui.write((" rev flag offset length size link p1"
2390 " p2 %s\n") % "nodeid".rjust(idlen))
2391 " p2 %s\n") % "nodeid".rjust(idlen))
2391 else:
2392 else:
2392 ui.write((" rev flag size link p1 p2 %s\n") %
2393 ui.write((" rev flag size link p1 p2 %s\n") %
2393 "nodeid".rjust(idlen))
2394 "nodeid".rjust(idlen))
2394
2395
2395 for i in r:
2396 for i in r:
2396 node = r.node(i)
2397 node = r.node(i)
2397 if format == 0:
2398 if format == 0:
2398 try:
2399 try:
2399 pp = r.parents(node)
2400 pp = r.parents(node)
2400 except Exception:
2401 except Exception:
2401 pp = [nullid, nullid]
2402 pp = [nullid, nullid]
2402 if ui.verbose:
2403 if ui.verbose:
2403 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2404 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2404 i, r.start(i), r.length(i), r.linkrev(i),
2405 i, r.start(i), r.length(i), r.linkrev(i),
2405 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2406 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2406 else:
2407 else:
2407 ui.write("% 6d % 7d %s %s %s\n" % (
2408 ui.write("% 6d % 7d %s %s %s\n" % (
2408 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2409 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2409 shortfn(pp[1])))
2410 shortfn(pp[1])))
2410 elif format == 1:
2411 elif format == 1:
2411 pr = r.parentrevs(i)
2412 pr = r.parentrevs(i)
2412 if ui.verbose:
2413 if ui.verbose:
2413 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2414 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2414 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2415 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2415 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2416 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2416 else:
2417 else:
2417 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2418 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2418 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2419 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2419 shortfn(node)))
2420 shortfn(node)))
2420
2421
2421 @command('debugrevspec',
2422 @command('debugrevspec',
2422 [('', 'optimize', None,
2423 [('', 'optimize', None,
2423 _('print parsed tree after optimizing (DEPRECATED)')),
2424 _('print parsed tree after optimizing (DEPRECATED)')),
2424 ('', 'show-revs', True, _('print list of result revisions (default)')),
2425 ('', 'show-revs', True, _('print list of result revisions (default)')),
2425 ('s', 'show-set', None, _('print internal representation of result set')),
2426 ('s', 'show-set', None, _('print internal representation of result set')),
2426 ('p', 'show-stage', [],
2427 ('p', 'show-stage', [],
2427 _('print parsed tree at the given stage'), _('NAME')),
2428 _('print parsed tree at the given stage'), _('NAME')),
2428 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2429 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2429 ('', 'verify-optimized', False, _('verify optimized result')),
2430 ('', 'verify-optimized', False, _('verify optimized result')),
2430 ],
2431 ],
2431 ('REVSPEC'))
2432 ('REVSPEC'))
2432 def debugrevspec(ui, repo, expr, **opts):
2433 def debugrevspec(ui, repo, expr, **opts):
2433 """parse and apply a revision specification
2434 """parse and apply a revision specification
2434
2435
2435 Use -p/--show-stage option to print the parsed tree at the given stages.
2436 Use -p/--show-stage option to print the parsed tree at the given stages.
2436 Use -p all to print tree at every stage.
2437 Use -p all to print tree at every stage.
2437
2438
2438 Use --no-show-revs option with -s or -p to print only the set
2439 Use --no-show-revs option with -s or -p to print only the set
2439 representation or the parsed tree respectively.
2440 representation or the parsed tree respectively.
2440
2441
2441 Use --verify-optimized to compare the optimized result with the unoptimized
2442 Use --verify-optimized to compare the optimized result with the unoptimized
2442 one. Returns 1 if the optimized result differs.
2443 one. Returns 1 if the optimized result differs.
2443 """
2444 """
2444 opts = pycompat.byteskwargs(opts)
2445 opts = pycompat.byteskwargs(opts)
2445 aliases = ui.configitems('revsetalias')
2446 aliases = ui.configitems('revsetalias')
2446 stages = [
2447 stages = [
2447 ('parsed', lambda tree: tree),
2448 ('parsed', lambda tree: tree),
2448 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2449 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2449 ui.warn)),
2450 ui.warn)),
2450 ('concatenated', revsetlang.foldconcat),
2451 ('concatenated', revsetlang.foldconcat),
2451 ('analyzed', revsetlang.analyze),
2452 ('analyzed', revsetlang.analyze),
2452 ('optimized', revsetlang.optimize),
2453 ('optimized', revsetlang.optimize),
2453 ]
2454 ]
2454 if opts['no_optimized']:
2455 if opts['no_optimized']:
2455 stages = stages[:-1]
2456 stages = stages[:-1]
2456 if opts['verify_optimized'] and opts['no_optimized']:
2457 if opts['verify_optimized'] and opts['no_optimized']:
2457 raise error.Abort(_('cannot use --verify-optimized with '
2458 raise error.Abort(_('cannot use --verify-optimized with '
2458 '--no-optimized'))
2459 '--no-optimized'))
2459 stagenames = set(n for n, f in stages)
2460 stagenames = set(n for n, f in stages)
2460
2461
2461 showalways = set()
2462 showalways = set()
2462 showchanged = set()
2463 showchanged = set()
2463 if ui.verbose and not opts['show_stage']:
2464 if ui.verbose and not opts['show_stage']:
2464 # show parsed tree by --verbose (deprecated)
2465 # show parsed tree by --verbose (deprecated)
2465 showalways.add('parsed')
2466 showalways.add('parsed')
2466 showchanged.update(['expanded', 'concatenated'])
2467 showchanged.update(['expanded', 'concatenated'])
2467 if opts['optimize']:
2468 if opts['optimize']:
2468 showalways.add('optimized')
2469 showalways.add('optimized')
2469 if opts['show_stage'] and opts['optimize']:
2470 if opts['show_stage'] and opts['optimize']:
2470 raise error.Abort(_('cannot use --optimize with --show-stage'))
2471 raise error.Abort(_('cannot use --optimize with --show-stage'))
2471 if opts['show_stage'] == ['all']:
2472 if opts['show_stage'] == ['all']:
2472 showalways.update(stagenames)
2473 showalways.update(stagenames)
2473 else:
2474 else:
2474 for n in opts['show_stage']:
2475 for n in opts['show_stage']:
2475 if n not in stagenames:
2476 if n not in stagenames:
2476 raise error.Abort(_('invalid stage name: %s') % n)
2477 raise error.Abort(_('invalid stage name: %s') % n)
2477 showalways.update(opts['show_stage'])
2478 showalways.update(opts['show_stage'])
2478
2479
2479 treebystage = {}
2480 treebystage = {}
2480 printedtree = None
2481 printedtree = None
2481 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2482 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2482 for n, f in stages:
2483 for n, f in stages:
2483 treebystage[n] = tree = f(tree)
2484 treebystage[n] = tree = f(tree)
2484 if n in showalways or (n in showchanged and tree != printedtree):
2485 if n in showalways or (n in showchanged and tree != printedtree):
2485 if opts['show_stage'] or n != 'parsed':
2486 if opts['show_stage'] or n != 'parsed':
2486 ui.write(("* %s:\n") % n)
2487 ui.write(("* %s:\n") % n)
2487 ui.write(revsetlang.prettyformat(tree), "\n")
2488 ui.write(revsetlang.prettyformat(tree), "\n")
2488 printedtree = tree
2489 printedtree = tree
2489
2490
2490 if opts['verify_optimized']:
2491 if opts['verify_optimized']:
2491 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2492 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2492 brevs = revset.makematcher(treebystage['optimized'])(repo)
2493 brevs = revset.makematcher(treebystage['optimized'])(repo)
2493 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2494 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2494 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2495 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2495 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2496 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2496 arevs = list(arevs)
2497 arevs = list(arevs)
2497 brevs = list(brevs)
2498 brevs = list(brevs)
2498 if arevs == brevs:
2499 if arevs == brevs:
2499 return 0
2500 return 0
2500 ui.write(('--- analyzed\n'), label='diff.file_a')
2501 ui.write(('--- analyzed\n'), label='diff.file_a')
2501 ui.write(('+++ optimized\n'), label='diff.file_b')
2502 ui.write(('+++ optimized\n'), label='diff.file_b')
2502 sm = difflib.SequenceMatcher(None, arevs, brevs)
2503 sm = difflib.SequenceMatcher(None, arevs, brevs)
2503 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2504 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2504 if tag in (r'delete', r'replace'):
2505 if tag in (r'delete', r'replace'):
2505 for c in arevs[alo:ahi]:
2506 for c in arevs[alo:ahi]:
2506 ui.write('-%d\n' % c, label='diff.deleted')
2507 ui.write('-%d\n' % c, label='diff.deleted')
2507 if tag in (r'insert', r'replace'):
2508 if tag in (r'insert', r'replace'):
2508 for c in brevs[blo:bhi]:
2509 for c in brevs[blo:bhi]:
2509 ui.write('+%d\n' % c, label='diff.inserted')
2510 ui.write('+%d\n' % c, label='diff.inserted')
2510 if tag == r'equal':
2511 if tag == r'equal':
2511 for c in arevs[alo:ahi]:
2512 for c in arevs[alo:ahi]:
2512 ui.write(' %d\n' % c)
2513 ui.write(' %d\n' % c)
2513 return 1
2514 return 1
2514
2515
2515 func = revset.makematcher(tree)
2516 func = revset.makematcher(tree)
2516 revs = func(repo)
2517 revs = func(repo)
2517 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2518 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2518 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2519 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2519 if not opts['show_revs']:
2520 if not opts['show_revs']:
2520 return
2521 return
2521 for c in revs:
2522 for c in revs:
2522 ui.write("%d\n" % c)
2523 ui.write("%d\n" % c)
2523
2524
2524 @command('debugserve', [
2525 @command('debugserve', [
2525 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2526 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2526 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2527 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2527 ('', 'logiofile', '', _('file to log server I/O to')),
2528 ('', 'logiofile', '', _('file to log server I/O to')),
2528 ], '')
2529 ], '')
2529 def debugserve(ui, repo, **opts):
2530 def debugserve(ui, repo, **opts):
2530 """run a server with advanced settings
2531 """run a server with advanced settings
2531
2532
2532 This command is similar to :hg:`serve`. It exists partially as a
2533 This command is similar to :hg:`serve`. It exists partially as a
2533 workaround to the fact that ``hg serve --stdio`` must have specific
2534 workaround to the fact that ``hg serve --stdio`` must have specific
2534 arguments for security reasons.
2535 arguments for security reasons.
2535 """
2536 """
2536 opts = pycompat.byteskwargs(opts)
2537 opts = pycompat.byteskwargs(opts)
2537
2538
2538 if not opts['sshstdio']:
2539 if not opts['sshstdio']:
2539 raise error.Abort(_('only --sshstdio is currently supported'))
2540 raise error.Abort(_('only --sshstdio is currently supported'))
2540
2541
2541 logfh = None
2542 logfh = None
2542
2543
2543 if opts['logiofd'] and opts['logiofile']:
2544 if opts['logiofd'] and opts['logiofile']:
2544 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2545 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2545
2546
2546 if opts['logiofd']:
2547 if opts['logiofd']:
2547 # Line buffered because output is line based.
2548 # Line buffered because output is line based.
2548 try:
2549 try:
2549 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2550 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2550 except OSError as e:
2551 except OSError as e:
2551 if e.errno != errno.ESPIPE:
2552 if e.errno != errno.ESPIPE:
2552 raise
2553 raise
2553 # can't seek a pipe, so `ab` mode fails on py3
2554 # can't seek a pipe, so `ab` mode fails on py3
2554 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2555 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2555 elif opts['logiofile']:
2556 elif opts['logiofile']:
2556 logfh = open(opts['logiofile'], 'ab', 1)
2557 logfh = open(opts['logiofile'], 'ab', 1)
2557
2558
2558 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2559 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2559 s.serve_forever()
2560 s.serve_forever()
2560
2561
2561 @command('debugsetparents', [], _('REV1 [REV2]'))
2562 @command('debugsetparents', [], _('REV1 [REV2]'))
2562 def debugsetparents(ui, repo, rev1, rev2=None):
2563 def debugsetparents(ui, repo, rev1, rev2=None):
2563 """manually set the parents of the current working directory
2564 """manually set the parents of the current working directory
2564
2565
2565 This is useful for writing repository conversion tools, but should
2566 This is useful for writing repository conversion tools, but should
2566 be used with care. For example, neither the working directory nor the
2567 be used with care. For example, neither the working directory nor the
2567 dirstate is updated, so file status may be incorrect after running this
2568 dirstate is updated, so file status may be incorrect after running this
2568 command.
2569 command.
2569
2570
2570 Returns 0 on success.
2571 Returns 0 on success.
2571 """
2572 """
2572
2573
2573 node1 = scmutil.revsingle(repo, rev1).node()
2574 node1 = scmutil.revsingle(repo, rev1).node()
2574 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2575 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2575
2576
2576 with repo.wlock():
2577 with repo.wlock():
2577 repo.setparents(node1, node2)
2578 repo.setparents(node1, node2)
2578
2579
2579 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2580 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2580 def debugssl(ui, repo, source=None, **opts):
2581 def debugssl(ui, repo, source=None, **opts):
2581 '''test a secure connection to a server
2582 '''test a secure connection to a server
2582
2583
2583 This builds the certificate chain for the server on Windows, installing the
2584 This builds the certificate chain for the server on Windows, installing the
2584 missing intermediates and trusted root via Windows Update if necessary. It
2585 missing intermediates and trusted root via Windows Update if necessary. It
2585 does nothing on other platforms.
2586 does nothing on other platforms.
2586
2587
2587 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2588 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2588 that server is used. See :hg:`help urls` for more information.
2589 that server is used. See :hg:`help urls` for more information.
2589
2590
2590 If the update succeeds, retry the original operation. Otherwise, the cause
2591 If the update succeeds, retry the original operation. Otherwise, the cause
2591 of the SSL error is likely another issue.
2592 of the SSL error is likely another issue.
2592 '''
2593 '''
2593 if not pycompat.iswindows:
2594 if not pycompat.iswindows:
2594 raise error.Abort(_('certificate chain building is only possible on '
2595 raise error.Abort(_('certificate chain building is only possible on '
2595 'Windows'))
2596 'Windows'))
2596
2597
2597 if not source:
2598 if not source:
2598 if not repo:
2599 if not repo:
2599 raise error.Abort(_("there is no Mercurial repository here, and no "
2600 raise error.Abort(_("there is no Mercurial repository here, and no "
2600 "server specified"))
2601 "server specified"))
2601 source = "default"
2602 source = "default"
2602
2603
2603 source, branches = hg.parseurl(ui.expandpath(source))
2604 source, branches = hg.parseurl(ui.expandpath(source))
2604 url = util.url(source)
2605 url = util.url(source)
2605
2606
2606 defaultport = {'https': 443, 'ssh': 22}
2607 defaultport = {'https': 443, 'ssh': 22}
2607 if url.scheme in defaultport:
2608 if url.scheme in defaultport:
2608 try:
2609 try:
2609 addr = (url.host, int(url.port or defaultport[url.scheme]))
2610 addr = (url.host, int(url.port or defaultport[url.scheme]))
2610 except ValueError:
2611 except ValueError:
2611 raise error.Abort(_("malformed port number in URL"))
2612 raise error.Abort(_("malformed port number in URL"))
2612 else:
2613 else:
2613 raise error.Abort(_("only https and ssh connections are supported"))
2614 raise error.Abort(_("only https and ssh connections are supported"))
2614
2615
2615 from . import win32
2616 from . import win32
2616
2617
2617 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2618 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2618 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2619 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2619
2620
2620 try:
2621 try:
2621 s.connect(addr)
2622 s.connect(addr)
2622 cert = s.getpeercert(True)
2623 cert = s.getpeercert(True)
2623
2624
2624 ui.status(_('checking the certificate chain for %s\n') % url.host)
2625 ui.status(_('checking the certificate chain for %s\n') % url.host)
2625
2626
2626 complete = win32.checkcertificatechain(cert, build=False)
2627 complete = win32.checkcertificatechain(cert, build=False)
2627
2628
2628 if not complete:
2629 if not complete:
2629 ui.status(_('certificate chain is incomplete, updating... '))
2630 ui.status(_('certificate chain is incomplete, updating... '))
2630
2631
2631 if not win32.checkcertificatechain(cert):
2632 if not win32.checkcertificatechain(cert):
2632 ui.status(_('failed.\n'))
2633 ui.status(_('failed.\n'))
2633 else:
2634 else:
2634 ui.status(_('done.\n'))
2635 ui.status(_('done.\n'))
2635 else:
2636 else:
2636 ui.status(_('full certificate chain is available\n'))
2637 ui.status(_('full certificate chain is available\n'))
2637 finally:
2638 finally:
2638 s.close()
2639 s.close()
2639
2640
2640 @command('debugsub',
2641 @command('debugsub',
2641 [('r', 'rev', '',
2642 [('r', 'rev', '',
2642 _('revision to check'), _('REV'))],
2643 _('revision to check'), _('REV'))],
2643 _('[-r REV] [REV]'))
2644 _('[-r REV] [REV]'))
2644 def debugsub(ui, repo, rev=None):
2645 def debugsub(ui, repo, rev=None):
2645 ctx = scmutil.revsingle(repo, rev, None)
2646 ctx = scmutil.revsingle(repo, rev, None)
2646 for k, v in sorted(ctx.substate.items()):
2647 for k, v in sorted(ctx.substate.items()):
2647 ui.write(('path %s\n') % k)
2648 ui.write(('path %s\n') % k)
2648 ui.write((' source %s\n') % v[0])
2649 ui.write((' source %s\n') % v[0])
2649 ui.write((' revision %s\n') % v[1])
2650 ui.write((' revision %s\n') % v[1])
2650
2651
2651 @command('debugsuccessorssets',
2652 @command('debugsuccessorssets',
2652 [('', 'closest', False, _('return closest successors sets only'))],
2653 [('', 'closest', False, _('return closest successors sets only'))],
2653 _('[REV]'))
2654 _('[REV]'))
2654 def debugsuccessorssets(ui, repo, *revs, **opts):
2655 def debugsuccessorssets(ui, repo, *revs, **opts):
2655 """show set of successors for revision
2656 """show set of successors for revision
2656
2657
2657 A successors set of changeset A is a consistent group of revisions that
2658 A successors set of changeset A is a consistent group of revisions that
2658 succeed A. It contains non-obsolete changesets only unless closests
2659 succeed A. It contains non-obsolete changesets only unless closests
2659 successors set is set.
2660 successors set is set.
2660
2661
2661 In most cases a changeset A has a single successors set containing a single
2662 In most cases a changeset A has a single successors set containing a single
2662 successor (changeset A replaced by A').
2663 successor (changeset A replaced by A').
2663
2664
2664 A changeset that is made obsolete with no successors are called "pruned".
2665 A changeset that is made obsolete with no successors are called "pruned".
2665 Such changesets have no successors sets at all.
2666 Such changesets have no successors sets at all.
2666
2667
2667 A changeset that has been "split" will have a successors set containing
2668 A changeset that has been "split" will have a successors set containing
2668 more than one successor.
2669 more than one successor.
2669
2670
2670 A changeset that has been rewritten in multiple different ways is called
2671 A changeset that has been rewritten in multiple different ways is called
2671 "divergent". Such changesets have multiple successor sets (each of which
2672 "divergent". Such changesets have multiple successor sets (each of which
2672 may also be split, i.e. have multiple successors).
2673 may also be split, i.e. have multiple successors).
2673
2674
2674 Results are displayed as follows::
2675 Results are displayed as follows::
2675
2676
2676 <rev1>
2677 <rev1>
2677 <successors-1A>
2678 <successors-1A>
2678 <rev2>
2679 <rev2>
2679 <successors-2A>
2680 <successors-2A>
2680 <successors-2B1> <successors-2B2> <successors-2B3>
2681 <successors-2B1> <successors-2B2> <successors-2B3>
2681
2682
2682 Here rev2 has two possible (i.e. divergent) successors sets. The first
2683 Here rev2 has two possible (i.e. divergent) successors sets. The first
2683 holds one element, whereas the second holds three (i.e. the changeset has
2684 holds one element, whereas the second holds three (i.e. the changeset has
2684 been split).
2685 been split).
2685 """
2686 """
2686 # passed to successorssets caching computation from one call to another
2687 # passed to successorssets caching computation from one call to another
2687 cache = {}
2688 cache = {}
2688 ctx2str = bytes
2689 ctx2str = bytes
2689 node2str = short
2690 node2str = short
2690 for rev in scmutil.revrange(repo, revs):
2691 for rev in scmutil.revrange(repo, revs):
2691 ctx = repo[rev]
2692 ctx = repo[rev]
2692 ui.write('%s\n'% ctx2str(ctx))
2693 ui.write('%s\n'% ctx2str(ctx))
2693 for succsset in obsutil.successorssets(repo, ctx.node(),
2694 for succsset in obsutil.successorssets(repo, ctx.node(),
2694 closest=opts[r'closest'],
2695 closest=opts[r'closest'],
2695 cache=cache):
2696 cache=cache):
2696 if succsset:
2697 if succsset:
2697 ui.write(' ')
2698 ui.write(' ')
2698 ui.write(node2str(succsset[0]))
2699 ui.write(node2str(succsset[0]))
2699 for node in succsset[1:]:
2700 for node in succsset[1:]:
2700 ui.write(' ')
2701 ui.write(' ')
2701 ui.write(node2str(node))
2702 ui.write(node2str(node))
2702 ui.write('\n')
2703 ui.write('\n')
2703
2704
2704 @command('debugtemplate',
2705 @command('debugtemplate',
2705 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2706 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2706 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2707 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2707 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2708 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2708 optionalrepo=True)
2709 optionalrepo=True)
2709 def debugtemplate(ui, repo, tmpl, **opts):
2710 def debugtemplate(ui, repo, tmpl, **opts):
2710 """parse and apply a template
2711 """parse and apply a template
2711
2712
2712 If -r/--rev is given, the template is processed as a log template and
2713 If -r/--rev is given, the template is processed as a log template and
2713 applied to the given changesets. Otherwise, it is processed as a generic
2714 applied to the given changesets. Otherwise, it is processed as a generic
2714 template.
2715 template.
2715
2716
2716 Use --verbose to print the parsed tree.
2717 Use --verbose to print the parsed tree.
2717 """
2718 """
2718 revs = None
2719 revs = None
2719 if opts[r'rev']:
2720 if opts[r'rev']:
2720 if repo is None:
2721 if repo is None:
2721 raise error.RepoError(_('there is no Mercurial repository here '
2722 raise error.RepoError(_('there is no Mercurial repository here '
2722 '(.hg not found)'))
2723 '(.hg not found)'))
2723 revs = scmutil.revrange(repo, opts[r'rev'])
2724 revs = scmutil.revrange(repo, opts[r'rev'])
2724
2725
2725 props = {}
2726 props = {}
2726 for d in opts[r'define']:
2727 for d in opts[r'define']:
2727 try:
2728 try:
2728 k, v = (e.strip() for e in d.split('=', 1))
2729 k, v = (e.strip() for e in d.split('=', 1))
2729 if not k or k == 'ui':
2730 if not k or k == 'ui':
2730 raise ValueError
2731 raise ValueError
2731 props[k] = v
2732 props[k] = v
2732 except ValueError:
2733 except ValueError:
2733 raise error.Abort(_('malformed keyword definition: %s') % d)
2734 raise error.Abort(_('malformed keyword definition: %s') % d)
2734
2735
2735 if ui.verbose:
2736 if ui.verbose:
2736 aliases = ui.configitems('templatealias')
2737 aliases = ui.configitems('templatealias')
2737 tree = templater.parse(tmpl)
2738 tree = templater.parse(tmpl)
2738 ui.note(templater.prettyformat(tree), '\n')
2739 ui.note(templater.prettyformat(tree), '\n')
2739 newtree = templater.expandaliases(tree, aliases)
2740 newtree = templater.expandaliases(tree, aliases)
2740 if newtree != tree:
2741 if newtree != tree:
2741 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2742 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2742
2743
2743 if revs is None:
2744 if revs is None:
2744 tres = formatter.templateresources(ui, repo)
2745 tres = formatter.templateresources(ui, repo)
2745 t = formatter.maketemplater(ui, tmpl, resources=tres)
2746 t = formatter.maketemplater(ui, tmpl, resources=tres)
2746 if ui.verbose:
2747 if ui.verbose:
2747 kwds, funcs = t.symbolsuseddefault()
2748 kwds, funcs = t.symbolsuseddefault()
2748 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2749 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2749 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2750 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2750 ui.write(t.renderdefault(props))
2751 ui.write(t.renderdefault(props))
2751 else:
2752 else:
2752 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2753 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2753 if ui.verbose:
2754 if ui.verbose:
2754 kwds, funcs = displayer.t.symbolsuseddefault()
2755 kwds, funcs = displayer.t.symbolsuseddefault()
2755 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2756 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2756 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2757 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2757 for r in revs:
2758 for r in revs:
2758 displayer.show(repo[r], **pycompat.strkwargs(props))
2759 displayer.show(repo[r], **pycompat.strkwargs(props))
2759 displayer.close()
2760 displayer.close()
2760
2761
2761 @command('debuguigetpass', [
2762 @command('debuguigetpass', [
2762 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2763 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2763 ], _('[-p TEXT]'), norepo=True)
2764 ], _('[-p TEXT]'), norepo=True)
2764 def debuguigetpass(ui, prompt=''):
2765 def debuguigetpass(ui, prompt=''):
2765 """show prompt to type password"""
2766 """show prompt to type password"""
2766 r = ui.getpass(prompt)
2767 r = ui.getpass(prompt)
2767 ui.write(('respose: %s\n') % r)
2768 ui.write(('respose: %s\n') % r)
2768
2769
2769 @command('debuguiprompt', [
2770 @command('debuguiprompt', [
2770 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2771 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2771 ], _('[-p TEXT]'), norepo=True)
2772 ], _('[-p TEXT]'), norepo=True)
2772 def debuguiprompt(ui, prompt=''):
2773 def debuguiprompt(ui, prompt=''):
2773 """show plain prompt"""
2774 """show plain prompt"""
2774 r = ui.prompt(prompt)
2775 r = ui.prompt(prompt)
2775 ui.write(('response: %s\n') % r)
2776 ui.write(('response: %s\n') % r)
2776
2777
2777 @command('debugupdatecaches', [])
2778 @command('debugupdatecaches', [])
2778 def debugupdatecaches(ui, repo, *pats, **opts):
2779 def debugupdatecaches(ui, repo, *pats, **opts):
2779 """warm all known caches in the repository"""
2780 """warm all known caches in the repository"""
2780 with repo.wlock(), repo.lock():
2781 with repo.wlock(), repo.lock():
2781 repo.updatecaches(full=True)
2782 repo.updatecaches(full=True)
2782
2783
2783 @command('debugupgraderepo', [
2784 @command('debugupgraderepo', [
2784 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2785 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2785 ('', 'run', False, _('performs an upgrade')),
2786 ('', 'run', False, _('performs an upgrade')),
2786 ('', 'backup', True, _('keep the old repository content around')),
2787 ('', 'backup', True, _('keep the old repository content around')),
2787 ])
2788 ])
2788 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2789 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2789 """upgrade a repository to use different features
2790 """upgrade a repository to use different features
2790
2791
2791 If no arguments are specified, the repository is evaluated for upgrade
2792 If no arguments are specified, the repository is evaluated for upgrade
2792 and a list of problems and potential optimizations is printed.
2793 and a list of problems and potential optimizations is printed.
2793
2794
2794 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2795 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2795 can be influenced via additional arguments. More details will be provided
2796 can be influenced via additional arguments. More details will be provided
2796 by the command output when run without ``--run``.
2797 by the command output when run without ``--run``.
2797
2798
2798 During the upgrade, the repository will be locked and no writes will be
2799 During the upgrade, the repository will be locked and no writes will be
2799 allowed.
2800 allowed.
2800
2801
2801 At the end of the upgrade, the repository may not be readable while new
2802 At the end of the upgrade, the repository may not be readable while new
2802 repository data is swapped in. This window will be as long as it takes to
2803 repository data is swapped in. This window will be as long as it takes to
2803 rename some directories inside the ``.hg`` directory. On most machines, this
2804 rename some directories inside the ``.hg`` directory. On most machines, this
2804 should complete almost instantaneously and the chances of a consumer being
2805 should complete almost instantaneously and the chances of a consumer being
2805 unable to access the repository should be low.
2806 unable to access the repository should be low.
2806 """
2807 """
2807 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2808 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2808 backup=backup)
2809 backup=backup)
2809
2810
2810 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2811 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2811 inferrepo=True)
2812 inferrepo=True)
2812 def debugwalk(ui, repo, *pats, **opts):
2813 def debugwalk(ui, repo, *pats, **opts):
2813 """show how files match on given patterns"""
2814 """show how files match on given patterns"""
2814 opts = pycompat.byteskwargs(opts)
2815 opts = pycompat.byteskwargs(opts)
2815 m = scmutil.match(repo[None], pats, opts)
2816 m = scmutil.match(repo[None], pats, opts)
2816 if ui.verbose:
2817 if ui.verbose:
2817 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2818 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2818 items = list(repo[None].walk(m))
2819 items = list(repo[None].walk(m))
2819 if not items:
2820 if not items:
2820 return
2821 return
2821 f = lambda fn: fn
2822 f = lambda fn: fn
2822 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2823 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2823 f = lambda fn: util.normpath(fn)
2824 f = lambda fn: util.normpath(fn)
2824 fmt = 'f %%-%ds %%-%ds %%s' % (
2825 fmt = 'f %%-%ds %%-%ds %%s' % (
2825 max([len(abs) for abs in items]),
2826 max([len(abs) for abs in items]),
2826 max([len(repo.pathto(abs)) for abs in items]))
2827 max([len(repo.pathto(abs)) for abs in items]))
2827 for abs in items:
2828 for abs in items:
2828 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2829 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2829 ui.write("%s\n" % line.rstrip())
2830 ui.write("%s\n" % line.rstrip())
2830
2831
2831 @command('debugwhyunstable', [], _('REV'))
2832 @command('debugwhyunstable', [], _('REV'))
2832 def debugwhyunstable(ui, repo, rev):
2833 def debugwhyunstable(ui, repo, rev):
2833 """explain instabilities of a changeset"""
2834 """explain instabilities of a changeset"""
2834 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2835 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2835 dnodes = ''
2836 dnodes = ''
2836 if entry.get('divergentnodes'):
2837 if entry.get('divergentnodes'):
2837 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2838 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2838 for ctx in entry['divergentnodes']) + ' '
2839 for ctx in entry['divergentnodes']) + ' '
2839 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2840 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2840 entry['reason'], entry['node']))
2841 entry['reason'], entry['node']))
2841
2842
2842 @command('debugwireargs',
2843 @command('debugwireargs',
2843 [('', 'three', '', 'three'),
2844 [('', 'three', '', 'three'),
2844 ('', 'four', '', 'four'),
2845 ('', 'four', '', 'four'),
2845 ('', 'five', '', 'five'),
2846 ('', 'five', '', 'five'),
2846 ] + cmdutil.remoteopts,
2847 ] + cmdutil.remoteopts,
2847 _('REPO [OPTIONS]... [ONE [TWO]]'),
2848 _('REPO [OPTIONS]... [ONE [TWO]]'),
2848 norepo=True)
2849 norepo=True)
2849 def debugwireargs(ui, repopath, *vals, **opts):
2850 def debugwireargs(ui, repopath, *vals, **opts):
2850 opts = pycompat.byteskwargs(opts)
2851 opts = pycompat.byteskwargs(opts)
2851 repo = hg.peer(ui, opts, repopath)
2852 repo = hg.peer(ui, opts, repopath)
2852 for opt in cmdutil.remoteopts:
2853 for opt in cmdutil.remoteopts:
2853 del opts[opt[1]]
2854 del opts[opt[1]]
2854 args = {}
2855 args = {}
2855 for k, v in opts.iteritems():
2856 for k, v in opts.iteritems():
2856 if v:
2857 if v:
2857 args[k] = v
2858 args[k] = v
2858 args = pycompat.strkwargs(args)
2859 args = pycompat.strkwargs(args)
2859 # run twice to check that we don't mess up the stream for the next command
2860 # run twice to check that we don't mess up the stream for the next command
2860 res1 = repo.debugwireargs(*vals, **args)
2861 res1 = repo.debugwireargs(*vals, **args)
2861 res2 = repo.debugwireargs(*vals, **args)
2862 res2 = repo.debugwireargs(*vals, **args)
2862 ui.write("%s\n" % res1)
2863 ui.write("%s\n" % res1)
2863 if res1 != res2:
2864 if res1 != res2:
2864 ui.warn("%s\n" % res2)
2865 ui.warn("%s\n" % res2)
2865
2866
2866 def _parsewirelangblocks(fh):
2867 def _parsewirelangblocks(fh):
2867 activeaction = None
2868 activeaction = None
2868 blocklines = []
2869 blocklines = []
2869 lastindent = 0
2870 lastindent = 0
2870
2871
2871 for line in fh:
2872 for line in fh:
2872 line = line.rstrip()
2873 line = line.rstrip()
2873 if not line:
2874 if not line:
2874 continue
2875 continue
2875
2876
2876 if line.startswith(b'#'):
2877 if line.startswith(b'#'):
2877 continue
2878 continue
2878
2879
2879 if not line.startswith(b' '):
2880 if not line.startswith(b' '):
2880 # New block. Flush previous one.
2881 # New block. Flush previous one.
2881 if activeaction:
2882 if activeaction:
2882 yield activeaction, blocklines
2883 yield activeaction, blocklines
2883
2884
2884 activeaction = line
2885 activeaction = line
2885 blocklines = []
2886 blocklines = []
2886 lastindent = 0
2887 lastindent = 0
2887 continue
2888 continue
2888
2889
2889 # Else we start with an indent.
2890 # Else we start with an indent.
2890
2891
2891 if not activeaction:
2892 if not activeaction:
2892 raise error.Abort(_('indented line outside of block'))
2893 raise error.Abort(_('indented line outside of block'))
2893
2894
2894 indent = len(line) - len(line.lstrip())
2895 indent = len(line) - len(line.lstrip())
2895
2896
2896 # If this line is indented more than the last line, concatenate it.
2897 # If this line is indented more than the last line, concatenate it.
2897 if indent > lastindent and blocklines:
2898 if indent > lastindent and blocklines:
2898 blocklines[-1] += line.lstrip()
2899 blocklines[-1] += line.lstrip()
2899 else:
2900 else:
2900 blocklines.append(line)
2901 blocklines.append(line)
2901 lastindent = indent
2902 lastindent = indent
2902
2903
2903 # Flush last block.
2904 # Flush last block.
2904 if activeaction:
2905 if activeaction:
2905 yield activeaction, blocklines
2906 yield activeaction, blocklines
2906
2907
2907 @command('debugwireproto',
2908 @command('debugwireproto',
2908 [
2909 [
2909 ('', 'localssh', False, _('start an SSH server for this repo')),
2910 ('', 'localssh', False, _('start an SSH server for this repo')),
2910 ('', 'peer', '', _('construct a specific version of the peer')),
2911 ('', 'peer', '', _('construct a specific version of the peer')),
2911 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2912 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2912 ('', 'nologhandshake', False,
2913 ('', 'nologhandshake', False,
2913 _('do not log I/O related to the peer handshake')),
2914 _('do not log I/O related to the peer handshake')),
2914 ] + cmdutil.remoteopts,
2915 ] + cmdutil.remoteopts,
2915 _('[PATH]'),
2916 _('[PATH]'),
2916 optionalrepo=True)
2917 optionalrepo=True)
2917 def debugwireproto(ui, repo, path=None, **opts):
2918 def debugwireproto(ui, repo, path=None, **opts):
2918 """send wire protocol commands to a server
2919 """send wire protocol commands to a server
2919
2920
2920 This command can be used to issue wire protocol commands to remote
2921 This command can be used to issue wire protocol commands to remote
2921 peers and to debug the raw data being exchanged.
2922 peers and to debug the raw data being exchanged.
2922
2923
2923 ``--localssh`` will start an SSH server against the current repository
2924 ``--localssh`` will start an SSH server against the current repository
2924 and connect to that. By default, the connection will perform a handshake
2925 and connect to that. By default, the connection will perform a handshake
2925 and establish an appropriate peer instance.
2926 and establish an appropriate peer instance.
2926
2927
2927 ``--peer`` can be used to bypass the handshake protocol and construct a
2928 ``--peer`` can be used to bypass the handshake protocol and construct a
2928 peer instance using the specified class type. Valid values are ``raw``,
2929 peer instance using the specified class type. Valid values are ``raw``,
2929 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2930 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2930 raw data payloads and don't support higher-level command actions.
2931 raw data payloads and don't support higher-level command actions.
2931
2932
2932 ``--noreadstderr`` can be used to disable automatic reading from stderr
2933 ``--noreadstderr`` can be used to disable automatic reading from stderr
2933 of the peer (for SSH connections only). Disabling automatic reading of
2934 of the peer (for SSH connections only). Disabling automatic reading of
2934 stderr is useful for making output more deterministic.
2935 stderr is useful for making output more deterministic.
2935
2936
2936 Commands are issued via a mini language which is specified via stdin.
2937 Commands are issued via a mini language which is specified via stdin.
2937 The language consists of individual actions to perform. An action is
2938 The language consists of individual actions to perform. An action is
2938 defined by a block. A block is defined as a line with no leading
2939 defined by a block. A block is defined as a line with no leading
2939 space followed by 0 or more lines with leading space. Blocks are
2940 space followed by 0 or more lines with leading space. Blocks are
2940 effectively a high-level command with additional metadata.
2941 effectively a high-level command with additional metadata.
2941
2942
2942 Lines beginning with ``#`` are ignored.
2943 Lines beginning with ``#`` are ignored.
2943
2944
2944 The following sections denote available actions.
2945 The following sections denote available actions.
2945
2946
2946 raw
2947 raw
2947 ---
2948 ---
2948
2949
2949 Send raw data to the server.
2950 Send raw data to the server.
2950
2951
2951 The block payload contains the raw data to send as one atomic send
2952 The block payload contains the raw data to send as one atomic send
2952 operation. The data may not actually be delivered in a single system
2953 operation. The data may not actually be delivered in a single system
2953 call: it depends on the abilities of the transport being used.
2954 call: it depends on the abilities of the transport being used.
2954
2955
2955 Each line in the block is de-indented and concatenated. Then, that
2956 Each line in the block is de-indented and concatenated. Then, that
2956 value is evaluated as a Python b'' literal. This allows the use of
2957 value is evaluated as a Python b'' literal. This allows the use of
2957 backslash escaping, etc.
2958 backslash escaping, etc.
2958
2959
2959 raw+
2960 raw+
2960 ----
2961 ----
2961
2962
2962 Behaves like ``raw`` except flushes output afterwards.
2963 Behaves like ``raw`` except flushes output afterwards.
2963
2964
2964 command <X>
2965 command <X>
2965 -----------
2966 -----------
2966
2967
2967 Send a request to run a named command, whose name follows the ``command``
2968 Send a request to run a named command, whose name follows the ``command``
2968 string.
2969 string.
2969
2970
2970 Arguments to the command are defined as lines in this block. The format of
2971 Arguments to the command are defined as lines in this block. The format of
2971 each line is ``<key> <value>``. e.g.::
2972 each line is ``<key> <value>``. e.g.::
2972
2973
2973 command listkeys
2974 command listkeys
2974 namespace bookmarks
2975 namespace bookmarks
2975
2976
2976 If the value begins with ``eval:``, it will be interpreted as a Python
2977 If the value begins with ``eval:``, it will be interpreted as a Python
2977 literal expression. Otherwise values are interpreted as Python b'' literals.
2978 literal expression. Otherwise values are interpreted as Python b'' literals.
2978 This allows sending complex types and encoding special byte sequences via
2979 This allows sending complex types and encoding special byte sequences via
2979 backslash escaping.
2980 backslash escaping.
2980
2981
2981 The following arguments have special meaning:
2982 The following arguments have special meaning:
2982
2983
2983 ``PUSHFILE``
2984 ``PUSHFILE``
2984 When defined, the *push* mechanism of the peer will be used instead
2985 When defined, the *push* mechanism of the peer will be used instead
2985 of the static request-response mechanism and the content of the
2986 of the static request-response mechanism and the content of the
2986 file specified in the value of this argument will be sent as the
2987 file specified in the value of this argument will be sent as the
2987 command payload.
2988 command payload.
2988
2989
2989 This can be used to submit a local bundle file to the remote.
2990 This can be used to submit a local bundle file to the remote.
2990
2991
2991 batchbegin
2992 batchbegin
2992 ----------
2993 ----------
2993
2994
2994 Instruct the peer to begin a batched send.
2995 Instruct the peer to begin a batched send.
2995
2996
2996 All ``command`` blocks are queued for execution until the next
2997 All ``command`` blocks are queued for execution until the next
2997 ``batchsubmit`` block.
2998 ``batchsubmit`` block.
2998
2999
2999 batchsubmit
3000 batchsubmit
3000 -----------
3001 -----------
3001
3002
3002 Submit previously queued ``command`` blocks as a batch request.
3003 Submit previously queued ``command`` blocks as a batch request.
3003
3004
3004 This action MUST be paired with a ``batchbegin`` action.
3005 This action MUST be paired with a ``batchbegin`` action.
3005
3006
3006 httprequest <method> <path>
3007 httprequest <method> <path>
3007 ---------------------------
3008 ---------------------------
3008
3009
3009 (HTTP peer only)
3010 (HTTP peer only)
3010
3011
3011 Send an HTTP request to the peer.
3012 Send an HTTP request to the peer.
3012
3013
3013 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3014 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3014
3015
3015 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3016 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3016 headers to add to the request. e.g. ``Accept: foo``.
3017 headers to add to the request. e.g. ``Accept: foo``.
3017
3018
3018 The following arguments are special:
3019 The following arguments are special:
3019
3020
3020 ``BODYFILE``
3021 ``BODYFILE``
3021 The content of the file defined as the value to this argument will be
3022 The content of the file defined as the value to this argument will be
3022 transferred verbatim as the HTTP request body.
3023 transferred verbatim as the HTTP request body.
3023
3024
3024 ``frame <type> <flags> <payload>``
3025 ``frame <type> <flags> <payload>``
3025 Send a unified protocol frame as part of the request body.
3026 Send a unified protocol frame as part of the request body.
3026
3027
3027 All frames will be collected and sent as the body to the HTTP
3028 All frames will be collected and sent as the body to the HTTP
3028 request.
3029 request.
3029
3030
3030 close
3031 close
3031 -----
3032 -----
3032
3033
3033 Close the connection to the server.
3034 Close the connection to the server.
3034
3035
3035 flush
3036 flush
3036 -----
3037 -----
3037
3038
3038 Flush data written to the server.
3039 Flush data written to the server.
3039
3040
3040 readavailable
3041 readavailable
3041 -------------
3042 -------------
3042
3043
3043 Close the write end of the connection and read all available data from
3044 Close the write end of the connection and read all available data from
3044 the server.
3045 the server.
3045
3046
3046 If the connection to the server encompasses multiple pipes, we poll both
3047 If the connection to the server encompasses multiple pipes, we poll both
3047 pipes and read available data.
3048 pipes and read available data.
3048
3049
3049 readline
3050 readline
3050 --------
3051 --------
3051
3052
3052 Read a line of output from the server. If there are multiple output
3053 Read a line of output from the server. If there are multiple output
3053 pipes, reads only the main pipe.
3054 pipes, reads only the main pipe.
3054
3055
3055 ereadline
3056 ereadline
3056 ---------
3057 ---------
3057
3058
3058 Like ``readline``, but read from the stderr pipe, if available.
3059 Like ``readline``, but read from the stderr pipe, if available.
3059
3060
3060 read <X>
3061 read <X>
3061 --------
3062 --------
3062
3063
3063 ``read()`` N bytes from the server's main output pipe.
3064 ``read()`` N bytes from the server's main output pipe.
3064
3065
3065 eread <X>
3066 eread <X>
3066 ---------
3067 ---------
3067
3068
3068 ``read()`` N bytes from the server's stderr pipe, if available.
3069 ``read()`` N bytes from the server's stderr pipe, if available.
3069
3070
3070 Specifying Unified Frame-Based Protocol Frames
3071 Specifying Unified Frame-Based Protocol Frames
3071 ----------------------------------------------
3072 ----------------------------------------------
3072
3073
3073 It is possible to emit a *Unified Frame-Based Protocol* by using special
3074 It is possible to emit a *Unified Frame-Based Protocol* by using special
3074 syntax.
3075 syntax.
3075
3076
3076 A frame is composed as a type, flags, and payload. These can be parsed
3077 A frame is composed as a type, flags, and payload. These can be parsed
3077 from a string of the form:
3078 from a string of the form:
3078
3079
3079 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3080 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3080
3081
3081 ``request-id`` and ``stream-id`` are integers defining the request and
3082 ``request-id`` and ``stream-id`` are integers defining the request and
3082 stream identifiers.
3083 stream identifiers.
3083
3084
3084 ``type`` can be an integer value for the frame type or the string name
3085 ``type`` can be an integer value for the frame type or the string name
3085 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3086 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3086 ``command-name``.
3087 ``command-name``.
3087
3088
3088 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3089 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3089 components. Each component (and there can be just one) can be an integer
3090 components. Each component (and there can be just one) can be an integer
3090 or a flag name for stream flags or frame flags, respectively. Values are
3091 or a flag name for stream flags or frame flags, respectively. Values are
3091 resolved to integers and then bitwise OR'd together.
3092 resolved to integers and then bitwise OR'd together.
3092
3093
3093 ``payload`` represents the raw frame payload. If it begins with
3094 ``payload`` represents the raw frame payload. If it begins with
3094 ``cbor:``, the following string is evaluated as Python code and the
3095 ``cbor:``, the following string is evaluated as Python code and the
3095 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3096 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3096 as a Python byte string literal.
3097 as a Python byte string literal.
3097 """
3098 """
3098 opts = pycompat.byteskwargs(opts)
3099 opts = pycompat.byteskwargs(opts)
3099
3100
3100 if opts['localssh'] and not repo:
3101 if opts['localssh'] and not repo:
3101 raise error.Abort(_('--localssh requires a repository'))
3102 raise error.Abort(_('--localssh requires a repository'))
3102
3103
3103 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3104 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3104 raise error.Abort(_('invalid value for --peer'),
3105 raise error.Abort(_('invalid value for --peer'),
3105 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3106 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3106
3107
3107 if path and opts['localssh']:
3108 if path and opts['localssh']:
3108 raise error.Abort(_('cannot specify --localssh with an explicit '
3109 raise error.Abort(_('cannot specify --localssh with an explicit '
3109 'path'))
3110 'path'))
3110
3111
3111 if ui.interactive():
3112 if ui.interactive():
3112 ui.write(_('(waiting for commands on stdin)\n'))
3113 ui.write(_('(waiting for commands on stdin)\n'))
3113
3114
3114 blocks = list(_parsewirelangblocks(ui.fin))
3115 blocks = list(_parsewirelangblocks(ui.fin))
3115
3116
3116 proc = None
3117 proc = None
3117 stdin = None
3118 stdin = None
3118 stdout = None
3119 stdout = None
3119 stderr = None
3120 stderr = None
3120 opener = None
3121 opener = None
3121
3122
3122 if opts['localssh']:
3123 if opts['localssh']:
3123 # We start the SSH server in its own process so there is process
3124 # We start the SSH server in its own process so there is process
3124 # separation. This prevents a whole class of potential bugs around
3125 # separation. This prevents a whole class of potential bugs around
3125 # shared state from interfering with server operation.
3126 # shared state from interfering with server operation.
3126 args = procutil.hgcmd() + [
3127 args = procutil.hgcmd() + [
3127 '-R', repo.root,
3128 '-R', repo.root,
3128 'debugserve', '--sshstdio',
3129 'debugserve', '--sshstdio',
3129 ]
3130 ]
3130 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3131 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3131 stdin=subprocess.PIPE,
3132 stdin=subprocess.PIPE,
3132 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3133 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3133 bufsize=0)
3134 bufsize=0)
3134
3135
3135 stdin = proc.stdin
3136 stdin = proc.stdin
3136 stdout = proc.stdout
3137 stdout = proc.stdout
3137 stderr = proc.stderr
3138 stderr = proc.stderr
3138
3139
3139 # We turn the pipes into observers so we can log I/O.
3140 # We turn the pipes into observers so we can log I/O.
3140 if ui.verbose or opts['peer'] == 'raw':
3141 if ui.verbose or opts['peer'] == 'raw':
3141 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3142 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3142 logdata=True)
3143 logdata=True)
3143 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3144 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3144 logdata=True)
3145 logdata=True)
3145 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3146 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3146 logdata=True)
3147 logdata=True)
3147
3148
3148 # --localssh also implies the peer connection settings.
3149 # --localssh also implies the peer connection settings.
3149
3150
3150 url = 'ssh://localserver'
3151 url = 'ssh://localserver'
3151 autoreadstderr = not opts['noreadstderr']
3152 autoreadstderr = not opts['noreadstderr']
3152
3153
3153 if opts['peer'] == 'ssh1':
3154 if opts['peer'] == 'ssh1':
3154 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3155 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3155 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3156 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3156 None, autoreadstderr=autoreadstderr)
3157 None, autoreadstderr=autoreadstderr)
3157 elif opts['peer'] == 'ssh2':
3158 elif opts['peer'] == 'ssh2':
3158 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3159 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3159 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3160 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3160 None, autoreadstderr=autoreadstderr)
3161 None, autoreadstderr=autoreadstderr)
3161 elif opts['peer'] == 'raw':
3162 elif opts['peer'] == 'raw':
3162 ui.write(_('using raw connection to peer\n'))
3163 ui.write(_('using raw connection to peer\n'))
3163 peer = None
3164 peer = None
3164 else:
3165 else:
3165 ui.write(_('creating ssh peer from handshake results\n'))
3166 ui.write(_('creating ssh peer from handshake results\n'))
3166 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3167 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3167 autoreadstderr=autoreadstderr)
3168 autoreadstderr=autoreadstderr)
3168
3169
3169 elif path:
3170 elif path:
3170 # We bypass hg.peer() so we can proxy the sockets.
3171 # We bypass hg.peer() so we can proxy the sockets.
3171 # TODO consider not doing this because we skip
3172 # TODO consider not doing this because we skip
3172 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3173 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3173 u = util.url(path)
3174 u = util.url(path)
3174 if u.scheme != 'http':
3175 if u.scheme != 'http':
3175 raise error.Abort(_('only http:// paths are currently supported'))
3176 raise error.Abort(_('only http:// paths are currently supported'))
3176
3177
3177 url, authinfo = u.authinfo()
3178 url, authinfo = u.authinfo()
3178 openerargs = {
3179 openerargs = {
3179 r'useragent': b'Mercurial debugwireproto',
3180 r'useragent': b'Mercurial debugwireproto',
3180 }
3181 }
3181
3182
3182 # Turn pipes/sockets into observers so we can log I/O.
3183 # Turn pipes/sockets into observers so we can log I/O.
3183 if ui.verbose:
3184 if ui.verbose:
3184 openerargs.update({
3185 openerargs.update({
3185 r'loggingfh': ui,
3186 r'loggingfh': ui,
3186 r'loggingname': b's',
3187 r'loggingname': b's',
3187 r'loggingopts': {
3188 r'loggingopts': {
3188 r'logdata': True,
3189 r'logdata': True,
3189 r'logdataapis': False,
3190 r'logdataapis': False,
3190 },
3191 },
3191 })
3192 })
3192
3193
3193 if ui.debugflag:
3194 if ui.debugflag:
3194 openerargs[r'loggingopts'][r'logdataapis'] = True
3195 openerargs[r'loggingopts'][r'logdataapis'] = True
3195
3196
3196 # Don't send default headers when in raw mode. This allows us to
3197 # Don't send default headers when in raw mode. This allows us to
3197 # bypass most of the behavior of our URL handling code so we can
3198 # bypass most of the behavior of our URL handling code so we can
3198 # have near complete control over what's sent on the wire.
3199 # have near complete control over what's sent on the wire.
3199 if opts['peer'] == 'raw':
3200 if opts['peer'] == 'raw':
3200 openerargs[r'sendaccept'] = False
3201 openerargs[r'sendaccept'] = False
3201
3202
3202 opener = urlmod.opener(ui, authinfo, **openerargs)
3203 opener = urlmod.opener(ui, authinfo, **openerargs)
3203
3204
3204 if opts['peer'] == 'http2':
3205 if opts['peer'] == 'http2':
3205 ui.write(_('creating http peer for wire protocol version 2\n'))
3206 ui.write(_('creating http peer for wire protocol version 2\n'))
3206 # We go through makepeer() because we need an API descriptor for
3207 # We go through makepeer() because we need an API descriptor for
3207 # the peer instance to be useful.
3208 # the peer instance to be useful.
3208 with ui.configoverride({
3209 with ui.configoverride({
3209 ('experimental', 'httppeer.advertise-v2'): True}):
3210 ('experimental', 'httppeer.advertise-v2'): True}):
3210 if opts['nologhandshake']:
3211 if opts['nologhandshake']:
3211 ui.pushbuffer()
3212 ui.pushbuffer()
3212
3213
3213 peer = httppeer.makepeer(ui, path, opener=opener)
3214 peer = httppeer.makepeer(ui, path, opener=opener)
3214
3215
3215 if opts['nologhandshake']:
3216 if opts['nologhandshake']:
3216 ui.popbuffer()
3217 ui.popbuffer()
3217
3218
3218 if not isinstance(peer, httppeer.httpv2peer):
3219 if not isinstance(peer, httppeer.httpv2peer):
3219 raise error.Abort(_('could not instantiate HTTP peer for '
3220 raise error.Abort(_('could not instantiate HTTP peer for '
3220 'wire protocol version 2'),
3221 'wire protocol version 2'),
3221 hint=_('the server may not have the feature '
3222 hint=_('the server may not have the feature '
3222 'enabled or is not allowing this '
3223 'enabled or is not allowing this '
3223 'client version'))
3224 'client version'))
3224
3225
3225 elif opts['peer'] == 'raw':
3226 elif opts['peer'] == 'raw':
3226 ui.write(_('using raw connection to peer\n'))
3227 ui.write(_('using raw connection to peer\n'))
3227 peer = None
3228 peer = None
3228 elif opts['peer']:
3229 elif opts['peer']:
3229 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3230 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3230 opts['peer'])
3231 opts['peer'])
3231 else:
3232 else:
3232 peer = httppeer.makepeer(ui, path, opener=opener)
3233 peer = httppeer.makepeer(ui, path, opener=opener)
3233
3234
3234 # We /could/ populate stdin/stdout with sock.makefile()...
3235 # We /could/ populate stdin/stdout with sock.makefile()...
3235 else:
3236 else:
3236 raise error.Abort(_('unsupported connection configuration'))
3237 raise error.Abort(_('unsupported connection configuration'))
3237
3238
3238 batchedcommands = None
3239 batchedcommands = None
3239
3240
3240 # Now perform actions based on the parsed wire language instructions.
3241 # Now perform actions based on the parsed wire language instructions.
3241 for action, lines in blocks:
3242 for action, lines in blocks:
3242 if action in ('raw', 'raw+'):
3243 if action in ('raw', 'raw+'):
3243 if not stdin:
3244 if not stdin:
3244 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3245 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3245
3246
3246 # Concatenate the data together.
3247 # Concatenate the data together.
3247 data = ''.join(l.lstrip() for l in lines)
3248 data = ''.join(l.lstrip() for l in lines)
3248 data = stringutil.unescapestr(data)
3249 data = stringutil.unescapestr(data)
3249 stdin.write(data)
3250 stdin.write(data)
3250
3251
3251 if action == 'raw+':
3252 if action == 'raw+':
3252 stdin.flush()
3253 stdin.flush()
3253 elif action == 'flush':
3254 elif action == 'flush':
3254 if not stdin:
3255 if not stdin:
3255 raise error.Abort(_('cannot call flush on this peer'))
3256 raise error.Abort(_('cannot call flush on this peer'))
3256 stdin.flush()
3257 stdin.flush()
3257 elif action.startswith('command'):
3258 elif action.startswith('command'):
3258 if not peer:
3259 if not peer:
3259 raise error.Abort(_('cannot send commands unless peer instance '
3260 raise error.Abort(_('cannot send commands unless peer instance '
3260 'is available'))
3261 'is available'))
3261
3262
3262 command = action.split(' ', 1)[1]
3263 command = action.split(' ', 1)[1]
3263
3264
3264 args = {}
3265 args = {}
3265 for line in lines:
3266 for line in lines:
3266 # We need to allow empty values.
3267 # We need to allow empty values.
3267 fields = line.lstrip().split(' ', 1)
3268 fields = line.lstrip().split(' ', 1)
3268 if len(fields) == 1:
3269 if len(fields) == 1:
3269 key = fields[0]
3270 key = fields[0]
3270 value = ''
3271 value = ''
3271 else:
3272 else:
3272 key, value = fields
3273 key, value = fields
3273
3274
3274 if value.startswith('eval:'):
3275 if value.startswith('eval:'):
3275 value = stringutil.evalpythonliteral(value[5:])
3276 value = stringutil.evalpythonliteral(value[5:])
3276 else:
3277 else:
3277 value = stringutil.unescapestr(value)
3278 value = stringutil.unescapestr(value)
3278
3279
3279 args[key] = value
3280 args[key] = value
3280
3281
3281 if batchedcommands is not None:
3282 if batchedcommands is not None:
3282 batchedcommands.append((command, args))
3283 batchedcommands.append((command, args))
3283 continue
3284 continue
3284
3285
3285 ui.status(_('sending %s command\n') % command)
3286 ui.status(_('sending %s command\n') % command)
3286
3287
3287 if 'PUSHFILE' in args:
3288 if 'PUSHFILE' in args:
3288 with open(args['PUSHFILE'], r'rb') as fh:
3289 with open(args['PUSHFILE'], r'rb') as fh:
3289 del args['PUSHFILE']
3290 del args['PUSHFILE']
3290 res, output = peer._callpush(command, fh,
3291 res, output = peer._callpush(command, fh,
3291 **pycompat.strkwargs(args))
3292 **pycompat.strkwargs(args))
3292 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3293 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3293 ui.status(_('remote output: %s\n') %
3294 ui.status(_('remote output: %s\n') %
3294 stringutil.escapestr(output))
3295 stringutil.escapestr(output))
3295 else:
3296 else:
3296 with peer.commandexecutor() as e:
3297 with peer.commandexecutor() as e:
3297 res = e.callcommand(command, args).result()
3298 res = e.callcommand(command, args).result()
3298
3299
3299 if isinstance(res, wireprotov2peer.commandresponse):
3300 if isinstance(res, wireprotov2peer.commandresponse):
3300 val = res.objects()
3301 val = res.objects()
3301 ui.status(_('response: %s\n') %
3302 ui.status(_('response: %s\n') %
3302 stringutil.pprint(val, bprefix=True, indent=2))
3303 stringutil.pprint(val, bprefix=True, indent=2))
3303 else:
3304 else:
3304 ui.status(_('response: %s\n') %
3305 ui.status(_('response: %s\n') %
3305 stringutil.pprint(res, bprefix=True, indent=2))
3306 stringutil.pprint(res, bprefix=True, indent=2))
3306
3307
3307 elif action == 'batchbegin':
3308 elif action == 'batchbegin':
3308 if batchedcommands is not None:
3309 if batchedcommands is not None:
3309 raise error.Abort(_('nested batchbegin not allowed'))
3310 raise error.Abort(_('nested batchbegin not allowed'))
3310
3311
3311 batchedcommands = []
3312 batchedcommands = []
3312 elif action == 'batchsubmit':
3313 elif action == 'batchsubmit':
3313 # There is a batching API we could go through. But it would be
3314 # There is a batching API we could go through. But it would be
3314 # difficult to normalize requests into function calls. It is easier
3315 # difficult to normalize requests into function calls. It is easier
3315 # to bypass this layer and normalize to commands + args.
3316 # to bypass this layer and normalize to commands + args.
3316 ui.status(_('sending batch with %d sub-commands\n') %
3317 ui.status(_('sending batch with %d sub-commands\n') %
3317 len(batchedcommands))
3318 len(batchedcommands))
3318 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3319 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3319 ui.status(_('response #%d: %s\n') %
3320 ui.status(_('response #%d: %s\n') %
3320 (i, stringutil.escapestr(chunk)))
3321 (i, stringutil.escapestr(chunk)))
3321
3322
3322 batchedcommands = None
3323 batchedcommands = None
3323
3324
3324 elif action.startswith('httprequest '):
3325 elif action.startswith('httprequest '):
3325 if not opener:
3326 if not opener:
3326 raise error.Abort(_('cannot use httprequest without an HTTP '
3327 raise error.Abort(_('cannot use httprequest without an HTTP '
3327 'peer'))
3328 'peer'))
3328
3329
3329 request = action.split(' ', 2)
3330 request = action.split(' ', 2)
3330 if len(request) != 3:
3331 if len(request) != 3:
3331 raise error.Abort(_('invalid httprequest: expected format is '
3332 raise error.Abort(_('invalid httprequest: expected format is '
3332 '"httprequest <method> <path>'))
3333 '"httprequest <method> <path>'))
3333
3334
3334 method, httppath = request[1:]
3335 method, httppath = request[1:]
3335 headers = {}
3336 headers = {}
3336 body = None
3337 body = None
3337 frames = []
3338 frames = []
3338 for line in lines:
3339 for line in lines:
3339 line = line.lstrip()
3340 line = line.lstrip()
3340 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3341 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3341 if m:
3342 if m:
3342 # Headers need to use native strings.
3343 # Headers need to use native strings.
3343 key = pycompat.strurl(m.group(1))
3344 key = pycompat.strurl(m.group(1))
3344 value = pycompat.strurl(m.group(2))
3345 value = pycompat.strurl(m.group(2))
3345 headers[key] = value
3346 headers[key] = value
3346 continue
3347 continue
3347
3348
3348 if line.startswith(b'BODYFILE '):
3349 if line.startswith(b'BODYFILE '):
3349 with open(line.split(b' ', 1), 'rb') as fh:
3350 with open(line.split(b' ', 1), 'rb') as fh:
3350 body = fh.read()
3351 body = fh.read()
3351 elif line.startswith(b'frame '):
3352 elif line.startswith(b'frame '):
3352 frame = wireprotoframing.makeframefromhumanstring(
3353 frame = wireprotoframing.makeframefromhumanstring(
3353 line[len(b'frame '):])
3354 line[len(b'frame '):])
3354
3355
3355 frames.append(frame)
3356 frames.append(frame)
3356 else:
3357 else:
3357 raise error.Abort(_('unknown argument to httprequest: %s') %
3358 raise error.Abort(_('unknown argument to httprequest: %s') %
3358 line)
3359 line)
3359
3360
3360 url = path + httppath
3361 url = path + httppath
3361
3362
3362 if frames:
3363 if frames:
3363 body = b''.join(bytes(f) for f in frames)
3364 body = b''.join(bytes(f) for f in frames)
3364
3365
3365 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3366 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3366
3367
3367 # urllib.Request insists on using has_data() as a proxy for
3368 # urllib.Request insists on using has_data() as a proxy for
3368 # determining the request method. Override that to use our
3369 # determining the request method. Override that to use our
3369 # explicitly requested method.
3370 # explicitly requested method.
3370 req.get_method = lambda: pycompat.sysstr(method)
3371 req.get_method = lambda: pycompat.sysstr(method)
3371
3372
3372 try:
3373 try:
3373 res = opener.open(req)
3374 res = opener.open(req)
3374 body = res.read()
3375 body = res.read()
3375 except util.urlerr.urlerror as e:
3376 except util.urlerr.urlerror as e:
3376 # read() method must be called, but only exists in Python 2
3377 # read() method must be called, but only exists in Python 2
3377 getattr(e, 'read', lambda: None)()
3378 getattr(e, 'read', lambda: None)()
3378 continue
3379 continue
3379
3380
3380 ct = res.headers.get(r'Content-Type')
3381 ct = res.headers.get(r'Content-Type')
3381 if ct == r'application/mercurial-cbor':
3382 if ct == r'application/mercurial-cbor':
3382 ui.write(_('cbor> %s\n') %
3383 ui.write(_('cbor> %s\n') %
3383 stringutil.pprint(cborutil.decodeall(body),
3384 stringutil.pprint(cborutil.decodeall(body),
3384 bprefix=True,
3385 bprefix=True,
3385 indent=2))
3386 indent=2))
3386
3387
3387 elif action == 'close':
3388 elif action == 'close':
3388 peer.close()
3389 peer.close()
3389 elif action == 'readavailable':
3390 elif action == 'readavailable':
3390 if not stdout or not stderr:
3391 if not stdout or not stderr:
3391 raise error.Abort(_('readavailable not available on this peer'))
3392 raise error.Abort(_('readavailable not available on this peer'))
3392
3393
3393 stdin.close()
3394 stdin.close()
3394 stdout.read()
3395 stdout.read()
3395 stderr.read()
3396 stderr.read()
3396
3397
3397 elif action == 'readline':
3398 elif action == 'readline':
3398 if not stdout:
3399 if not stdout:
3399 raise error.Abort(_('readline not available on this peer'))
3400 raise error.Abort(_('readline not available on this peer'))
3400 stdout.readline()
3401 stdout.readline()
3401 elif action == 'ereadline':
3402 elif action == 'ereadline':
3402 if not stderr:
3403 if not stderr:
3403 raise error.Abort(_('ereadline not available on this peer'))
3404 raise error.Abort(_('ereadline not available on this peer'))
3404 stderr.readline()
3405 stderr.readline()
3405 elif action.startswith('read '):
3406 elif action.startswith('read '):
3406 count = int(action.split(' ', 1)[1])
3407 count = int(action.split(' ', 1)[1])
3407 if not stdout:
3408 if not stdout:
3408 raise error.Abort(_('read not available on this peer'))
3409 raise error.Abort(_('read not available on this peer'))
3409 stdout.read(count)
3410 stdout.read(count)
3410 elif action.startswith('eread '):
3411 elif action.startswith('eread '):
3411 count = int(action.split(' ', 1)[1])
3412 count = int(action.split(' ', 1)[1])
3412 if not stderr:
3413 if not stderr:
3413 raise error.Abort(_('eread not available on this peer'))
3414 raise error.Abort(_('eread not available on this peer'))
3414 stderr.read(count)
3415 stderr.read(count)
3415 else:
3416 else:
3416 raise error.Abort(_('unknown action: %s') % action)
3417 raise error.Abort(_('unknown action: %s') % action)
3417
3418
3418 if batchedcommands is not None:
3419 if batchedcommands is not None:
3419 raise error.Abort(_('unclosed "batchbegin" request'))
3420 raise error.Abort(_('unclosed "batchbegin" request'))
3420
3421
3421 if peer:
3422 if peer:
3422 peer.close()
3423 peer.close()
3423
3424
3424 if proc:
3425 if proc:
3425 proc.kill()
3426 proc.kill()
@@ -1,121 +1,118
1 Source bundle was generated with the following script:
1 Source bundle was generated with the following script:
2
2
3 # hg init
3 # hg init
4 # echo a > a
4 # echo a > a
5 # ln -s a l
5 # ln -s a l
6 # hg ci -Ama -d'0 0'
6 # hg ci -Ama -d'0 0'
7 # mkdir b
7 # mkdir b
8 # echo a > b/a
8 # echo a > b/a
9 # chmod +x b/a
9 # chmod +x b/a
10 # hg ci -Amb -d'1 0'
10 # hg ci -Amb -d'1 0'
11
11
12 $ hg init
12 $ hg init
13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
13 $ hg unbundle "$TESTDIR/bundles/test-manifest.hg"
14 adding changesets
14 adding changesets
15 adding manifests
15 adding manifests
16 adding file changes
16 adding file changes
17 added 2 changesets with 3 changes to 3 files
17 added 2 changesets with 3 changes to 3 files
18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
18 new changesets b73562a03cfe:5bdc995175ba (2 drafts)
19 (run 'hg update' to get a working copy)
19 (run 'hg update' to get a working copy)
20
20
21 The next call is expected to return nothing:
21 The next call is expected to return nothing:
22
22
23 $ hg manifest
23 $ hg manifest
24
24
25 $ hg co
25 $ hg co
26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
26 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
27
27
28 $ hg manifest
28 $ hg manifest
29 a
29 a
30 b/a
30 b/a
31 l
31 l
32
32
33 $ hg files -vr .
33 $ hg files -vr .
34 2 a
34 2 a
35 2 x b/a
35 2 x b/a
36 1 l l
36 1 l l
37 $ hg files -r . -X b
37 $ hg files -r . -X b
38 a
38 a
39 l
39 l
40 $ hg files -T '{path} {size} {flags}\n'
40 $ hg files -T '{path} {size} {flags}\n'
41 a 2
41 a 2
42 b/a 2 x
42 b/a 2 x
43 l 1 l
43 l 1 l
44 $ hg files -T '{path} {node|shortest}\n' -r.
44 $ hg files -T '{path} {node|shortest}\n' -r.
45 a 5bdc
45 a 5bdc
46 b/a 5bdc
46 b/a 5bdc
47 l 5bdc
47 l 5bdc
48
48
49 $ hg manifest -v
49 $ hg manifest -v
50 644 a
50 644 a
51 755 * b/a
51 755 * b/a
52 644 @ l
52 644 @ l
53 $ hg manifest -T '{path} {rev}\n'
53 $ hg manifest -T '{path} {rev}\n'
54 a 1
54 a 1
55 b/a 1
55 b/a 1
56 l 1
56 l 1
57
57
58 $ hg manifest --debug
58 $ hg manifest --debug
59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
59 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
60 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a
61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
61 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l
62
62
63 $ hg manifest -r 0
63 $ hg manifest -r 0
64 a
64 a
65 l
65 l
66
66
67 $ hg manifest -r 1
67 $ hg manifest -r 1
68 a
68 a
69 b/a
69 b/a
70 l
70 l
71
71
72 $ hg manifest -r tip
72 $ hg manifest -r tip
73 a
73 a
74 b/a
74 b/a
75 l
75 l
76
76
77 $ hg manifest tip
77 $ hg manifest tip
78 a
78 a
79 b/a
79 b/a
80 l
80 l
81
81
82 $ hg manifest --all
82 $ hg manifest --all
83 a
83 a
84 b/a
84 b/a
85 l
85 l
86
86
87 The next two calls are expected to abort:
87 The next two calls are expected to abort:
88
88
89 $ hg manifest -r 2
89 $ hg manifest -r 2
90 abort: unknown revision '2'!
90 abort: unknown revision '2'!
91 [255]
91 [255]
92
92
93 $ hg manifest -r tip tip
93 $ hg manifest -r tip tip
94 abort: please specify just one revision
94 abort: please specify just one revision
95 [255]
95 [255]
96
96
97 Testing the manifest full text cache utility
97 Testing the manifest full text cache utility
98 --------------------------------------------
98 --------------------------------------------
99
99
100 Reminder of the manifest log content
100 Reminder of the manifest log content
101
101
102 $ hg log --debug | grep 'manifest:'
102 $ hg log --debug | grep 'manifest:'
103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
103 manifest: 1:1e01206b1d2f72bd55f2a33fa8ccad74144825b7
104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
104 manifest: 0:fce2a30dedad1eef4da95ca1dc0004157aa527cf
105
105
106 Showing the content of the caches after the above operations
106 Showing the content of the caches after the above operations
107
107
108 $ hg debugmanifestfulltextcache
108 $ hg debugmanifestfulltextcache
109 cache empty
109 cache empty
110
110
111 Adding a new persistent entry in the cache
111 Adding a new persistent entry in the cache
112
112
113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
113 $ hg debugmanifestfulltextcache --add 1e01206b1d2f72bd55f2a33fa8ccad74144825b7
114 cache contains 1 manifest entries, in order of most to least recent:
115 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
116 total cache data size 157 bytes, on-disk 157 bytes
117
114
118 $ hg debugmanifestfulltextcache
115 $ hg debugmanifestfulltextcache
119 cache contains 1 manifest entries, in order of most to least recent:
116 cache contains 1 manifest entries, in order of most to least recent:
120 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
117 id: 1e01206b1d2f72bd55f2a33fa8ccad74144825b7, size 133 bytes
121 total cache data size 157 bytes, on-disk 157 bytes
118 total cache data size 157 bytes, on-disk 157 bytes
General Comments 0
You need to be logged in to leave comments. Login now