##// END OF EJS Templates
py3: use raw strings and %d for formatting...
Gregory Szorc -
r41820:34ae00a1 default
parent child Browse files
Show More
@@ -1,3398 +1,3398 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 from .revlogutils import (
90 from .revlogutils import (
91 deltas as deltautil
91 deltas as deltautil
92 )
92 )
93
93
94 release = lockmod.release
94 release = lockmod.release
95
95
96 command = registrar.command()
96 command = registrar.command()
97
97
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
100 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
101 if len(args) == 3:
101 if len(args) == 3:
102 index, rev1, rev2 = args
102 index, rev1, rev2 = args
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 lookup = r.lookup
104 lookup = r.lookup
105 elif len(args) == 2:
105 elif len(args) == 2:
106 if not repo:
106 if not repo:
107 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
108 '(.hg not found)'))
108 '(.hg not found)'))
109 rev1, rev2 = args
109 rev1, rev2 = args
110 r = repo.changelog
110 r = repo.changelog
111 lookup = repo.lookup
111 lookup = repo.lookup
112 else:
112 else:
113 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116
116
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
118 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
119 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
120 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
122 gen.apply(repo)
122 gen.apply(repo)
123
123
124 @command('debugbuilddag',
124 @command('debugbuilddag',
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
128 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
129 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
130 mergeable_file=False,
130 mergeable_file=False,
131 overwritten_file=False,
131 overwritten_file=False,
132 new_file=False):
132 new_file=False):
133 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
134
134
135 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
136 command line.
136 command line.
137
137
138 Elements:
138 Elements:
139
139
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
141 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
142 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
143 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
144 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
145 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
148 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
149 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
150 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
151
151
152 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
153
153
154 A backref is either
154 A backref is either
155
155
156 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
157 node, or
157 node, or
158 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
159 - empty to denote the default parent.
159 - empty to denote the default parent.
160
160
161 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 """
163 """
164
164
165 if text is None:
165 if text is None:
166 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
167 text = ui.fin.read()
167 text = ui.fin.read()
168
168
169 cl = repo.changelog
169 cl = repo.changelog
170 if len(cl) > 0:
170 if len(cl) > 0:
171 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
172
172
173 # determine number of revs in DAG
173 # determine number of revs in DAG
174 total = 0
174 total = 0
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 total += 1
177 total += 1
178
178
179 if mergeable_file:
179 if mergeable_file:
180 linesperrev = 2
180 linesperrev = 2
181 # make a file with k lines per rev
181 # make a file with k lines per rev
182 initialmergedlines = ['%d' % i
182 initialmergedlines = ['%d' % i
183 for i in pycompat.xrange(0, total * linesperrev)]
183 for i in pycompat.xrange(0, total * linesperrev)]
184 initialmergedlines.append("")
184 initialmergedlines.append("")
185
185
186 tags = []
186 tags = []
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 total=total)
188 total=total)
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 at = -1
190 at = -1
191 atbranch = 'default'
191 atbranch = 'default'
192 nodeids = []
192 nodeids = []
193 id = 0
193 id = 0
194 progress.update(id)
194 progress.update(id)
195 for type, data in dagparser.parsedag(text):
195 for type, data in dagparser.parsedag(text):
196 if type == 'n':
196 if type == 'n':
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 id, ps = data
198 id, ps = data
199
199
200 files = []
200 files = []
201 filecontent = {}
201 filecontent = {}
202
202
203 p2 = None
203 p2 = None
204 if mergeable_file:
204 if mergeable_file:
205 fn = "mf"
205 fn = "mf"
206 p1 = repo[ps[0]]
206 p1 = repo[ps[0]]
207 if len(ps) > 1:
207 if len(ps) > 1:
208 p2 = repo[ps[1]]
208 p2 = repo[ps[1]]
209 pa = p1.ancestor(p2)
209 pa = p1.ancestor(p2)
210 base, local, other = [x[fn].data() for x in (pa, p1,
210 base, local, other = [x[fn].data() for x in (pa, p1,
211 p2)]
211 p2)]
212 m3 = simplemerge.Merge3Text(base, local, other)
212 m3 = simplemerge.Merge3Text(base, local, other)
213 ml = [l.strip() for l in m3.merge_lines()]
213 ml = [l.strip() for l in m3.merge_lines()]
214 ml.append("")
214 ml.append("")
215 elif at > 0:
215 elif at > 0:
216 ml = p1[fn].data().split("\n")
216 ml = p1[fn].data().split("\n")
217 else:
217 else:
218 ml = initialmergedlines
218 ml = initialmergedlines
219 ml[id * linesperrev] += " r%i" % id
219 ml[id * linesperrev] += " r%i" % id
220 mergedtext = "\n".join(ml)
220 mergedtext = "\n".join(ml)
221 files.append(fn)
221 files.append(fn)
222 filecontent[fn] = mergedtext
222 filecontent[fn] = mergedtext
223
223
224 if overwritten_file:
224 if overwritten_file:
225 fn = "of"
225 fn = "of"
226 files.append(fn)
226 files.append(fn)
227 filecontent[fn] = "r%i\n" % id
227 filecontent[fn] = "r%i\n" % id
228
228
229 if new_file:
229 if new_file:
230 fn = "nf%i" % id
230 fn = "nf%i" % id
231 files.append(fn)
231 files.append(fn)
232 filecontent[fn] = "r%i\n" % id
232 filecontent[fn] = "r%i\n" % id
233 if len(ps) > 1:
233 if len(ps) > 1:
234 if not p2:
234 if not p2:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 for fn in p2:
236 for fn in p2:
237 if fn.startswith("nf"):
237 if fn.startswith("nf"):
238 files.append(fn)
238 files.append(fn)
239 filecontent[fn] = p2[fn].data()
239 filecontent[fn] = p2[fn].data()
240
240
241 def fctxfn(repo, cx, path):
241 def fctxfn(repo, cx, path):
242 if path in filecontent:
242 if path in filecontent:
243 return context.memfilectx(repo, cx, path,
243 return context.memfilectx(repo, cx, path,
244 filecontent[path])
244 filecontent[path])
245 return None
245 return None
246
246
247 if len(ps) == 0 or ps[0] < 0:
247 if len(ps) == 0 or ps[0] < 0:
248 pars = [None, None]
248 pars = [None, None]
249 elif len(ps) == 1:
249 elif len(ps) == 1:
250 pars = [nodeids[ps[0]], None]
250 pars = [nodeids[ps[0]], None]
251 else:
251 else:
252 pars = [nodeids[p] for p in ps]
252 pars = [nodeids[p] for p in ps]
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 date=(id, 0),
254 date=(id, 0),
255 user="debugbuilddag",
255 user="debugbuilddag",
256 extra={'branch': atbranch})
256 extra={'branch': atbranch})
257 nodeid = repo.commitctx(cx)
257 nodeid = repo.commitctx(cx)
258 nodeids.append(nodeid)
258 nodeids.append(nodeid)
259 at = id
259 at = id
260 elif type == 'l':
260 elif type == 'l':
261 id, name = data
261 id, name = data
262 ui.note(('tag %s\n' % name))
262 ui.note(('tag %s\n' % name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 elif type == 'a':
264 elif type == 'a':
265 ui.note(('branch %s\n' % data))
265 ui.note(('branch %s\n' % data))
266 atbranch = data
266 atbranch = data
267 progress.update(id)
267 progress.update(id)
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271
271
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 indent_string = ' ' * indent
273 indent_string = ' ' * indent
274 if all:
274 if all:
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 % indent_string)
276 % indent_string)
277
277
278 def showchunks(named):
278 def showchunks(named):
279 ui.write("\n%s%s\n" % (indent_string, named))
279 ui.write("\n%s%s\n" % (indent_string, named))
280 for deltadata in gen.deltaiter():
280 for deltadata in gen.deltaiter():
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 ui.write("%s%s %s %s %s %s %d\n" %
282 ui.write("%s%s %s %s %s %s %d\n" %
283 (indent_string, hex(node), hex(p1), hex(p2),
283 (indent_string, hex(node), hex(p1), hex(p2),
284 hex(cs), hex(deltabase), len(delta)))
284 hex(cs), hex(deltabase), len(delta)))
285
285
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 showchunks("changelog")
287 showchunks("changelog")
288 chunkdata = gen.manifestheader()
288 chunkdata = gen.manifestheader()
289 showchunks("manifest")
289 showchunks("manifest")
290 for chunkdata in iter(gen.filelogheader, {}):
290 for chunkdata in iter(gen.filelogheader, {}):
291 fname = chunkdata['filename']
291 fname = chunkdata['filename']
292 showchunks(fname)
292 showchunks(fname)
293 else:
293 else:
294 if isinstance(gen, bundle2.unbundle20):
294 if isinstance(gen, bundle2.unbundle20):
295 raise error.Abort(_('use debugbundle2 for this file'))
295 raise error.Abort(_('use debugbundle2 for this file'))
296 chunkdata = gen.changelogheader()
296 chunkdata = gen.changelogheader()
297 for deltadata in gen.deltaiter():
297 for deltadata in gen.deltaiter():
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 ui.write("%s%s\n" % (indent_string, hex(node)))
299 ui.write("%s%s\n" % (indent_string, hex(node)))
300
300
301 def _debugobsmarkers(ui, part, indent=0, **opts):
301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 """display version and markers contained in 'data'"""
302 """display version and markers contained in 'data'"""
303 opts = pycompat.byteskwargs(opts)
303 opts = pycompat.byteskwargs(opts)
304 data = part.read()
304 data = part.read()
305 indent_string = ' ' * indent
305 indent_string = ' ' * indent
306 try:
306 try:
307 version, markers = obsolete._readmarkers(data)
307 version, markers = obsolete._readmarkers(data)
308 except error.UnknownVersion as exc:
308 except error.UnknownVersion as exc:
309 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg %= indent_string, exc.version, len(data)
310 msg %= indent_string, exc.version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 else:
312 else:
313 msg = "%sversion: %d (%d bytes)\n"
313 msg = "%sversion: %d (%d bytes)\n"
314 msg %= indent_string, version, len(data)
314 msg %= indent_string, version, len(data)
315 ui.write(msg)
315 ui.write(msg)
316 fm = ui.formatter('debugobsolete', opts)
316 fm = ui.formatter('debugobsolete', opts)
317 for rawmarker in sorted(markers):
317 for rawmarker in sorted(markers):
318 m = obsutil.marker(None, rawmarker)
318 m = obsutil.marker(None, rawmarker)
319 fm.startitem()
319 fm.startitem()
320 fm.plain(indent_string)
320 fm.plain(indent_string)
321 cmdutil.showmarker(fm, m)
321 cmdutil.showmarker(fm, m)
322 fm.end()
322 fm.end()
323
323
324 def _debugphaseheads(ui, data, indent=0):
324 def _debugphaseheads(ui, data, indent=0):
325 """display version and markers contained in 'data'"""
325 """display version and markers contained in 'data'"""
326 indent_string = ' ' * indent
326 indent_string = ' ' * indent
327 headsbyphase = phases.binarydecode(data)
327 headsbyphase = phases.binarydecode(data)
328 for phase in phases.allphases:
328 for phase in phases.allphases:
329 for head in headsbyphase[phase]:
329 for head in headsbyphase[phase]:
330 ui.write(indent_string)
330 ui.write(indent_string)
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332
332
333 def _quasirepr(thing):
333 def _quasirepr(thing):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 return '{%s}' % (
335 return '{%s}' % (
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 return pycompat.bytestr(repr(thing))
337 return pycompat.bytestr(repr(thing))
338
338
339 def _debugbundle2(ui, gen, all=None, **opts):
339 def _debugbundle2(ui, gen, all=None, **opts):
340 """lists the contents of a bundle2"""
340 """lists the contents of a bundle2"""
341 if not isinstance(gen, bundle2.unbundle20):
341 if not isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_('not a bundle2 file'))
342 raise error.Abort(_('not a bundle2 file'))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 parttypes = opts.get(r'part_type', [])
344 parttypes = opts.get(r'part_type', [])
345 for part in gen.iterparts():
345 for part in gen.iterparts():
346 if parttypes and part.type not in parttypes:
346 if parttypes and part.type not in parttypes:
347 continue
347 continue
348 msg = '%s -- %s (mandatory: %r)\n'
348 msg = '%s -- %s (mandatory: %r)\n'
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 if part.type == 'changegroup':
350 if part.type == 'changegroup':
351 version = part.params.get('version', '01')
351 version = part.params.get('version', '01')
352 cg = changegroup.getunbundler(version, part, 'UN')
352 cg = changegroup.getunbundler(version, part, 'UN')
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 if part.type == 'obsmarkers':
355 if part.type == 'obsmarkers':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugobsmarkers(ui, part, indent=4, **opts)
357 _debugobsmarkers(ui, part, indent=4, **opts)
358 if part.type == 'phase-heads':
358 if part.type == 'phase-heads':
359 if not ui.quiet:
359 if not ui.quiet:
360 _debugphaseheads(ui, part, indent=4)
360 _debugphaseheads(ui, part, indent=4)
361
361
362 @command('debugbundle',
362 @command('debugbundle',
363 [('a', 'all', None, _('show all details')),
363 [('a', 'all', None, _('show all details')),
364 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 _('FILE'),
366 _('FILE'),
367 norepo=True)
367 norepo=True)
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 """lists the contents of a bundle"""
369 """lists the contents of a bundle"""
370 with hg.openpath(ui, bundlepath) as f:
370 with hg.openpath(ui, bundlepath) as f:
371 if spec:
371 if spec:
372 spec = exchange.getbundlespec(ui, f)
372 spec = exchange.getbundlespec(ui, f)
373 ui.write('%s\n' % spec)
373 ui.write('%s\n' % spec)
374 return
374 return
375
375
376 gen = exchange.readbundle(ui, f, bundlepath)
376 gen = exchange.readbundle(ui, f, bundlepath)
377 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
378 return _debugbundle2(ui, gen, all=all, **opts)
378 return _debugbundle2(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
380
380
381 @command('debugcapabilities',
381 @command('debugcapabilities',
382 [], _('PATH'),
382 [], _('PATH'),
383 norepo=True)
383 norepo=True)
384 def debugcapabilities(ui, path, **opts):
384 def debugcapabilities(ui, path, **opts):
385 """lists the capabilities of a remote peer"""
385 """lists the capabilities of a remote peer"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 peer = hg.peer(ui, opts, path)
387 peer = hg.peer(ui, opts, path)
388 caps = peer.capabilities()
388 caps = peer.capabilities()
389 ui.write(('Main capabilities:\n'))
389 ui.write(('Main capabilities:\n'))
390 for c in sorted(caps):
390 for c in sorted(caps):
391 ui.write((' %s\n') % c)
391 ui.write((' %s\n') % c)
392 b2caps = bundle2.bundle2caps(peer)
392 b2caps = bundle2.bundle2caps(peer)
393 if b2caps:
393 if b2caps:
394 ui.write(('Bundle2 capabilities:\n'))
394 ui.write(('Bundle2 capabilities:\n'))
395 for key, values in sorted(b2caps.iteritems()):
395 for key, values in sorted(b2caps.iteritems()):
396 ui.write((' %s\n') % key)
396 ui.write((' %s\n') % key)
397 for v in values:
397 for v in values:
398 ui.write((' %s\n') % v)
398 ui.write((' %s\n') % v)
399
399
400 @command('debugcheckstate', [], '')
400 @command('debugcheckstate', [], '')
401 def debugcheckstate(ui, repo):
401 def debugcheckstate(ui, repo):
402 """validate the correctness of the current dirstate"""
402 """validate the correctness of the current dirstate"""
403 parent1, parent2 = repo.dirstate.parents()
403 parent1, parent2 = repo.dirstate.parents()
404 m1 = repo[parent1].manifest()
404 m1 = repo[parent1].manifest()
405 m2 = repo[parent2].manifest()
405 m2 = repo[parent2].manifest()
406 errors = 0
406 errors = 0
407 for f in repo.dirstate:
407 for f in repo.dirstate:
408 state = repo.dirstate[f]
408 state = repo.dirstate[f]
409 if state in "nr" and f not in m1:
409 if state in "nr" and f not in m1:
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "a" and f in m1:
412 if state in "a" and f in m1:
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 errors += 1
414 errors += 1
415 if state in "m" and f not in m1 and f not in m2:
415 if state in "m" and f not in m1 and f not in m2:
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 (f, state))
417 (f, state))
418 errors += 1
418 errors += 1
419 for f in m1:
419 for f in m1:
420 state = repo.dirstate[f]
420 state = repo.dirstate[f]
421 if state not in "nrm":
421 if state not in "nrm":
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 errors += 1
423 errors += 1
424 if errors:
424 if errors:
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 raise error.Abort(error)
426 raise error.Abort(error)
427
427
428 @command('debugcolor',
428 @command('debugcolor',
429 [('', 'style', None, _('show all configured styles'))],
429 [('', 'style', None, _('show all configured styles'))],
430 'hg debugcolor')
430 'hg debugcolor')
431 def debugcolor(ui, repo, **opts):
431 def debugcolor(ui, repo, **opts):
432 """show available color, effects or style"""
432 """show available color, effects or style"""
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 if opts.get(r'style'):
434 if opts.get(r'style'):
435 return _debugdisplaystyle(ui)
435 return _debugdisplaystyle(ui)
436 else:
436 else:
437 return _debugdisplaycolor(ui)
437 return _debugdisplaycolor(ui)
438
438
439 def _debugdisplaycolor(ui):
439 def _debugdisplaycolor(ui):
440 ui = ui.copy()
440 ui = ui.copy()
441 ui._styles.clear()
441 ui._styles.clear()
442 for effect in color._activeeffects(ui).keys():
442 for effect in color._activeeffects(ui).keys():
443 ui._styles[effect] = effect
443 ui._styles[effect] = effect
444 if ui._terminfoparams:
444 if ui._terminfoparams:
445 for k, v in ui.configitems('color'):
445 for k, v in ui.configitems('color'):
446 if k.startswith('color.'):
446 if k.startswith('color.'):
447 ui._styles[k] = k[6:]
447 ui._styles[k] = k[6:]
448 elif k.startswith('terminfo.'):
448 elif k.startswith('terminfo.'):
449 ui._styles[k] = k[9:]
449 ui._styles[k] = k[9:]
450 ui.write(_('available colors:\n'))
450 ui.write(_('available colors:\n'))
451 # sort label with a '_' after the other to group '_background' entry.
451 # sort label with a '_' after the other to group '_background' entry.
452 items = sorted(ui._styles.items(),
452 items = sorted(ui._styles.items(),
453 key=lambda i: ('_' in i[0], i[0], i[1]))
453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 for colorname, label in items:
454 for colorname, label in items:
455 ui.write(('%s\n') % colorname, label=label)
455 ui.write(('%s\n') % colorname, label=label)
456
456
457 def _debugdisplaystyle(ui):
457 def _debugdisplaystyle(ui):
458 ui.write(_('available style:\n'))
458 ui.write(_('available style:\n'))
459 if not ui._styles:
459 if not ui._styles:
460 return
460 return
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 start = r.start
626 start = r.start
627 length = r.length
627 length = r.length
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 withsparseread = getattr(r, '_withsparseread', False)
629 withsparseread = getattr(r, '_withsparseread', False)
630
630
631 def revinfo(rev):
631 def revinfo(rev):
632 e = index[rev]
632 e = index[rev]
633 compsize = e[1]
633 compsize = e[1]
634 uncompsize = e[2]
634 uncompsize = e[2]
635 chainsize = 0
635 chainsize = 0
636
636
637 if generaldelta:
637 if generaldelta:
638 if e[3] == e[5]:
638 if e[3] == e[5]:
639 deltatype = 'p1'
639 deltatype = 'p1'
640 elif e[3] == e[6]:
640 elif e[3] == e[6]:
641 deltatype = 'p2'
641 deltatype = 'p2'
642 elif e[3] == rev - 1:
642 elif e[3] == rev - 1:
643 deltatype = 'prev'
643 deltatype = 'prev'
644 elif e[3] == rev:
644 elif e[3] == rev:
645 deltatype = 'base'
645 deltatype = 'base'
646 else:
646 else:
647 deltatype = 'other'
647 deltatype = 'other'
648 else:
648 else:
649 if e[3] == rev:
649 if e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'prev'
652 deltatype = 'prev'
653
653
654 chain = r._deltachain(rev)[0]
654 chain = r._deltachain(rev)[0]
655 for iterrev in chain:
655 for iterrev in chain:
656 e = index[iterrev]
656 e = index[iterrev]
657 chainsize += e[1]
657 chainsize += e[1]
658
658
659 return compsize, uncompsize, deltatype, chain, chainsize
659 return compsize, uncompsize, deltatype, chain, chainsize
660
660
661 fm = ui.formatter('debugdeltachain', opts)
661 fm = ui.formatter('debugdeltachain', opts)
662
662
663 fm.plain(' rev chain# chainlen prev delta '
663 fm.plain(' rev chain# chainlen prev delta '
664 'size rawsize chainsize ratio lindist extradist '
664 'size rawsize chainsize ratio lindist extradist '
665 'extraratio')
665 'extraratio')
666 if withsparseread:
666 if withsparseread:
667 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain('\n')
668 fm.plain('\n')
669
669
670 chainbases = {}
670 chainbases = {}
671 for rev in r:
671 for rev in r:
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 chainbase = chain[0]
673 chainbase = chain[0]
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 if uncomp != 0:
684 if uncomp != 0:
685 chainratio = float(chainsize) / float(uncomp)
685 chainratio = float(chainsize) / float(uncomp)
686 else:
686 else:
687 chainratio = chainsize
687 chainratio = chainsize
688
688
689 if chainsize != 0:
689 if chainsize != 0:
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691 else:
691 else:
692 extraratio = extradist
692 extraratio = extradist
693
693
694 fm.startitem()
694 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
697 'extraratio',
697 'extraratio',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
701 extraratio,
701 extraratio,
702 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
705 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
706 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
707 if withsparseread:
707 if withsparseread:
708 readsize = 0
708 readsize = 0
709 largestblock = 0
709 largestblock = 0
710 srchunks = 0
710 srchunks = 0
711
711
712 for revschunk in deltautil.slicechunk(r, chain):
712 for revschunk in deltautil.slicechunk(r, chain):
713 srchunks += 1
713 srchunks += 1
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
716
716
717 readsize += blksize
717 readsize += blksize
718 if largestblock < blksize:
718 if largestblock < blksize:
719 largestblock = blksize
719 largestblock = blksize
720
720
721 if readsize:
721 if readsize:
722 readdensity = float(chainsize) / float(readsize)
722 readdensity = float(chainsize) / float(readsize)
723 else:
723 else:
724 readdensity = 1
724 readdensity = 1
725
725
726 fm.write('readsize largestblock readdensity srchunks',
726 fm.write('readsize largestblock readdensity srchunks',
727 ' %10d %10d %9.5f %8d',
727 ' %10d %10d %9.5f %8d',
728 readsize, largestblock, readdensity, srchunks,
728 readsize, largestblock, readdensity, srchunks,
729 readsize=readsize, largestblock=largestblock,
729 readsize=readsize, largestblock=largestblock,
730 readdensity=readdensity, srchunks=srchunks)
730 readdensity=readdensity, srchunks=srchunks)
731
731
732 fm.plain('\n')
732 fm.plain('\n')
733
733
734 fm.end()
734 fm.end()
735
735
736 @command('debugdirstate|debugstate',
736 @command('debugdirstate|debugstate',
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
739 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
740 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
741 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
742 """show the contents of the current dirstate"""
743
743
744 nodates = not opts[r'dates']
744 nodates = not opts[r'dates']
745 if opts.get(r'nodates') is not None:
745 if opts.get(r'nodates') is not None:
746 nodates = True
746 nodates = True
747 datesort = opts.get(r'datesort')
747 datesort = opts.get(r'datesort')
748
748
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ] + cmdutil.remoteopts,
775 ] + cmdutil.remoteopts,
776 _('[--rev REV] [OTHER]'))
776 _('[--rev REV] [OTHER]'))
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
777 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 """runs the changeset discovery protocol in isolation"""
778 """runs the changeset discovery protocol in isolation"""
779 opts = pycompat.byteskwargs(opts)
779 opts = pycompat.byteskwargs(opts)
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
780 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remote = hg.peer(repo, opts, remoteurl)
781 remote = hg.peer(repo, opts, remoteurl)
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
782 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783
783
784 # make sure tests are repeatable
784 # make sure tests are repeatable
785 random.seed(12323)
785 random.seed(12323)
786
786
787 def doit(pushedrevs, remoteheads, remote=remote):
787 def doit(pushedrevs, remoteheads, remote=remote):
788 if opts.get('old'):
788 if opts.get('old'):
789 if not util.safehasattr(remote, 'branches'):
789 if not util.safehasattr(remote, 'branches'):
790 # enable in-client legacy support
790 # enable in-client legacy support
791 remote = localrepo.locallegacypeer(remote.local())
791 remote = localrepo.locallegacypeer(remote.local())
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
792 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
793 force=True)
793 force=True)
794 common = set(common)
794 common = set(common)
795 if not opts.get('nonheads'):
795 if not opts.get('nonheads'):
796 ui.write(("unpruned common: %s\n") %
796 ui.write(("unpruned common: %s\n") %
797 " ".join(sorted(short(n) for n in common)))
797 " ".join(sorted(short(n) for n in common)))
798
798
799 clnode = repo.changelog.node
799 clnode = repo.changelog.node
800 common = repo.revs('heads(::%ln)', common)
800 common = repo.revs('heads(::%ln)', common)
801 common = {clnode(r) for r in common}
801 common = {clnode(r) for r in common}
802 else:
802 else:
803 nodes = None
803 nodes = None
804 if pushedrevs:
804 if pushedrevs:
805 revs = scmutil.revrange(repo, pushedrevs)
805 revs = scmutil.revrange(repo, pushedrevs)
806 nodes = [repo[r].node() for r in revs]
806 nodes = [repo[r].node() for r in revs]
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
807 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
808 ancestorsof=nodes)
808 ancestorsof=nodes)
809 common = set(common)
809 common = set(common)
810 rheads = set(hds)
810 rheads = set(hds)
811 lheads = set(repo.heads())
811 lheads = set(repo.heads())
812 ui.write(("common heads: %s\n") %
812 ui.write(("common heads: %s\n") %
813 " ".join(sorted(short(n) for n in common)))
813 " ".join(sorted(short(n) for n in common)))
814 if lheads <= common:
814 if lheads <= common:
815 ui.write(("local is subset\n"))
815 ui.write(("local is subset\n"))
816 elif rheads <= common:
816 elif rheads <= common:
817 ui.write(("remote is subset\n"))
817 ui.write(("remote is subset\n"))
818
818
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
819 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
820 localrevs = opts['rev']
820 localrevs = opts['rev']
821 doit(localrevs, remoterevs)
821 doit(localrevs, remoterevs)
822
822
823 _chunksize = 4 << 10
823 _chunksize = 4 << 10
824
824
825 @command('debugdownload',
825 @command('debugdownload',
826 [
826 [
827 ('o', 'output', '', _('path')),
827 ('o', 'output', '', _('path')),
828 ],
828 ],
829 optionalrepo=True)
829 optionalrepo=True)
830 def debugdownload(ui, repo, url, output=None, **opts):
830 def debugdownload(ui, repo, url, output=None, **opts):
831 """download a resource using Mercurial logic and config
831 """download a resource using Mercurial logic and config
832 """
832 """
833 fh = urlmod.open(ui, url, output)
833 fh = urlmod.open(ui, url, output)
834
834
835 dest = ui
835 dest = ui
836 if output:
836 if output:
837 dest = open(output, "wb", _chunksize)
837 dest = open(output, "wb", _chunksize)
838 try:
838 try:
839 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
840 while data:
840 while data:
841 dest.write(data)
841 dest.write(data)
842 data = fh.read(_chunksize)
842 data = fh.read(_chunksize)
843 finally:
843 finally:
844 if output:
844 if output:
845 dest.close()
845 dest.close()
846
846
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
847 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
848 def debugextensions(ui, repo, **opts):
848 def debugextensions(ui, repo, **opts):
849 '''show information about active extensions'''
849 '''show information about active extensions'''
850 opts = pycompat.byteskwargs(opts)
850 opts = pycompat.byteskwargs(opts)
851 exts = extensions.extensions(ui)
851 exts = extensions.extensions(ui)
852 hgver = util.version()
852 hgver = util.version()
853 fm = ui.formatter('debugextensions', opts)
853 fm = ui.formatter('debugextensions', opts)
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
854 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
855 isinternal = extensions.ismoduleinternal(extmod)
855 isinternal = extensions.ismoduleinternal(extmod)
856 extsource = pycompat.fsencode(extmod.__file__)
856 extsource = pycompat.fsencode(extmod.__file__)
857 if isinternal:
857 if isinternal:
858 exttestedwith = [] # never expose magic string to users
858 exttestedwith = [] # never expose magic string to users
859 else:
859 else:
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
860 exttestedwith = getattr(extmod, 'testedwith', '').split()
861 extbuglink = getattr(extmod, 'buglink', None)
861 extbuglink = getattr(extmod, 'buglink', None)
862
862
863 fm.startitem()
863 fm.startitem()
864
864
865 if ui.quiet or ui.verbose:
865 if ui.quiet or ui.verbose:
866 fm.write('name', '%s\n', extname)
866 fm.write('name', '%s\n', extname)
867 else:
867 else:
868 fm.write('name', '%s', extname)
868 fm.write('name', '%s', extname)
869 if isinternal or hgver in exttestedwith:
869 if isinternal or hgver in exttestedwith:
870 fm.plain('\n')
870 fm.plain('\n')
871 elif not exttestedwith:
871 elif not exttestedwith:
872 fm.plain(_(' (untested!)\n'))
872 fm.plain(_(' (untested!)\n'))
873 else:
873 else:
874 lasttestedversion = exttestedwith[-1]
874 lasttestedversion = exttestedwith[-1]
875 fm.plain(' (%s!)\n' % lasttestedversion)
875 fm.plain(' (%s!)\n' % lasttestedversion)
876
876
877 fm.condwrite(ui.verbose and extsource, 'source',
877 fm.condwrite(ui.verbose and extsource, 'source',
878 _(' location: %s\n'), extsource or "")
878 _(' location: %s\n'), extsource or "")
879
879
880 if ui.verbose:
880 if ui.verbose:
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
881 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
882 fm.data(bundled=isinternal)
882 fm.data(bundled=isinternal)
883
883
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
884 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
885 _(' tested with: %s\n'),
885 _(' tested with: %s\n'),
886 fm.formatlist(exttestedwith, name='ver'))
886 fm.formatlist(exttestedwith, name='ver'))
887
887
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
888 fm.condwrite(ui.verbose and extbuglink, 'buglink',
889 _(' bug reporting: %s\n'), extbuglink or "")
889 _(' bug reporting: %s\n'), extbuglink or "")
890
890
891 fm.end()
891 fm.end()
892
892
893 @command('debugfileset',
893 @command('debugfileset',
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
894 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
895 ('', 'all-files', False,
895 ('', 'all-files', False,
896 _('test files from all revisions and working directory')),
896 _('test files from all revisions and working directory')),
897 ('s', 'show-matcher', None,
897 ('s', 'show-matcher', None,
898 _('print internal representation of matcher')),
898 _('print internal representation of matcher')),
899 ('p', 'show-stage', [],
899 ('p', 'show-stage', [],
900 _('print parsed tree at the given stage'), _('NAME'))],
900 _('print parsed tree at the given stage'), _('NAME'))],
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
901 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
902 def debugfileset(ui, repo, expr, **opts):
902 def debugfileset(ui, repo, expr, **opts):
903 '''parse and apply a fileset specification'''
903 '''parse and apply a fileset specification'''
904 from . import fileset
904 from . import fileset
905 fileset.symbols # force import of fileset so we have predicates to optimize
905 fileset.symbols # force import of fileset so we have predicates to optimize
906 opts = pycompat.byteskwargs(opts)
906 opts = pycompat.byteskwargs(opts)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
907 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
908
908
909 stages = [
909 stages = [
910 ('parsed', pycompat.identity),
910 ('parsed', pycompat.identity),
911 ('analyzed', filesetlang.analyze),
911 ('analyzed', filesetlang.analyze),
912 ('optimized', filesetlang.optimize),
912 ('optimized', filesetlang.optimize),
913 ]
913 ]
914 stagenames = set(n for n, f in stages)
914 stagenames = set(n for n, f in stages)
915
915
916 showalways = set()
916 showalways = set()
917 if ui.verbose and not opts['show_stage']:
917 if ui.verbose and not opts['show_stage']:
918 # show parsed tree by --verbose (deprecated)
918 # show parsed tree by --verbose (deprecated)
919 showalways.add('parsed')
919 showalways.add('parsed')
920 if opts['show_stage'] == ['all']:
920 if opts['show_stage'] == ['all']:
921 showalways.update(stagenames)
921 showalways.update(stagenames)
922 else:
922 else:
923 for n in opts['show_stage']:
923 for n in opts['show_stage']:
924 if n not in stagenames:
924 if n not in stagenames:
925 raise error.Abort(_('invalid stage name: %s') % n)
925 raise error.Abort(_('invalid stage name: %s') % n)
926 showalways.update(opts['show_stage'])
926 showalways.update(opts['show_stage'])
927
927
928 tree = filesetlang.parse(expr)
928 tree = filesetlang.parse(expr)
929 for n, f in stages:
929 for n, f in stages:
930 tree = f(tree)
930 tree = f(tree)
931 if n in showalways:
931 if n in showalways:
932 if opts['show_stage'] or n != 'parsed':
932 if opts['show_stage'] or n != 'parsed':
933 ui.write(("* %s:\n") % n)
933 ui.write(("* %s:\n") % n)
934 ui.write(filesetlang.prettyformat(tree), "\n")
934 ui.write(filesetlang.prettyformat(tree), "\n")
935
935
936 files = set()
936 files = set()
937 if opts['all_files']:
937 if opts['all_files']:
938 for r in repo:
938 for r in repo:
939 c = repo[r]
939 c = repo[r]
940 files.update(c.files())
940 files.update(c.files())
941 files.update(c.substate)
941 files.update(c.substate)
942 if opts['all_files'] or ctx.rev() is None:
942 if opts['all_files'] or ctx.rev() is None:
943 wctx = repo[None]
943 wctx = repo[None]
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
944 files.update(repo.dirstate.walk(scmutil.matchall(repo),
945 subrepos=list(wctx.substate),
945 subrepos=list(wctx.substate),
946 unknown=True, ignored=True))
946 unknown=True, ignored=True))
947 files.update(wctx.substate)
947 files.update(wctx.substate)
948 else:
948 else:
949 files.update(ctx.files())
949 files.update(ctx.files())
950 files.update(ctx.substate)
950 files.update(ctx.substate)
951
951
952 m = ctx.matchfileset(expr)
952 m = ctx.matchfileset(expr)
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
953 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
954 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
955 for f in sorted(files):
955 for f in sorted(files):
956 if not m(f):
956 if not m(f):
957 continue
957 continue
958 ui.write("%s\n" % f)
958 ui.write("%s\n" % f)
959
959
960 @command('debugformat',
960 @command('debugformat',
961 [] + cmdutil.formatteropts)
961 [] + cmdutil.formatteropts)
962 def debugformat(ui, repo, **opts):
962 def debugformat(ui, repo, **opts):
963 """display format information about the current repository
963 """display format information about the current repository
964
964
965 Use --verbose to get extra information about current config value and
965 Use --verbose to get extra information about current config value and
966 Mercurial default."""
966 Mercurial default."""
967 opts = pycompat.byteskwargs(opts)
967 opts = pycompat.byteskwargs(opts)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
968 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
969 maxvariantlength = max(len('format-variant'), maxvariantlength)
970
970
971 def makeformatname(name):
971 def makeformatname(name):
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
972 return '%s:' + (' ' * (maxvariantlength - len(name)))
973
973
974 fm = ui.formatter('debugformat', opts)
974 fm = ui.formatter('debugformat', opts)
975 if fm.isplain():
975 if fm.isplain():
976 def formatvalue(value):
976 def formatvalue(value):
977 if util.safehasattr(value, 'startswith'):
977 if util.safehasattr(value, 'startswith'):
978 return value
978 return value
979 if value:
979 if value:
980 return 'yes'
980 return 'yes'
981 else:
981 else:
982 return 'no'
982 return 'no'
983 else:
983 else:
984 formatvalue = pycompat.identity
984 formatvalue = pycompat.identity
985
985
986 fm.plain('format-variant')
986 fm.plain('format-variant')
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
987 fm.plain(' ' * (maxvariantlength - len('format-variant')))
988 fm.plain(' repo')
988 fm.plain(' repo')
989 if ui.verbose:
989 if ui.verbose:
990 fm.plain(' config default')
990 fm.plain(' config default')
991 fm.plain('\n')
991 fm.plain('\n')
992 for fv in upgrade.allformatvariant:
992 for fv in upgrade.allformatvariant:
993 fm.startitem()
993 fm.startitem()
994 repovalue = fv.fromrepo(repo)
994 repovalue = fv.fromrepo(repo)
995 configvalue = fv.fromconfig(repo)
995 configvalue = fv.fromconfig(repo)
996
996
997 if repovalue != configvalue:
997 if repovalue != configvalue:
998 namelabel = 'formatvariant.name.mismatchconfig'
998 namelabel = 'formatvariant.name.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
999 repolabel = 'formatvariant.repo.mismatchconfig'
1000 elif repovalue != fv.default:
1000 elif repovalue != fv.default:
1001 namelabel = 'formatvariant.name.mismatchdefault'
1001 namelabel = 'formatvariant.name.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1002 repolabel = 'formatvariant.repo.mismatchdefault'
1003 else:
1003 else:
1004 namelabel = 'formatvariant.name.uptodate'
1004 namelabel = 'formatvariant.name.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1005 repolabel = 'formatvariant.repo.uptodate'
1006
1006
1007 fm.write('name', makeformatname(fv.name), fv.name,
1007 fm.write('name', makeformatname(fv.name), fv.name,
1008 label=namelabel)
1008 label=namelabel)
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1009 fm.write('repo', ' %3s', formatvalue(repovalue),
1010 label=repolabel)
1010 label=repolabel)
1011 if fv.default != configvalue:
1011 if fv.default != configvalue:
1012 configlabel = 'formatvariant.config.special'
1012 configlabel = 'formatvariant.config.special'
1013 else:
1013 else:
1014 configlabel = 'formatvariant.config.default'
1014 configlabel = 'formatvariant.config.default'
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1015 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1016 label=configlabel)
1016 label=configlabel)
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1017 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1018 label='formatvariant.default')
1018 label='formatvariant.default')
1019 fm.plain('\n')
1019 fm.plain('\n')
1020 fm.end()
1020 fm.end()
1021
1021
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1022 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1023 def debugfsinfo(ui, path="."):
1023 def debugfsinfo(ui, path="."):
1024 """show information detected about current filesystem"""
1024 """show information detected about current filesystem"""
1025 ui.write(('path: %s\n') % path)
1025 ui.write(('path: %s\n') % path)
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1026 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1027 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1028 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1029 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1030 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1031 casesensitive = '(unknown)'
1031 casesensitive = '(unknown)'
1032 try:
1032 try:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1033 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1034 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1035 except OSError:
1035 except OSError:
1036 pass
1036 pass
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1037 ui.write(('case-sensitive: %s\n') % casesensitive)
1038
1038
1039 @command('debuggetbundle',
1039 @command('debuggetbundle',
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1040 [('H', 'head', [], _('id of head node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1041 ('C', 'common', [], _('id of common node'), _('ID')),
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1042 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1043 _('REPO FILE [-H|-C ID]...'),
1043 _('REPO FILE [-H|-C ID]...'),
1044 norepo=True)
1044 norepo=True)
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1045 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1046 """retrieves a bundle from a repo
1046 """retrieves a bundle from a repo
1047
1047
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1048 Every ID must be a full-length hex node id string. Saves the bundle to the
1049 given file.
1049 given file.
1050 """
1050 """
1051 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1052 repo = hg.peer(ui, opts, repopath)
1052 repo = hg.peer(ui, opts, repopath)
1053 if not repo.capable('getbundle'):
1053 if not repo.capable('getbundle'):
1054 raise error.Abort("getbundle() not supported by target repository")
1054 raise error.Abort("getbundle() not supported by target repository")
1055 args = {}
1055 args = {}
1056 if common:
1056 if common:
1057 args[r'common'] = [bin(s) for s in common]
1057 args[r'common'] = [bin(s) for s in common]
1058 if head:
1058 if head:
1059 args[r'heads'] = [bin(s) for s in head]
1059 args[r'heads'] = [bin(s) for s in head]
1060 # TODO: get desired bundlecaps from command line.
1060 # TODO: get desired bundlecaps from command line.
1061 args[r'bundlecaps'] = None
1061 args[r'bundlecaps'] = None
1062 bundle = repo.getbundle('debug', **args)
1062 bundle = repo.getbundle('debug', **args)
1063
1063
1064 bundletype = opts.get('type', 'bzip2').lower()
1064 bundletype = opts.get('type', 'bzip2').lower()
1065 btypes = {'none': 'HG10UN',
1065 btypes = {'none': 'HG10UN',
1066 'bzip2': 'HG10BZ',
1066 'bzip2': 'HG10BZ',
1067 'gzip': 'HG10GZ',
1067 'gzip': 'HG10GZ',
1068 'bundle2': 'HG20'}
1068 'bundle2': 'HG20'}
1069 bundletype = btypes.get(bundletype)
1069 bundletype = btypes.get(bundletype)
1070 if bundletype not in bundle2.bundletypes:
1070 if bundletype not in bundle2.bundletypes:
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1071 raise error.Abort(_('unknown bundle type specified with --type'))
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1072 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1073
1073
1074 @command('debugignore', [], '[FILE]')
1074 @command('debugignore', [], '[FILE]')
1075 def debugignore(ui, repo, *files, **opts):
1075 def debugignore(ui, repo, *files, **opts):
1076 """display the combined ignore pattern and information about ignored files
1076 """display the combined ignore pattern and information about ignored files
1077
1077
1078 With no argument display the combined ignore pattern.
1078 With no argument display the combined ignore pattern.
1079
1079
1080 Given space separated file names, shows if the given file is ignored and
1080 Given space separated file names, shows if the given file is ignored and
1081 if so, show the ignore rule (file and line number) that matched it.
1081 if so, show the ignore rule (file and line number) that matched it.
1082 """
1082 """
1083 ignore = repo.dirstate._ignore
1083 ignore = repo.dirstate._ignore
1084 if not files:
1084 if not files:
1085 # Show all the patterns
1085 # Show all the patterns
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1086 ui.write("%s\n" % pycompat.byterepr(ignore))
1087 else:
1087 else:
1088 m = scmutil.match(repo[None], pats=files)
1088 m = scmutil.match(repo[None], pats=files)
1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1089 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1090 for f in m.files():
1090 for f in m.files():
1091 nf = util.normpath(f)
1091 nf = util.normpath(f)
1092 ignored = None
1092 ignored = None
1093 ignoredata = None
1093 ignoredata = None
1094 if nf != '.':
1094 if nf != '.':
1095 if ignore(nf):
1095 if ignore(nf):
1096 ignored = nf
1096 ignored = nf
1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1097 ignoredata = repo.dirstate._ignorefileandline(nf)
1098 else:
1098 else:
1099 for p in util.finddirs(nf):
1099 for p in util.finddirs(nf):
1100 if ignore(p):
1100 if ignore(p):
1101 ignored = p
1101 ignored = p
1102 ignoredata = repo.dirstate._ignorefileandline(p)
1102 ignoredata = repo.dirstate._ignorefileandline(p)
1103 break
1103 break
1104 if ignored:
1104 if ignored:
1105 if ignored == nf:
1105 if ignored == nf:
1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1106 ui.write(_("%s is ignored\n") % uipathfn(f))
1107 else:
1107 else:
1108 ui.write(_("%s is ignored because of "
1108 ui.write(_("%s is ignored because of "
1109 "containing folder %s\n")
1109 "containing folder %s\n")
1110 % (uipathfn(f), ignored))
1110 % (uipathfn(f), ignored))
1111 ignorefile, lineno, line = ignoredata
1111 ignorefile, lineno, line = ignoredata
1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1112 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1113 % (ignorefile, lineno, line))
1113 % (ignorefile, lineno, line))
1114 else:
1114 else:
1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1115 ui.write(_("%s is not ignored\n") % uipathfn(f))
1116
1116
1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1117 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1118 _('-c|-m|FILE'))
1118 _('-c|-m|FILE'))
1119 def debugindex(ui, repo, file_=None, **opts):
1119 def debugindex(ui, repo, file_=None, **opts):
1120 """dump index data for a storage primitive"""
1120 """dump index data for a storage primitive"""
1121 opts = pycompat.byteskwargs(opts)
1121 opts = pycompat.byteskwargs(opts)
1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1122 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1123
1123
1124 if ui.debugflag:
1124 if ui.debugflag:
1125 shortfn = hex
1125 shortfn = hex
1126 else:
1126 else:
1127 shortfn = short
1127 shortfn = short
1128
1128
1129 idlen = 12
1129 idlen = 12
1130 for i in store:
1130 for i in store:
1131 idlen = len(shortfn(store.node(i)))
1131 idlen = len(shortfn(store.node(i)))
1132 break
1132 break
1133
1133
1134 fm = ui.formatter('debugindex', opts)
1134 fm = ui.formatter('debugindex', opts)
1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1135 fm.plain(b' rev linkrev %s %s p2\n' % (
1136 b'nodeid'.ljust(idlen),
1136 b'nodeid'.ljust(idlen),
1137 b'p1'.ljust(idlen)))
1137 b'p1'.ljust(idlen)))
1138
1138
1139 for rev in store:
1139 for rev in store:
1140 node = store.node(rev)
1140 node = store.node(rev)
1141 parents = store.parents(node)
1141 parents = store.parents(node)
1142
1142
1143 fm.startitem()
1143 fm.startitem()
1144 fm.write(b'rev', b'%6d ', rev)
1144 fm.write(b'rev', b'%6d ', rev)
1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1145 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1146 fm.write(b'node', '%s ', shortfn(node))
1146 fm.write(b'node', '%s ', shortfn(node))
1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1147 fm.write(b'p1', '%s ', shortfn(parents[0]))
1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1148 fm.write(b'p2', '%s', shortfn(parents[1]))
1149 fm.plain(b'\n')
1149 fm.plain(b'\n')
1150
1150
1151 fm.end()
1151 fm.end()
1152
1152
1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1153 @command('debugindexdot', cmdutil.debugrevlogopts,
1154 _('-c|-m|FILE'), optionalrepo=True)
1154 _('-c|-m|FILE'), optionalrepo=True)
1155 def debugindexdot(ui, repo, file_=None, **opts):
1155 def debugindexdot(ui, repo, file_=None, **opts):
1156 """dump an index DAG as a graphviz dot file"""
1156 """dump an index DAG as a graphviz dot file"""
1157 opts = pycompat.byteskwargs(opts)
1157 opts = pycompat.byteskwargs(opts)
1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1158 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1159 ui.write(("digraph G {\n"))
1159 ui.write(("digraph G {\n"))
1160 for i in r:
1160 for i in r:
1161 node = r.node(i)
1161 node = r.node(i)
1162 pp = r.parents(node)
1162 pp = r.parents(node)
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1163 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1164 if pp[1] != nullid:
1164 if pp[1] != nullid:
1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1165 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1166 ui.write("}\n")
1166 ui.write("}\n")
1167
1167
1168 @command('debugindexstats', [])
1168 @command('debugindexstats', [])
1169 def debugindexstats(ui, repo):
1169 def debugindexstats(ui, repo):
1170 """show stats related to the changelog index"""
1170 """show stats related to the changelog index"""
1171 repo.changelog.shortest(nullid, 1)
1171 repo.changelog.shortest(nullid, 1)
1172 index = repo.changelog.index
1172 index = repo.changelog.index
1173 if not util.safehasattr(index, 'stats'):
1173 if not util.safehasattr(index, 'stats'):
1174 raise error.Abort(_('debugindexstats only works with native code'))
1174 raise error.Abort(_('debugindexstats only works with native code'))
1175 for k, v in sorted(index.stats().items()):
1175 for k, v in sorted(index.stats().items()):
1176 ui.write('%s: %d\n' % (k, v))
1176 ui.write('%s: %d\n' % (k, v))
1177
1177
1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1178 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1179 def debuginstall(ui, **opts):
1179 def debuginstall(ui, **opts):
1180 '''test Mercurial installation
1180 '''test Mercurial installation
1181
1181
1182 Returns 0 on success.
1182 Returns 0 on success.
1183 '''
1183 '''
1184 opts = pycompat.byteskwargs(opts)
1184 opts = pycompat.byteskwargs(opts)
1185
1185
1186 problems = 0
1186 problems = 0
1187
1187
1188 fm = ui.formatter('debuginstall', opts)
1188 fm = ui.formatter('debuginstall', opts)
1189 fm.startitem()
1189 fm.startitem()
1190
1190
1191 # encoding
1191 # encoding
1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1192 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1193 err = None
1193 err = None
1194 try:
1194 try:
1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1195 codecs.lookup(pycompat.sysstr(encoding.encoding))
1196 except LookupError as inst:
1196 except LookupError as inst:
1197 err = stringutil.forcebytestr(inst)
1197 err = stringutil.forcebytestr(inst)
1198 problems += 1
1198 problems += 1
1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1199 fm.condwrite(err, 'encodingerror', _(" %s\n"
1200 " (check that your locale is properly set)\n"), err)
1200 " (check that your locale is properly set)\n"), err)
1201
1201
1202 # Python
1202 # Python
1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1203 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1204 pycompat.sysexecutable)
1204 pycompat.sysexecutable)
1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1205 fm.write('pythonver', _("checking Python version (%s)\n"),
1206 ("%d.%d.%d" % sys.version_info[:3]))
1206 ("%d.%d.%d" % sys.version_info[:3]))
1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1207 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1208 os.path.dirname(pycompat.fsencode(os.__file__)))
1209
1209
1210 security = set(sslutil.supportedprotocols)
1210 security = set(sslutil.supportedprotocols)
1211 if sslutil.hassni:
1211 if sslutil.hassni:
1212 security.add('sni')
1212 security.add('sni')
1213
1213
1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1214 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1215 fm.formatlist(sorted(security), name='protocol',
1215 fm.formatlist(sorted(security), name='protocol',
1216 fmt='%s', sep=','))
1216 fmt='%s', sep=','))
1217
1217
1218 # These are warnings, not errors. So don't increment problem count. This
1218 # These are warnings, not errors. So don't increment problem count. This
1219 # may change in the future.
1219 # may change in the future.
1220 if 'tls1.2' not in security:
1220 if 'tls1.2' not in security:
1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1221 fm.plain(_(' TLS 1.2 not supported by Python install; '
1222 'network connections lack modern security\n'))
1222 'network connections lack modern security\n'))
1223 if 'sni' not in security:
1223 if 'sni' not in security:
1224 fm.plain(_(' SNI not supported by Python install; may have '
1224 fm.plain(_(' SNI not supported by Python install; may have '
1225 'connectivity issues with some servers\n'))
1225 'connectivity issues with some servers\n'))
1226
1226
1227 # TODO print CA cert info
1227 # TODO print CA cert info
1228
1228
1229 # hg version
1229 # hg version
1230 hgver = util.version()
1230 hgver = util.version()
1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1231 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1232 hgver.split('+')[0])
1232 hgver.split('+')[0])
1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1233 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1234 '+'.join(hgver.split('+')[1:]))
1234 '+'.join(hgver.split('+')[1:]))
1235
1235
1236 # compiled modules
1236 # compiled modules
1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1237 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1238 policy.policy)
1238 policy.policy)
1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1239 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1240 os.path.dirname(pycompat.fsencode(__file__)))
1240 os.path.dirname(pycompat.fsencode(__file__)))
1241
1241
1242 if policy.policy in ('c', 'allow'):
1242 if policy.policy in ('c', 'allow'):
1243 err = None
1243 err = None
1244 try:
1244 try:
1245 from .cext import (
1245 from .cext import (
1246 base85,
1246 base85,
1247 bdiff,
1247 bdiff,
1248 mpatch,
1248 mpatch,
1249 osutil,
1249 osutil,
1250 )
1250 )
1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1251 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1252 except Exception as inst:
1252 except Exception as inst:
1253 err = stringutil.forcebytestr(inst)
1253 err = stringutil.forcebytestr(inst)
1254 problems += 1
1254 problems += 1
1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1255 fm.condwrite(err, 'extensionserror', " %s\n", err)
1256
1256
1257 compengines = util.compengines._engines.values()
1257 compengines = util.compengines._engines.values()
1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1258 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1259 fm.formatlist(sorted(e.name() for e in compengines),
1259 fm.formatlist(sorted(e.name() for e in compengines),
1260 name='compengine', fmt='%s', sep=', '))
1260 name='compengine', fmt='%s', sep=', '))
1261 fm.write('compenginesavail', _('checking available compression engines '
1261 fm.write('compenginesavail', _('checking available compression engines '
1262 '(%s)\n'),
1262 '(%s)\n'),
1263 fm.formatlist(sorted(e.name() for e in compengines
1263 fm.formatlist(sorted(e.name() for e in compengines
1264 if e.available()),
1264 if e.available()),
1265 name='compengine', fmt='%s', sep=', '))
1265 name='compengine', fmt='%s', sep=', '))
1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1266 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1267 fm.write('compenginesserver', _('checking available compression engines '
1267 fm.write('compenginesserver', _('checking available compression engines '
1268 'for wire protocol (%s)\n'),
1268 'for wire protocol (%s)\n'),
1269 fm.formatlist([e.name() for e in wirecompengines
1269 fm.formatlist([e.name() for e in wirecompengines
1270 if e.wireprotosupport()],
1270 if e.wireprotosupport()],
1271 name='compengine', fmt='%s', sep=', '))
1271 name='compengine', fmt='%s', sep=', '))
1272 re2 = 'missing'
1272 re2 = 'missing'
1273 if util._re2:
1273 if util._re2:
1274 re2 = 'available'
1274 re2 = 'available'
1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1275 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1276 fm.data(re2=bool(util._re2))
1276 fm.data(re2=bool(util._re2))
1277
1277
1278 # templates
1278 # templates
1279 p = templater.templatepaths()
1279 p = templater.templatepaths()
1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1280 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1281 fm.condwrite(not p, '', _(" no template directories found\n"))
1282 if p:
1282 if p:
1283 m = templater.templatepath("map-cmdline.default")
1283 m = templater.templatepath("map-cmdline.default")
1284 if m:
1284 if m:
1285 # template found, check if it is working
1285 # template found, check if it is working
1286 err = None
1286 err = None
1287 try:
1287 try:
1288 templater.templater.frommapfile(m)
1288 templater.templater.frommapfile(m)
1289 except Exception as inst:
1289 except Exception as inst:
1290 err = stringutil.forcebytestr(inst)
1290 err = stringutil.forcebytestr(inst)
1291 p = None
1291 p = None
1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1292 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1293 else:
1293 else:
1294 p = None
1294 p = None
1295 fm.condwrite(p, 'defaulttemplate',
1295 fm.condwrite(p, 'defaulttemplate',
1296 _("checking default template (%s)\n"), m)
1296 _("checking default template (%s)\n"), m)
1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1297 fm.condwrite(not m, 'defaulttemplatenotfound',
1298 _(" template '%s' not found\n"), "default")
1298 _(" template '%s' not found\n"), "default")
1299 if not p:
1299 if not p:
1300 problems += 1
1300 problems += 1
1301 fm.condwrite(not p, '',
1301 fm.condwrite(not p, '',
1302 _(" (templates seem to have been installed incorrectly)\n"))
1302 _(" (templates seem to have been installed incorrectly)\n"))
1303
1303
1304 # editor
1304 # editor
1305 editor = ui.geteditor()
1305 editor = ui.geteditor()
1306 editor = util.expandpath(editor)
1306 editor = util.expandpath(editor)
1307 editorbin = procutil.shellsplit(editor)[0]
1307 editorbin = procutil.shellsplit(editor)[0]
1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1308 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1309 cmdpath = procutil.findexe(editorbin)
1309 cmdpath = procutil.findexe(editorbin)
1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1310 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1311 _(" No commit editor set and can't find %s in PATH\n"
1311 _(" No commit editor set and can't find %s in PATH\n"
1312 " (specify a commit editor in your configuration"
1312 " (specify a commit editor in your configuration"
1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1313 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1314 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1315 _(" Can't find editor '%s' in PATH\n"
1315 _(" Can't find editor '%s' in PATH\n"
1316 " (specify a commit editor in your configuration"
1316 " (specify a commit editor in your configuration"
1317 " file)\n"), not cmdpath and editorbin)
1317 " file)\n"), not cmdpath and editorbin)
1318 if not cmdpath and editor != 'vi':
1318 if not cmdpath and editor != 'vi':
1319 problems += 1
1319 problems += 1
1320
1320
1321 # check username
1321 # check username
1322 username = None
1322 username = None
1323 err = None
1323 err = None
1324 try:
1324 try:
1325 username = ui.username()
1325 username = ui.username()
1326 except error.Abort as e:
1326 except error.Abort as e:
1327 err = stringutil.forcebytestr(e)
1327 err = stringutil.forcebytestr(e)
1328 problems += 1
1328 problems += 1
1329
1329
1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1330 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1331 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1332 " (specify a username in your configuration file)\n"), err)
1332 " (specify a username in your configuration file)\n"), err)
1333
1333
1334 fm.condwrite(not problems, '',
1334 fm.condwrite(not problems, '',
1335 _("no problems detected\n"))
1335 _("no problems detected\n"))
1336 if not problems:
1336 if not problems:
1337 fm.data(problems=problems)
1337 fm.data(problems=problems)
1338 fm.condwrite(problems, 'problems',
1338 fm.condwrite(problems, 'problems',
1339 _("%d problems detected,"
1339 _("%d problems detected,"
1340 " please check your install!\n"), problems)
1340 " please check your install!\n"), problems)
1341 fm.end()
1341 fm.end()
1342
1342
1343 return problems
1343 return problems
1344
1344
1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1345 @command('debugknown', [], _('REPO ID...'), norepo=True)
1346 def debugknown(ui, repopath, *ids, **opts):
1346 def debugknown(ui, repopath, *ids, **opts):
1347 """test whether node ids are known to a repo
1347 """test whether node ids are known to a repo
1348
1348
1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1349 Every ID must be a full-length hex node id string. Returns a list of 0s
1350 and 1s indicating unknown/known.
1350 and 1s indicating unknown/known.
1351 """
1351 """
1352 opts = pycompat.byteskwargs(opts)
1352 opts = pycompat.byteskwargs(opts)
1353 repo = hg.peer(ui, opts, repopath)
1353 repo = hg.peer(ui, opts, repopath)
1354 if not repo.capable('known'):
1354 if not repo.capable('known'):
1355 raise error.Abort("known() not supported by target repository")
1355 raise error.Abort("known() not supported by target repository")
1356 flags = repo.known([bin(s) for s in ids])
1356 flags = repo.known([bin(s) for s in ids])
1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1357 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1358
1358
1359 @command('debuglabelcomplete', [], _('LABEL...'))
1359 @command('debuglabelcomplete', [], _('LABEL...'))
1360 def debuglabelcomplete(ui, repo, *args):
1360 def debuglabelcomplete(ui, repo, *args):
1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1361 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1362 debugnamecomplete(ui, repo, *args)
1362 debugnamecomplete(ui, repo, *args)
1363
1363
1364 @command('debuglocks',
1364 @command('debuglocks',
1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1365 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1366 ('W', 'force-wlock', None,
1366 ('W', 'force-wlock', None,
1367 _('free the working state lock (DANGEROUS)')),
1367 _('free the working state lock (DANGEROUS)')),
1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1368 ('s', 'set-lock', None, _('set the store lock until stopped')),
1369 ('S', 'set-wlock', None,
1369 ('S', 'set-wlock', None,
1370 _('set the working state lock until stopped'))],
1370 _('set the working state lock until stopped'))],
1371 _('[OPTION]...'))
1371 _('[OPTION]...'))
1372 def debuglocks(ui, repo, **opts):
1372 def debuglocks(ui, repo, **opts):
1373 """show or modify state of locks
1373 """show or modify state of locks
1374
1374
1375 By default, this command will show which locks are held. This
1375 By default, this command will show which locks are held. This
1376 includes the user and process holding the lock, the amount of time
1376 includes the user and process holding the lock, the amount of time
1377 the lock has been held, and the machine name where the process is
1377 the lock has been held, and the machine name where the process is
1378 running if it's not local.
1378 running if it's not local.
1379
1379
1380 Locks protect the integrity of Mercurial's data, so should be
1380 Locks protect the integrity of Mercurial's data, so should be
1381 treated with care. System crashes or other interruptions may cause
1381 treated with care. System crashes or other interruptions may cause
1382 locks to not be properly released, though Mercurial will usually
1382 locks to not be properly released, though Mercurial will usually
1383 detect and remove such stale locks automatically.
1383 detect and remove such stale locks automatically.
1384
1384
1385 However, detecting stale locks may not always be possible (for
1385 However, detecting stale locks may not always be possible (for
1386 instance, on a shared filesystem). Removing locks may also be
1386 instance, on a shared filesystem). Removing locks may also be
1387 blocked by filesystem permissions.
1387 blocked by filesystem permissions.
1388
1388
1389 Setting a lock will prevent other commands from changing the data.
1389 Setting a lock will prevent other commands from changing the data.
1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1390 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1391 The set locks are removed when the command exits.
1391 The set locks are removed when the command exits.
1392
1392
1393 Returns 0 if no locks are held.
1393 Returns 0 if no locks are held.
1394
1394
1395 """
1395 """
1396
1396
1397 if opts.get(r'force_lock'):
1397 if opts.get(r'force_lock'):
1398 repo.svfs.unlink('lock')
1398 repo.svfs.unlink('lock')
1399 if opts.get(r'force_wlock'):
1399 if opts.get(r'force_wlock'):
1400 repo.vfs.unlink('wlock')
1400 repo.vfs.unlink('wlock')
1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1401 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1402 return 0
1402 return 0
1403
1403
1404 locks = []
1404 locks = []
1405 try:
1405 try:
1406 if opts.get(r'set_wlock'):
1406 if opts.get(r'set_wlock'):
1407 try:
1407 try:
1408 locks.append(repo.wlock(False))
1408 locks.append(repo.wlock(False))
1409 except error.LockHeld:
1409 except error.LockHeld:
1410 raise error.Abort(_('wlock is already held'))
1410 raise error.Abort(_('wlock is already held'))
1411 if opts.get(r'set_lock'):
1411 if opts.get(r'set_lock'):
1412 try:
1412 try:
1413 locks.append(repo.lock(False))
1413 locks.append(repo.lock(False))
1414 except error.LockHeld:
1414 except error.LockHeld:
1415 raise error.Abort(_('lock is already held'))
1415 raise error.Abort(_('lock is already held'))
1416 if len(locks):
1416 if len(locks):
1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1417 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1418 return 0
1418 return 0
1419 finally:
1419 finally:
1420 release(*locks)
1420 release(*locks)
1421
1421
1422 now = time.time()
1422 now = time.time()
1423 held = 0
1423 held = 0
1424
1424
1425 def report(vfs, name, method):
1425 def report(vfs, name, method):
1426 # this causes stale locks to get reaped for more accurate reporting
1426 # this causes stale locks to get reaped for more accurate reporting
1427 try:
1427 try:
1428 l = method(False)
1428 l = method(False)
1429 except error.LockHeld:
1429 except error.LockHeld:
1430 l = None
1430 l = None
1431
1431
1432 if l:
1432 if l:
1433 l.release()
1433 l.release()
1434 else:
1434 else:
1435 try:
1435 try:
1436 st = vfs.lstat(name)
1436 st = vfs.lstat(name)
1437 age = now - st[stat.ST_MTIME]
1437 age = now - st[stat.ST_MTIME]
1438 user = util.username(st.st_uid)
1438 user = util.username(st.st_uid)
1439 locker = vfs.readlock(name)
1439 locker = vfs.readlock(name)
1440 if ":" in locker:
1440 if ":" in locker:
1441 host, pid = locker.split(':')
1441 host, pid = locker.split(':')
1442 if host == socket.gethostname():
1442 if host == socket.gethostname():
1443 locker = 'user %s, process %s' % (user or b'None', pid)
1443 locker = 'user %s, process %s' % (user or b'None', pid)
1444 else:
1444 else:
1445 locker = 'user %s, process %s, host %s' \
1445 locker = 'user %s, process %s, host %s' \
1446 % (user or b'None', pid, host)
1446 % (user or b'None', pid, host)
1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1447 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1448 return 1
1448 return 1
1449 except OSError as e:
1449 except OSError as e:
1450 if e.errno != errno.ENOENT:
1450 if e.errno != errno.ENOENT:
1451 raise
1451 raise
1452
1452
1453 ui.write(("%-6s free\n") % (name + ":"))
1453 ui.write(("%-6s free\n") % (name + ":"))
1454 return 0
1454 return 0
1455
1455
1456 held += report(repo.svfs, "lock", repo.lock)
1456 held += report(repo.svfs, "lock", repo.lock)
1457 held += report(repo.vfs, "wlock", repo.wlock)
1457 held += report(repo.vfs, "wlock", repo.wlock)
1458
1458
1459 return held
1459 return held
1460
1460
1461 @command('debugmanifestfulltextcache', [
1461 @command('debugmanifestfulltextcache', [
1462 ('', 'clear', False, _('clear the cache')),
1462 ('', 'clear', False, _('clear the cache')),
1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1463 ('a', 'add', '', _('add the given manifest node to the cache'),
1464 _('NODE'))
1464 _('NODE'))
1465 ], '')
1465 ], '')
1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1466 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1467 """show, clear or amend the contents of the manifest fulltext cache"""
1467 """show, clear or amend the contents of the manifest fulltext cache"""
1468 with repo.lock():
1468 with repo.lock():
1469 r = repo.manifestlog.getstorage(b'')
1469 r = repo.manifestlog.getstorage(b'')
1470 try:
1470 try:
1471 cache = r._fulltextcache
1471 cache = r._fulltextcache
1472 except AttributeError:
1472 except AttributeError:
1473 ui.warn(_(
1473 ui.warn(_(
1474 "Current revlog implementation doesn't appear to have a "
1474 "Current revlog implementation doesn't appear to have a "
1475 'manifest fulltext cache\n'))
1475 'manifest fulltext cache\n'))
1476 return
1476 return
1477
1477
1478 if opts.get(r'clear'):
1478 if opts.get(r'clear'):
1479 cache.clear()
1479 cache.clear()
1480
1480
1481 if add:
1481 if add:
1482 try:
1482 try:
1483 manifest = repo.manifestlog[r.lookup(add)]
1483 manifest = repo.manifestlog[r.lookup(add)]
1484 except error.LookupError as e:
1484 except error.LookupError as e:
1485 raise error.Abort(e, hint="Check your manifest node id")
1485 raise error.Abort(e, hint="Check your manifest node id")
1486 manifest.read() # stores revisision in cache too
1486 manifest.read() # stores revisision in cache too
1487
1487
1488 if not len(cache):
1488 if not len(cache):
1489 ui.write(_('Cache empty'))
1489 ui.write(_('Cache empty'))
1490 else:
1490 else:
1491 ui.write(
1491 ui.write(
1492 _('Cache contains %d manifest entries, in order of most to '
1492 _('Cache contains %d manifest entries, in order of most to '
1493 'least recent:\n') % (len(cache),))
1493 'least recent:\n') % (len(cache),))
1494 totalsize = 0
1494 totalsize = 0
1495 for nodeid in cache:
1495 for nodeid in cache:
1496 # Use cache.get to not update the LRU order
1496 # Use cache.get to not update the LRU order
1497 data = cache.get(nodeid)
1497 data = cache.get(nodeid)
1498 size = len(data)
1498 size = len(data)
1499 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1499 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1500 ui.write(_('id: %s, size %s\n') % (
1500 ui.write(_('id: %s, size %s\n') % (
1501 hex(nodeid), util.bytecount(size)))
1501 hex(nodeid), util.bytecount(size)))
1502 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1502 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1503 ui.write(
1503 ui.write(
1504 _('Total cache data size %s, on-disk %s\n') % (
1504 _('Total cache data size %s, on-disk %s\n') % (
1505 util.bytecount(totalsize), util.bytecount(ondisk))
1505 util.bytecount(totalsize), util.bytecount(ondisk))
1506 )
1506 )
1507
1507
1508 @command('debugmergestate', [], '')
1508 @command('debugmergestate', [], '')
1509 def debugmergestate(ui, repo, *args):
1509 def debugmergestate(ui, repo, *args):
1510 """print merge state
1510 """print merge state
1511
1511
1512 Use --verbose to print out information about whether v1 or v2 merge state
1512 Use --verbose to print out information about whether v1 or v2 merge state
1513 was chosen."""
1513 was chosen."""
1514 def _hashornull(h):
1514 def _hashornull(h):
1515 if h == nullhex:
1515 if h == nullhex:
1516 return 'null'
1516 return 'null'
1517 else:
1517 else:
1518 return h
1518 return h
1519
1519
1520 def printrecords(version):
1520 def printrecords(version):
1521 ui.write(('* version %d records\n') % version)
1521 ui.write(('* version %d records\n') % version)
1522 if version == 1:
1522 if version == 1:
1523 records = v1records
1523 records = v1records
1524 else:
1524 else:
1525 records = v2records
1525 records = v2records
1526
1526
1527 for rtype, record in records:
1527 for rtype, record in records:
1528 # pretty print some record types
1528 # pretty print some record types
1529 if rtype == 'L':
1529 if rtype == 'L':
1530 ui.write(('local: %s\n') % record)
1530 ui.write(('local: %s\n') % record)
1531 elif rtype == 'O':
1531 elif rtype == 'O':
1532 ui.write(('other: %s\n') % record)
1532 ui.write(('other: %s\n') % record)
1533 elif rtype == 'm':
1533 elif rtype == 'm':
1534 driver, mdstate = record.split('\0', 1)
1534 driver, mdstate = record.split('\0', 1)
1535 ui.write(('merge driver: %s (state "%s")\n')
1535 ui.write(('merge driver: %s (state "%s")\n')
1536 % (driver, mdstate))
1536 % (driver, mdstate))
1537 elif rtype in 'FDC':
1537 elif rtype in 'FDC':
1538 r = record.split('\0')
1538 r = record.split('\0')
1539 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1539 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1540 if version == 1:
1540 if version == 1:
1541 onode = 'not stored in v1 format'
1541 onode = 'not stored in v1 format'
1542 flags = r[7]
1542 flags = r[7]
1543 else:
1543 else:
1544 onode, flags = r[7:9]
1544 onode, flags = r[7:9]
1545 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1545 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1546 % (f, rtype, state, _hashornull(hash)))
1546 % (f, rtype, state, _hashornull(hash)))
1547 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1547 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1548 ui.write((' ancestor path: %s (node %s)\n')
1548 ui.write((' ancestor path: %s (node %s)\n')
1549 % (afile, _hashornull(anode)))
1549 % (afile, _hashornull(anode)))
1550 ui.write((' other path: %s (node %s)\n')
1550 ui.write((' other path: %s (node %s)\n')
1551 % (ofile, _hashornull(onode)))
1551 % (ofile, _hashornull(onode)))
1552 elif rtype == 'f':
1552 elif rtype == 'f':
1553 filename, rawextras = record.split('\0', 1)
1553 filename, rawextras = record.split('\0', 1)
1554 extras = rawextras.split('\0')
1554 extras = rawextras.split('\0')
1555 i = 0
1555 i = 0
1556 extrastrings = []
1556 extrastrings = []
1557 while i < len(extras):
1557 while i < len(extras):
1558 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1558 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1559 i += 2
1559 i += 2
1560
1560
1561 ui.write(('file extras: %s (%s)\n')
1561 ui.write(('file extras: %s (%s)\n')
1562 % (filename, ', '.join(extrastrings)))
1562 % (filename, ', '.join(extrastrings)))
1563 elif rtype == 'l':
1563 elif rtype == 'l':
1564 labels = record.split('\0', 2)
1564 labels = record.split('\0', 2)
1565 labels = [l for l in labels if len(l) > 0]
1565 labels = [l for l in labels if len(l) > 0]
1566 ui.write(('labels:\n'))
1566 ui.write(('labels:\n'))
1567 ui.write((' local: %s\n' % labels[0]))
1567 ui.write((' local: %s\n' % labels[0]))
1568 ui.write((' other: %s\n' % labels[1]))
1568 ui.write((' other: %s\n' % labels[1]))
1569 if len(labels) > 2:
1569 if len(labels) > 2:
1570 ui.write((' base: %s\n' % labels[2]))
1570 ui.write((' base: %s\n' % labels[2]))
1571 else:
1571 else:
1572 ui.write(('unrecognized entry: %s\t%s\n')
1572 ui.write(('unrecognized entry: %s\t%s\n')
1573 % (rtype, record.replace('\0', '\t')))
1573 % (rtype, record.replace('\0', '\t')))
1574
1574
1575 # Avoid mergestate.read() since it may raise an exception for unsupported
1575 # Avoid mergestate.read() since it may raise an exception for unsupported
1576 # merge state records. We shouldn't be doing this, but this is OK since this
1576 # merge state records. We shouldn't be doing this, but this is OK since this
1577 # command is pretty low-level.
1577 # command is pretty low-level.
1578 ms = mergemod.mergestate(repo)
1578 ms = mergemod.mergestate(repo)
1579
1579
1580 # sort so that reasonable information is on top
1580 # sort so that reasonable information is on top
1581 v1records = ms._readrecordsv1()
1581 v1records = ms._readrecordsv1()
1582 v2records = ms._readrecordsv2()
1582 v2records = ms._readrecordsv2()
1583 order = 'LOml'
1583 order = 'LOml'
1584 def key(r):
1584 def key(r):
1585 idx = order.find(r[0])
1585 idx = order.find(r[0])
1586 if idx == -1:
1586 if idx == -1:
1587 return (1, r[1])
1587 return (1, r[1])
1588 else:
1588 else:
1589 return (0, idx)
1589 return (0, idx)
1590 v1records.sort(key=key)
1590 v1records.sort(key=key)
1591 v2records.sort(key=key)
1591 v2records.sort(key=key)
1592
1592
1593 if not v1records and not v2records:
1593 if not v1records and not v2records:
1594 ui.write(('no merge state found\n'))
1594 ui.write(('no merge state found\n'))
1595 elif not v2records:
1595 elif not v2records:
1596 ui.note(('no version 2 merge state\n'))
1596 ui.note(('no version 2 merge state\n'))
1597 printrecords(1)
1597 printrecords(1)
1598 elif ms._v1v2match(v1records, v2records):
1598 elif ms._v1v2match(v1records, v2records):
1599 ui.note(('v1 and v2 states match: using v2\n'))
1599 ui.note(('v1 and v2 states match: using v2\n'))
1600 printrecords(2)
1600 printrecords(2)
1601 else:
1601 else:
1602 ui.note(('v1 and v2 states mismatch: using v1\n'))
1602 ui.note(('v1 and v2 states mismatch: using v1\n'))
1603 printrecords(1)
1603 printrecords(1)
1604 if ui.verbose:
1604 if ui.verbose:
1605 printrecords(2)
1605 printrecords(2)
1606
1606
1607 @command('debugnamecomplete', [], _('NAME...'))
1607 @command('debugnamecomplete', [], _('NAME...'))
1608 def debugnamecomplete(ui, repo, *args):
1608 def debugnamecomplete(ui, repo, *args):
1609 '''complete "names" - tags, open branch names, bookmark names'''
1609 '''complete "names" - tags, open branch names, bookmark names'''
1610
1610
1611 names = set()
1611 names = set()
1612 # since we previously only listed open branches, we will handle that
1612 # since we previously only listed open branches, we will handle that
1613 # specially (after this for loop)
1613 # specially (after this for loop)
1614 for name, ns in repo.names.iteritems():
1614 for name, ns in repo.names.iteritems():
1615 if name != 'branches':
1615 if name != 'branches':
1616 names.update(ns.listnames(repo))
1616 names.update(ns.listnames(repo))
1617 names.update(tag for (tag, heads, tip, closed)
1617 names.update(tag for (tag, heads, tip, closed)
1618 in repo.branchmap().iterbranches() if not closed)
1618 in repo.branchmap().iterbranches() if not closed)
1619 completions = set()
1619 completions = set()
1620 if not args:
1620 if not args:
1621 args = ['']
1621 args = ['']
1622 for a in args:
1622 for a in args:
1623 completions.update(n for n in names if n.startswith(a))
1623 completions.update(n for n in names if n.startswith(a))
1624 ui.write('\n'.join(sorted(completions)))
1624 ui.write('\n'.join(sorted(completions)))
1625 ui.write('\n')
1625 ui.write('\n')
1626
1626
1627 @command('debugobsolete',
1627 @command('debugobsolete',
1628 [('', 'flags', 0, _('markers flag')),
1628 [('', 'flags', 0, _('markers flag')),
1629 ('', 'record-parents', False,
1629 ('', 'record-parents', False,
1630 _('record parent information for the precursor')),
1630 _('record parent information for the precursor')),
1631 ('r', 'rev', [], _('display markers relevant to REV')),
1631 ('r', 'rev', [], _('display markers relevant to REV')),
1632 ('', 'exclusive', False, _('restrict display to markers only '
1632 ('', 'exclusive', False, _('restrict display to markers only '
1633 'relevant to REV')),
1633 'relevant to REV')),
1634 ('', 'index', False, _('display index of the marker')),
1634 ('', 'index', False, _('display index of the marker')),
1635 ('', 'delete', [], _('delete markers specified by indices')),
1635 ('', 'delete', [], _('delete markers specified by indices')),
1636 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1636 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1637 _('[OBSOLETED [REPLACEMENT ...]]'))
1637 _('[OBSOLETED [REPLACEMENT ...]]'))
1638 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1638 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1639 """create arbitrary obsolete marker
1639 """create arbitrary obsolete marker
1640
1640
1641 With no arguments, displays the list of obsolescence markers."""
1641 With no arguments, displays the list of obsolescence markers."""
1642
1642
1643 opts = pycompat.byteskwargs(opts)
1643 opts = pycompat.byteskwargs(opts)
1644
1644
1645 def parsenodeid(s):
1645 def parsenodeid(s):
1646 try:
1646 try:
1647 # We do not use revsingle/revrange functions here to accept
1647 # We do not use revsingle/revrange functions here to accept
1648 # arbitrary node identifiers, possibly not present in the
1648 # arbitrary node identifiers, possibly not present in the
1649 # local repository.
1649 # local repository.
1650 n = bin(s)
1650 n = bin(s)
1651 if len(n) != len(nullid):
1651 if len(n) != len(nullid):
1652 raise TypeError()
1652 raise TypeError()
1653 return n
1653 return n
1654 except TypeError:
1654 except TypeError:
1655 raise error.Abort('changeset references must be full hexadecimal '
1655 raise error.Abort('changeset references must be full hexadecimal '
1656 'node identifiers')
1656 'node identifiers')
1657
1657
1658 if opts.get('delete'):
1658 if opts.get('delete'):
1659 indices = []
1659 indices = []
1660 for v in opts.get('delete'):
1660 for v in opts.get('delete'):
1661 try:
1661 try:
1662 indices.append(int(v))
1662 indices.append(int(v))
1663 except ValueError:
1663 except ValueError:
1664 raise error.Abort(_('invalid index value: %r') % v,
1664 raise error.Abort(_('invalid index value: %r') % v,
1665 hint=_('use integers for indices'))
1665 hint=_('use integers for indices'))
1666
1666
1667 if repo.currenttransaction():
1667 if repo.currenttransaction():
1668 raise error.Abort(_('cannot delete obsmarkers in the middle '
1668 raise error.Abort(_('cannot delete obsmarkers in the middle '
1669 'of transaction.'))
1669 'of transaction.'))
1670
1670
1671 with repo.lock():
1671 with repo.lock():
1672 n = repair.deleteobsmarkers(repo.obsstore, indices)
1672 n = repair.deleteobsmarkers(repo.obsstore, indices)
1673 ui.write(_('deleted %i obsolescence markers\n') % n)
1673 ui.write(_('deleted %i obsolescence markers\n') % n)
1674
1674
1675 return
1675 return
1676
1676
1677 if precursor is not None:
1677 if precursor is not None:
1678 if opts['rev']:
1678 if opts['rev']:
1679 raise error.Abort('cannot select revision when creating marker')
1679 raise error.Abort('cannot select revision when creating marker')
1680 metadata = {}
1680 metadata = {}
1681 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1681 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1682 succs = tuple(parsenodeid(succ) for succ in successors)
1682 succs = tuple(parsenodeid(succ) for succ in successors)
1683 l = repo.lock()
1683 l = repo.lock()
1684 try:
1684 try:
1685 tr = repo.transaction('debugobsolete')
1685 tr = repo.transaction('debugobsolete')
1686 try:
1686 try:
1687 date = opts.get('date')
1687 date = opts.get('date')
1688 if date:
1688 if date:
1689 date = dateutil.parsedate(date)
1689 date = dateutil.parsedate(date)
1690 else:
1690 else:
1691 date = None
1691 date = None
1692 prec = parsenodeid(precursor)
1692 prec = parsenodeid(precursor)
1693 parents = None
1693 parents = None
1694 if opts['record_parents']:
1694 if opts['record_parents']:
1695 if prec not in repo.unfiltered():
1695 if prec not in repo.unfiltered():
1696 raise error.Abort('cannot used --record-parents on '
1696 raise error.Abort('cannot used --record-parents on '
1697 'unknown changesets')
1697 'unknown changesets')
1698 parents = repo.unfiltered()[prec].parents()
1698 parents = repo.unfiltered()[prec].parents()
1699 parents = tuple(p.node() for p in parents)
1699 parents = tuple(p.node() for p in parents)
1700 repo.obsstore.create(tr, prec, succs, opts['flags'],
1700 repo.obsstore.create(tr, prec, succs, opts['flags'],
1701 parents=parents, date=date,
1701 parents=parents, date=date,
1702 metadata=metadata, ui=ui)
1702 metadata=metadata, ui=ui)
1703 tr.close()
1703 tr.close()
1704 except ValueError as exc:
1704 except ValueError as exc:
1705 raise error.Abort(_('bad obsmarker input: %s') %
1705 raise error.Abort(_('bad obsmarker input: %s') %
1706 pycompat.bytestr(exc))
1706 pycompat.bytestr(exc))
1707 finally:
1707 finally:
1708 tr.release()
1708 tr.release()
1709 finally:
1709 finally:
1710 l.release()
1710 l.release()
1711 else:
1711 else:
1712 if opts['rev']:
1712 if opts['rev']:
1713 revs = scmutil.revrange(repo, opts['rev'])
1713 revs = scmutil.revrange(repo, opts['rev'])
1714 nodes = [repo[r].node() for r in revs]
1714 nodes = [repo[r].node() for r in revs]
1715 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1715 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1716 exclusive=opts['exclusive']))
1716 exclusive=opts['exclusive']))
1717 markers.sort(key=lambda x: x._data)
1717 markers.sort(key=lambda x: x._data)
1718 else:
1718 else:
1719 markers = obsutil.getmarkers(repo)
1719 markers = obsutil.getmarkers(repo)
1720
1720
1721 markerstoiter = markers
1721 markerstoiter = markers
1722 isrelevant = lambda m: True
1722 isrelevant = lambda m: True
1723 if opts.get('rev') and opts.get('index'):
1723 if opts.get('rev') and opts.get('index'):
1724 markerstoiter = obsutil.getmarkers(repo)
1724 markerstoiter = obsutil.getmarkers(repo)
1725 markerset = set(markers)
1725 markerset = set(markers)
1726 isrelevant = lambda m: m in markerset
1726 isrelevant = lambda m: m in markerset
1727
1727
1728 fm = ui.formatter('debugobsolete', opts)
1728 fm = ui.formatter('debugobsolete', opts)
1729 for i, m in enumerate(markerstoiter):
1729 for i, m in enumerate(markerstoiter):
1730 if not isrelevant(m):
1730 if not isrelevant(m):
1731 # marker can be irrelevant when we're iterating over a set
1731 # marker can be irrelevant when we're iterating over a set
1732 # of markers (markerstoiter) which is bigger than the set
1732 # of markers (markerstoiter) which is bigger than the set
1733 # of markers we want to display (markers)
1733 # of markers we want to display (markers)
1734 # this can happen if both --index and --rev options are
1734 # this can happen if both --index and --rev options are
1735 # provided and thus we need to iterate over all of the markers
1735 # provided and thus we need to iterate over all of the markers
1736 # to get the correct indices, but only display the ones that
1736 # to get the correct indices, but only display the ones that
1737 # are relevant to --rev value
1737 # are relevant to --rev value
1738 continue
1738 continue
1739 fm.startitem()
1739 fm.startitem()
1740 ind = i if opts.get('index') else None
1740 ind = i if opts.get('index') else None
1741 cmdutil.showmarker(fm, m, index=ind)
1741 cmdutil.showmarker(fm, m, index=ind)
1742 fm.end()
1742 fm.end()
1743
1743
1744 @command('debugpathcomplete',
1744 @command('debugpathcomplete',
1745 [('f', 'full', None, _('complete an entire path')),
1745 [('f', 'full', None, _('complete an entire path')),
1746 ('n', 'normal', None, _('show only normal files')),
1746 ('n', 'normal', None, _('show only normal files')),
1747 ('a', 'added', None, _('show only added files')),
1747 ('a', 'added', None, _('show only added files')),
1748 ('r', 'removed', None, _('show only removed files'))],
1748 ('r', 'removed', None, _('show only removed files'))],
1749 _('FILESPEC...'))
1749 _('FILESPEC...'))
1750 def debugpathcomplete(ui, repo, *specs, **opts):
1750 def debugpathcomplete(ui, repo, *specs, **opts):
1751 '''complete part or all of a tracked path
1751 '''complete part or all of a tracked path
1752
1752
1753 This command supports shells that offer path name completion. It
1753 This command supports shells that offer path name completion. It
1754 currently completes only files already known to the dirstate.
1754 currently completes only files already known to the dirstate.
1755
1755
1756 Completion extends only to the next path segment unless
1756 Completion extends only to the next path segment unless
1757 --full is specified, in which case entire paths are used.'''
1757 --full is specified, in which case entire paths are used.'''
1758
1758
1759 def complete(path, acceptable):
1759 def complete(path, acceptable):
1760 dirstate = repo.dirstate
1760 dirstate = repo.dirstate
1761 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1761 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1762 rootdir = repo.root + pycompat.ossep
1762 rootdir = repo.root + pycompat.ossep
1763 if spec != repo.root and not spec.startswith(rootdir):
1763 if spec != repo.root and not spec.startswith(rootdir):
1764 return [], []
1764 return [], []
1765 if os.path.isdir(spec):
1765 if os.path.isdir(spec):
1766 spec += '/'
1766 spec += '/'
1767 spec = spec[len(rootdir):]
1767 spec = spec[len(rootdir):]
1768 fixpaths = pycompat.ossep != '/'
1768 fixpaths = pycompat.ossep != '/'
1769 if fixpaths:
1769 if fixpaths:
1770 spec = spec.replace(pycompat.ossep, '/')
1770 spec = spec.replace(pycompat.ossep, '/')
1771 speclen = len(spec)
1771 speclen = len(spec)
1772 fullpaths = opts[r'full']
1772 fullpaths = opts[r'full']
1773 files, dirs = set(), set()
1773 files, dirs = set(), set()
1774 adddir, addfile = dirs.add, files.add
1774 adddir, addfile = dirs.add, files.add
1775 for f, st in dirstate.iteritems():
1775 for f, st in dirstate.iteritems():
1776 if f.startswith(spec) and st[0] in acceptable:
1776 if f.startswith(spec) and st[0] in acceptable:
1777 if fixpaths:
1777 if fixpaths:
1778 f = f.replace('/', pycompat.ossep)
1778 f = f.replace('/', pycompat.ossep)
1779 if fullpaths:
1779 if fullpaths:
1780 addfile(f)
1780 addfile(f)
1781 continue
1781 continue
1782 s = f.find(pycompat.ossep, speclen)
1782 s = f.find(pycompat.ossep, speclen)
1783 if s >= 0:
1783 if s >= 0:
1784 adddir(f[:s])
1784 adddir(f[:s])
1785 else:
1785 else:
1786 addfile(f)
1786 addfile(f)
1787 return files, dirs
1787 return files, dirs
1788
1788
1789 acceptable = ''
1789 acceptable = ''
1790 if opts[r'normal']:
1790 if opts[r'normal']:
1791 acceptable += 'nm'
1791 acceptable += 'nm'
1792 if opts[r'added']:
1792 if opts[r'added']:
1793 acceptable += 'a'
1793 acceptable += 'a'
1794 if opts[r'removed']:
1794 if opts[r'removed']:
1795 acceptable += 'r'
1795 acceptable += 'r'
1796 cwd = repo.getcwd()
1796 cwd = repo.getcwd()
1797 if not specs:
1797 if not specs:
1798 specs = ['.']
1798 specs = ['.']
1799
1799
1800 files, dirs = set(), set()
1800 files, dirs = set(), set()
1801 for spec in specs:
1801 for spec in specs:
1802 f, d = complete(spec, acceptable or 'nmar')
1802 f, d = complete(spec, acceptable or 'nmar')
1803 files.update(f)
1803 files.update(f)
1804 dirs.update(d)
1804 dirs.update(d)
1805 files.update(dirs)
1805 files.update(dirs)
1806 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1806 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1807 ui.write('\n')
1807 ui.write('\n')
1808
1808
1809 @command('debugpathcopies',
1809 @command('debugpathcopies',
1810 cmdutil.walkopts,
1810 cmdutil.walkopts,
1811 'hg debugcopies REV1 REV2 [FILE]',
1811 'hg debugcopies REV1 REV2 [FILE]',
1812 inferrepo=True)
1812 inferrepo=True)
1813 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1813 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1814 """show copies between two revisions"""
1814 """show copies between two revisions"""
1815 ctx1 = scmutil.revsingle(repo, rev1)
1815 ctx1 = scmutil.revsingle(repo, rev1)
1816 ctx2 = scmutil.revsingle(repo, rev2)
1816 ctx2 = scmutil.revsingle(repo, rev2)
1817 m = scmutil.match(ctx1, pats, opts)
1817 m = scmutil.match(ctx1, pats, opts)
1818 for dst, src in copies.pathcopies(ctx1, ctx2, m).items():
1818 for dst, src in copies.pathcopies(ctx1, ctx2, m).items():
1819 ui.write('%s -> %s\n' % (src, dst))
1819 ui.write('%s -> %s\n' % (src, dst))
1820
1820
1821 @command('debugpeer', [], _('PATH'), norepo=True)
1821 @command('debugpeer', [], _('PATH'), norepo=True)
1822 def debugpeer(ui, path):
1822 def debugpeer(ui, path):
1823 """establish a connection to a peer repository"""
1823 """establish a connection to a peer repository"""
1824 # Always enable peer request logging. Requires --debug to display
1824 # Always enable peer request logging. Requires --debug to display
1825 # though.
1825 # though.
1826 overrides = {
1826 overrides = {
1827 ('devel', 'debug.peer-request'): True,
1827 ('devel', 'debug.peer-request'): True,
1828 }
1828 }
1829
1829
1830 with ui.configoverride(overrides):
1830 with ui.configoverride(overrides):
1831 peer = hg.peer(ui, {}, path)
1831 peer = hg.peer(ui, {}, path)
1832
1832
1833 local = peer.local() is not None
1833 local = peer.local() is not None
1834 canpush = peer.canpush()
1834 canpush = peer.canpush()
1835
1835
1836 ui.write(_('url: %s\n') % peer.url())
1836 ui.write(_('url: %s\n') % peer.url())
1837 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1837 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1838 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1838 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1839
1839
1840 @command('debugpickmergetool',
1840 @command('debugpickmergetool',
1841 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1841 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1842 ('', 'changedelete', None, _('emulate merging change and delete')),
1842 ('', 'changedelete', None, _('emulate merging change and delete')),
1843 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1843 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1844 _('[PATTERN]...'),
1844 _('[PATTERN]...'),
1845 inferrepo=True)
1845 inferrepo=True)
1846 def debugpickmergetool(ui, repo, *pats, **opts):
1846 def debugpickmergetool(ui, repo, *pats, **opts):
1847 """examine which merge tool is chosen for specified file
1847 """examine which merge tool is chosen for specified file
1848
1848
1849 As described in :hg:`help merge-tools`, Mercurial examines
1849 As described in :hg:`help merge-tools`, Mercurial examines
1850 configurations below in this order to decide which merge tool is
1850 configurations below in this order to decide which merge tool is
1851 chosen for specified file.
1851 chosen for specified file.
1852
1852
1853 1. ``--tool`` option
1853 1. ``--tool`` option
1854 2. ``HGMERGE`` environment variable
1854 2. ``HGMERGE`` environment variable
1855 3. configurations in ``merge-patterns`` section
1855 3. configurations in ``merge-patterns`` section
1856 4. configuration of ``ui.merge``
1856 4. configuration of ``ui.merge``
1857 5. configurations in ``merge-tools`` section
1857 5. configurations in ``merge-tools`` section
1858 6. ``hgmerge`` tool (for historical reason only)
1858 6. ``hgmerge`` tool (for historical reason only)
1859 7. default tool for fallback (``:merge`` or ``:prompt``)
1859 7. default tool for fallback (``:merge`` or ``:prompt``)
1860
1860
1861 This command writes out examination result in the style below::
1861 This command writes out examination result in the style below::
1862
1862
1863 FILE = MERGETOOL
1863 FILE = MERGETOOL
1864
1864
1865 By default, all files known in the first parent context of the
1865 By default, all files known in the first parent context of the
1866 working directory are examined. Use file patterns and/or -I/-X
1866 working directory are examined. Use file patterns and/or -I/-X
1867 options to limit target files. -r/--rev is also useful to examine
1867 options to limit target files. -r/--rev is also useful to examine
1868 files in another context without actual updating to it.
1868 files in another context without actual updating to it.
1869
1869
1870 With --debug, this command shows warning messages while matching
1870 With --debug, this command shows warning messages while matching
1871 against ``merge-patterns`` and so on, too. It is recommended to
1871 against ``merge-patterns`` and so on, too. It is recommended to
1872 use this option with explicit file patterns and/or -I/-X options,
1872 use this option with explicit file patterns and/or -I/-X options,
1873 because this option increases amount of output per file according
1873 because this option increases amount of output per file according
1874 to configurations in hgrc.
1874 to configurations in hgrc.
1875
1875
1876 With -v/--verbose, this command shows configurations below at
1876 With -v/--verbose, this command shows configurations below at
1877 first (only if specified).
1877 first (only if specified).
1878
1878
1879 - ``--tool`` option
1879 - ``--tool`` option
1880 - ``HGMERGE`` environment variable
1880 - ``HGMERGE`` environment variable
1881 - configuration of ``ui.merge``
1881 - configuration of ``ui.merge``
1882
1882
1883 If merge tool is chosen before matching against
1883 If merge tool is chosen before matching against
1884 ``merge-patterns``, this command can't show any helpful
1884 ``merge-patterns``, this command can't show any helpful
1885 information, even with --debug. In such case, information above is
1885 information, even with --debug. In such case, information above is
1886 useful to know why a merge tool is chosen.
1886 useful to know why a merge tool is chosen.
1887 """
1887 """
1888 opts = pycompat.byteskwargs(opts)
1888 opts = pycompat.byteskwargs(opts)
1889 overrides = {}
1889 overrides = {}
1890 if opts['tool']:
1890 if opts['tool']:
1891 overrides[('ui', 'forcemerge')] = opts['tool']
1891 overrides[('ui', 'forcemerge')] = opts['tool']
1892 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1892 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1893
1893
1894 with ui.configoverride(overrides, 'debugmergepatterns'):
1894 with ui.configoverride(overrides, 'debugmergepatterns'):
1895 hgmerge = encoding.environ.get("HGMERGE")
1895 hgmerge = encoding.environ.get("HGMERGE")
1896 if hgmerge is not None:
1896 if hgmerge is not None:
1897 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1897 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1898 uimerge = ui.config("ui", "merge")
1898 uimerge = ui.config("ui", "merge")
1899 if uimerge:
1899 if uimerge:
1900 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1900 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1901
1901
1902 ctx = scmutil.revsingle(repo, opts.get('rev'))
1902 ctx = scmutil.revsingle(repo, opts.get('rev'))
1903 m = scmutil.match(ctx, pats, opts)
1903 m = scmutil.match(ctx, pats, opts)
1904 changedelete = opts['changedelete']
1904 changedelete = opts['changedelete']
1905 for path in ctx.walk(m):
1905 for path in ctx.walk(m):
1906 fctx = ctx[path]
1906 fctx = ctx[path]
1907 try:
1907 try:
1908 if not ui.debugflag:
1908 if not ui.debugflag:
1909 ui.pushbuffer(error=True)
1909 ui.pushbuffer(error=True)
1910 tool, toolpath = filemerge._picktool(repo, ui, path,
1910 tool, toolpath = filemerge._picktool(repo, ui, path,
1911 fctx.isbinary(),
1911 fctx.isbinary(),
1912 'l' in fctx.flags(),
1912 'l' in fctx.flags(),
1913 changedelete)
1913 changedelete)
1914 finally:
1914 finally:
1915 if not ui.debugflag:
1915 if not ui.debugflag:
1916 ui.popbuffer()
1916 ui.popbuffer()
1917 ui.write(('%s = %s\n') % (path, tool))
1917 ui.write(('%s = %s\n') % (path, tool))
1918
1918
1919 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1919 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1920 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1920 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1921 '''access the pushkey key/value protocol
1921 '''access the pushkey key/value protocol
1922
1922
1923 With two args, list the keys in the given namespace.
1923 With two args, list the keys in the given namespace.
1924
1924
1925 With five args, set a key to new if it currently is set to old.
1925 With five args, set a key to new if it currently is set to old.
1926 Reports success or failure.
1926 Reports success or failure.
1927 '''
1927 '''
1928
1928
1929 target = hg.peer(ui, {}, repopath)
1929 target = hg.peer(ui, {}, repopath)
1930 if keyinfo:
1930 if keyinfo:
1931 key, old, new = keyinfo
1931 key, old, new = keyinfo
1932 with target.commandexecutor() as e:
1932 with target.commandexecutor() as e:
1933 r = e.callcommand('pushkey', {
1933 r = e.callcommand('pushkey', {
1934 'namespace': namespace,
1934 'namespace': namespace,
1935 'key': key,
1935 'key': key,
1936 'old': old,
1936 'old': old,
1937 'new': new,
1937 'new': new,
1938 }).result()
1938 }).result()
1939
1939
1940 ui.status(pycompat.bytestr(r) + '\n')
1940 ui.status(pycompat.bytestr(r) + '\n')
1941 return not r
1941 return not r
1942 else:
1942 else:
1943 for k, v in sorted(target.listkeys(namespace).iteritems()):
1943 for k, v in sorted(target.listkeys(namespace).iteritems()):
1944 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1944 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1945 stringutil.escapestr(v)))
1945 stringutil.escapestr(v)))
1946
1946
1947 @command('debugpvec', [], _('A B'))
1947 @command('debugpvec', [], _('A B'))
1948 def debugpvec(ui, repo, a, b=None):
1948 def debugpvec(ui, repo, a, b=None):
1949 ca = scmutil.revsingle(repo, a)
1949 ca = scmutil.revsingle(repo, a)
1950 cb = scmutil.revsingle(repo, b)
1950 cb = scmutil.revsingle(repo, b)
1951 pa = pvec.ctxpvec(ca)
1951 pa = pvec.ctxpvec(ca)
1952 pb = pvec.ctxpvec(cb)
1952 pb = pvec.ctxpvec(cb)
1953 if pa == pb:
1953 if pa == pb:
1954 rel = "="
1954 rel = "="
1955 elif pa > pb:
1955 elif pa > pb:
1956 rel = ">"
1956 rel = ">"
1957 elif pa < pb:
1957 elif pa < pb:
1958 rel = "<"
1958 rel = "<"
1959 elif pa | pb:
1959 elif pa | pb:
1960 rel = "|"
1960 rel = "|"
1961 ui.write(_("a: %s\n") % pa)
1961 ui.write(_("a: %s\n") % pa)
1962 ui.write(_("b: %s\n") % pb)
1962 ui.write(_("b: %s\n") % pb)
1963 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1963 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1964 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1964 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1965 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1965 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1966 pa.distance(pb), rel))
1966 pa.distance(pb), rel))
1967
1967
1968 @command('debugrebuilddirstate|debugrebuildstate',
1968 @command('debugrebuilddirstate|debugrebuildstate',
1969 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1969 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1970 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1970 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1971 'the working copy parent')),
1971 'the working copy parent')),
1972 ],
1972 ],
1973 _('[-r REV]'))
1973 _('[-r REV]'))
1974 def debugrebuilddirstate(ui, repo, rev, **opts):
1974 def debugrebuilddirstate(ui, repo, rev, **opts):
1975 """rebuild the dirstate as it would look like for the given revision
1975 """rebuild the dirstate as it would look like for the given revision
1976
1976
1977 If no revision is specified the first current parent will be used.
1977 If no revision is specified the first current parent will be used.
1978
1978
1979 The dirstate will be set to the files of the given revision.
1979 The dirstate will be set to the files of the given revision.
1980 The actual working directory content or existing dirstate
1980 The actual working directory content or existing dirstate
1981 information such as adds or removes is not considered.
1981 information such as adds or removes is not considered.
1982
1982
1983 ``minimal`` will only rebuild the dirstate status for files that claim to be
1983 ``minimal`` will only rebuild the dirstate status for files that claim to be
1984 tracked but are not in the parent manifest, or that exist in the parent
1984 tracked but are not in the parent manifest, or that exist in the parent
1985 manifest but are not in the dirstate. It will not change adds, removes, or
1985 manifest but are not in the dirstate. It will not change adds, removes, or
1986 modified files that are in the working copy parent.
1986 modified files that are in the working copy parent.
1987
1987
1988 One use of this command is to make the next :hg:`status` invocation
1988 One use of this command is to make the next :hg:`status` invocation
1989 check the actual file content.
1989 check the actual file content.
1990 """
1990 """
1991 ctx = scmutil.revsingle(repo, rev)
1991 ctx = scmutil.revsingle(repo, rev)
1992 with repo.wlock():
1992 with repo.wlock():
1993 dirstate = repo.dirstate
1993 dirstate = repo.dirstate
1994 changedfiles = None
1994 changedfiles = None
1995 # See command doc for what minimal does.
1995 # See command doc for what minimal does.
1996 if opts.get(r'minimal'):
1996 if opts.get(r'minimal'):
1997 manifestfiles = set(ctx.manifest().keys())
1997 manifestfiles = set(ctx.manifest().keys())
1998 dirstatefiles = set(dirstate)
1998 dirstatefiles = set(dirstate)
1999 manifestonly = manifestfiles - dirstatefiles
1999 manifestonly = manifestfiles - dirstatefiles
2000 dsonly = dirstatefiles - manifestfiles
2000 dsonly = dirstatefiles - manifestfiles
2001 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2001 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2002 changedfiles = manifestonly | dsnotadded
2002 changedfiles = manifestonly | dsnotadded
2003
2003
2004 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2004 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2005
2005
2006 @command('debugrebuildfncache', [], '')
2006 @command('debugrebuildfncache', [], '')
2007 def debugrebuildfncache(ui, repo):
2007 def debugrebuildfncache(ui, repo):
2008 """rebuild the fncache file"""
2008 """rebuild the fncache file"""
2009 repair.rebuildfncache(ui, repo)
2009 repair.rebuildfncache(ui, repo)
2010
2010
2011 @command('debugrename',
2011 @command('debugrename',
2012 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2012 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2013 _('[-r REV] FILE'))
2013 _('[-r REV] FILE'))
2014 def debugrename(ui, repo, file1, *pats, **opts):
2014 def debugrename(ui, repo, file1, *pats, **opts):
2015 """dump rename information"""
2015 """dump rename information"""
2016
2016
2017 opts = pycompat.byteskwargs(opts)
2017 opts = pycompat.byteskwargs(opts)
2018 ctx = scmutil.revsingle(repo, opts.get('rev'))
2018 ctx = scmutil.revsingle(repo, opts.get('rev'))
2019 m = scmutil.match(ctx, (file1,) + pats, opts)
2019 m = scmutil.match(ctx, (file1,) + pats, opts)
2020 for abs in ctx.walk(m):
2020 for abs in ctx.walk(m):
2021 fctx = ctx[abs]
2021 fctx = ctx[abs]
2022 o = fctx.filelog().renamed(fctx.filenode())
2022 o = fctx.filelog().renamed(fctx.filenode())
2023 rel = repo.pathto(abs)
2023 rel = repo.pathto(abs)
2024 if o:
2024 if o:
2025 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2025 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2026 else:
2026 else:
2027 ui.write(_("%s not renamed\n") % rel)
2027 ui.write(_("%s not renamed\n") % rel)
2028
2028
2029 @command('debugrevlog', cmdutil.debugrevlogopts +
2029 @command('debugrevlog', cmdutil.debugrevlogopts +
2030 [('d', 'dump', False, _('dump index data'))],
2030 [('d', 'dump', False, _('dump index data'))],
2031 _('-c|-m|FILE'),
2031 _('-c|-m|FILE'),
2032 optionalrepo=True)
2032 optionalrepo=True)
2033 def debugrevlog(ui, repo, file_=None, **opts):
2033 def debugrevlog(ui, repo, file_=None, **opts):
2034 """show data and statistics about a revlog"""
2034 """show data and statistics about a revlog"""
2035 opts = pycompat.byteskwargs(opts)
2035 opts = pycompat.byteskwargs(opts)
2036 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2036 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2037
2037
2038 if opts.get("dump"):
2038 if opts.get("dump"):
2039 numrevs = len(r)
2039 numrevs = len(r)
2040 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2040 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2041 " rawsize totalsize compression heads chainlen\n"))
2041 " rawsize totalsize compression heads chainlen\n"))
2042 ts = 0
2042 ts = 0
2043 heads = set()
2043 heads = set()
2044
2044
2045 for rev in pycompat.xrange(numrevs):
2045 for rev in pycompat.xrange(numrevs):
2046 dbase = r.deltaparent(rev)
2046 dbase = r.deltaparent(rev)
2047 if dbase == -1:
2047 if dbase == -1:
2048 dbase = rev
2048 dbase = rev
2049 cbase = r.chainbase(rev)
2049 cbase = r.chainbase(rev)
2050 clen = r.chainlen(rev)
2050 clen = r.chainlen(rev)
2051 p1, p2 = r.parentrevs(rev)
2051 p1, p2 = r.parentrevs(rev)
2052 rs = r.rawsize(rev)
2052 rs = r.rawsize(rev)
2053 ts = ts + rs
2053 ts = ts + rs
2054 heads -= set(r.parentrevs(rev))
2054 heads -= set(r.parentrevs(rev))
2055 heads.add(rev)
2055 heads.add(rev)
2056 try:
2056 try:
2057 compression = ts / r.end(rev)
2057 compression = ts / r.end(rev)
2058 except ZeroDivisionError:
2058 except ZeroDivisionError:
2059 compression = 0
2059 compression = 0
2060 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2060 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2061 "%11d %5d %8d\n" %
2061 "%11d %5d %8d\n" %
2062 (rev, p1, p2, r.start(rev), r.end(rev),
2062 (rev, p1, p2, r.start(rev), r.end(rev),
2063 r.start(dbase), r.start(cbase),
2063 r.start(dbase), r.start(cbase),
2064 r.start(p1), r.start(p2),
2064 r.start(p1), r.start(p2),
2065 rs, ts, compression, len(heads), clen))
2065 rs, ts, compression, len(heads), clen))
2066 return 0
2066 return 0
2067
2067
2068 v = r.version
2068 v = r.version
2069 format = v & 0xFFFF
2069 format = v & 0xFFFF
2070 flags = []
2070 flags = []
2071 gdelta = False
2071 gdelta = False
2072 if v & revlog.FLAG_INLINE_DATA:
2072 if v & revlog.FLAG_INLINE_DATA:
2073 flags.append('inline')
2073 flags.append('inline')
2074 if v & revlog.FLAG_GENERALDELTA:
2074 if v & revlog.FLAG_GENERALDELTA:
2075 gdelta = True
2075 gdelta = True
2076 flags.append('generaldelta')
2076 flags.append('generaldelta')
2077 if not flags:
2077 if not flags:
2078 flags = ['(none)']
2078 flags = ['(none)']
2079
2079
2080 ### tracks merge vs single parent
2080 ### tracks merge vs single parent
2081 nummerges = 0
2081 nummerges = 0
2082
2082
2083 ### tracks ways the "delta" are build
2083 ### tracks ways the "delta" are build
2084 # nodelta
2084 # nodelta
2085 numempty = 0
2085 numempty = 0
2086 numemptytext = 0
2086 numemptytext = 0
2087 numemptydelta = 0
2087 numemptydelta = 0
2088 # full file content
2088 # full file content
2089 numfull = 0
2089 numfull = 0
2090 # intermediate snapshot against a prior snapshot
2090 # intermediate snapshot against a prior snapshot
2091 numsemi = 0
2091 numsemi = 0
2092 # snapshot count per depth
2092 # snapshot count per depth
2093 numsnapdepth = collections.defaultdict(lambda: 0)
2093 numsnapdepth = collections.defaultdict(lambda: 0)
2094 # delta against previous revision
2094 # delta against previous revision
2095 numprev = 0
2095 numprev = 0
2096 # delta against first or second parent (not prev)
2096 # delta against first or second parent (not prev)
2097 nump1 = 0
2097 nump1 = 0
2098 nump2 = 0
2098 nump2 = 0
2099 # delta against neither prev nor parents
2099 # delta against neither prev nor parents
2100 numother = 0
2100 numother = 0
2101 # delta against prev that are also first or second parent
2101 # delta against prev that are also first or second parent
2102 # (details of `numprev`)
2102 # (details of `numprev`)
2103 nump1prev = 0
2103 nump1prev = 0
2104 nump2prev = 0
2104 nump2prev = 0
2105
2105
2106 # data about delta chain of each revs
2106 # data about delta chain of each revs
2107 chainlengths = []
2107 chainlengths = []
2108 chainbases = []
2108 chainbases = []
2109 chainspans = []
2109 chainspans = []
2110
2110
2111 # data about each revision
2111 # data about each revision
2112 datasize = [None, 0, 0]
2112 datasize = [None, 0, 0]
2113 fullsize = [None, 0, 0]
2113 fullsize = [None, 0, 0]
2114 semisize = [None, 0, 0]
2114 semisize = [None, 0, 0]
2115 # snapshot count per depth
2115 # snapshot count per depth
2116 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2116 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2117 deltasize = [None, 0, 0]
2117 deltasize = [None, 0, 0]
2118 chunktypecounts = {}
2118 chunktypecounts = {}
2119 chunktypesizes = {}
2119 chunktypesizes = {}
2120
2120
2121 def addsize(size, l):
2121 def addsize(size, l):
2122 if l[0] is None or size < l[0]:
2122 if l[0] is None or size < l[0]:
2123 l[0] = size
2123 l[0] = size
2124 if size > l[1]:
2124 if size > l[1]:
2125 l[1] = size
2125 l[1] = size
2126 l[2] += size
2126 l[2] += size
2127
2127
2128 numrevs = len(r)
2128 numrevs = len(r)
2129 for rev in pycompat.xrange(numrevs):
2129 for rev in pycompat.xrange(numrevs):
2130 p1, p2 = r.parentrevs(rev)
2130 p1, p2 = r.parentrevs(rev)
2131 delta = r.deltaparent(rev)
2131 delta = r.deltaparent(rev)
2132 if format > 0:
2132 if format > 0:
2133 addsize(r.rawsize(rev), datasize)
2133 addsize(r.rawsize(rev), datasize)
2134 if p2 != nullrev:
2134 if p2 != nullrev:
2135 nummerges += 1
2135 nummerges += 1
2136 size = r.length(rev)
2136 size = r.length(rev)
2137 if delta == nullrev:
2137 if delta == nullrev:
2138 chainlengths.append(0)
2138 chainlengths.append(0)
2139 chainbases.append(r.start(rev))
2139 chainbases.append(r.start(rev))
2140 chainspans.append(size)
2140 chainspans.append(size)
2141 if size == 0:
2141 if size == 0:
2142 numempty += 1
2142 numempty += 1
2143 numemptytext += 1
2143 numemptytext += 1
2144 else:
2144 else:
2145 numfull += 1
2145 numfull += 1
2146 numsnapdepth[0] += 1
2146 numsnapdepth[0] += 1
2147 addsize(size, fullsize)
2147 addsize(size, fullsize)
2148 addsize(size, snapsizedepth[0])
2148 addsize(size, snapsizedepth[0])
2149 else:
2149 else:
2150 chainlengths.append(chainlengths[delta] + 1)
2150 chainlengths.append(chainlengths[delta] + 1)
2151 baseaddr = chainbases[delta]
2151 baseaddr = chainbases[delta]
2152 revaddr = r.start(rev)
2152 revaddr = r.start(rev)
2153 chainbases.append(baseaddr)
2153 chainbases.append(baseaddr)
2154 chainspans.append((revaddr - baseaddr) + size)
2154 chainspans.append((revaddr - baseaddr) + size)
2155 if size == 0:
2155 if size == 0:
2156 numempty += 1
2156 numempty += 1
2157 numemptydelta += 1
2157 numemptydelta += 1
2158 elif r.issnapshot(rev):
2158 elif r.issnapshot(rev):
2159 addsize(size, semisize)
2159 addsize(size, semisize)
2160 numsemi += 1
2160 numsemi += 1
2161 depth = r.snapshotdepth(rev)
2161 depth = r.snapshotdepth(rev)
2162 numsnapdepth[depth] += 1
2162 numsnapdepth[depth] += 1
2163 addsize(size, snapsizedepth[depth])
2163 addsize(size, snapsizedepth[depth])
2164 else:
2164 else:
2165 addsize(size, deltasize)
2165 addsize(size, deltasize)
2166 if delta == rev - 1:
2166 if delta == rev - 1:
2167 numprev += 1
2167 numprev += 1
2168 if delta == p1:
2168 if delta == p1:
2169 nump1prev += 1
2169 nump1prev += 1
2170 elif delta == p2:
2170 elif delta == p2:
2171 nump2prev += 1
2171 nump2prev += 1
2172 elif delta == p1:
2172 elif delta == p1:
2173 nump1 += 1
2173 nump1 += 1
2174 elif delta == p2:
2174 elif delta == p2:
2175 nump2 += 1
2175 nump2 += 1
2176 elif delta != nullrev:
2176 elif delta != nullrev:
2177 numother += 1
2177 numother += 1
2178
2178
2179 # Obtain data on the raw chunks in the revlog.
2179 # Obtain data on the raw chunks in the revlog.
2180 if util.safehasattr(r, '_getsegmentforrevs'):
2180 if util.safehasattr(r, '_getsegmentforrevs'):
2181 segment = r._getsegmentforrevs(rev, rev)[1]
2181 segment = r._getsegmentforrevs(rev, rev)[1]
2182 else:
2182 else:
2183 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2183 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2184 if segment:
2184 if segment:
2185 chunktype = bytes(segment[0:1])
2185 chunktype = bytes(segment[0:1])
2186 else:
2186 else:
2187 chunktype = 'empty'
2187 chunktype = 'empty'
2188
2188
2189 if chunktype not in chunktypecounts:
2189 if chunktype not in chunktypecounts:
2190 chunktypecounts[chunktype] = 0
2190 chunktypecounts[chunktype] = 0
2191 chunktypesizes[chunktype] = 0
2191 chunktypesizes[chunktype] = 0
2192
2192
2193 chunktypecounts[chunktype] += 1
2193 chunktypecounts[chunktype] += 1
2194 chunktypesizes[chunktype] += size
2194 chunktypesizes[chunktype] += size
2195
2195
2196 # Adjust size min value for empty cases
2196 # Adjust size min value for empty cases
2197 for size in (datasize, fullsize, semisize, deltasize):
2197 for size in (datasize, fullsize, semisize, deltasize):
2198 if size[0] is None:
2198 if size[0] is None:
2199 size[0] = 0
2199 size[0] = 0
2200
2200
2201 numdeltas = numrevs - numfull - numempty - numsemi
2201 numdeltas = numrevs - numfull - numempty - numsemi
2202 numoprev = numprev - nump1prev - nump2prev
2202 numoprev = numprev - nump1prev - nump2prev
2203 totalrawsize = datasize[2]
2203 totalrawsize = datasize[2]
2204 datasize[2] /= numrevs
2204 datasize[2] /= numrevs
2205 fulltotal = fullsize[2]
2205 fulltotal = fullsize[2]
2206 fullsize[2] /= numfull
2206 fullsize[2] /= numfull
2207 semitotal = semisize[2]
2207 semitotal = semisize[2]
2208 snaptotal = {}
2208 snaptotal = {}
2209 if numsemi > 0:
2209 if numsemi > 0:
2210 semisize[2] /= numsemi
2210 semisize[2] /= numsemi
2211 for depth in snapsizedepth:
2211 for depth in snapsizedepth:
2212 snaptotal[depth] = snapsizedepth[depth][2]
2212 snaptotal[depth] = snapsizedepth[depth][2]
2213 snapsizedepth[depth][2] /= numsnapdepth[depth]
2213 snapsizedepth[depth][2] /= numsnapdepth[depth]
2214
2214
2215 deltatotal = deltasize[2]
2215 deltatotal = deltasize[2]
2216 if numdeltas > 0:
2216 if numdeltas > 0:
2217 deltasize[2] /= numdeltas
2217 deltasize[2] /= numdeltas
2218 totalsize = fulltotal + semitotal + deltatotal
2218 totalsize = fulltotal + semitotal + deltatotal
2219 avgchainlen = sum(chainlengths) / numrevs
2219 avgchainlen = sum(chainlengths) / numrevs
2220 maxchainlen = max(chainlengths)
2220 maxchainlen = max(chainlengths)
2221 maxchainspan = max(chainspans)
2221 maxchainspan = max(chainspans)
2222 compratio = 1
2222 compratio = 1
2223 if totalsize:
2223 if totalsize:
2224 compratio = totalrawsize / totalsize
2224 compratio = totalrawsize / totalsize
2225
2225
2226 basedfmtstr = '%%%dd\n'
2226 basedfmtstr = '%%%dd\n'
2227 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2227 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2228
2228
2229 def dfmtstr(max):
2229 def dfmtstr(max):
2230 return basedfmtstr % len(str(max))
2230 return basedfmtstr % len(str(max))
2231 def pcfmtstr(max, padding=0):
2231 def pcfmtstr(max, padding=0):
2232 return basepcfmtstr % (len(str(max)), ' ' * padding)
2232 return basepcfmtstr % (len(str(max)), ' ' * padding)
2233
2233
2234 def pcfmt(value, total):
2234 def pcfmt(value, total):
2235 if total:
2235 if total:
2236 return (value, 100 * float(value) / total)
2236 return (value, 100 * float(value) / total)
2237 else:
2237 else:
2238 return value, 100.0
2238 return value, 100.0
2239
2239
2240 ui.write(('format : %d\n') % format)
2240 ui.write(('format : %d\n') % format)
2241 ui.write(('flags : %s\n') % ', '.join(flags))
2241 ui.write(('flags : %s\n') % ', '.join(flags))
2242
2242
2243 ui.write('\n')
2243 ui.write('\n')
2244 fmt = pcfmtstr(totalsize)
2244 fmt = pcfmtstr(totalsize)
2245 fmt2 = dfmtstr(totalsize)
2245 fmt2 = dfmtstr(totalsize)
2246 ui.write(('revisions : ') + fmt2 % numrevs)
2246 ui.write(('revisions : ') + fmt2 % numrevs)
2247 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2247 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2248 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2248 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2249 ui.write(('revisions : ') + fmt2 % numrevs)
2249 ui.write(('revisions : ') + fmt2 % numrevs)
2250 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2250 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2251 ui.write((' text : ')
2251 ui.write((' text : ')
2252 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2252 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2253 ui.write((' delta : ')
2253 ui.write((' delta : ')
2254 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2254 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2255 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2255 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2256 for depth in sorted(numsnapdepth):
2256 for depth in sorted(numsnapdepth):
2257 ui.write((' lvl-%-3d : ' % depth)
2257 ui.write((' lvl-%-3d : ' % depth)
2258 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2258 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2259 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2259 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2260 ui.write(('revision size : ') + fmt2 % totalsize)
2260 ui.write(('revision size : ') + fmt2 % totalsize)
2261 ui.write((' snapshot : ')
2261 ui.write((' snapshot : ')
2262 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2262 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2263 for depth in sorted(numsnapdepth):
2263 for depth in sorted(numsnapdepth):
2264 ui.write((' lvl-%-3d : ' % depth)
2264 ui.write((' lvl-%-3d : ' % depth)
2265 + fmt % pcfmt(snaptotal[depth], totalsize))
2265 + fmt % pcfmt(snaptotal[depth], totalsize))
2266 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2266 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2267
2267
2268 def fmtchunktype(chunktype):
2268 def fmtchunktype(chunktype):
2269 if chunktype == 'empty':
2269 if chunktype == 'empty':
2270 return ' %s : ' % chunktype
2270 return ' %s : ' % chunktype
2271 elif chunktype in pycompat.bytestr(string.ascii_letters):
2271 elif chunktype in pycompat.bytestr(string.ascii_letters):
2272 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2272 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2273 else:
2273 else:
2274 return ' 0x%s : ' % hex(chunktype)
2274 return ' 0x%s : ' % hex(chunktype)
2275
2275
2276 ui.write('\n')
2276 ui.write('\n')
2277 ui.write(('chunks : ') + fmt2 % numrevs)
2277 ui.write(('chunks : ') + fmt2 % numrevs)
2278 for chunktype in sorted(chunktypecounts):
2278 for chunktype in sorted(chunktypecounts):
2279 ui.write(fmtchunktype(chunktype))
2279 ui.write(fmtchunktype(chunktype))
2280 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2280 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2281 ui.write(('chunks size : ') + fmt2 % totalsize)
2281 ui.write(('chunks size : ') + fmt2 % totalsize)
2282 for chunktype in sorted(chunktypecounts):
2282 for chunktype in sorted(chunktypecounts):
2283 ui.write(fmtchunktype(chunktype))
2283 ui.write(fmtchunktype(chunktype))
2284 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2284 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2285
2285
2286 ui.write('\n')
2286 ui.write('\n')
2287 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2287 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2288 ui.write(('avg chain length : ') + fmt % avgchainlen)
2288 ui.write(('avg chain length : ') + fmt % avgchainlen)
2289 ui.write(('max chain length : ') + fmt % maxchainlen)
2289 ui.write(('max chain length : ') + fmt % maxchainlen)
2290 ui.write(('max chain reach : ') + fmt % maxchainspan)
2290 ui.write(('max chain reach : ') + fmt % maxchainspan)
2291 ui.write(('compression ratio : ') + fmt % compratio)
2291 ui.write(('compression ratio : ') + fmt % compratio)
2292
2292
2293 if format > 0:
2293 if format > 0:
2294 ui.write('\n')
2294 ui.write('\n')
2295 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2295 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2296 % tuple(datasize))
2296 % tuple(datasize))
2297 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2297 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2298 % tuple(fullsize))
2298 % tuple(fullsize))
2299 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2299 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2300 % tuple(semisize))
2300 % tuple(semisize))
2301 for depth in sorted(snapsizedepth):
2301 for depth in sorted(snapsizedepth):
2302 if depth == 0:
2302 if depth == 0:
2303 continue
2303 continue
2304 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2304 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2305 % ((depth,) + tuple(snapsizedepth[depth])))
2305 % ((depth,) + tuple(snapsizedepth[depth])))
2306 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2306 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2307 % tuple(deltasize))
2307 % tuple(deltasize))
2308
2308
2309 if numdeltas > 0:
2309 if numdeltas > 0:
2310 ui.write('\n')
2310 ui.write('\n')
2311 fmt = pcfmtstr(numdeltas)
2311 fmt = pcfmtstr(numdeltas)
2312 fmt2 = pcfmtstr(numdeltas, 4)
2312 fmt2 = pcfmtstr(numdeltas, 4)
2313 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2313 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2314 if numprev > 0:
2314 if numprev > 0:
2315 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2315 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2316 numprev))
2316 numprev))
2317 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2317 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2318 numprev))
2318 numprev))
2319 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2319 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2320 numprev))
2320 numprev))
2321 if gdelta:
2321 if gdelta:
2322 ui.write(('deltas against p1 : ')
2322 ui.write(('deltas against p1 : ')
2323 + fmt % pcfmt(nump1, numdeltas))
2323 + fmt % pcfmt(nump1, numdeltas))
2324 ui.write(('deltas against p2 : ')
2324 ui.write(('deltas against p2 : ')
2325 + fmt % pcfmt(nump2, numdeltas))
2325 + fmt % pcfmt(nump2, numdeltas))
2326 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2326 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2327 numdeltas))
2327 numdeltas))
2328
2328
2329 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2329 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2330 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2330 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2331 _('[-f FORMAT] -c|-m|FILE'),
2331 _('[-f FORMAT] -c|-m|FILE'),
2332 optionalrepo=True)
2332 optionalrepo=True)
2333 def debugrevlogindex(ui, repo, file_=None, **opts):
2333 def debugrevlogindex(ui, repo, file_=None, **opts):
2334 """dump the contents of a revlog index"""
2334 """dump the contents of a revlog index"""
2335 opts = pycompat.byteskwargs(opts)
2335 opts = pycompat.byteskwargs(opts)
2336 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2336 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2337 format = opts.get('format', 0)
2337 format = opts.get('format', 0)
2338 if format not in (0, 1):
2338 if format not in (0, 1):
2339 raise error.Abort(_("unknown format %d") % format)
2339 raise error.Abort(_("unknown format %d") % format)
2340
2340
2341 if ui.debugflag:
2341 if ui.debugflag:
2342 shortfn = hex
2342 shortfn = hex
2343 else:
2343 else:
2344 shortfn = short
2344 shortfn = short
2345
2345
2346 # There might not be anything in r, so have a sane default
2346 # There might not be anything in r, so have a sane default
2347 idlen = 12
2347 idlen = 12
2348 for i in r:
2348 for i in r:
2349 idlen = len(shortfn(r.node(i)))
2349 idlen = len(shortfn(r.node(i)))
2350 break
2350 break
2351
2351
2352 if format == 0:
2352 if format == 0:
2353 if ui.verbose:
2353 if ui.verbose:
2354 ui.write((" rev offset length linkrev"
2354 ui.write((" rev offset length linkrev"
2355 " %s %s p2\n") % ("nodeid".ljust(idlen),
2355 " %s %s p2\n") % ("nodeid".ljust(idlen),
2356 "p1".ljust(idlen)))
2356 "p1".ljust(idlen)))
2357 else:
2357 else:
2358 ui.write((" rev linkrev %s %s p2\n") % (
2358 ui.write((" rev linkrev %s %s p2\n") % (
2359 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2359 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2360 elif format == 1:
2360 elif format == 1:
2361 if ui.verbose:
2361 if ui.verbose:
2362 ui.write((" rev flag offset length size link p1"
2362 ui.write((" rev flag offset length size link p1"
2363 " p2 %s\n") % "nodeid".rjust(idlen))
2363 " p2 %s\n") % "nodeid".rjust(idlen))
2364 else:
2364 else:
2365 ui.write((" rev flag size link p1 p2 %s\n") %
2365 ui.write((" rev flag size link p1 p2 %s\n") %
2366 "nodeid".rjust(idlen))
2366 "nodeid".rjust(idlen))
2367
2367
2368 for i in r:
2368 for i in r:
2369 node = r.node(i)
2369 node = r.node(i)
2370 if format == 0:
2370 if format == 0:
2371 try:
2371 try:
2372 pp = r.parents(node)
2372 pp = r.parents(node)
2373 except Exception:
2373 except Exception:
2374 pp = [nullid, nullid]
2374 pp = [nullid, nullid]
2375 if ui.verbose:
2375 if ui.verbose:
2376 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2376 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2377 i, r.start(i), r.length(i), r.linkrev(i),
2377 i, r.start(i), r.length(i), r.linkrev(i),
2378 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2378 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2379 else:
2379 else:
2380 ui.write("% 6d % 7d %s %s %s\n" % (
2380 ui.write("% 6d % 7d %s %s %s\n" % (
2381 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2381 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2382 shortfn(pp[1])))
2382 shortfn(pp[1])))
2383 elif format == 1:
2383 elif format == 1:
2384 pr = r.parentrevs(i)
2384 pr = r.parentrevs(i)
2385 if ui.verbose:
2385 if ui.verbose:
2386 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2386 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2387 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2387 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2388 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2388 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2389 else:
2389 else:
2390 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2390 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2391 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2391 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2392 shortfn(node)))
2392 shortfn(node)))
2393
2393
2394 @command('debugrevspec',
2394 @command('debugrevspec',
2395 [('', 'optimize', None,
2395 [('', 'optimize', None,
2396 _('print parsed tree after optimizing (DEPRECATED)')),
2396 _('print parsed tree after optimizing (DEPRECATED)')),
2397 ('', 'show-revs', True, _('print list of result revisions (default)')),
2397 ('', 'show-revs', True, _('print list of result revisions (default)')),
2398 ('s', 'show-set', None, _('print internal representation of result set')),
2398 ('s', 'show-set', None, _('print internal representation of result set')),
2399 ('p', 'show-stage', [],
2399 ('p', 'show-stage', [],
2400 _('print parsed tree at the given stage'), _('NAME')),
2400 _('print parsed tree at the given stage'), _('NAME')),
2401 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2401 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2402 ('', 'verify-optimized', False, _('verify optimized result')),
2402 ('', 'verify-optimized', False, _('verify optimized result')),
2403 ],
2403 ],
2404 ('REVSPEC'))
2404 ('REVSPEC'))
2405 def debugrevspec(ui, repo, expr, **opts):
2405 def debugrevspec(ui, repo, expr, **opts):
2406 """parse and apply a revision specification
2406 """parse and apply a revision specification
2407
2407
2408 Use -p/--show-stage option to print the parsed tree at the given stages.
2408 Use -p/--show-stage option to print the parsed tree at the given stages.
2409 Use -p all to print tree at every stage.
2409 Use -p all to print tree at every stage.
2410
2410
2411 Use --no-show-revs option with -s or -p to print only the set
2411 Use --no-show-revs option with -s or -p to print only the set
2412 representation or the parsed tree respectively.
2412 representation or the parsed tree respectively.
2413
2413
2414 Use --verify-optimized to compare the optimized result with the unoptimized
2414 Use --verify-optimized to compare the optimized result with the unoptimized
2415 one. Returns 1 if the optimized result differs.
2415 one. Returns 1 if the optimized result differs.
2416 """
2416 """
2417 opts = pycompat.byteskwargs(opts)
2417 opts = pycompat.byteskwargs(opts)
2418 aliases = ui.configitems('revsetalias')
2418 aliases = ui.configitems('revsetalias')
2419 stages = [
2419 stages = [
2420 ('parsed', lambda tree: tree),
2420 ('parsed', lambda tree: tree),
2421 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2421 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2422 ui.warn)),
2422 ui.warn)),
2423 ('concatenated', revsetlang.foldconcat),
2423 ('concatenated', revsetlang.foldconcat),
2424 ('analyzed', revsetlang.analyze),
2424 ('analyzed', revsetlang.analyze),
2425 ('optimized', revsetlang.optimize),
2425 ('optimized', revsetlang.optimize),
2426 ]
2426 ]
2427 if opts['no_optimized']:
2427 if opts['no_optimized']:
2428 stages = stages[:-1]
2428 stages = stages[:-1]
2429 if opts['verify_optimized'] and opts['no_optimized']:
2429 if opts['verify_optimized'] and opts['no_optimized']:
2430 raise error.Abort(_('cannot use --verify-optimized with '
2430 raise error.Abort(_('cannot use --verify-optimized with '
2431 '--no-optimized'))
2431 '--no-optimized'))
2432 stagenames = set(n for n, f in stages)
2432 stagenames = set(n for n, f in stages)
2433
2433
2434 showalways = set()
2434 showalways = set()
2435 showchanged = set()
2435 showchanged = set()
2436 if ui.verbose and not opts['show_stage']:
2436 if ui.verbose and not opts['show_stage']:
2437 # show parsed tree by --verbose (deprecated)
2437 # show parsed tree by --verbose (deprecated)
2438 showalways.add('parsed')
2438 showalways.add('parsed')
2439 showchanged.update(['expanded', 'concatenated'])
2439 showchanged.update(['expanded', 'concatenated'])
2440 if opts['optimize']:
2440 if opts['optimize']:
2441 showalways.add('optimized')
2441 showalways.add('optimized')
2442 if opts['show_stage'] and opts['optimize']:
2442 if opts['show_stage'] and opts['optimize']:
2443 raise error.Abort(_('cannot use --optimize with --show-stage'))
2443 raise error.Abort(_('cannot use --optimize with --show-stage'))
2444 if opts['show_stage'] == ['all']:
2444 if opts['show_stage'] == ['all']:
2445 showalways.update(stagenames)
2445 showalways.update(stagenames)
2446 else:
2446 else:
2447 for n in opts['show_stage']:
2447 for n in opts['show_stage']:
2448 if n not in stagenames:
2448 if n not in stagenames:
2449 raise error.Abort(_('invalid stage name: %s') % n)
2449 raise error.Abort(_('invalid stage name: %s') % n)
2450 showalways.update(opts['show_stage'])
2450 showalways.update(opts['show_stage'])
2451
2451
2452 treebystage = {}
2452 treebystage = {}
2453 printedtree = None
2453 printedtree = None
2454 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2454 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2455 for n, f in stages:
2455 for n, f in stages:
2456 treebystage[n] = tree = f(tree)
2456 treebystage[n] = tree = f(tree)
2457 if n in showalways or (n in showchanged and tree != printedtree):
2457 if n in showalways or (n in showchanged and tree != printedtree):
2458 if opts['show_stage'] or n != 'parsed':
2458 if opts['show_stage'] or n != 'parsed':
2459 ui.write(("* %s:\n") % n)
2459 ui.write(("* %s:\n") % n)
2460 ui.write(revsetlang.prettyformat(tree), "\n")
2460 ui.write(revsetlang.prettyformat(tree), "\n")
2461 printedtree = tree
2461 printedtree = tree
2462
2462
2463 if opts['verify_optimized']:
2463 if opts['verify_optimized']:
2464 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2464 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2465 brevs = revset.makematcher(treebystage['optimized'])(repo)
2465 brevs = revset.makematcher(treebystage['optimized'])(repo)
2466 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2466 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2467 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2467 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2468 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2468 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2469 arevs = list(arevs)
2469 arevs = list(arevs)
2470 brevs = list(brevs)
2470 brevs = list(brevs)
2471 if arevs == brevs:
2471 if arevs == brevs:
2472 return 0
2472 return 0
2473 ui.write(('--- analyzed\n'), label='diff.file_a')
2473 ui.write(('--- analyzed\n'), label='diff.file_a')
2474 ui.write(('+++ optimized\n'), label='diff.file_b')
2474 ui.write(('+++ optimized\n'), label='diff.file_b')
2475 sm = difflib.SequenceMatcher(None, arevs, brevs)
2475 sm = difflib.SequenceMatcher(None, arevs, brevs)
2476 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2476 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2477 if tag in ('delete', 'replace'):
2477 if tag in (r'delete', r'replace'):
2478 for c in arevs[alo:ahi]:
2478 for c in arevs[alo:ahi]:
2479 ui.write('-%s\n' % c, label='diff.deleted')
2479 ui.write('-%d\n' % c, label='diff.deleted')
2480 if tag in ('insert', 'replace'):
2480 if tag in (r'insert', r'replace'):
2481 for c in brevs[blo:bhi]:
2481 for c in brevs[blo:bhi]:
2482 ui.write('+%s\n' % c, label='diff.inserted')
2482 ui.write('+%d\n' % c, label='diff.inserted')
2483 if tag == 'equal':
2483 if tag == r'equal':
2484 for c in arevs[alo:ahi]:
2484 for c in arevs[alo:ahi]:
2485 ui.write(' %s\n' % c)
2485 ui.write(' %d\n' % c)
2486 return 1
2486 return 1
2487
2487
2488 func = revset.makematcher(tree)
2488 func = revset.makematcher(tree)
2489 revs = func(repo)
2489 revs = func(repo)
2490 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2490 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2491 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2491 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2492 if not opts['show_revs']:
2492 if not opts['show_revs']:
2493 return
2493 return
2494 for c in revs:
2494 for c in revs:
2495 ui.write("%d\n" % c)
2495 ui.write("%d\n" % c)
2496
2496
2497 @command('debugserve', [
2497 @command('debugserve', [
2498 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2498 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2499 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2499 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2500 ('', 'logiofile', '', _('file to log server I/O to')),
2500 ('', 'logiofile', '', _('file to log server I/O to')),
2501 ], '')
2501 ], '')
2502 def debugserve(ui, repo, **opts):
2502 def debugserve(ui, repo, **opts):
2503 """run a server with advanced settings
2503 """run a server with advanced settings
2504
2504
2505 This command is similar to :hg:`serve`. It exists partially as a
2505 This command is similar to :hg:`serve`. It exists partially as a
2506 workaround to the fact that ``hg serve --stdio`` must have specific
2506 workaround to the fact that ``hg serve --stdio`` must have specific
2507 arguments for security reasons.
2507 arguments for security reasons.
2508 """
2508 """
2509 opts = pycompat.byteskwargs(opts)
2509 opts = pycompat.byteskwargs(opts)
2510
2510
2511 if not opts['sshstdio']:
2511 if not opts['sshstdio']:
2512 raise error.Abort(_('only --sshstdio is currently supported'))
2512 raise error.Abort(_('only --sshstdio is currently supported'))
2513
2513
2514 logfh = None
2514 logfh = None
2515
2515
2516 if opts['logiofd'] and opts['logiofile']:
2516 if opts['logiofd'] and opts['logiofile']:
2517 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2517 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2518
2518
2519 if opts['logiofd']:
2519 if opts['logiofd']:
2520 # Line buffered because output is line based.
2520 # Line buffered because output is line based.
2521 try:
2521 try:
2522 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2522 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2523 except OSError as e:
2523 except OSError as e:
2524 if e.errno != errno.ESPIPE:
2524 if e.errno != errno.ESPIPE:
2525 raise
2525 raise
2526 # can't seek a pipe, so `ab` mode fails on py3
2526 # can't seek a pipe, so `ab` mode fails on py3
2527 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2527 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2528 elif opts['logiofile']:
2528 elif opts['logiofile']:
2529 logfh = open(opts['logiofile'], 'ab', 1)
2529 logfh = open(opts['logiofile'], 'ab', 1)
2530
2530
2531 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2531 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2532 s.serve_forever()
2532 s.serve_forever()
2533
2533
2534 @command('debugsetparents', [], _('REV1 [REV2]'))
2534 @command('debugsetparents', [], _('REV1 [REV2]'))
2535 def debugsetparents(ui, repo, rev1, rev2=None):
2535 def debugsetparents(ui, repo, rev1, rev2=None):
2536 """manually set the parents of the current working directory
2536 """manually set the parents of the current working directory
2537
2537
2538 This is useful for writing repository conversion tools, but should
2538 This is useful for writing repository conversion tools, but should
2539 be used with care. For example, neither the working directory nor the
2539 be used with care. For example, neither the working directory nor the
2540 dirstate is updated, so file status may be incorrect after running this
2540 dirstate is updated, so file status may be incorrect after running this
2541 command.
2541 command.
2542
2542
2543 Returns 0 on success.
2543 Returns 0 on success.
2544 """
2544 """
2545
2545
2546 node1 = scmutil.revsingle(repo, rev1).node()
2546 node1 = scmutil.revsingle(repo, rev1).node()
2547 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2547 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2548
2548
2549 with repo.wlock():
2549 with repo.wlock():
2550 repo.setparents(node1, node2)
2550 repo.setparents(node1, node2)
2551
2551
2552 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2552 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2553 def debugssl(ui, repo, source=None, **opts):
2553 def debugssl(ui, repo, source=None, **opts):
2554 '''test a secure connection to a server
2554 '''test a secure connection to a server
2555
2555
2556 This builds the certificate chain for the server on Windows, installing the
2556 This builds the certificate chain for the server on Windows, installing the
2557 missing intermediates and trusted root via Windows Update if necessary. It
2557 missing intermediates and trusted root via Windows Update if necessary. It
2558 does nothing on other platforms.
2558 does nothing on other platforms.
2559
2559
2560 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2560 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2561 that server is used. See :hg:`help urls` for more information.
2561 that server is used. See :hg:`help urls` for more information.
2562
2562
2563 If the update succeeds, retry the original operation. Otherwise, the cause
2563 If the update succeeds, retry the original operation. Otherwise, the cause
2564 of the SSL error is likely another issue.
2564 of the SSL error is likely another issue.
2565 '''
2565 '''
2566 if not pycompat.iswindows:
2566 if not pycompat.iswindows:
2567 raise error.Abort(_('certificate chain building is only possible on '
2567 raise error.Abort(_('certificate chain building is only possible on '
2568 'Windows'))
2568 'Windows'))
2569
2569
2570 if not source:
2570 if not source:
2571 if not repo:
2571 if not repo:
2572 raise error.Abort(_("there is no Mercurial repository here, and no "
2572 raise error.Abort(_("there is no Mercurial repository here, and no "
2573 "server specified"))
2573 "server specified"))
2574 source = "default"
2574 source = "default"
2575
2575
2576 source, branches = hg.parseurl(ui.expandpath(source))
2576 source, branches = hg.parseurl(ui.expandpath(source))
2577 url = util.url(source)
2577 url = util.url(source)
2578
2578
2579 defaultport = {'https': 443, 'ssh': 22}
2579 defaultport = {'https': 443, 'ssh': 22}
2580 if url.scheme in defaultport:
2580 if url.scheme in defaultport:
2581 try:
2581 try:
2582 addr = (url.host, int(url.port or defaultport[url.scheme]))
2582 addr = (url.host, int(url.port or defaultport[url.scheme]))
2583 except ValueError:
2583 except ValueError:
2584 raise error.Abort(_("malformed port number in URL"))
2584 raise error.Abort(_("malformed port number in URL"))
2585 else:
2585 else:
2586 raise error.Abort(_("only https and ssh connections are supported"))
2586 raise error.Abort(_("only https and ssh connections are supported"))
2587
2587
2588 from . import win32
2588 from . import win32
2589
2589
2590 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2590 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2591 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2591 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2592
2592
2593 try:
2593 try:
2594 s.connect(addr)
2594 s.connect(addr)
2595 cert = s.getpeercert(True)
2595 cert = s.getpeercert(True)
2596
2596
2597 ui.status(_('checking the certificate chain for %s\n') % url.host)
2597 ui.status(_('checking the certificate chain for %s\n') % url.host)
2598
2598
2599 complete = win32.checkcertificatechain(cert, build=False)
2599 complete = win32.checkcertificatechain(cert, build=False)
2600
2600
2601 if not complete:
2601 if not complete:
2602 ui.status(_('certificate chain is incomplete, updating... '))
2602 ui.status(_('certificate chain is incomplete, updating... '))
2603
2603
2604 if not win32.checkcertificatechain(cert):
2604 if not win32.checkcertificatechain(cert):
2605 ui.status(_('failed.\n'))
2605 ui.status(_('failed.\n'))
2606 else:
2606 else:
2607 ui.status(_('done.\n'))
2607 ui.status(_('done.\n'))
2608 else:
2608 else:
2609 ui.status(_('full certificate chain is available\n'))
2609 ui.status(_('full certificate chain is available\n'))
2610 finally:
2610 finally:
2611 s.close()
2611 s.close()
2612
2612
2613 @command('debugsub',
2613 @command('debugsub',
2614 [('r', 'rev', '',
2614 [('r', 'rev', '',
2615 _('revision to check'), _('REV'))],
2615 _('revision to check'), _('REV'))],
2616 _('[-r REV] [REV]'))
2616 _('[-r REV] [REV]'))
2617 def debugsub(ui, repo, rev=None):
2617 def debugsub(ui, repo, rev=None):
2618 ctx = scmutil.revsingle(repo, rev, None)
2618 ctx = scmutil.revsingle(repo, rev, None)
2619 for k, v in sorted(ctx.substate.items()):
2619 for k, v in sorted(ctx.substate.items()):
2620 ui.write(('path %s\n') % k)
2620 ui.write(('path %s\n') % k)
2621 ui.write((' source %s\n') % v[0])
2621 ui.write((' source %s\n') % v[0])
2622 ui.write((' revision %s\n') % v[1])
2622 ui.write((' revision %s\n') % v[1])
2623
2623
2624 @command('debugsuccessorssets',
2624 @command('debugsuccessorssets',
2625 [('', 'closest', False, _('return closest successors sets only'))],
2625 [('', 'closest', False, _('return closest successors sets only'))],
2626 _('[REV]'))
2626 _('[REV]'))
2627 def debugsuccessorssets(ui, repo, *revs, **opts):
2627 def debugsuccessorssets(ui, repo, *revs, **opts):
2628 """show set of successors for revision
2628 """show set of successors for revision
2629
2629
2630 A successors set of changeset A is a consistent group of revisions that
2630 A successors set of changeset A is a consistent group of revisions that
2631 succeed A. It contains non-obsolete changesets only unless closests
2631 succeed A. It contains non-obsolete changesets only unless closests
2632 successors set is set.
2632 successors set is set.
2633
2633
2634 In most cases a changeset A has a single successors set containing a single
2634 In most cases a changeset A has a single successors set containing a single
2635 successor (changeset A replaced by A').
2635 successor (changeset A replaced by A').
2636
2636
2637 A changeset that is made obsolete with no successors are called "pruned".
2637 A changeset that is made obsolete with no successors are called "pruned".
2638 Such changesets have no successors sets at all.
2638 Such changesets have no successors sets at all.
2639
2639
2640 A changeset that has been "split" will have a successors set containing
2640 A changeset that has been "split" will have a successors set containing
2641 more than one successor.
2641 more than one successor.
2642
2642
2643 A changeset that has been rewritten in multiple different ways is called
2643 A changeset that has been rewritten in multiple different ways is called
2644 "divergent". Such changesets have multiple successor sets (each of which
2644 "divergent". Such changesets have multiple successor sets (each of which
2645 may also be split, i.e. have multiple successors).
2645 may also be split, i.e. have multiple successors).
2646
2646
2647 Results are displayed as follows::
2647 Results are displayed as follows::
2648
2648
2649 <rev1>
2649 <rev1>
2650 <successors-1A>
2650 <successors-1A>
2651 <rev2>
2651 <rev2>
2652 <successors-2A>
2652 <successors-2A>
2653 <successors-2B1> <successors-2B2> <successors-2B3>
2653 <successors-2B1> <successors-2B2> <successors-2B3>
2654
2654
2655 Here rev2 has two possible (i.e. divergent) successors sets. The first
2655 Here rev2 has two possible (i.e. divergent) successors sets. The first
2656 holds one element, whereas the second holds three (i.e. the changeset has
2656 holds one element, whereas the second holds three (i.e. the changeset has
2657 been split).
2657 been split).
2658 """
2658 """
2659 # passed to successorssets caching computation from one call to another
2659 # passed to successorssets caching computation from one call to another
2660 cache = {}
2660 cache = {}
2661 ctx2str = bytes
2661 ctx2str = bytes
2662 node2str = short
2662 node2str = short
2663 for rev in scmutil.revrange(repo, revs):
2663 for rev in scmutil.revrange(repo, revs):
2664 ctx = repo[rev]
2664 ctx = repo[rev]
2665 ui.write('%s\n'% ctx2str(ctx))
2665 ui.write('%s\n'% ctx2str(ctx))
2666 for succsset in obsutil.successorssets(repo, ctx.node(),
2666 for succsset in obsutil.successorssets(repo, ctx.node(),
2667 closest=opts[r'closest'],
2667 closest=opts[r'closest'],
2668 cache=cache):
2668 cache=cache):
2669 if succsset:
2669 if succsset:
2670 ui.write(' ')
2670 ui.write(' ')
2671 ui.write(node2str(succsset[0]))
2671 ui.write(node2str(succsset[0]))
2672 for node in succsset[1:]:
2672 for node in succsset[1:]:
2673 ui.write(' ')
2673 ui.write(' ')
2674 ui.write(node2str(node))
2674 ui.write(node2str(node))
2675 ui.write('\n')
2675 ui.write('\n')
2676
2676
2677 @command('debugtemplate',
2677 @command('debugtemplate',
2678 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2678 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2679 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2679 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2680 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2680 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2681 optionalrepo=True)
2681 optionalrepo=True)
2682 def debugtemplate(ui, repo, tmpl, **opts):
2682 def debugtemplate(ui, repo, tmpl, **opts):
2683 """parse and apply a template
2683 """parse and apply a template
2684
2684
2685 If -r/--rev is given, the template is processed as a log template and
2685 If -r/--rev is given, the template is processed as a log template and
2686 applied to the given changesets. Otherwise, it is processed as a generic
2686 applied to the given changesets. Otherwise, it is processed as a generic
2687 template.
2687 template.
2688
2688
2689 Use --verbose to print the parsed tree.
2689 Use --verbose to print the parsed tree.
2690 """
2690 """
2691 revs = None
2691 revs = None
2692 if opts[r'rev']:
2692 if opts[r'rev']:
2693 if repo is None:
2693 if repo is None:
2694 raise error.RepoError(_('there is no Mercurial repository here '
2694 raise error.RepoError(_('there is no Mercurial repository here '
2695 '(.hg not found)'))
2695 '(.hg not found)'))
2696 revs = scmutil.revrange(repo, opts[r'rev'])
2696 revs = scmutil.revrange(repo, opts[r'rev'])
2697
2697
2698 props = {}
2698 props = {}
2699 for d in opts[r'define']:
2699 for d in opts[r'define']:
2700 try:
2700 try:
2701 k, v = (e.strip() for e in d.split('=', 1))
2701 k, v = (e.strip() for e in d.split('=', 1))
2702 if not k or k == 'ui':
2702 if not k or k == 'ui':
2703 raise ValueError
2703 raise ValueError
2704 props[k] = v
2704 props[k] = v
2705 except ValueError:
2705 except ValueError:
2706 raise error.Abort(_('malformed keyword definition: %s') % d)
2706 raise error.Abort(_('malformed keyword definition: %s') % d)
2707
2707
2708 if ui.verbose:
2708 if ui.verbose:
2709 aliases = ui.configitems('templatealias')
2709 aliases = ui.configitems('templatealias')
2710 tree = templater.parse(tmpl)
2710 tree = templater.parse(tmpl)
2711 ui.note(templater.prettyformat(tree), '\n')
2711 ui.note(templater.prettyformat(tree), '\n')
2712 newtree = templater.expandaliases(tree, aliases)
2712 newtree = templater.expandaliases(tree, aliases)
2713 if newtree != tree:
2713 if newtree != tree:
2714 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2714 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2715
2715
2716 if revs is None:
2716 if revs is None:
2717 tres = formatter.templateresources(ui, repo)
2717 tres = formatter.templateresources(ui, repo)
2718 t = formatter.maketemplater(ui, tmpl, resources=tres)
2718 t = formatter.maketemplater(ui, tmpl, resources=tres)
2719 if ui.verbose:
2719 if ui.verbose:
2720 kwds, funcs = t.symbolsuseddefault()
2720 kwds, funcs = t.symbolsuseddefault()
2721 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2721 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2722 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2722 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2723 ui.write(t.renderdefault(props))
2723 ui.write(t.renderdefault(props))
2724 else:
2724 else:
2725 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2725 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2726 if ui.verbose:
2726 if ui.verbose:
2727 kwds, funcs = displayer.t.symbolsuseddefault()
2727 kwds, funcs = displayer.t.symbolsuseddefault()
2728 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2728 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2729 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2729 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2730 for r in revs:
2730 for r in revs:
2731 displayer.show(repo[r], **pycompat.strkwargs(props))
2731 displayer.show(repo[r], **pycompat.strkwargs(props))
2732 displayer.close()
2732 displayer.close()
2733
2733
2734 @command('debuguigetpass', [
2734 @command('debuguigetpass', [
2735 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2735 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2736 ], _('[-p TEXT]'), norepo=True)
2736 ], _('[-p TEXT]'), norepo=True)
2737 def debuguigetpass(ui, prompt=''):
2737 def debuguigetpass(ui, prompt=''):
2738 """show prompt to type password"""
2738 """show prompt to type password"""
2739 r = ui.getpass(prompt)
2739 r = ui.getpass(prompt)
2740 ui.write(('respose: %s\n') % r)
2740 ui.write(('respose: %s\n') % r)
2741
2741
2742 @command('debuguiprompt', [
2742 @command('debuguiprompt', [
2743 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2743 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2744 ], _('[-p TEXT]'), norepo=True)
2744 ], _('[-p TEXT]'), norepo=True)
2745 def debuguiprompt(ui, prompt=''):
2745 def debuguiprompt(ui, prompt=''):
2746 """show plain prompt"""
2746 """show plain prompt"""
2747 r = ui.prompt(prompt)
2747 r = ui.prompt(prompt)
2748 ui.write(('response: %s\n') % r)
2748 ui.write(('response: %s\n') % r)
2749
2749
2750 @command('debugupdatecaches', [])
2750 @command('debugupdatecaches', [])
2751 def debugupdatecaches(ui, repo, *pats, **opts):
2751 def debugupdatecaches(ui, repo, *pats, **opts):
2752 """warm all known caches in the repository"""
2752 """warm all known caches in the repository"""
2753 with repo.wlock(), repo.lock():
2753 with repo.wlock(), repo.lock():
2754 repo.updatecaches(full=True)
2754 repo.updatecaches(full=True)
2755
2755
2756 @command('debugupgraderepo', [
2756 @command('debugupgraderepo', [
2757 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2757 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2758 ('', 'run', False, _('performs an upgrade')),
2758 ('', 'run', False, _('performs an upgrade')),
2759 ('', 'backup', True, _('keep the old repository content around')),
2759 ('', 'backup', True, _('keep the old repository content around')),
2760 ])
2760 ])
2761 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2761 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2762 """upgrade a repository to use different features
2762 """upgrade a repository to use different features
2763
2763
2764 If no arguments are specified, the repository is evaluated for upgrade
2764 If no arguments are specified, the repository is evaluated for upgrade
2765 and a list of problems and potential optimizations is printed.
2765 and a list of problems and potential optimizations is printed.
2766
2766
2767 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2767 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2768 can be influenced via additional arguments. More details will be provided
2768 can be influenced via additional arguments. More details will be provided
2769 by the command output when run without ``--run``.
2769 by the command output when run without ``--run``.
2770
2770
2771 During the upgrade, the repository will be locked and no writes will be
2771 During the upgrade, the repository will be locked and no writes will be
2772 allowed.
2772 allowed.
2773
2773
2774 At the end of the upgrade, the repository may not be readable while new
2774 At the end of the upgrade, the repository may not be readable while new
2775 repository data is swapped in. This window will be as long as it takes to
2775 repository data is swapped in. This window will be as long as it takes to
2776 rename some directories inside the ``.hg`` directory. On most machines, this
2776 rename some directories inside the ``.hg`` directory. On most machines, this
2777 should complete almost instantaneously and the chances of a consumer being
2777 should complete almost instantaneously and the chances of a consumer being
2778 unable to access the repository should be low.
2778 unable to access the repository should be low.
2779 """
2779 """
2780 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2780 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2781 backup=backup)
2781 backup=backup)
2782
2782
2783 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2783 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2784 inferrepo=True)
2784 inferrepo=True)
2785 def debugwalk(ui, repo, *pats, **opts):
2785 def debugwalk(ui, repo, *pats, **opts):
2786 """show how files match on given patterns"""
2786 """show how files match on given patterns"""
2787 opts = pycompat.byteskwargs(opts)
2787 opts = pycompat.byteskwargs(opts)
2788 m = scmutil.match(repo[None], pats, opts)
2788 m = scmutil.match(repo[None], pats, opts)
2789 if ui.verbose:
2789 if ui.verbose:
2790 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2790 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2791 items = list(repo[None].walk(m))
2791 items = list(repo[None].walk(m))
2792 if not items:
2792 if not items:
2793 return
2793 return
2794 f = lambda fn: fn
2794 f = lambda fn: fn
2795 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2795 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2796 f = lambda fn: util.normpath(fn)
2796 f = lambda fn: util.normpath(fn)
2797 fmt = 'f %%-%ds %%-%ds %%s' % (
2797 fmt = 'f %%-%ds %%-%ds %%s' % (
2798 max([len(abs) for abs in items]),
2798 max([len(abs) for abs in items]),
2799 max([len(repo.pathto(abs)) for abs in items]))
2799 max([len(repo.pathto(abs)) for abs in items]))
2800 for abs in items:
2800 for abs in items:
2801 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2801 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2802 ui.write("%s\n" % line.rstrip())
2802 ui.write("%s\n" % line.rstrip())
2803
2803
2804 @command('debugwhyunstable', [], _('REV'))
2804 @command('debugwhyunstable', [], _('REV'))
2805 def debugwhyunstable(ui, repo, rev):
2805 def debugwhyunstable(ui, repo, rev):
2806 """explain instabilities of a changeset"""
2806 """explain instabilities of a changeset"""
2807 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2807 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2808 dnodes = ''
2808 dnodes = ''
2809 if entry.get('divergentnodes'):
2809 if entry.get('divergentnodes'):
2810 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2810 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2811 for ctx in entry['divergentnodes']) + ' '
2811 for ctx in entry['divergentnodes']) + ' '
2812 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2812 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2813 entry['reason'], entry['node']))
2813 entry['reason'], entry['node']))
2814
2814
2815 @command('debugwireargs',
2815 @command('debugwireargs',
2816 [('', 'three', '', 'three'),
2816 [('', 'three', '', 'three'),
2817 ('', 'four', '', 'four'),
2817 ('', 'four', '', 'four'),
2818 ('', 'five', '', 'five'),
2818 ('', 'five', '', 'five'),
2819 ] + cmdutil.remoteopts,
2819 ] + cmdutil.remoteopts,
2820 _('REPO [OPTIONS]... [ONE [TWO]]'),
2820 _('REPO [OPTIONS]... [ONE [TWO]]'),
2821 norepo=True)
2821 norepo=True)
2822 def debugwireargs(ui, repopath, *vals, **opts):
2822 def debugwireargs(ui, repopath, *vals, **opts):
2823 opts = pycompat.byteskwargs(opts)
2823 opts = pycompat.byteskwargs(opts)
2824 repo = hg.peer(ui, opts, repopath)
2824 repo = hg.peer(ui, opts, repopath)
2825 for opt in cmdutil.remoteopts:
2825 for opt in cmdutil.remoteopts:
2826 del opts[opt[1]]
2826 del opts[opt[1]]
2827 args = {}
2827 args = {}
2828 for k, v in opts.iteritems():
2828 for k, v in opts.iteritems():
2829 if v:
2829 if v:
2830 args[k] = v
2830 args[k] = v
2831 args = pycompat.strkwargs(args)
2831 args = pycompat.strkwargs(args)
2832 # run twice to check that we don't mess up the stream for the next command
2832 # run twice to check that we don't mess up the stream for the next command
2833 res1 = repo.debugwireargs(*vals, **args)
2833 res1 = repo.debugwireargs(*vals, **args)
2834 res2 = repo.debugwireargs(*vals, **args)
2834 res2 = repo.debugwireargs(*vals, **args)
2835 ui.write("%s\n" % res1)
2835 ui.write("%s\n" % res1)
2836 if res1 != res2:
2836 if res1 != res2:
2837 ui.warn("%s\n" % res2)
2837 ui.warn("%s\n" % res2)
2838
2838
2839 def _parsewirelangblocks(fh):
2839 def _parsewirelangblocks(fh):
2840 activeaction = None
2840 activeaction = None
2841 blocklines = []
2841 blocklines = []
2842 lastindent = 0
2842 lastindent = 0
2843
2843
2844 for line in fh:
2844 for line in fh:
2845 line = line.rstrip()
2845 line = line.rstrip()
2846 if not line:
2846 if not line:
2847 continue
2847 continue
2848
2848
2849 if line.startswith(b'#'):
2849 if line.startswith(b'#'):
2850 continue
2850 continue
2851
2851
2852 if not line.startswith(b' '):
2852 if not line.startswith(b' '):
2853 # New block. Flush previous one.
2853 # New block. Flush previous one.
2854 if activeaction:
2854 if activeaction:
2855 yield activeaction, blocklines
2855 yield activeaction, blocklines
2856
2856
2857 activeaction = line
2857 activeaction = line
2858 blocklines = []
2858 blocklines = []
2859 lastindent = 0
2859 lastindent = 0
2860 continue
2860 continue
2861
2861
2862 # Else we start with an indent.
2862 # Else we start with an indent.
2863
2863
2864 if not activeaction:
2864 if not activeaction:
2865 raise error.Abort(_('indented line outside of block'))
2865 raise error.Abort(_('indented line outside of block'))
2866
2866
2867 indent = len(line) - len(line.lstrip())
2867 indent = len(line) - len(line.lstrip())
2868
2868
2869 # If this line is indented more than the last line, concatenate it.
2869 # If this line is indented more than the last line, concatenate it.
2870 if indent > lastindent and blocklines:
2870 if indent > lastindent and blocklines:
2871 blocklines[-1] += line.lstrip()
2871 blocklines[-1] += line.lstrip()
2872 else:
2872 else:
2873 blocklines.append(line)
2873 blocklines.append(line)
2874 lastindent = indent
2874 lastindent = indent
2875
2875
2876 # Flush last block.
2876 # Flush last block.
2877 if activeaction:
2877 if activeaction:
2878 yield activeaction, blocklines
2878 yield activeaction, blocklines
2879
2879
2880 @command('debugwireproto',
2880 @command('debugwireproto',
2881 [
2881 [
2882 ('', 'localssh', False, _('start an SSH server for this repo')),
2882 ('', 'localssh', False, _('start an SSH server for this repo')),
2883 ('', 'peer', '', _('construct a specific version of the peer')),
2883 ('', 'peer', '', _('construct a specific version of the peer')),
2884 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2884 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2885 ('', 'nologhandshake', False,
2885 ('', 'nologhandshake', False,
2886 _('do not log I/O related to the peer handshake')),
2886 _('do not log I/O related to the peer handshake')),
2887 ] + cmdutil.remoteopts,
2887 ] + cmdutil.remoteopts,
2888 _('[PATH]'),
2888 _('[PATH]'),
2889 optionalrepo=True)
2889 optionalrepo=True)
2890 def debugwireproto(ui, repo, path=None, **opts):
2890 def debugwireproto(ui, repo, path=None, **opts):
2891 """send wire protocol commands to a server
2891 """send wire protocol commands to a server
2892
2892
2893 This command can be used to issue wire protocol commands to remote
2893 This command can be used to issue wire protocol commands to remote
2894 peers and to debug the raw data being exchanged.
2894 peers and to debug the raw data being exchanged.
2895
2895
2896 ``--localssh`` will start an SSH server against the current repository
2896 ``--localssh`` will start an SSH server against the current repository
2897 and connect to that. By default, the connection will perform a handshake
2897 and connect to that. By default, the connection will perform a handshake
2898 and establish an appropriate peer instance.
2898 and establish an appropriate peer instance.
2899
2899
2900 ``--peer`` can be used to bypass the handshake protocol and construct a
2900 ``--peer`` can be used to bypass the handshake protocol and construct a
2901 peer instance using the specified class type. Valid values are ``raw``,
2901 peer instance using the specified class type. Valid values are ``raw``,
2902 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2902 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2903 raw data payloads and don't support higher-level command actions.
2903 raw data payloads and don't support higher-level command actions.
2904
2904
2905 ``--noreadstderr`` can be used to disable automatic reading from stderr
2905 ``--noreadstderr`` can be used to disable automatic reading from stderr
2906 of the peer (for SSH connections only). Disabling automatic reading of
2906 of the peer (for SSH connections only). Disabling automatic reading of
2907 stderr is useful for making output more deterministic.
2907 stderr is useful for making output more deterministic.
2908
2908
2909 Commands are issued via a mini language which is specified via stdin.
2909 Commands are issued via a mini language which is specified via stdin.
2910 The language consists of individual actions to perform. An action is
2910 The language consists of individual actions to perform. An action is
2911 defined by a block. A block is defined as a line with no leading
2911 defined by a block. A block is defined as a line with no leading
2912 space followed by 0 or more lines with leading space. Blocks are
2912 space followed by 0 or more lines with leading space. Blocks are
2913 effectively a high-level command with additional metadata.
2913 effectively a high-level command with additional metadata.
2914
2914
2915 Lines beginning with ``#`` are ignored.
2915 Lines beginning with ``#`` are ignored.
2916
2916
2917 The following sections denote available actions.
2917 The following sections denote available actions.
2918
2918
2919 raw
2919 raw
2920 ---
2920 ---
2921
2921
2922 Send raw data to the server.
2922 Send raw data to the server.
2923
2923
2924 The block payload contains the raw data to send as one atomic send
2924 The block payload contains the raw data to send as one atomic send
2925 operation. The data may not actually be delivered in a single system
2925 operation. The data may not actually be delivered in a single system
2926 call: it depends on the abilities of the transport being used.
2926 call: it depends on the abilities of the transport being used.
2927
2927
2928 Each line in the block is de-indented and concatenated. Then, that
2928 Each line in the block is de-indented and concatenated. Then, that
2929 value is evaluated as a Python b'' literal. This allows the use of
2929 value is evaluated as a Python b'' literal. This allows the use of
2930 backslash escaping, etc.
2930 backslash escaping, etc.
2931
2931
2932 raw+
2932 raw+
2933 ----
2933 ----
2934
2934
2935 Behaves like ``raw`` except flushes output afterwards.
2935 Behaves like ``raw`` except flushes output afterwards.
2936
2936
2937 command <X>
2937 command <X>
2938 -----------
2938 -----------
2939
2939
2940 Send a request to run a named command, whose name follows the ``command``
2940 Send a request to run a named command, whose name follows the ``command``
2941 string.
2941 string.
2942
2942
2943 Arguments to the command are defined as lines in this block. The format of
2943 Arguments to the command are defined as lines in this block. The format of
2944 each line is ``<key> <value>``. e.g.::
2944 each line is ``<key> <value>``. e.g.::
2945
2945
2946 command listkeys
2946 command listkeys
2947 namespace bookmarks
2947 namespace bookmarks
2948
2948
2949 If the value begins with ``eval:``, it will be interpreted as a Python
2949 If the value begins with ``eval:``, it will be interpreted as a Python
2950 literal expression. Otherwise values are interpreted as Python b'' literals.
2950 literal expression. Otherwise values are interpreted as Python b'' literals.
2951 This allows sending complex types and encoding special byte sequences via
2951 This allows sending complex types and encoding special byte sequences via
2952 backslash escaping.
2952 backslash escaping.
2953
2953
2954 The following arguments have special meaning:
2954 The following arguments have special meaning:
2955
2955
2956 ``PUSHFILE``
2956 ``PUSHFILE``
2957 When defined, the *push* mechanism of the peer will be used instead
2957 When defined, the *push* mechanism of the peer will be used instead
2958 of the static request-response mechanism and the content of the
2958 of the static request-response mechanism and the content of the
2959 file specified in the value of this argument will be sent as the
2959 file specified in the value of this argument will be sent as the
2960 command payload.
2960 command payload.
2961
2961
2962 This can be used to submit a local bundle file to the remote.
2962 This can be used to submit a local bundle file to the remote.
2963
2963
2964 batchbegin
2964 batchbegin
2965 ----------
2965 ----------
2966
2966
2967 Instruct the peer to begin a batched send.
2967 Instruct the peer to begin a batched send.
2968
2968
2969 All ``command`` blocks are queued for execution until the next
2969 All ``command`` blocks are queued for execution until the next
2970 ``batchsubmit`` block.
2970 ``batchsubmit`` block.
2971
2971
2972 batchsubmit
2972 batchsubmit
2973 -----------
2973 -----------
2974
2974
2975 Submit previously queued ``command`` blocks as a batch request.
2975 Submit previously queued ``command`` blocks as a batch request.
2976
2976
2977 This action MUST be paired with a ``batchbegin`` action.
2977 This action MUST be paired with a ``batchbegin`` action.
2978
2978
2979 httprequest <method> <path>
2979 httprequest <method> <path>
2980 ---------------------------
2980 ---------------------------
2981
2981
2982 (HTTP peer only)
2982 (HTTP peer only)
2983
2983
2984 Send an HTTP request to the peer.
2984 Send an HTTP request to the peer.
2985
2985
2986 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2986 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2987
2987
2988 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2988 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2989 headers to add to the request. e.g. ``Accept: foo``.
2989 headers to add to the request. e.g. ``Accept: foo``.
2990
2990
2991 The following arguments are special:
2991 The following arguments are special:
2992
2992
2993 ``BODYFILE``
2993 ``BODYFILE``
2994 The content of the file defined as the value to this argument will be
2994 The content of the file defined as the value to this argument will be
2995 transferred verbatim as the HTTP request body.
2995 transferred verbatim as the HTTP request body.
2996
2996
2997 ``frame <type> <flags> <payload>``
2997 ``frame <type> <flags> <payload>``
2998 Send a unified protocol frame as part of the request body.
2998 Send a unified protocol frame as part of the request body.
2999
2999
3000 All frames will be collected and sent as the body to the HTTP
3000 All frames will be collected and sent as the body to the HTTP
3001 request.
3001 request.
3002
3002
3003 close
3003 close
3004 -----
3004 -----
3005
3005
3006 Close the connection to the server.
3006 Close the connection to the server.
3007
3007
3008 flush
3008 flush
3009 -----
3009 -----
3010
3010
3011 Flush data written to the server.
3011 Flush data written to the server.
3012
3012
3013 readavailable
3013 readavailable
3014 -------------
3014 -------------
3015
3015
3016 Close the write end of the connection and read all available data from
3016 Close the write end of the connection and read all available data from
3017 the server.
3017 the server.
3018
3018
3019 If the connection to the server encompasses multiple pipes, we poll both
3019 If the connection to the server encompasses multiple pipes, we poll both
3020 pipes and read available data.
3020 pipes and read available data.
3021
3021
3022 readline
3022 readline
3023 --------
3023 --------
3024
3024
3025 Read a line of output from the server. If there are multiple output
3025 Read a line of output from the server. If there are multiple output
3026 pipes, reads only the main pipe.
3026 pipes, reads only the main pipe.
3027
3027
3028 ereadline
3028 ereadline
3029 ---------
3029 ---------
3030
3030
3031 Like ``readline``, but read from the stderr pipe, if available.
3031 Like ``readline``, but read from the stderr pipe, if available.
3032
3032
3033 read <X>
3033 read <X>
3034 --------
3034 --------
3035
3035
3036 ``read()`` N bytes from the server's main output pipe.
3036 ``read()`` N bytes from the server's main output pipe.
3037
3037
3038 eread <X>
3038 eread <X>
3039 ---------
3039 ---------
3040
3040
3041 ``read()`` N bytes from the server's stderr pipe, if available.
3041 ``read()`` N bytes from the server's stderr pipe, if available.
3042
3042
3043 Specifying Unified Frame-Based Protocol Frames
3043 Specifying Unified Frame-Based Protocol Frames
3044 ----------------------------------------------
3044 ----------------------------------------------
3045
3045
3046 It is possible to emit a *Unified Frame-Based Protocol* by using special
3046 It is possible to emit a *Unified Frame-Based Protocol* by using special
3047 syntax.
3047 syntax.
3048
3048
3049 A frame is composed as a type, flags, and payload. These can be parsed
3049 A frame is composed as a type, flags, and payload. These can be parsed
3050 from a string of the form:
3050 from a string of the form:
3051
3051
3052 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3052 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3053
3053
3054 ``request-id`` and ``stream-id`` are integers defining the request and
3054 ``request-id`` and ``stream-id`` are integers defining the request and
3055 stream identifiers.
3055 stream identifiers.
3056
3056
3057 ``type`` can be an integer value for the frame type or the string name
3057 ``type`` can be an integer value for the frame type or the string name
3058 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3058 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3059 ``command-name``.
3059 ``command-name``.
3060
3060
3061 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3061 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3062 components. Each component (and there can be just one) can be an integer
3062 components. Each component (and there can be just one) can be an integer
3063 or a flag name for stream flags or frame flags, respectively. Values are
3063 or a flag name for stream flags or frame flags, respectively. Values are
3064 resolved to integers and then bitwise OR'd together.
3064 resolved to integers and then bitwise OR'd together.
3065
3065
3066 ``payload`` represents the raw frame payload. If it begins with
3066 ``payload`` represents the raw frame payload. If it begins with
3067 ``cbor:``, the following string is evaluated as Python code and the
3067 ``cbor:``, the following string is evaluated as Python code and the
3068 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3068 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3069 as a Python byte string literal.
3069 as a Python byte string literal.
3070 """
3070 """
3071 opts = pycompat.byteskwargs(opts)
3071 opts = pycompat.byteskwargs(opts)
3072
3072
3073 if opts['localssh'] and not repo:
3073 if opts['localssh'] and not repo:
3074 raise error.Abort(_('--localssh requires a repository'))
3074 raise error.Abort(_('--localssh requires a repository'))
3075
3075
3076 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3076 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3077 raise error.Abort(_('invalid value for --peer'),
3077 raise error.Abort(_('invalid value for --peer'),
3078 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3078 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3079
3079
3080 if path and opts['localssh']:
3080 if path and opts['localssh']:
3081 raise error.Abort(_('cannot specify --localssh with an explicit '
3081 raise error.Abort(_('cannot specify --localssh with an explicit '
3082 'path'))
3082 'path'))
3083
3083
3084 if ui.interactive():
3084 if ui.interactive():
3085 ui.write(_('(waiting for commands on stdin)\n'))
3085 ui.write(_('(waiting for commands on stdin)\n'))
3086
3086
3087 blocks = list(_parsewirelangblocks(ui.fin))
3087 blocks = list(_parsewirelangblocks(ui.fin))
3088
3088
3089 proc = None
3089 proc = None
3090 stdin = None
3090 stdin = None
3091 stdout = None
3091 stdout = None
3092 stderr = None
3092 stderr = None
3093 opener = None
3093 opener = None
3094
3094
3095 if opts['localssh']:
3095 if opts['localssh']:
3096 # We start the SSH server in its own process so there is process
3096 # We start the SSH server in its own process so there is process
3097 # separation. This prevents a whole class of potential bugs around
3097 # separation. This prevents a whole class of potential bugs around
3098 # shared state from interfering with server operation.
3098 # shared state from interfering with server operation.
3099 args = procutil.hgcmd() + [
3099 args = procutil.hgcmd() + [
3100 '-R', repo.root,
3100 '-R', repo.root,
3101 'debugserve', '--sshstdio',
3101 'debugserve', '--sshstdio',
3102 ]
3102 ]
3103 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3103 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3104 stdin=subprocess.PIPE,
3104 stdin=subprocess.PIPE,
3105 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3105 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3106 bufsize=0)
3106 bufsize=0)
3107
3107
3108 stdin = proc.stdin
3108 stdin = proc.stdin
3109 stdout = proc.stdout
3109 stdout = proc.stdout
3110 stderr = proc.stderr
3110 stderr = proc.stderr
3111
3111
3112 # We turn the pipes into observers so we can log I/O.
3112 # We turn the pipes into observers so we can log I/O.
3113 if ui.verbose or opts['peer'] == 'raw':
3113 if ui.verbose or opts['peer'] == 'raw':
3114 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3114 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3115 logdata=True)
3115 logdata=True)
3116 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3116 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3117 logdata=True)
3117 logdata=True)
3118 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3118 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3119 logdata=True)
3119 logdata=True)
3120
3120
3121 # --localssh also implies the peer connection settings.
3121 # --localssh also implies the peer connection settings.
3122
3122
3123 url = 'ssh://localserver'
3123 url = 'ssh://localserver'
3124 autoreadstderr = not opts['noreadstderr']
3124 autoreadstderr = not opts['noreadstderr']
3125
3125
3126 if opts['peer'] == 'ssh1':
3126 if opts['peer'] == 'ssh1':
3127 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3127 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3128 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3128 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3129 None, autoreadstderr=autoreadstderr)
3129 None, autoreadstderr=autoreadstderr)
3130 elif opts['peer'] == 'ssh2':
3130 elif opts['peer'] == 'ssh2':
3131 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3131 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3132 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3132 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3133 None, autoreadstderr=autoreadstderr)
3133 None, autoreadstderr=autoreadstderr)
3134 elif opts['peer'] == 'raw':
3134 elif opts['peer'] == 'raw':
3135 ui.write(_('using raw connection to peer\n'))
3135 ui.write(_('using raw connection to peer\n'))
3136 peer = None
3136 peer = None
3137 else:
3137 else:
3138 ui.write(_('creating ssh peer from handshake results\n'))
3138 ui.write(_('creating ssh peer from handshake results\n'))
3139 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3139 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3140 autoreadstderr=autoreadstderr)
3140 autoreadstderr=autoreadstderr)
3141
3141
3142 elif path:
3142 elif path:
3143 # We bypass hg.peer() so we can proxy the sockets.
3143 # We bypass hg.peer() so we can proxy the sockets.
3144 # TODO consider not doing this because we skip
3144 # TODO consider not doing this because we skip
3145 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3145 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3146 u = util.url(path)
3146 u = util.url(path)
3147 if u.scheme != 'http':
3147 if u.scheme != 'http':
3148 raise error.Abort(_('only http:// paths are currently supported'))
3148 raise error.Abort(_('only http:// paths are currently supported'))
3149
3149
3150 url, authinfo = u.authinfo()
3150 url, authinfo = u.authinfo()
3151 openerargs = {
3151 openerargs = {
3152 r'useragent': b'Mercurial debugwireproto',
3152 r'useragent': b'Mercurial debugwireproto',
3153 }
3153 }
3154
3154
3155 # Turn pipes/sockets into observers so we can log I/O.
3155 # Turn pipes/sockets into observers so we can log I/O.
3156 if ui.verbose:
3156 if ui.verbose:
3157 openerargs.update({
3157 openerargs.update({
3158 r'loggingfh': ui,
3158 r'loggingfh': ui,
3159 r'loggingname': b's',
3159 r'loggingname': b's',
3160 r'loggingopts': {
3160 r'loggingopts': {
3161 r'logdata': True,
3161 r'logdata': True,
3162 r'logdataapis': False,
3162 r'logdataapis': False,
3163 },
3163 },
3164 })
3164 })
3165
3165
3166 if ui.debugflag:
3166 if ui.debugflag:
3167 openerargs[r'loggingopts'][r'logdataapis'] = True
3167 openerargs[r'loggingopts'][r'logdataapis'] = True
3168
3168
3169 # Don't send default headers when in raw mode. This allows us to
3169 # Don't send default headers when in raw mode. This allows us to
3170 # bypass most of the behavior of our URL handling code so we can
3170 # bypass most of the behavior of our URL handling code so we can
3171 # have near complete control over what's sent on the wire.
3171 # have near complete control over what's sent on the wire.
3172 if opts['peer'] == 'raw':
3172 if opts['peer'] == 'raw':
3173 openerargs[r'sendaccept'] = False
3173 openerargs[r'sendaccept'] = False
3174
3174
3175 opener = urlmod.opener(ui, authinfo, **openerargs)
3175 opener = urlmod.opener(ui, authinfo, **openerargs)
3176
3176
3177 if opts['peer'] == 'http2':
3177 if opts['peer'] == 'http2':
3178 ui.write(_('creating http peer for wire protocol version 2\n'))
3178 ui.write(_('creating http peer for wire protocol version 2\n'))
3179 # We go through makepeer() because we need an API descriptor for
3179 # We go through makepeer() because we need an API descriptor for
3180 # the peer instance to be useful.
3180 # the peer instance to be useful.
3181 with ui.configoverride({
3181 with ui.configoverride({
3182 ('experimental', 'httppeer.advertise-v2'): True}):
3182 ('experimental', 'httppeer.advertise-v2'): True}):
3183 if opts['nologhandshake']:
3183 if opts['nologhandshake']:
3184 ui.pushbuffer()
3184 ui.pushbuffer()
3185
3185
3186 peer = httppeer.makepeer(ui, path, opener=opener)
3186 peer = httppeer.makepeer(ui, path, opener=opener)
3187
3187
3188 if opts['nologhandshake']:
3188 if opts['nologhandshake']:
3189 ui.popbuffer()
3189 ui.popbuffer()
3190
3190
3191 if not isinstance(peer, httppeer.httpv2peer):
3191 if not isinstance(peer, httppeer.httpv2peer):
3192 raise error.Abort(_('could not instantiate HTTP peer for '
3192 raise error.Abort(_('could not instantiate HTTP peer for '
3193 'wire protocol version 2'),
3193 'wire protocol version 2'),
3194 hint=_('the server may not have the feature '
3194 hint=_('the server may not have the feature '
3195 'enabled or is not allowing this '
3195 'enabled or is not allowing this '
3196 'client version'))
3196 'client version'))
3197
3197
3198 elif opts['peer'] == 'raw':
3198 elif opts['peer'] == 'raw':
3199 ui.write(_('using raw connection to peer\n'))
3199 ui.write(_('using raw connection to peer\n'))
3200 peer = None
3200 peer = None
3201 elif opts['peer']:
3201 elif opts['peer']:
3202 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3202 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3203 opts['peer'])
3203 opts['peer'])
3204 else:
3204 else:
3205 peer = httppeer.makepeer(ui, path, opener=opener)
3205 peer = httppeer.makepeer(ui, path, opener=opener)
3206
3206
3207 # We /could/ populate stdin/stdout with sock.makefile()...
3207 # We /could/ populate stdin/stdout with sock.makefile()...
3208 else:
3208 else:
3209 raise error.Abort(_('unsupported connection configuration'))
3209 raise error.Abort(_('unsupported connection configuration'))
3210
3210
3211 batchedcommands = None
3211 batchedcommands = None
3212
3212
3213 # Now perform actions based on the parsed wire language instructions.
3213 # Now perform actions based on the parsed wire language instructions.
3214 for action, lines in blocks:
3214 for action, lines in blocks:
3215 if action in ('raw', 'raw+'):
3215 if action in ('raw', 'raw+'):
3216 if not stdin:
3216 if not stdin:
3217 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3217 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3218
3218
3219 # Concatenate the data together.
3219 # Concatenate the data together.
3220 data = ''.join(l.lstrip() for l in lines)
3220 data = ''.join(l.lstrip() for l in lines)
3221 data = stringutil.unescapestr(data)
3221 data = stringutil.unescapestr(data)
3222 stdin.write(data)
3222 stdin.write(data)
3223
3223
3224 if action == 'raw+':
3224 if action == 'raw+':
3225 stdin.flush()
3225 stdin.flush()
3226 elif action == 'flush':
3226 elif action == 'flush':
3227 if not stdin:
3227 if not stdin:
3228 raise error.Abort(_('cannot call flush on this peer'))
3228 raise error.Abort(_('cannot call flush on this peer'))
3229 stdin.flush()
3229 stdin.flush()
3230 elif action.startswith('command'):
3230 elif action.startswith('command'):
3231 if not peer:
3231 if not peer:
3232 raise error.Abort(_('cannot send commands unless peer instance '
3232 raise error.Abort(_('cannot send commands unless peer instance '
3233 'is available'))
3233 'is available'))
3234
3234
3235 command = action.split(' ', 1)[1]
3235 command = action.split(' ', 1)[1]
3236
3236
3237 args = {}
3237 args = {}
3238 for line in lines:
3238 for line in lines:
3239 # We need to allow empty values.
3239 # We need to allow empty values.
3240 fields = line.lstrip().split(' ', 1)
3240 fields = line.lstrip().split(' ', 1)
3241 if len(fields) == 1:
3241 if len(fields) == 1:
3242 key = fields[0]
3242 key = fields[0]
3243 value = ''
3243 value = ''
3244 else:
3244 else:
3245 key, value = fields
3245 key, value = fields
3246
3246
3247 if value.startswith('eval:'):
3247 if value.startswith('eval:'):
3248 value = stringutil.evalpythonliteral(value[5:])
3248 value = stringutil.evalpythonliteral(value[5:])
3249 else:
3249 else:
3250 value = stringutil.unescapestr(value)
3250 value = stringutil.unescapestr(value)
3251
3251
3252 args[key] = value
3252 args[key] = value
3253
3253
3254 if batchedcommands is not None:
3254 if batchedcommands is not None:
3255 batchedcommands.append((command, args))
3255 batchedcommands.append((command, args))
3256 continue
3256 continue
3257
3257
3258 ui.status(_('sending %s command\n') % command)
3258 ui.status(_('sending %s command\n') % command)
3259
3259
3260 if 'PUSHFILE' in args:
3260 if 'PUSHFILE' in args:
3261 with open(args['PUSHFILE'], r'rb') as fh:
3261 with open(args['PUSHFILE'], r'rb') as fh:
3262 del args['PUSHFILE']
3262 del args['PUSHFILE']
3263 res, output = peer._callpush(command, fh,
3263 res, output = peer._callpush(command, fh,
3264 **pycompat.strkwargs(args))
3264 **pycompat.strkwargs(args))
3265 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3265 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3266 ui.status(_('remote output: %s\n') %
3266 ui.status(_('remote output: %s\n') %
3267 stringutil.escapestr(output))
3267 stringutil.escapestr(output))
3268 else:
3268 else:
3269 with peer.commandexecutor() as e:
3269 with peer.commandexecutor() as e:
3270 res = e.callcommand(command, args).result()
3270 res = e.callcommand(command, args).result()
3271
3271
3272 if isinstance(res, wireprotov2peer.commandresponse):
3272 if isinstance(res, wireprotov2peer.commandresponse):
3273 val = res.objects()
3273 val = res.objects()
3274 ui.status(_('response: %s\n') %
3274 ui.status(_('response: %s\n') %
3275 stringutil.pprint(val, bprefix=True, indent=2))
3275 stringutil.pprint(val, bprefix=True, indent=2))
3276 else:
3276 else:
3277 ui.status(_('response: %s\n') %
3277 ui.status(_('response: %s\n') %
3278 stringutil.pprint(res, bprefix=True, indent=2))
3278 stringutil.pprint(res, bprefix=True, indent=2))
3279
3279
3280 elif action == 'batchbegin':
3280 elif action == 'batchbegin':
3281 if batchedcommands is not None:
3281 if batchedcommands is not None:
3282 raise error.Abort(_('nested batchbegin not allowed'))
3282 raise error.Abort(_('nested batchbegin not allowed'))
3283
3283
3284 batchedcommands = []
3284 batchedcommands = []
3285 elif action == 'batchsubmit':
3285 elif action == 'batchsubmit':
3286 # There is a batching API we could go through. But it would be
3286 # There is a batching API we could go through. But it would be
3287 # difficult to normalize requests into function calls. It is easier
3287 # difficult to normalize requests into function calls. It is easier
3288 # to bypass this layer and normalize to commands + args.
3288 # to bypass this layer and normalize to commands + args.
3289 ui.status(_('sending batch with %d sub-commands\n') %
3289 ui.status(_('sending batch with %d sub-commands\n') %
3290 len(batchedcommands))
3290 len(batchedcommands))
3291 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3291 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3292 ui.status(_('response #%d: %s\n') %
3292 ui.status(_('response #%d: %s\n') %
3293 (i, stringutil.escapestr(chunk)))
3293 (i, stringutil.escapestr(chunk)))
3294
3294
3295 batchedcommands = None
3295 batchedcommands = None
3296
3296
3297 elif action.startswith('httprequest '):
3297 elif action.startswith('httprequest '):
3298 if not opener:
3298 if not opener:
3299 raise error.Abort(_('cannot use httprequest without an HTTP '
3299 raise error.Abort(_('cannot use httprequest without an HTTP '
3300 'peer'))
3300 'peer'))
3301
3301
3302 request = action.split(' ', 2)
3302 request = action.split(' ', 2)
3303 if len(request) != 3:
3303 if len(request) != 3:
3304 raise error.Abort(_('invalid httprequest: expected format is '
3304 raise error.Abort(_('invalid httprequest: expected format is '
3305 '"httprequest <method> <path>'))
3305 '"httprequest <method> <path>'))
3306
3306
3307 method, httppath = request[1:]
3307 method, httppath = request[1:]
3308 headers = {}
3308 headers = {}
3309 body = None
3309 body = None
3310 frames = []
3310 frames = []
3311 for line in lines:
3311 for line in lines:
3312 line = line.lstrip()
3312 line = line.lstrip()
3313 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3313 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3314 if m:
3314 if m:
3315 # Headers need to use native strings.
3315 # Headers need to use native strings.
3316 key = pycompat.strurl(m.group(1))
3316 key = pycompat.strurl(m.group(1))
3317 value = pycompat.strurl(m.group(2))
3317 value = pycompat.strurl(m.group(2))
3318 headers[key] = value
3318 headers[key] = value
3319 continue
3319 continue
3320
3320
3321 if line.startswith(b'BODYFILE '):
3321 if line.startswith(b'BODYFILE '):
3322 with open(line.split(b' ', 1), 'rb') as fh:
3322 with open(line.split(b' ', 1), 'rb') as fh:
3323 body = fh.read()
3323 body = fh.read()
3324 elif line.startswith(b'frame '):
3324 elif line.startswith(b'frame '):
3325 frame = wireprotoframing.makeframefromhumanstring(
3325 frame = wireprotoframing.makeframefromhumanstring(
3326 line[len(b'frame '):])
3326 line[len(b'frame '):])
3327
3327
3328 frames.append(frame)
3328 frames.append(frame)
3329 else:
3329 else:
3330 raise error.Abort(_('unknown argument to httprequest: %s') %
3330 raise error.Abort(_('unknown argument to httprequest: %s') %
3331 line)
3331 line)
3332
3332
3333 url = path + httppath
3333 url = path + httppath
3334
3334
3335 if frames:
3335 if frames:
3336 body = b''.join(bytes(f) for f in frames)
3336 body = b''.join(bytes(f) for f in frames)
3337
3337
3338 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3338 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3339
3339
3340 # urllib.Request insists on using has_data() as a proxy for
3340 # urllib.Request insists on using has_data() as a proxy for
3341 # determining the request method. Override that to use our
3341 # determining the request method. Override that to use our
3342 # explicitly requested method.
3342 # explicitly requested method.
3343 req.get_method = lambda: pycompat.sysstr(method)
3343 req.get_method = lambda: pycompat.sysstr(method)
3344
3344
3345 try:
3345 try:
3346 res = opener.open(req)
3346 res = opener.open(req)
3347 body = res.read()
3347 body = res.read()
3348 except util.urlerr.urlerror as e:
3348 except util.urlerr.urlerror as e:
3349 # read() method must be called, but only exists in Python 2
3349 # read() method must be called, but only exists in Python 2
3350 getattr(e, 'read', lambda: None)()
3350 getattr(e, 'read', lambda: None)()
3351 continue
3351 continue
3352
3352
3353 ct = res.headers.get(r'Content-Type')
3353 ct = res.headers.get(r'Content-Type')
3354 if ct == r'application/mercurial-cbor':
3354 if ct == r'application/mercurial-cbor':
3355 ui.write(_('cbor> %s\n') %
3355 ui.write(_('cbor> %s\n') %
3356 stringutil.pprint(cborutil.decodeall(body),
3356 stringutil.pprint(cborutil.decodeall(body),
3357 bprefix=True,
3357 bprefix=True,
3358 indent=2))
3358 indent=2))
3359
3359
3360 elif action == 'close':
3360 elif action == 'close':
3361 peer.close()
3361 peer.close()
3362 elif action == 'readavailable':
3362 elif action == 'readavailable':
3363 if not stdout or not stderr:
3363 if not stdout or not stderr:
3364 raise error.Abort(_('readavailable not available on this peer'))
3364 raise error.Abort(_('readavailable not available on this peer'))
3365
3365
3366 stdin.close()
3366 stdin.close()
3367 stdout.read()
3367 stdout.read()
3368 stderr.read()
3368 stderr.read()
3369
3369
3370 elif action == 'readline':
3370 elif action == 'readline':
3371 if not stdout:
3371 if not stdout:
3372 raise error.Abort(_('readline not available on this peer'))
3372 raise error.Abort(_('readline not available on this peer'))
3373 stdout.readline()
3373 stdout.readline()
3374 elif action == 'ereadline':
3374 elif action == 'ereadline':
3375 if not stderr:
3375 if not stderr:
3376 raise error.Abort(_('ereadline not available on this peer'))
3376 raise error.Abort(_('ereadline not available on this peer'))
3377 stderr.readline()
3377 stderr.readline()
3378 elif action.startswith('read '):
3378 elif action.startswith('read '):
3379 count = int(action.split(' ', 1)[1])
3379 count = int(action.split(' ', 1)[1])
3380 if not stdout:
3380 if not stdout:
3381 raise error.Abort(_('read not available on this peer'))
3381 raise error.Abort(_('read not available on this peer'))
3382 stdout.read(count)
3382 stdout.read(count)
3383 elif action.startswith('eread '):
3383 elif action.startswith('eread '):
3384 count = int(action.split(' ', 1)[1])
3384 count = int(action.split(' ', 1)[1])
3385 if not stderr:
3385 if not stderr:
3386 raise error.Abort(_('eread not available on this peer'))
3386 raise error.Abort(_('eread not available on this peer'))
3387 stderr.read(count)
3387 stderr.read(count)
3388 else:
3388 else:
3389 raise error.Abort(_('unknown action: %s') % action)
3389 raise error.Abort(_('unknown action: %s') % action)
3390
3390
3391 if batchedcommands is not None:
3391 if batchedcommands is not None:
3392 raise error.Abort(_('unclosed "batchbegin" request'))
3392 raise error.Abort(_('unclosed "batchbegin" request'))
3393
3393
3394 if peer:
3394 if peer:
3395 peer.close()
3395 peer.close()
3396
3396
3397 if proc:
3397 if proc:
3398 proc.kill()
3398 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now