##// END OF EJS Templates
debugcommands: use a revset instead of dagutil...
Gregory Szorc -
r39199:26f3d075 default
parent child Browse files
Show More
@@ -1,3327 +1,3325
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
46 encoding,
45 encoding,
47 error,
46 error,
48 exchange,
47 exchange,
49 extensions,
48 extensions,
50 filemerge,
49 filemerge,
51 filesetlang,
50 filesetlang,
52 formatter,
51 formatter,
53 hg,
52 hg,
54 httppeer,
53 httppeer,
55 localrepo,
54 localrepo,
56 lock as lockmod,
55 lock as lockmod,
57 logcmdutil,
56 logcmdutil,
58 merge as mergemod,
57 merge as mergemod,
59 obsolete,
58 obsolete,
60 obsutil,
59 obsutil,
61 phases,
60 phases,
62 policy,
61 policy,
63 pvec,
62 pvec,
64 pycompat,
63 pycompat,
65 registrar,
64 registrar,
66 repair,
65 repair,
67 revlog,
66 revlog,
68 revset,
67 revset,
69 revsetlang,
68 revsetlang,
70 scmutil,
69 scmutil,
71 setdiscovery,
70 setdiscovery,
72 simplemerge,
71 simplemerge,
73 sshpeer,
72 sshpeer,
74 sslutil,
73 sslutil,
75 streamclone,
74 streamclone,
76 templater,
75 templater,
77 treediscovery,
76 treediscovery,
78 upgrade,
77 upgrade,
79 url as urlmod,
78 url as urlmod,
80 util,
79 util,
81 vfs as vfsmod,
80 vfs as vfsmod,
82 wireprotoframing,
81 wireprotoframing,
83 wireprotoserver,
82 wireprotoserver,
84 wireprotov2peer,
83 wireprotov2peer,
85 )
84 )
86 from .utils import (
85 from .utils import (
87 dateutil,
86 dateutil,
88 procutil,
87 procutil,
89 stringutil,
88 stringutil,
90 )
89 )
91
90
92 release = lockmod.release
91 release = lockmod.release
93
92
94 command = registrar.command()
93 command = registrar.command()
95
94
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
95 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
96 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
97 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
98 if len(args) == 3:
100 index, rev1, rev2 = args
99 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
100 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
101 lookup = r.lookup
103 elif len(args) == 2:
102 elif len(args) == 2:
104 if not repo:
103 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
104 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
105 '(.hg not found)'))
107 rev1, rev2 = args
106 rev1, rev2 = args
108 r = repo.changelog
107 r = repo.changelog
109 lookup = repo.lookup
108 lookup = repo.lookup
110 else:
109 else:
111 raise error.Abort(_('either two or three arguments required'))
110 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
111 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
112 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
113
115 @command('debugapplystreamclonebundle', [], 'FILE')
114 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
115 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
116 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
117 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
118 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
119 gen.apply(repo)
121
120
122 @command('debugbuilddag',
121 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
122 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
123 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
124 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
125 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
126 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
127 mergeable_file=False,
129 overwritten_file=False,
128 overwritten_file=False,
130 new_file=False):
129 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
130 """builds a repo with a given DAG from scratch in the current empty repo
132
131
133 The description of the DAG is read from stdin if not given on the
132 The description of the DAG is read from stdin if not given on the
134 command line.
133 command line.
135
134
136 Elements:
135 Elements:
137
136
138 - "+n" is a linear run of n nodes based on the current default parent
137 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
138 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
139 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
140 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
141 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
142 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
143 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
144 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
145 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
146 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
147 - "#...\\n" is a comment up to the end of the line
149
148
150 Whitespace between the above elements is ignored.
149 Whitespace between the above elements is ignored.
151
150
152 A backref is either
151 A backref is either
153
152
154 - a number n, which references the node curr-n, where curr is the current
153 - a number n, which references the node curr-n, where curr is the current
155 node, or
154 node, or
156 - the name of a local tag you placed earlier using ":tag", or
155 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
156 - empty to denote the default parent.
158
157
159 All string valued-elements are either strictly alphanumeric, or must
158 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
159 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
160 """
162
161
163 if text is None:
162 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
163 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
164 text = ui.fin.read()
166
165
167 cl = repo.changelog
166 cl = repo.changelog
168 if len(cl) > 0:
167 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
168 raise error.Abort(_('repository is not empty'))
170
169
171 # determine number of revs in DAG
170 # determine number of revs in DAG
172 total = 0
171 total = 0
173 for type, data in dagparser.parsedag(text):
172 for type, data in dagparser.parsedag(text):
174 if type == 'n':
173 if type == 'n':
175 total += 1
174 total += 1
176
175
177 if mergeable_file:
176 if mergeable_file:
178 linesperrev = 2
177 linesperrev = 2
179 # make a file with k lines per rev
178 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i
179 initialmergedlines = ['%d' % i
181 for i in pycompat.xrange(0, total * linesperrev)]
180 for i in pycompat.xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
181 initialmergedlines.append("")
183
182
184 tags = []
183 tags = []
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 total=total)
185 total=total)
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 at = -1
187 at = -1
189 atbranch = 'default'
188 atbranch = 'default'
190 nodeids = []
189 nodeids = []
191 id = 0
190 id = 0
192 progress.update(id)
191 progress.update(id)
193 for type, data in dagparser.parsedag(text):
192 for type, data in dagparser.parsedag(text):
194 if type == 'n':
193 if type == 'n':
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 id, ps = data
195 id, ps = data
197
196
198 files = []
197 files = []
199 filecontent = {}
198 filecontent = {}
200
199
201 p2 = None
200 p2 = None
202 if mergeable_file:
201 if mergeable_file:
203 fn = "mf"
202 fn = "mf"
204 p1 = repo[ps[0]]
203 p1 = repo[ps[0]]
205 if len(ps) > 1:
204 if len(ps) > 1:
206 p2 = repo[ps[1]]
205 p2 = repo[ps[1]]
207 pa = p1.ancestor(p2)
206 pa = p1.ancestor(p2)
208 base, local, other = [x[fn].data() for x in (pa, p1,
207 base, local, other = [x[fn].data() for x in (pa, p1,
209 p2)]
208 p2)]
210 m3 = simplemerge.Merge3Text(base, local, other)
209 m3 = simplemerge.Merge3Text(base, local, other)
211 ml = [l.strip() for l in m3.merge_lines()]
210 ml = [l.strip() for l in m3.merge_lines()]
212 ml.append("")
211 ml.append("")
213 elif at > 0:
212 elif at > 0:
214 ml = p1[fn].data().split("\n")
213 ml = p1[fn].data().split("\n")
215 else:
214 else:
216 ml = initialmergedlines
215 ml = initialmergedlines
217 ml[id * linesperrev] += " r%i" % id
216 ml[id * linesperrev] += " r%i" % id
218 mergedtext = "\n".join(ml)
217 mergedtext = "\n".join(ml)
219 files.append(fn)
218 files.append(fn)
220 filecontent[fn] = mergedtext
219 filecontent[fn] = mergedtext
221
220
222 if overwritten_file:
221 if overwritten_file:
223 fn = "of"
222 fn = "of"
224 files.append(fn)
223 files.append(fn)
225 filecontent[fn] = "r%i\n" % id
224 filecontent[fn] = "r%i\n" % id
226
225
227 if new_file:
226 if new_file:
228 fn = "nf%i" % id
227 fn = "nf%i" % id
229 files.append(fn)
228 files.append(fn)
230 filecontent[fn] = "r%i\n" % id
229 filecontent[fn] = "r%i\n" % id
231 if len(ps) > 1:
230 if len(ps) > 1:
232 if not p2:
231 if not p2:
233 p2 = repo[ps[1]]
232 p2 = repo[ps[1]]
234 for fn in p2:
233 for fn in p2:
235 if fn.startswith("nf"):
234 if fn.startswith("nf"):
236 files.append(fn)
235 files.append(fn)
237 filecontent[fn] = p2[fn].data()
236 filecontent[fn] = p2[fn].data()
238
237
239 def fctxfn(repo, cx, path):
238 def fctxfn(repo, cx, path):
240 if path in filecontent:
239 if path in filecontent:
241 return context.memfilectx(repo, cx, path,
240 return context.memfilectx(repo, cx, path,
242 filecontent[path])
241 filecontent[path])
243 return None
242 return None
244
243
245 if len(ps) == 0 or ps[0] < 0:
244 if len(ps) == 0 or ps[0] < 0:
246 pars = [None, None]
245 pars = [None, None]
247 elif len(ps) == 1:
246 elif len(ps) == 1:
248 pars = [nodeids[ps[0]], None]
247 pars = [nodeids[ps[0]], None]
249 else:
248 else:
250 pars = [nodeids[p] for p in ps]
249 pars = [nodeids[p] for p in ps]
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 date=(id, 0),
251 date=(id, 0),
253 user="debugbuilddag",
252 user="debugbuilddag",
254 extra={'branch': atbranch})
253 extra={'branch': atbranch})
255 nodeid = repo.commitctx(cx)
254 nodeid = repo.commitctx(cx)
256 nodeids.append(nodeid)
255 nodeids.append(nodeid)
257 at = id
256 at = id
258 elif type == 'l':
257 elif type == 'l':
259 id, name = data
258 id, name = data
260 ui.note(('tag %s\n' % name))
259 ui.note(('tag %s\n' % name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 elif type == 'a':
261 elif type == 'a':
263 ui.note(('branch %s\n' % data))
262 ui.note(('branch %s\n' % data))
264 atbranch = data
263 atbranch = data
265 progress.update(id)
264 progress.update(id)
266
265
267 if tags:
266 if tags:
268 repo.vfs.write("localtags", "".join(tags))
267 repo.vfs.write("localtags", "".join(tags))
269
268
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
270 indent_string = ' ' * indent
272 if all:
271 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
273 % indent_string)
275
274
276 def showchunks(named):
275 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
276 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
277 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
279 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
280 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
281 hex(cs), hex(deltabase), len(delta)))
283
282
284 chunkdata = gen.changelogheader()
283 chunkdata = gen.changelogheader()
285 showchunks("changelog")
284 showchunks("changelog")
286 chunkdata = gen.manifestheader()
285 chunkdata = gen.manifestheader()
287 showchunks("manifest")
286 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
287 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
288 fname = chunkdata['filename']
290 showchunks(fname)
289 showchunks(fname)
291 else:
290 else:
292 if isinstance(gen, bundle2.unbundle20):
291 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
292 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
294 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
296 ui.write("%s%s\n" % (indent_string, hex(node)))
298
297
299 def _debugobsmarkers(ui, part, indent=0, **opts):
298 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
299 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
300 opts = pycompat.byteskwargs(opts)
302 data = part.read()
301 data = part.read()
303 indent_string = ' ' * indent
302 indent_string = ' ' * indent
304 try:
303 try:
305 version, markers = obsolete._readmarkers(data)
304 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
305 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
307 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
308 ui.write(msg)
310 else:
309 else:
311 msg = "%sversion: %d (%d bytes)\n"
310 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
311 msg %= indent_string, version, len(data)
313 ui.write(msg)
312 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
313 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
314 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
315 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
316 fm.startitem()
318 fm.plain(indent_string)
317 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
318 cmdutil.showmarker(fm, m)
320 fm.end()
319 fm.end()
321
320
322 def _debugphaseheads(ui, data, indent=0):
321 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
322 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
323 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
324 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
325 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
326 for head in headsbyphase[phase]:
328 ui.write(indent_string)
327 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
329
331 def _quasirepr(thing):
330 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
332 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
334 return pycompat.bytestr(repr(thing))
336
335
337 def _debugbundle2(ui, gen, all=None, **opts):
336 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
337 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
338 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
339 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
341 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
342 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
343 if parttypes and part.type not in parttypes:
345 continue
344 continue
346 msg = '%s -- %s (mandatory: %r)\n'
345 msg = '%s -- %s (mandatory: %r)\n'
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 if part.type == 'changegroup':
347 if part.type == 'changegroup':
349 version = part.params.get('version', '01')
348 version = part.params.get('version', '01')
350 cg = changegroup.getunbundler(version, part, 'UN')
349 cg = changegroup.getunbundler(version, part, 'UN')
351 if not ui.quiet:
350 if not ui.quiet:
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 if part.type == 'obsmarkers':
352 if part.type == 'obsmarkers':
354 if not ui.quiet:
353 if not ui.quiet:
355 _debugobsmarkers(ui, part, indent=4, **opts)
354 _debugobsmarkers(ui, part, indent=4, **opts)
356 if part.type == 'phase-heads':
355 if part.type == 'phase-heads':
357 if not ui.quiet:
356 if not ui.quiet:
358 _debugphaseheads(ui, part, indent=4)
357 _debugphaseheads(ui, part, indent=4)
359
358
360 @command('debugbundle',
359 @command('debugbundle',
361 [('a', 'all', None, _('show all details')),
360 [('a', 'all', None, _('show all details')),
362 ('', 'part-type', [], _('show only the named part type')),
361 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 _('FILE'),
363 _('FILE'),
365 norepo=True)
364 norepo=True)
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 """lists the contents of a bundle"""
366 """lists the contents of a bundle"""
368 with hg.openpath(ui, bundlepath) as f:
367 with hg.openpath(ui, bundlepath) as f:
369 if spec:
368 if spec:
370 spec = exchange.getbundlespec(ui, f)
369 spec = exchange.getbundlespec(ui, f)
371 ui.write('%s\n' % spec)
370 ui.write('%s\n' % spec)
372 return
371 return
373
372
374 gen = exchange.readbundle(ui, f, bundlepath)
373 gen = exchange.readbundle(ui, f, bundlepath)
375 if isinstance(gen, bundle2.unbundle20):
374 if isinstance(gen, bundle2.unbundle20):
376 return _debugbundle2(ui, gen, all=all, **opts)
375 return _debugbundle2(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
378
377
379 @command('debugcapabilities',
378 @command('debugcapabilities',
380 [], _('PATH'),
379 [], _('PATH'),
381 norepo=True)
380 norepo=True)
382 def debugcapabilities(ui, path, **opts):
381 def debugcapabilities(ui, path, **opts):
383 """lists the capabilities of a remote peer"""
382 """lists the capabilities of a remote peer"""
384 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
385 peer = hg.peer(ui, opts, path)
384 peer = hg.peer(ui, opts, path)
386 caps = peer.capabilities()
385 caps = peer.capabilities()
387 ui.write(('Main capabilities:\n'))
386 ui.write(('Main capabilities:\n'))
388 for c in sorted(caps):
387 for c in sorted(caps):
389 ui.write((' %s\n') % c)
388 ui.write((' %s\n') % c)
390 b2caps = bundle2.bundle2caps(peer)
389 b2caps = bundle2.bundle2caps(peer)
391 if b2caps:
390 if b2caps:
392 ui.write(('Bundle2 capabilities:\n'))
391 ui.write(('Bundle2 capabilities:\n'))
393 for key, values in sorted(b2caps.iteritems()):
392 for key, values in sorted(b2caps.iteritems()):
394 ui.write((' %s\n') % key)
393 ui.write((' %s\n') % key)
395 for v in values:
394 for v in values:
396 ui.write((' %s\n') % v)
395 ui.write((' %s\n') % v)
397
396
398 @command('debugcheckstate', [], '')
397 @command('debugcheckstate', [], '')
399 def debugcheckstate(ui, repo):
398 def debugcheckstate(ui, repo):
400 """validate the correctness of the current dirstate"""
399 """validate the correctness of the current dirstate"""
401 parent1, parent2 = repo.dirstate.parents()
400 parent1, parent2 = repo.dirstate.parents()
402 m1 = repo[parent1].manifest()
401 m1 = repo[parent1].manifest()
403 m2 = repo[parent2].manifest()
402 m2 = repo[parent2].manifest()
404 errors = 0
403 errors = 0
405 for f in repo.dirstate:
404 for f in repo.dirstate:
406 state = repo.dirstate[f]
405 state = repo.dirstate[f]
407 if state in "nr" and f not in m1:
406 if state in "nr" and f not in m1:
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 errors += 1
408 errors += 1
410 if state in "a" and f in m1:
409 if state in "a" and f in m1:
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 errors += 1
411 errors += 1
413 if state in "m" and f not in m1 and f not in m2:
412 if state in "m" and f not in m1 and f not in m2:
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 (f, state))
414 (f, state))
416 errors += 1
415 errors += 1
417 for f in m1:
416 for f in m1:
418 state = repo.dirstate[f]
417 state = repo.dirstate[f]
419 if state not in "nrm":
418 if state not in "nrm":
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 errors += 1
420 errors += 1
422 if errors:
421 if errors:
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 raise error.Abort(error)
423 raise error.Abort(error)
425
424
426 @command('debugcolor',
425 @command('debugcolor',
427 [('', 'style', None, _('show all configured styles'))],
426 [('', 'style', None, _('show all configured styles'))],
428 'hg debugcolor')
427 'hg debugcolor')
429 def debugcolor(ui, repo, **opts):
428 def debugcolor(ui, repo, **opts):
430 """show available color, effects or style"""
429 """show available color, effects or style"""
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 if opts.get(r'style'):
431 if opts.get(r'style'):
433 return _debugdisplaystyle(ui)
432 return _debugdisplaystyle(ui)
434 else:
433 else:
435 return _debugdisplaycolor(ui)
434 return _debugdisplaycolor(ui)
436
435
437 def _debugdisplaycolor(ui):
436 def _debugdisplaycolor(ui):
438 ui = ui.copy()
437 ui = ui.copy()
439 ui._styles.clear()
438 ui._styles.clear()
440 for effect in color._activeeffects(ui).keys():
439 for effect in color._activeeffects(ui).keys():
441 ui._styles[effect] = effect
440 ui._styles[effect] = effect
442 if ui._terminfoparams:
441 if ui._terminfoparams:
443 for k, v in ui.configitems('color'):
442 for k, v in ui.configitems('color'):
444 if k.startswith('color.'):
443 if k.startswith('color.'):
445 ui._styles[k] = k[6:]
444 ui._styles[k] = k[6:]
446 elif k.startswith('terminfo.'):
445 elif k.startswith('terminfo.'):
447 ui._styles[k] = k[9:]
446 ui._styles[k] = k[9:]
448 ui.write(_('available colors:\n'))
447 ui.write(_('available colors:\n'))
449 # sort label with a '_' after the other to group '_background' entry.
448 # sort label with a '_' after the other to group '_background' entry.
450 items = sorted(ui._styles.items(),
449 items = sorted(ui._styles.items(),
451 key=lambda i: ('_' in i[0], i[0], i[1]))
450 key=lambda i: ('_' in i[0], i[0], i[1]))
452 for colorname, label in items:
451 for colorname, label in items:
453 ui.write(('%s\n') % colorname, label=label)
452 ui.write(('%s\n') % colorname, label=label)
454
453
455 def _debugdisplaystyle(ui):
454 def _debugdisplaystyle(ui):
456 ui.write(_('available style:\n'))
455 ui.write(_('available style:\n'))
457 if not ui._styles:
456 if not ui._styles:
458 return
457 return
459 width = max(len(s) for s in ui._styles)
458 width = max(len(s) for s in ui._styles)
460 for label, effects in sorted(ui._styles.items()):
459 for label, effects in sorted(ui._styles.items()):
461 ui.write('%s' % label, label=label)
460 ui.write('%s' % label, label=label)
462 if effects:
461 if effects:
463 # 50
462 # 50
464 ui.write(': ')
463 ui.write(': ')
465 ui.write(' ' * (max(0, width - len(label))))
464 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write('\n')
466 ui.write('\n')
468
467
469 @command('debugcreatestreamclonebundle', [], 'FILE')
468 @command('debugcreatestreamclonebundle', [], 'FILE')
470 def debugcreatestreamclonebundle(ui, repo, fname):
469 def debugcreatestreamclonebundle(ui, repo, fname):
471 """create a stream clone bundle file
470 """create a stream clone bundle file
472
471
473 Stream bundles are special bundles that are essentially archives of
472 Stream bundles are special bundles that are essentially archives of
474 revlog files. They are commonly used for cloning very quickly.
473 revlog files. They are commonly used for cloning very quickly.
475 """
474 """
476 # TODO we may want to turn this into an abort when this functionality
475 # TODO we may want to turn this into an abort when this functionality
477 # is moved into `hg bundle`.
476 # is moved into `hg bundle`.
478 if phases.hassecret(repo):
477 if phases.hassecret(repo):
479 ui.warn(_('(warning: stream clone bundle will contain secret '
478 ui.warn(_('(warning: stream clone bundle will contain secret '
480 'revisions)\n'))
479 'revisions)\n'))
481
480
482 requirements, gen = streamclone.generatebundlev1(repo)
481 requirements, gen = streamclone.generatebundlev1(repo)
483 changegroup.writechunks(ui, gen, fname)
482 changegroup.writechunks(ui, gen, fname)
484
483
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486
485
487 @command('debugdag',
486 @command('debugdag',
488 [('t', 'tags', None, _('use tags as labels')),
487 [('t', 'tags', None, _('use tags as labels')),
489 ('b', 'branches', None, _('annotate with branch names')),
488 ('b', 'branches', None, _('annotate with branch names')),
490 ('', 'dots', None, _('use dots for runs')),
489 ('', 'dots', None, _('use dots for runs')),
491 ('s', 'spaces', None, _('separate elements by spaces'))],
490 ('s', 'spaces', None, _('separate elements by spaces'))],
492 _('[OPTION]... [FILE [REV]...]'),
491 _('[OPTION]... [FILE [REV]...]'),
493 optionalrepo=True)
492 optionalrepo=True)
494 def debugdag(ui, repo, file_=None, *revs, **opts):
493 def debugdag(ui, repo, file_=None, *revs, **opts):
495 """format the changelog or an index DAG as a concise textual description
494 """format the changelog or an index DAG as a concise textual description
496
495
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 revision numbers, they get labeled in the output as rN.
497 revision numbers, they get labeled in the output as rN.
499
498
500 Otherwise, the changelog DAG of the current repo is emitted.
499 Otherwise, the changelog DAG of the current repo is emitted.
501 """
500 """
502 spaces = opts.get(r'spaces')
501 spaces = opts.get(r'spaces')
503 dots = opts.get(r'dots')
502 dots = opts.get(r'dots')
504 if file_:
503 if file_:
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 file_)
505 file_)
507 revs = set((int(r) for r in revs))
506 revs = set((int(r) for r in revs))
508 def events():
507 def events():
509 for r in rlog:
508 for r in rlog:
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 if p != -1))
510 if p != -1))
512 if r in revs:
511 if r in revs:
513 yield 'l', (r, "r%i" % r)
512 yield 'l', (r, "r%i" % r)
514 elif repo:
513 elif repo:
515 cl = repo.changelog
514 cl = repo.changelog
516 tags = opts.get(r'tags')
515 tags = opts.get(r'tags')
517 branches = opts.get(r'branches')
516 branches = opts.get(r'branches')
518 if tags:
517 if tags:
519 labels = {}
518 labels = {}
520 for l, n in repo.tags().items():
519 for l, n in repo.tags().items():
521 labels.setdefault(cl.rev(n), []).append(l)
520 labels.setdefault(cl.rev(n), []).append(l)
522 def events():
521 def events():
523 b = "default"
522 b = "default"
524 for r in cl:
523 for r in cl:
525 if branches:
524 if branches:
526 newb = cl.read(cl.node(r))[5]['branch']
525 newb = cl.read(cl.node(r))[5]['branch']
527 if newb != b:
526 if newb != b:
528 yield 'a', newb
527 yield 'a', newb
529 b = newb
528 b = newb
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 if p != -1))
530 if p != -1))
532 if tags:
531 if tags:
533 ls = labels.get(r)
532 ls = labels.get(r)
534 if ls:
533 if ls:
535 for l in ls:
534 for l in ls:
536 yield 'l', (r, l)
535 yield 'l', (r, l)
537 else:
536 else:
538 raise error.Abort(_('need repo for changelog dag'))
537 raise error.Abort(_('need repo for changelog dag'))
539
538
540 for line in dagparser.dagtextlines(events(),
539 for line in dagparser.dagtextlines(events(),
541 addspaces=spaces,
540 addspaces=spaces,
542 wraplabels=True,
541 wraplabels=True,
543 wrapannotations=True,
542 wrapannotations=True,
544 wrapnonlinear=dots,
543 wrapnonlinear=dots,
545 usedots=dots,
544 usedots=dots,
546 maxlinewidth=70):
545 maxlinewidth=70):
547 ui.write(line)
546 ui.write(line)
548 ui.write("\n")
547 ui.write("\n")
549
548
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 def debugdata(ui, repo, file_, rev=None, **opts):
550 def debugdata(ui, repo, file_, rev=None, **opts):
552 """dump the contents of a data file revision"""
551 """dump the contents of a data file revision"""
553 opts = pycompat.byteskwargs(opts)
552 opts = pycompat.byteskwargs(opts)
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if rev is not None:
554 if rev is not None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
555 raise error.CommandError('debugdata', _('invalid arguments'))
557 file_, rev = None, file_
556 file_, rev = None, file_
558 elif rev is None:
557 elif rev is None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 try:
560 try:
562 ui.write(r.revision(r.lookup(rev), raw=True))
561 ui.write(r.revision(r.lookup(rev), raw=True))
563 except KeyError:
562 except KeyError:
564 raise error.Abort(_('invalid revision identifier %s') % rev)
563 raise error.Abort(_('invalid revision identifier %s') % rev)
565
564
566 @command('debugdate',
565 @command('debugdate',
567 [('e', 'extended', None, _('try extended date formats'))],
566 [('e', 'extended', None, _('try extended date formats'))],
568 _('[-e] DATE [RANGE]'),
567 _('[-e] DATE [RANGE]'),
569 norepo=True, optionalrepo=True)
568 norepo=True, optionalrepo=True)
570 def debugdate(ui, date, range=None, **opts):
569 def debugdate(ui, date, range=None, **opts):
571 """parse and display a date"""
570 """parse and display a date"""
572 if opts[r"extended"]:
571 if opts[r"extended"]:
573 d = dateutil.parsedate(date, util.extendeddateformats)
572 d = dateutil.parsedate(date, util.extendeddateformats)
574 else:
573 else:
575 d = dateutil.parsedate(date)
574 d = dateutil.parsedate(date)
576 ui.write(("internal: %d %d\n") % d)
575 ui.write(("internal: %d %d\n") % d)
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 if range:
577 if range:
579 m = dateutil.matchdate(range)
578 m = dateutil.matchdate(range)
580 ui.write(("match: %s\n") % m(d[0]))
579 ui.write(("match: %s\n") % m(d[0]))
581
580
582 @command('debugdeltachain',
581 @command('debugdeltachain',
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 _('-c|-m|FILE'),
583 _('-c|-m|FILE'),
585 optionalrepo=True)
584 optionalrepo=True)
586 def debugdeltachain(ui, repo, file_=None, **opts):
585 def debugdeltachain(ui, repo, file_=None, **opts):
587 """dump information about delta chains in a revlog
586 """dump information about delta chains in a revlog
588
587
589 Output can be templatized. Available template keywords are:
588 Output can be templatized. Available template keywords are:
590
589
591 :``rev``: revision number
590 :``rev``: revision number
592 :``chainid``: delta chain identifier (numbered by unique base)
591 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainlen``: delta chain length to this revision
592 :``chainlen``: delta chain length to this revision
594 :``prevrev``: previous revision in delta chain
593 :``prevrev``: previous revision in delta chain
595 :``deltatype``: role of delta / how it was computed
594 :``deltatype``: role of delta / how it was computed
596 :``compsize``: compressed size of revision
595 :``compsize``: compressed size of revision
597 :``uncompsize``: uncompressed size of revision
596 :``uncompsize``: uncompressed size of revision
598 :``chainsize``: total size of compressed revisions in chain
597 :``chainsize``: total size of compressed revisions in chain
599 :``chainratio``: total chain size divided by uncompressed revision size
598 :``chainratio``: total chain size divided by uncompressed revision size
600 (new delta chains typically start at ratio 2.00)
599 (new delta chains typically start at ratio 2.00)
601 :``lindist``: linear distance from base revision in delta chain to end
600 :``lindist``: linear distance from base revision in delta chain to end
602 of this revision
601 of this revision
603 :``extradist``: total size of revisions not part of this delta chain from
602 :``extradist``: total size of revisions not part of this delta chain from
604 base of delta chain to end of this revision; a measurement
603 base of delta chain to end of this revision; a measurement
605 of how much extra data we need to read/seek across to read
604 of how much extra data we need to read/seek across to read
606 the delta chain for this revision
605 the delta chain for this revision
607 :``extraratio``: extradist divided by chainsize; another representation of
606 :``extraratio``: extradist divided by chainsize; another representation of
608 how much unrelated data is needed to load this delta chain
607 how much unrelated data is needed to load this delta chain
609
608
610 If the repository is configured to use the sparse read, additional keywords
609 If the repository is configured to use the sparse read, additional keywords
611 are available:
610 are available:
612
611
613 :``readsize``: total size of data read from the disk for a revision
612 :``readsize``: total size of data read from the disk for a revision
614 (sum of the sizes of all the blocks)
613 (sum of the sizes of all the blocks)
615 :``largestblock``: size of the largest block of data read from the disk
614 :``largestblock``: size of the largest block of data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
617 :``srchunks``: in how many data hunks the whole revision would be read
616 :``srchunks``: in how many data hunks the whole revision would be read
618
617
619 The sparse read can be enabled with experimental.sparse-read = True
618 The sparse read can be enabled with experimental.sparse-read = True
620 """
619 """
621 opts = pycompat.byteskwargs(opts)
620 opts = pycompat.byteskwargs(opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 index = r.index
622 index = r.index
624 start = r.start
623 start = r.start
625 length = r.length
624 length = r.length
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
626 withsparseread = getattr(r, '_withsparseread', False)
628
627
629 def revinfo(rev):
628 def revinfo(rev):
630 e = index[rev]
629 e = index[rev]
631 compsize = e[1]
630 compsize = e[1]
632 uncompsize = e[2]
631 uncompsize = e[2]
633 chainsize = 0
632 chainsize = 0
634
633
635 if generaldelta:
634 if generaldelta:
636 if e[3] == e[5]:
635 if e[3] == e[5]:
637 deltatype = 'p1'
636 deltatype = 'p1'
638 elif e[3] == e[6]:
637 elif e[3] == e[6]:
639 deltatype = 'p2'
638 deltatype = 'p2'
640 elif e[3] == rev - 1:
639 elif e[3] == rev - 1:
641 deltatype = 'prev'
640 deltatype = 'prev'
642 elif e[3] == rev:
641 elif e[3] == rev:
643 deltatype = 'base'
642 deltatype = 'base'
644 else:
643 else:
645 deltatype = 'other'
644 deltatype = 'other'
646 else:
645 else:
647 if e[3] == rev:
646 if e[3] == rev:
648 deltatype = 'base'
647 deltatype = 'base'
649 else:
648 else:
650 deltatype = 'prev'
649 deltatype = 'prev'
651
650
652 chain = r._deltachain(rev)[0]
651 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
652 for iterrev in chain:
654 e = index[iterrev]
653 e = index[iterrev]
655 chainsize += e[1]
654 chainsize += e[1]
656
655
657 return compsize, uncompsize, deltatype, chain, chainsize
656 return compsize, uncompsize, deltatype, chain, chainsize
658
657
659 fm = ui.formatter('debugdeltachain', opts)
658 fm = ui.formatter('debugdeltachain', opts)
660
659
661 fm.plain(' rev chain# chainlen prev delta '
660 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
661 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
662 'extraratio')
664 if withsparseread:
663 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
664 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
665 fm.plain('\n')
667
666
668 chainbases = {}
667 chainbases = {}
669 for rev in r:
668 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
670 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 basestart = start(chainbase)
672 basestart = start(chainbase)
674 revstart = start(rev)
673 revstart = start(rev)
675 lineardist = revstart + comp - basestart
674 lineardist = revstart + comp - basestart
676 extradist = lineardist - chainsize
675 extradist = lineardist - chainsize
677 try:
676 try:
678 prevrev = chain[-2]
677 prevrev = chain[-2]
679 except IndexError:
678 except IndexError:
680 prevrev = -1
679 prevrev = -1
681
680
682 if uncomp != 0:
681 if uncomp != 0:
683 chainratio = float(chainsize) / float(uncomp)
682 chainratio = float(chainsize) / float(uncomp)
684 else:
683 else:
685 chainratio = chainsize
684 chainratio = chainsize
686
685
687 if chainsize != 0:
686 if chainsize != 0:
688 extraratio = float(extradist) / float(chainsize)
687 extraratio = float(extradist) / float(chainsize)
689 else:
688 else:
690 extraratio = extradist
689 extraratio = extradist
691
690
692 fm.startitem()
691 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
693 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
694 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
696 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
697 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
698 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
699 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
701 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
702 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
703 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
704 if withsparseread:
706 readsize = 0
705 readsize = 0
707 largestblock = 0
706 largestblock = 0
708 srchunks = 0
707 srchunks = 0
709
708
710 for revschunk in revlog._slicechunk(r, chain):
709 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
710 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
711 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
712 blksize = blkend - start(revschunk[0])
714
713
715 readsize += blksize
714 readsize += blksize
716 if largestblock < blksize:
715 if largestblock < blksize:
717 largestblock = blksize
716 largestblock = blksize
718
717
719 if readsize:
718 if readsize:
720 readdensity = float(chainsize) / float(readsize)
719 readdensity = float(chainsize) / float(readsize)
721 else:
720 else:
722 readdensity = 1
721 readdensity = 1
723
722
724 fm.write('readsize largestblock readdensity srchunks',
723 fm.write('readsize largestblock readdensity srchunks',
725 ' %10d %10d %9.5f %8d',
724 ' %10d %10d %9.5f %8d',
726 readsize, largestblock, readdensity, srchunks,
725 readsize, largestblock, readdensity, srchunks,
727 readsize=readsize, largestblock=largestblock,
726 readsize=readsize, largestblock=largestblock,
728 readdensity=readdensity, srchunks=srchunks)
727 readdensity=readdensity, srchunks=srchunks)
729
728
730 fm.plain('\n')
729 fm.plain('\n')
731
730
732 fm.end()
731 fm.end()
733
732
734 @command('debugdirstate|debugstate',
733 @command('debugdirstate|debugstate',
735 [('', 'nodates', None, _('do not display the saved mtime')),
734 [('', 'nodates', None, _('do not display the saved mtime')),
736 ('', 'datesort', None, _('sort by saved mtime'))],
735 ('', 'datesort', None, _('sort by saved mtime'))],
737 _('[OPTION]...'))
736 _('[OPTION]...'))
738 def debugstate(ui, repo, **opts):
737 def debugstate(ui, repo, **opts):
739 """show the contents of the current dirstate"""
738 """show the contents of the current dirstate"""
740
739
741 nodates = opts.get(r'nodates')
740 nodates = opts.get(r'nodates')
742 datesort = opts.get(r'datesort')
741 datesort = opts.get(r'datesort')
743
742
744 timestr = ""
743 timestr = ""
745 if datesort:
744 if datesort:
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 else:
746 else:
748 keyfunc = None # sort by filename
747 keyfunc = None # sort by filename
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 if ent[3] == -1:
749 if ent[3] == -1:
751 timestr = 'unset '
750 timestr = 'unset '
752 elif nodates:
751 elif nodates:
753 timestr = 'set '
752 timestr = 'set '
754 else:
753 else:
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 time.localtime(ent[3]))
755 time.localtime(ent[3]))
757 timestr = encoding.strtolocal(timestr)
756 timestr = encoding.strtolocal(timestr)
758 if ent[1] & 0o20000:
757 if ent[1] & 0o20000:
759 mode = 'lnk'
758 mode = 'lnk'
760 else:
759 else:
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 for f in repo.dirstate.copies():
762 for f in repo.dirstate.copies():
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765
764
766 @command('debugdiscovery',
765 @command('debugdiscovery',
767 [('', 'old', None, _('use old-style discovery')),
766 [('', 'old', None, _('use old-style discovery')),
768 ('', 'nonheads', None,
767 ('', 'nonheads', None,
769 _('use old-style discovery with non-heads included')),
768 _('use old-style discovery with non-heads included')),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
769 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ] + cmdutil.remoteopts,
770 ] + cmdutil.remoteopts,
772 _('[--rev REV] [OTHER]'))
771 _('[--rev REV] [OTHER]'))
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 """runs the changeset discovery protocol in isolation"""
773 """runs the changeset discovery protocol in isolation"""
775 opts = pycompat.byteskwargs(opts)
774 opts = pycompat.byteskwargs(opts)
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remote = hg.peer(repo, opts, remoteurl)
776 remote = hg.peer(repo, opts, remoteurl)
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779
778
780 # make sure tests are repeatable
779 # make sure tests are repeatable
781 random.seed(12323)
780 random.seed(12323)
782
781
783 def doit(pushedrevs, remoteheads, remote=remote):
782 def doit(pushedrevs, remoteheads, remote=remote):
784 if opts.get('old'):
783 if opts.get('old'):
785 if not util.safehasattr(remote, 'branches'):
784 if not util.safehasattr(remote, 'branches'):
786 # enable in-client legacy support
785 # enable in-client legacy support
787 remote = localrepo.locallegacypeer(remote.local())
786 remote = localrepo.locallegacypeer(remote.local())
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 force=True)
788 force=True)
790 common = set(common)
789 common = set(common)
791 if not opts.get('nonheads'):
790 if not opts.get('nonheads'):
792 ui.write(("unpruned common: %s\n") %
791 ui.write(("unpruned common: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
792 " ".join(sorted(short(n) for n in common)))
794 cl = repo.changelog
793
795 clnode = cl.node
794 clnode = repo.changelog.node
796 dag = dagutil.revlogdag(cl)
795 common = repo.revs('heads(::%ln)', common)
797 all = dag.ancestorset(cl.rev(n) for n in common)
796 common = {clnode(r) for r in common}
798 common = {clnode(r) for r in dag.headsetofconnecteds(all)}
799 else:
797 else:
800 nodes = None
798 nodes = None
801 if pushedrevs:
799 if pushedrevs:
802 revs = scmutil.revrange(repo, pushedrevs)
800 revs = scmutil.revrange(repo, pushedrevs)
803 nodes = [repo[r].node() for r in revs]
801 nodes = [repo[r].node() for r in revs]
804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
805 ancestorsof=nodes)
803 ancestorsof=nodes)
806 common = set(common)
804 common = set(common)
807 rheads = set(hds)
805 rheads = set(hds)
808 lheads = set(repo.heads())
806 lheads = set(repo.heads())
809 ui.write(("common heads: %s\n") %
807 ui.write(("common heads: %s\n") %
810 " ".join(sorted(short(n) for n in common)))
808 " ".join(sorted(short(n) for n in common)))
811 if lheads <= common:
809 if lheads <= common:
812 ui.write(("local is subset\n"))
810 ui.write(("local is subset\n"))
813 elif rheads <= common:
811 elif rheads <= common:
814 ui.write(("remote is subset\n"))
812 ui.write(("remote is subset\n"))
815
813
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 localrevs = opts['rev']
815 localrevs = opts['rev']
818 doit(localrevs, remoterevs)
816 doit(localrevs, remoterevs)
819
817
820 _chunksize = 4 << 10
818 _chunksize = 4 << 10
821
819
822 @command('debugdownload',
820 @command('debugdownload',
823 [
821 [
824 ('o', 'output', '', _('path')),
822 ('o', 'output', '', _('path')),
825 ],
823 ],
826 optionalrepo=True)
824 optionalrepo=True)
827 def debugdownload(ui, repo, url, output=None, **opts):
825 def debugdownload(ui, repo, url, output=None, **opts):
828 """download a resource using Mercurial logic and config
826 """download a resource using Mercurial logic and config
829 """
827 """
830 fh = urlmod.open(ui, url, output)
828 fh = urlmod.open(ui, url, output)
831
829
832 dest = ui
830 dest = ui
833 if output:
831 if output:
834 dest = open(output, "wb", _chunksize)
832 dest = open(output, "wb", _chunksize)
835 try:
833 try:
836 data = fh.read(_chunksize)
834 data = fh.read(_chunksize)
837 while data:
835 while data:
838 dest.write(data)
836 dest.write(data)
839 data = fh.read(_chunksize)
837 data = fh.read(_chunksize)
840 finally:
838 finally:
841 if output:
839 if output:
842 dest.close()
840 dest.close()
843
841
844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
845 def debugextensions(ui, repo, **opts):
843 def debugextensions(ui, repo, **opts):
846 '''show information about active extensions'''
844 '''show information about active extensions'''
847 opts = pycompat.byteskwargs(opts)
845 opts = pycompat.byteskwargs(opts)
848 exts = extensions.extensions(ui)
846 exts = extensions.extensions(ui)
849 hgver = util.version()
847 hgver = util.version()
850 fm = ui.formatter('debugextensions', opts)
848 fm = ui.formatter('debugextensions', opts)
851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
852 isinternal = extensions.ismoduleinternal(extmod)
850 isinternal = extensions.ismoduleinternal(extmod)
853 extsource = pycompat.fsencode(extmod.__file__)
851 extsource = pycompat.fsencode(extmod.__file__)
854 if isinternal:
852 if isinternal:
855 exttestedwith = [] # never expose magic string to users
853 exttestedwith = [] # never expose magic string to users
856 else:
854 else:
857 exttestedwith = getattr(extmod, 'testedwith', '').split()
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
858 extbuglink = getattr(extmod, 'buglink', None)
856 extbuglink = getattr(extmod, 'buglink', None)
859
857
860 fm.startitem()
858 fm.startitem()
861
859
862 if ui.quiet or ui.verbose:
860 if ui.quiet or ui.verbose:
863 fm.write('name', '%s\n', extname)
861 fm.write('name', '%s\n', extname)
864 else:
862 else:
865 fm.write('name', '%s', extname)
863 fm.write('name', '%s', extname)
866 if isinternal or hgver in exttestedwith:
864 if isinternal or hgver in exttestedwith:
867 fm.plain('\n')
865 fm.plain('\n')
868 elif not exttestedwith:
866 elif not exttestedwith:
869 fm.plain(_(' (untested!)\n'))
867 fm.plain(_(' (untested!)\n'))
870 else:
868 else:
871 lasttestedversion = exttestedwith[-1]
869 lasttestedversion = exttestedwith[-1]
872 fm.plain(' (%s!)\n' % lasttestedversion)
870 fm.plain(' (%s!)\n' % lasttestedversion)
873
871
874 fm.condwrite(ui.verbose and extsource, 'source',
872 fm.condwrite(ui.verbose and extsource, 'source',
875 _(' location: %s\n'), extsource or "")
873 _(' location: %s\n'), extsource or "")
876
874
877 if ui.verbose:
875 if ui.verbose:
878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
879 fm.data(bundled=isinternal)
877 fm.data(bundled=isinternal)
880
878
881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
882 _(' tested with: %s\n'),
880 _(' tested with: %s\n'),
883 fm.formatlist(exttestedwith, name='ver'))
881 fm.formatlist(exttestedwith, name='ver'))
884
882
885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
886 _(' bug reporting: %s\n'), extbuglink or "")
884 _(' bug reporting: %s\n'), extbuglink or "")
887
885
888 fm.end()
886 fm.end()
889
887
890 @command('debugfileset',
888 @command('debugfileset',
891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
892 ('', 'all-files', False,
890 ('', 'all-files', False,
893 _('test files from all revisions and working directory')),
891 _('test files from all revisions and working directory')),
894 ('s', 'show-matcher', None,
892 ('s', 'show-matcher', None,
895 _('print internal representation of matcher')),
893 _('print internal representation of matcher')),
896 ('p', 'show-stage', [],
894 ('p', 'show-stage', [],
897 _('print parsed tree at the given stage'), _('NAME'))],
895 _('print parsed tree at the given stage'), _('NAME'))],
898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
899 def debugfileset(ui, repo, expr, **opts):
897 def debugfileset(ui, repo, expr, **opts):
900 '''parse and apply a fileset specification'''
898 '''parse and apply a fileset specification'''
901 from . import fileset
899 from . import fileset
902 fileset.symbols # force import of fileset so we have predicates to optimize
900 fileset.symbols # force import of fileset so we have predicates to optimize
903 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
905
903
906 stages = [
904 stages = [
907 ('parsed', pycompat.identity),
905 ('parsed', pycompat.identity),
908 ('analyzed', filesetlang.analyze),
906 ('analyzed', filesetlang.analyze),
909 ('optimized', filesetlang.optimize),
907 ('optimized', filesetlang.optimize),
910 ]
908 ]
911 stagenames = set(n for n, f in stages)
909 stagenames = set(n for n, f in stages)
912
910
913 showalways = set()
911 showalways = set()
914 if ui.verbose and not opts['show_stage']:
912 if ui.verbose and not opts['show_stage']:
915 # show parsed tree by --verbose (deprecated)
913 # show parsed tree by --verbose (deprecated)
916 showalways.add('parsed')
914 showalways.add('parsed')
917 if opts['show_stage'] == ['all']:
915 if opts['show_stage'] == ['all']:
918 showalways.update(stagenames)
916 showalways.update(stagenames)
919 else:
917 else:
920 for n in opts['show_stage']:
918 for n in opts['show_stage']:
921 if n not in stagenames:
919 if n not in stagenames:
922 raise error.Abort(_('invalid stage name: %s') % n)
920 raise error.Abort(_('invalid stage name: %s') % n)
923 showalways.update(opts['show_stage'])
921 showalways.update(opts['show_stage'])
924
922
925 tree = filesetlang.parse(expr)
923 tree = filesetlang.parse(expr)
926 for n, f in stages:
924 for n, f in stages:
927 tree = f(tree)
925 tree = f(tree)
928 if n in showalways:
926 if n in showalways:
929 if opts['show_stage'] or n != 'parsed':
927 if opts['show_stage'] or n != 'parsed':
930 ui.write(("* %s:\n") % n)
928 ui.write(("* %s:\n") % n)
931 ui.write(filesetlang.prettyformat(tree), "\n")
929 ui.write(filesetlang.prettyformat(tree), "\n")
932
930
933 files = set()
931 files = set()
934 if opts['all_files']:
932 if opts['all_files']:
935 for r in repo:
933 for r in repo:
936 c = repo[r]
934 c = repo[r]
937 files.update(c.files())
935 files.update(c.files())
938 files.update(c.substate)
936 files.update(c.substate)
939 if opts['all_files'] or ctx.rev() is None:
937 if opts['all_files'] or ctx.rev() is None:
940 wctx = repo[None]
938 wctx = repo[None]
941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
942 subrepos=list(wctx.substate),
940 subrepos=list(wctx.substate),
943 unknown=True, ignored=True))
941 unknown=True, ignored=True))
944 files.update(wctx.substate)
942 files.update(wctx.substate)
945 else:
943 else:
946 files.update(ctx.files())
944 files.update(ctx.files())
947 files.update(ctx.substate)
945 files.update(ctx.substate)
948
946
949 m = ctx.matchfileset(expr)
947 m = ctx.matchfileset(expr)
950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
952 for f in sorted(files):
950 for f in sorted(files):
953 if not m(f):
951 if not m(f):
954 continue
952 continue
955 ui.write("%s\n" % f)
953 ui.write("%s\n" % f)
956
954
957 @command('debugformat',
955 @command('debugformat',
958 [] + cmdutil.formatteropts)
956 [] + cmdutil.formatteropts)
959 def debugformat(ui, repo, **opts):
957 def debugformat(ui, repo, **opts):
960 """display format information about the current repository
958 """display format information about the current repository
961
959
962 Use --verbose to get extra information about current config value and
960 Use --verbose to get extra information about current config value and
963 Mercurial default."""
961 Mercurial default."""
964 opts = pycompat.byteskwargs(opts)
962 opts = pycompat.byteskwargs(opts)
965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
966 maxvariantlength = max(len('format-variant'), maxvariantlength)
964 maxvariantlength = max(len('format-variant'), maxvariantlength)
967
965
968 def makeformatname(name):
966 def makeformatname(name):
969 return '%s:' + (' ' * (maxvariantlength - len(name)))
967 return '%s:' + (' ' * (maxvariantlength - len(name)))
970
968
971 fm = ui.formatter('debugformat', opts)
969 fm = ui.formatter('debugformat', opts)
972 if fm.isplain():
970 if fm.isplain():
973 def formatvalue(value):
971 def formatvalue(value):
974 if util.safehasattr(value, 'startswith'):
972 if util.safehasattr(value, 'startswith'):
975 return value
973 return value
976 if value:
974 if value:
977 return 'yes'
975 return 'yes'
978 else:
976 else:
979 return 'no'
977 return 'no'
980 else:
978 else:
981 formatvalue = pycompat.identity
979 formatvalue = pycompat.identity
982
980
983 fm.plain('format-variant')
981 fm.plain('format-variant')
984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
985 fm.plain(' repo')
983 fm.plain(' repo')
986 if ui.verbose:
984 if ui.verbose:
987 fm.plain(' config default')
985 fm.plain(' config default')
988 fm.plain('\n')
986 fm.plain('\n')
989 for fv in upgrade.allformatvariant:
987 for fv in upgrade.allformatvariant:
990 fm.startitem()
988 fm.startitem()
991 repovalue = fv.fromrepo(repo)
989 repovalue = fv.fromrepo(repo)
992 configvalue = fv.fromconfig(repo)
990 configvalue = fv.fromconfig(repo)
993
991
994 if repovalue != configvalue:
992 if repovalue != configvalue:
995 namelabel = 'formatvariant.name.mismatchconfig'
993 namelabel = 'formatvariant.name.mismatchconfig'
996 repolabel = 'formatvariant.repo.mismatchconfig'
994 repolabel = 'formatvariant.repo.mismatchconfig'
997 elif repovalue != fv.default:
995 elif repovalue != fv.default:
998 namelabel = 'formatvariant.name.mismatchdefault'
996 namelabel = 'formatvariant.name.mismatchdefault'
999 repolabel = 'formatvariant.repo.mismatchdefault'
997 repolabel = 'formatvariant.repo.mismatchdefault'
1000 else:
998 else:
1001 namelabel = 'formatvariant.name.uptodate'
999 namelabel = 'formatvariant.name.uptodate'
1002 repolabel = 'formatvariant.repo.uptodate'
1000 repolabel = 'formatvariant.repo.uptodate'
1003
1001
1004 fm.write('name', makeformatname(fv.name), fv.name,
1002 fm.write('name', makeformatname(fv.name), fv.name,
1005 label=namelabel)
1003 label=namelabel)
1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1007 label=repolabel)
1005 label=repolabel)
1008 if fv.default != configvalue:
1006 if fv.default != configvalue:
1009 configlabel = 'formatvariant.config.special'
1007 configlabel = 'formatvariant.config.special'
1010 else:
1008 else:
1011 configlabel = 'formatvariant.config.default'
1009 configlabel = 'formatvariant.config.default'
1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1013 label=configlabel)
1011 label=configlabel)
1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1015 label='formatvariant.default')
1013 label='formatvariant.default')
1016 fm.plain('\n')
1014 fm.plain('\n')
1017 fm.end()
1015 fm.end()
1018
1016
1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1020 def debugfsinfo(ui, path="."):
1018 def debugfsinfo(ui, path="."):
1021 """show information detected about current filesystem"""
1019 """show information detected about current filesystem"""
1022 ui.write(('path: %s\n') % path)
1020 ui.write(('path: %s\n') % path)
1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1028 casesensitive = '(unknown)'
1026 casesensitive = '(unknown)'
1029 try:
1027 try:
1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1032 except OSError:
1030 except OSError:
1033 pass
1031 pass
1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1035
1033
1036 @command('debuggetbundle',
1034 @command('debuggetbundle',
1037 [('H', 'head', [], _('id of head node'), _('ID')),
1035 [('H', 'head', [], _('id of head node'), _('ID')),
1038 ('C', 'common', [], _('id of common node'), _('ID')),
1036 ('C', 'common', [], _('id of common node'), _('ID')),
1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1040 _('REPO FILE [-H|-C ID]...'),
1038 _('REPO FILE [-H|-C ID]...'),
1041 norepo=True)
1039 norepo=True)
1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1043 """retrieves a bundle from a repo
1041 """retrieves a bundle from a repo
1044
1042
1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1046 given file.
1044 given file.
1047 """
1045 """
1048 opts = pycompat.byteskwargs(opts)
1046 opts = pycompat.byteskwargs(opts)
1049 repo = hg.peer(ui, opts, repopath)
1047 repo = hg.peer(ui, opts, repopath)
1050 if not repo.capable('getbundle'):
1048 if not repo.capable('getbundle'):
1051 raise error.Abort("getbundle() not supported by target repository")
1049 raise error.Abort("getbundle() not supported by target repository")
1052 args = {}
1050 args = {}
1053 if common:
1051 if common:
1054 args[r'common'] = [bin(s) for s in common]
1052 args[r'common'] = [bin(s) for s in common]
1055 if head:
1053 if head:
1056 args[r'heads'] = [bin(s) for s in head]
1054 args[r'heads'] = [bin(s) for s in head]
1057 # TODO: get desired bundlecaps from command line.
1055 # TODO: get desired bundlecaps from command line.
1058 args[r'bundlecaps'] = None
1056 args[r'bundlecaps'] = None
1059 bundle = repo.getbundle('debug', **args)
1057 bundle = repo.getbundle('debug', **args)
1060
1058
1061 bundletype = opts.get('type', 'bzip2').lower()
1059 bundletype = opts.get('type', 'bzip2').lower()
1062 btypes = {'none': 'HG10UN',
1060 btypes = {'none': 'HG10UN',
1063 'bzip2': 'HG10BZ',
1061 'bzip2': 'HG10BZ',
1064 'gzip': 'HG10GZ',
1062 'gzip': 'HG10GZ',
1065 'bundle2': 'HG20'}
1063 'bundle2': 'HG20'}
1066 bundletype = btypes.get(bundletype)
1064 bundletype = btypes.get(bundletype)
1067 if bundletype not in bundle2.bundletypes:
1065 if bundletype not in bundle2.bundletypes:
1068 raise error.Abort(_('unknown bundle type specified with --type'))
1066 raise error.Abort(_('unknown bundle type specified with --type'))
1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1070
1068
1071 @command('debugignore', [], '[FILE]')
1069 @command('debugignore', [], '[FILE]')
1072 def debugignore(ui, repo, *files, **opts):
1070 def debugignore(ui, repo, *files, **opts):
1073 """display the combined ignore pattern and information about ignored files
1071 """display the combined ignore pattern and information about ignored files
1074
1072
1075 With no argument display the combined ignore pattern.
1073 With no argument display the combined ignore pattern.
1076
1074
1077 Given space separated file names, shows if the given file is ignored and
1075 Given space separated file names, shows if the given file is ignored and
1078 if so, show the ignore rule (file and line number) that matched it.
1076 if so, show the ignore rule (file and line number) that matched it.
1079 """
1077 """
1080 ignore = repo.dirstate._ignore
1078 ignore = repo.dirstate._ignore
1081 if not files:
1079 if not files:
1082 # Show all the patterns
1080 # Show all the patterns
1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1084 else:
1082 else:
1085 m = scmutil.match(repo[None], pats=files)
1083 m = scmutil.match(repo[None], pats=files)
1086 for f in m.files():
1084 for f in m.files():
1087 nf = util.normpath(f)
1085 nf = util.normpath(f)
1088 ignored = None
1086 ignored = None
1089 ignoredata = None
1087 ignoredata = None
1090 if nf != '.':
1088 if nf != '.':
1091 if ignore(nf):
1089 if ignore(nf):
1092 ignored = nf
1090 ignored = nf
1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1094 else:
1092 else:
1095 for p in util.finddirs(nf):
1093 for p in util.finddirs(nf):
1096 if ignore(p):
1094 if ignore(p):
1097 ignored = p
1095 ignored = p
1098 ignoredata = repo.dirstate._ignorefileandline(p)
1096 ignoredata = repo.dirstate._ignorefileandline(p)
1099 break
1097 break
1100 if ignored:
1098 if ignored:
1101 if ignored == nf:
1099 if ignored == nf:
1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1103 else:
1101 else:
1104 ui.write(_("%s is ignored because of "
1102 ui.write(_("%s is ignored because of "
1105 "containing folder %s\n")
1103 "containing folder %s\n")
1106 % (m.uipath(f), ignored))
1104 % (m.uipath(f), ignored))
1107 ignorefile, lineno, line = ignoredata
1105 ignorefile, lineno, line = ignoredata
1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1109 % (ignorefile, lineno, line))
1107 % (ignorefile, lineno, line))
1110 else:
1108 else:
1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1112
1110
1113 @command('debugindex', cmdutil.debugrevlogopts +
1111 @command('debugindex', cmdutil.debugrevlogopts +
1114 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1115 _('[-f FORMAT] -c|-m|FILE'),
1113 _('[-f FORMAT] -c|-m|FILE'),
1116 optionalrepo=True)
1114 optionalrepo=True)
1117 def debugindex(ui, repo, file_=None, **opts):
1115 def debugindex(ui, repo, file_=None, **opts):
1118 """dump the contents of an index file"""
1116 """dump the contents of an index file"""
1119 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1120 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1121 format = opts.get('format', 0)
1119 format = opts.get('format', 0)
1122 if format not in (0, 1):
1120 if format not in (0, 1):
1123 raise error.Abort(_("unknown format %d") % format)
1121 raise error.Abort(_("unknown format %d") % format)
1124
1122
1125 if ui.debugflag:
1123 if ui.debugflag:
1126 shortfn = hex
1124 shortfn = hex
1127 else:
1125 else:
1128 shortfn = short
1126 shortfn = short
1129
1127
1130 # There might not be anything in r, so have a sane default
1128 # There might not be anything in r, so have a sane default
1131 idlen = 12
1129 idlen = 12
1132 for i in r:
1130 for i in r:
1133 idlen = len(shortfn(r.node(i)))
1131 idlen = len(shortfn(r.node(i)))
1134 break
1132 break
1135
1133
1136 if format == 0:
1134 if format == 0:
1137 if ui.verbose:
1135 if ui.verbose:
1138 ui.write((" rev offset length linkrev"
1136 ui.write((" rev offset length linkrev"
1139 " %s %s p2\n") % ("nodeid".ljust(idlen),
1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1140 "p1".ljust(idlen)))
1138 "p1".ljust(idlen)))
1141 else:
1139 else:
1142 ui.write((" rev linkrev %s %s p2\n") % (
1140 ui.write((" rev linkrev %s %s p2\n") % (
1143 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1144 elif format == 1:
1142 elif format == 1:
1145 if ui.verbose:
1143 if ui.verbose:
1146 ui.write((" rev flag offset length size link p1"
1144 ui.write((" rev flag offset length size link p1"
1147 " p2 %s\n") % "nodeid".rjust(idlen))
1145 " p2 %s\n") % "nodeid".rjust(idlen))
1148 else:
1146 else:
1149 ui.write((" rev flag size link p1 p2 %s\n") %
1147 ui.write((" rev flag size link p1 p2 %s\n") %
1150 "nodeid".rjust(idlen))
1148 "nodeid".rjust(idlen))
1151
1149
1152 for i in r:
1150 for i in r:
1153 node = r.node(i)
1151 node = r.node(i)
1154 if format == 0:
1152 if format == 0:
1155 try:
1153 try:
1156 pp = r.parents(node)
1154 pp = r.parents(node)
1157 except Exception:
1155 except Exception:
1158 pp = [nullid, nullid]
1156 pp = [nullid, nullid]
1159 if ui.verbose:
1157 if ui.verbose:
1160 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1161 i, r.start(i), r.length(i), r.linkrev(i),
1159 i, r.start(i), r.length(i), r.linkrev(i),
1162 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1163 else:
1161 else:
1164 ui.write("% 6d % 7d %s %s %s\n" % (
1162 ui.write("% 6d % 7d %s %s %s\n" % (
1165 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1166 shortfn(pp[1])))
1164 shortfn(pp[1])))
1167 elif format == 1:
1165 elif format == 1:
1168 pr = r.parentrevs(i)
1166 pr = r.parentrevs(i)
1169 if ui.verbose:
1167 if ui.verbose:
1170 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1171 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1172 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1173 else:
1171 else:
1174 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1175 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1176 shortfn(node)))
1174 shortfn(node)))
1177
1175
1178 @command('debugindexdot', cmdutil.debugrevlogopts,
1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1179 _('-c|-m|FILE'), optionalrepo=True)
1177 _('-c|-m|FILE'), optionalrepo=True)
1180 def debugindexdot(ui, repo, file_=None, **opts):
1178 def debugindexdot(ui, repo, file_=None, **opts):
1181 """dump an index DAG as a graphviz dot file"""
1179 """dump an index DAG as a graphviz dot file"""
1182 opts = pycompat.byteskwargs(opts)
1180 opts = pycompat.byteskwargs(opts)
1183 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1181 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1184 ui.write(("digraph G {\n"))
1182 ui.write(("digraph G {\n"))
1185 for i in r:
1183 for i in r:
1186 node = r.node(i)
1184 node = r.node(i)
1187 pp = r.parents(node)
1185 pp = r.parents(node)
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1189 if pp[1] != nullid:
1187 if pp[1] != nullid:
1190 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1191 ui.write("}\n")
1189 ui.write("}\n")
1192
1190
1193 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1194 def debuginstall(ui, **opts):
1192 def debuginstall(ui, **opts):
1195 '''test Mercurial installation
1193 '''test Mercurial installation
1196
1194
1197 Returns 0 on success.
1195 Returns 0 on success.
1198 '''
1196 '''
1199 opts = pycompat.byteskwargs(opts)
1197 opts = pycompat.byteskwargs(opts)
1200
1198
1201 def writetemp(contents):
1199 def writetemp(contents):
1202 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1203 f = os.fdopen(fd, r"wb")
1201 f = os.fdopen(fd, r"wb")
1204 f.write(contents)
1202 f.write(contents)
1205 f.close()
1203 f.close()
1206 return name
1204 return name
1207
1205
1208 problems = 0
1206 problems = 0
1209
1207
1210 fm = ui.formatter('debuginstall', opts)
1208 fm = ui.formatter('debuginstall', opts)
1211 fm.startitem()
1209 fm.startitem()
1212
1210
1213 # encoding
1211 # encoding
1214 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1215 err = None
1213 err = None
1216 try:
1214 try:
1217 codecs.lookup(pycompat.sysstr(encoding.encoding))
1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1218 except LookupError as inst:
1216 except LookupError as inst:
1219 err = stringutil.forcebytestr(inst)
1217 err = stringutil.forcebytestr(inst)
1220 problems += 1
1218 problems += 1
1221 fm.condwrite(err, 'encodingerror', _(" %s\n"
1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1222 " (check that your locale is properly set)\n"), err)
1220 " (check that your locale is properly set)\n"), err)
1223
1221
1224 # Python
1222 # Python
1225 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1226 pycompat.sysexecutable)
1224 pycompat.sysexecutable)
1227 fm.write('pythonver', _("checking Python version (%s)\n"),
1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1228 ("%d.%d.%d" % sys.version_info[:3]))
1226 ("%d.%d.%d" % sys.version_info[:3]))
1229 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1230 os.path.dirname(pycompat.fsencode(os.__file__)))
1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1231
1229
1232 security = set(sslutil.supportedprotocols)
1230 security = set(sslutil.supportedprotocols)
1233 if sslutil.hassni:
1231 if sslutil.hassni:
1234 security.add('sni')
1232 security.add('sni')
1235
1233
1236 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1237 fm.formatlist(sorted(security), name='protocol',
1235 fm.formatlist(sorted(security), name='protocol',
1238 fmt='%s', sep=','))
1236 fmt='%s', sep=','))
1239
1237
1240 # These are warnings, not errors. So don't increment problem count. This
1238 # These are warnings, not errors. So don't increment problem count. This
1241 # may change in the future.
1239 # may change in the future.
1242 if 'tls1.2' not in security:
1240 if 'tls1.2' not in security:
1243 fm.plain(_(' TLS 1.2 not supported by Python install; '
1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1244 'network connections lack modern security\n'))
1242 'network connections lack modern security\n'))
1245 if 'sni' not in security:
1243 if 'sni' not in security:
1246 fm.plain(_(' SNI not supported by Python install; may have '
1244 fm.plain(_(' SNI not supported by Python install; may have '
1247 'connectivity issues with some servers\n'))
1245 'connectivity issues with some servers\n'))
1248
1246
1249 # TODO print CA cert info
1247 # TODO print CA cert info
1250
1248
1251 # hg version
1249 # hg version
1252 hgver = util.version()
1250 hgver = util.version()
1253 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1254 hgver.split('+')[0])
1252 hgver.split('+')[0])
1255 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1256 '+'.join(hgver.split('+')[1:]))
1254 '+'.join(hgver.split('+')[1:]))
1257
1255
1258 # compiled modules
1256 # compiled modules
1259 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1260 policy.policy)
1258 policy.policy)
1261 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1262 os.path.dirname(pycompat.fsencode(__file__)))
1260 os.path.dirname(pycompat.fsencode(__file__)))
1263
1261
1264 if policy.policy in ('c', 'allow'):
1262 if policy.policy in ('c', 'allow'):
1265 err = None
1263 err = None
1266 try:
1264 try:
1267 from .cext import (
1265 from .cext import (
1268 base85,
1266 base85,
1269 bdiff,
1267 bdiff,
1270 mpatch,
1268 mpatch,
1271 osutil,
1269 osutil,
1272 )
1270 )
1273 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1274 except Exception as inst:
1272 except Exception as inst:
1275 err = stringutil.forcebytestr(inst)
1273 err = stringutil.forcebytestr(inst)
1276 problems += 1
1274 problems += 1
1277 fm.condwrite(err, 'extensionserror', " %s\n", err)
1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1278
1276
1279 compengines = util.compengines._engines.values()
1277 compengines = util.compengines._engines.values()
1280 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1281 fm.formatlist(sorted(e.name() for e in compengines),
1279 fm.formatlist(sorted(e.name() for e in compengines),
1282 name='compengine', fmt='%s', sep=', '))
1280 name='compengine', fmt='%s', sep=', '))
1283 fm.write('compenginesavail', _('checking available compression engines '
1281 fm.write('compenginesavail', _('checking available compression engines '
1284 '(%s)\n'),
1282 '(%s)\n'),
1285 fm.formatlist(sorted(e.name() for e in compengines
1283 fm.formatlist(sorted(e.name() for e in compengines
1286 if e.available()),
1284 if e.available()),
1287 name='compengine', fmt='%s', sep=', '))
1285 name='compengine', fmt='%s', sep=', '))
1288 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1289 fm.write('compenginesserver', _('checking available compression engines '
1287 fm.write('compenginesserver', _('checking available compression engines '
1290 'for wire protocol (%s)\n'),
1288 'for wire protocol (%s)\n'),
1291 fm.formatlist([e.name() for e in wirecompengines
1289 fm.formatlist([e.name() for e in wirecompengines
1292 if e.wireprotosupport()],
1290 if e.wireprotosupport()],
1293 name='compengine', fmt='%s', sep=', '))
1291 name='compengine', fmt='%s', sep=', '))
1294 re2 = 'missing'
1292 re2 = 'missing'
1295 if util._re2:
1293 if util._re2:
1296 re2 = 'available'
1294 re2 = 'available'
1297 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1298 fm.data(re2=bool(util._re2))
1296 fm.data(re2=bool(util._re2))
1299
1297
1300 # templates
1298 # templates
1301 p = templater.templatepaths()
1299 p = templater.templatepaths()
1302 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1303 fm.condwrite(not p, '', _(" no template directories found\n"))
1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1304 if p:
1302 if p:
1305 m = templater.templatepath("map-cmdline.default")
1303 m = templater.templatepath("map-cmdline.default")
1306 if m:
1304 if m:
1307 # template found, check if it is working
1305 # template found, check if it is working
1308 err = None
1306 err = None
1309 try:
1307 try:
1310 templater.templater.frommapfile(m)
1308 templater.templater.frommapfile(m)
1311 except Exception as inst:
1309 except Exception as inst:
1312 err = stringutil.forcebytestr(inst)
1310 err = stringutil.forcebytestr(inst)
1313 p = None
1311 p = None
1314 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1315 else:
1313 else:
1316 p = None
1314 p = None
1317 fm.condwrite(p, 'defaulttemplate',
1315 fm.condwrite(p, 'defaulttemplate',
1318 _("checking default template (%s)\n"), m)
1316 _("checking default template (%s)\n"), m)
1319 fm.condwrite(not m, 'defaulttemplatenotfound',
1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1320 _(" template '%s' not found\n"), "default")
1318 _(" template '%s' not found\n"), "default")
1321 if not p:
1319 if not p:
1322 problems += 1
1320 problems += 1
1323 fm.condwrite(not p, '',
1321 fm.condwrite(not p, '',
1324 _(" (templates seem to have been installed incorrectly)\n"))
1322 _(" (templates seem to have been installed incorrectly)\n"))
1325
1323
1326 # editor
1324 # editor
1327 editor = ui.geteditor()
1325 editor = ui.geteditor()
1328 editor = util.expandpath(editor)
1326 editor = util.expandpath(editor)
1329 editorbin = procutil.shellsplit(editor)[0]
1327 editorbin = procutil.shellsplit(editor)[0]
1330 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1331 cmdpath = procutil.findexe(editorbin)
1329 cmdpath = procutil.findexe(editorbin)
1332 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1333 _(" No commit editor set and can't find %s in PATH\n"
1331 _(" No commit editor set and can't find %s in PATH\n"
1334 " (specify a commit editor in your configuration"
1332 " (specify a commit editor in your configuration"
1335 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1336 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1337 _(" Can't find editor '%s' in PATH\n"
1335 _(" Can't find editor '%s' in PATH\n"
1338 " (specify a commit editor in your configuration"
1336 " (specify a commit editor in your configuration"
1339 " file)\n"), not cmdpath and editorbin)
1337 " file)\n"), not cmdpath and editorbin)
1340 if not cmdpath and editor != 'vi':
1338 if not cmdpath and editor != 'vi':
1341 problems += 1
1339 problems += 1
1342
1340
1343 # check username
1341 # check username
1344 username = None
1342 username = None
1345 err = None
1343 err = None
1346 try:
1344 try:
1347 username = ui.username()
1345 username = ui.username()
1348 except error.Abort as e:
1346 except error.Abort as e:
1349 err = stringutil.forcebytestr(e)
1347 err = stringutil.forcebytestr(e)
1350 problems += 1
1348 problems += 1
1351
1349
1352 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1353 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1354 " (specify a username in your configuration file)\n"), err)
1352 " (specify a username in your configuration file)\n"), err)
1355
1353
1356 fm.condwrite(not problems, '',
1354 fm.condwrite(not problems, '',
1357 _("no problems detected\n"))
1355 _("no problems detected\n"))
1358 if not problems:
1356 if not problems:
1359 fm.data(problems=problems)
1357 fm.data(problems=problems)
1360 fm.condwrite(problems, 'problems',
1358 fm.condwrite(problems, 'problems',
1361 _("%d problems detected,"
1359 _("%d problems detected,"
1362 " please check your install!\n"), problems)
1360 " please check your install!\n"), problems)
1363 fm.end()
1361 fm.end()
1364
1362
1365 return problems
1363 return problems
1366
1364
1367 @command('debugknown', [], _('REPO ID...'), norepo=True)
1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1368 def debugknown(ui, repopath, *ids, **opts):
1366 def debugknown(ui, repopath, *ids, **opts):
1369 """test whether node ids are known to a repo
1367 """test whether node ids are known to a repo
1370
1368
1371 Every ID must be a full-length hex node id string. Returns a list of 0s
1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1372 and 1s indicating unknown/known.
1370 and 1s indicating unknown/known.
1373 """
1371 """
1374 opts = pycompat.byteskwargs(opts)
1372 opts = pycompat.byteskwargs(opts)
1375 repo = hg.peer(ui, opts, repopath)
1373 repo = hg.peer(ui, opts, repopath)
1376 if not repo.capable('known'):
1374 if not repo.capable('known'):
1377 raise error.Abort("known() not supported by target repository")
1375 raise error.Abort("known() not supported by target repository")
1378 flags = repo.known([bin(s) for s in ids])
1376 flags = repo.known([bin(s) for s in ids])
1379 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1380
1378
1381 @command('debuglabelcomplete', [], _('LABEL...'))
1379 @command('debuglabelcomplete', [], _('LABEL...'))
1382 def debuglabelcomplete(ui, repo, *args):
1380 def debuglabelcomplete(ui, repo, *args):
1383 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1384 debugnamecomplete(ui, repo, *args)
1382 debugnamecomplete(ui, repo, *args)
1385
1383
1386 @command('debuglocks',
1384 @command('debuglocks',
1387 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1388 ('W', 'force-wlock', None,
1386 ('W', 'force-wlock', None,
1389 _('free the working state lock (DANGEROUS)')),
1387 _('free the working state lock (DANGEROUS)')),
1390 ('s', 'set-lock', None, _('set the store lock until stopped')),
1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1391 ('S', 'set-wlock', None,
1389 ('S', 'set-wlock', None,
1392 _('set the working state lock until stopped'))],
1390 _('set the working state lock until stopped'))],
1393 _('[OPTION]...'))
1391 _('[OPTION]...'))
1394 def debuglocks(ui, repo, **opts):
1392 def debuglocks(ui, repo, **opts):
1395 """show or modify state of locks
1393 """show or modify state of locks
1396
1394
1397 By default, this command will show which locks are held. This
1395 By default, this command will show which locks are held. This
1398 includes the user and process holding the lock, the amount of time
1396 includes the user and process holding the lock, the amount of time
1399 the lock has been held, and the machine name where the process is
1397 the lock has been held, and the machine name where the process is
1400 running if it's not local.
1398 running if it's not local.
1401
1399
1402 Locks protect the integrity of Mercurial's data, so should be
1400 Locks protect the integrity of Mercurial's data, so should be
1403 treated with care. System crashes or other interruptions may cause
1401 treated with care. System crashes or other interruptions may cause
1404 locks to not be properly released, though Mercurial will usually
1402 locks to not be properly released, though Mercurial will usually
1405 detect and remove such stale locks automatically.
1403 detect and remove such stale locks automatically.
1406
1404
1407 However, detecting stale locks may not always be possible (for
1405 However, detecting stale locks may not always be possible (for
1408 instance, on a shared filesystem). Removing locks may also be
1406 instance, on a shared filesystem). Removing locks may also be
1409 blocked by filesystem permissions.
1407 blocked by filesystem permissions.
1410
1408
1411 Setting a lock will prevent other commands from changing the data.
1409 Setting a lock will prevent other commands from changing the data.
1412 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1413 The set locks are removed when the command exits.
1411 The set locks are removed when the command exits.
1414
1412
1415 Returns 0 if no locks are held.
1413 Returns 0 if no locks are held.
1416
1414
1417 """
1415 """
1418
1416
1419 if opts.get(r'force_lock'):
1417 if opts.get(r'force_lock'):
1420 repo.svfs.unlink('lock')
1418 repo.svfs.unlink('lock')
1421 if opts.get(r'force_wlock'):
1419 if opts.get(r'force_wlock'):
1422 repo.vfs.unlink('wlock')
1420 repo.vfs.unlink('wlock')
1423 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1424 return 0
1422 return 0
1425
1423
1426 locks = []
1424 locks = []
1427 try:
1425 try:
1428 if opts.get(r'set_wlock'):
1426 if opts.get(r'set_wlock'):
1429 try:
1427 try:
1430 locks.append(repo.wlock(False))
1428 locks.append(repo.wlock(False))
1431 except error.LockHeld:
1429 except error.LockHeld:
1432 raise error.Abort(_('wlock is already held'))
1430 raise error.Abort(_('wlock is already held'))
1433 if opts.get(r'set_lock'):
1431 if opts.get(r'set_lock'):
1434 try:
1432 try:
1435 locks.append(repo.lock(False))
1433 locks.append(repo.lock(False))
1436 except error.LockHeld:
1434 except error.LockHeld:
1437 raise error.Abort(_('lock is already held'))
1435 raise error.Abort(_('lock is already held'))
1438 if len(locks):
1436 if len(locks):
1439 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1440 return 0
1438 return 0
1441 finally:
1439 finally:
1442 release(*locks)
1440 release(*locks)
1443
1441
1444 now = time.time()
1442 now = time.time()
1445 held = 0
1443 held = 0
1446
1444
1447 def report(vfs, name, method):
1445 def report(vfs, name, method):
1448 # this causes stale locks to get reaped for more accurate reporting
1446 # this causes stale locks to get reaped for more accurate reporting
1449 try:
1447 try:
1450 l = method(False)
1448 l = method(False)
1451 except error.LockHeld:
1449 except error.LockHeld:
1452 l = None
1450 l = None
1453
1451
1454 if l:
1452 if l:
1455 l.release()
1453 l.release()
1456 else:
1454 else:
1457 try:
1455 try:
1458 st = vfs.lstat(name)
1456 st = vfs.lstat(name)
1459 age = now - st[stat.ST_MTIME]
1457 age = now - st[stat.ST_MTIME]
1460 user = util.username(st.st_uid)
1458 user = util.username(st.st_uid)
1461 locker = vfs.readlock(name)
1459 locker = vfs.readlock(name)
1462 if ":" in locker:
1460 if ":" in locker:
1463 host, pid = locker.split(':')
1461 host, pid = locker.split(':')
1464 if host == socket.gethostname():
1462 if host == socket.gethostname():
1465 locker = 'user %s, process %s' % (user, pid)
1463 locker = 'user %s, process %s' % (user, pid)
1466 else:
1464 else:
1467 locker = 'user %s, process %s, host %s' \
1465 locker = 'user %s, process %s, host %s' \
1468 % (user, pid, host)
1466 % (user, pid, host)
1469 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1470 return 1
1468 return 1
1471 except OSError as e:
1469 except OSError as e:
1472 if e.errno != errno.ENOENT:
1470 if e.errno != errno.ENOENT:
1473 raise
1471 raise
1474
1472
1475 ui.write(("%-6s free\n") % (name + ":"))
1473 ui.write(("%-6s free\n") % (name + ":"))
1476 return 0
1474 return 0
1477
1475
1478 held += report(repo.svfs, "lock", repo.lock)
1476 held += report(repo.svfs, "lock", repo.lock)
1479 held += report(repo.vfs, "wlock", repo.wlock)
1477 held += report(repo.vfs, "wlock", repo.wlock)
1480
1478
1481 return held
1479 return held
1482
1480
1483 @command('debugmanifestfulltextcache', [
1481 @command('debugmanifestfulltextcache', [
1484 ('', 'clear', False, _('clear the cache')),
1482 ('', 'clear', False, _('clear the cache')),
1485 ('a', 'add', '', _('add the given manifest node to the cache'),
1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1486 _('NODE'))
1484 _('NODE'))
1487 ], '')
1485 ], '')
1488 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1489 """show, clear or amend the contents of the manifest fulltext cache"""
1487 """show, clear or amend the contents of the manifest fulltext cache"""
1490 with repo.lock():
1488 with repo.lock():
1491 r = repo.manifestlog._revlog
1489 r = repo.manifestlog._revlog
1492 try:
1490 try:
1493 cache = r._fulltextcache
1491 cache = r._fulltextcache
1494 except AttributeError:
1492 except AttributeError:
1495 ui.warn(_(
1493 ui.warn(_(
1496 "Current revlog implementation doesn't appear to have a "
1494 "Current revlog implementation doesn't appear to have a "
1497 'manifest fulltext cache\n'))
1495 'manifest fulltext cache\n'))
1498 return
1496 return
1499
1497
1500 if opts.get(r'clear'):
1498 if opts.get(r'clear'):
1501 cache.clear()
1499 cache.clear()
1502
1500
1503 if add:
1501 if add:
1504 try:
1502 try:
1505 manifest = repo.manifestlog[r.lookup(add)]
1503 manifest = repo.manifestlog[r.lookup(add)]
1506 except error.LookupError as e:
1504 except error.LookupError as e:
1507 raise error.Abort(e, hint="Check your manifest node id")
1505 raise error.Abort(e, hint="Check your manifest node id")
1508 manifest.read() # stores revisision in cache too
1506 manifest.read() # stores revisision in cache too
1509
1507
1510 if not len(cache):
1508 if not len(cache):
1511 ui.write(_('Cache empty'))
1509 ui.write(_('Cache empty'))
1512 else:
1510 else:
1513 ui.write(
1511 ui.write(
1514 _('Cache contains %d manifest entries, in order of most to '
1512 _('Cache contains %d manifest entries, in order of most to '
1515 'least recent:\n') % (len(cache),))
1513 'least recent:\n') % (len(cache),))
1516 totalsize = 0
1514 totalsize = 0
1517 for nodeid in cache:
1515 for nodeid in cache:
1518 # Use cache.get to not update the LRU order
1516 # Use cache.get to not update the LRU order
1519 data = cache.get(nodeid)
1517 data = cache.get(nodeid)
1520 size = len(data)
1518 size = len(data)
1521 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1522 ui.write(_('id: %s, size %s\n') % (
1520 ui.write(_('id: %s, size %s\n') % (
1523 hex(nodeid), util.bytecount(size)))
1521 hex(nodeid), util.bytecount(size)))
1524 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1525 ui.write(
1523 ui.write(
1526 _('Total cache data size %s, on-disk %s\n') % (
1524 _('Total cache data size %s, on-disk %s\n') % (
1527 util.bytecount(totalsize), util.bytecount(ondisk))
1525 util.bytecount(totalsize), util.bytecount(ondisk))
1528 )
1526 )
1529
1527
1530 @command('debugmergestate', [], '')
1528 @command('debugmergestate', [], '')
1531 def debugmergestate(ui, repo, *args):
1529 def debugmergestate(ui, repo, *args):
1532 """print merge state
1530 """print merge state
1533
1531
1534 Use --verbose to print out information about whether v1 or v2 merge state
1532 Use --verbose to print out information about whether v1 or v2 merge state
1535 was chosen."""
1533 was chosen."""
1536 def _hashornull(h):
1534 def _hashornull(h):
1537 if h == nullhex:
1535 if h == nullhex:
1538 return 'null'
1536 return 'null'
1539 else:
1537 else:
1540 return h
1538 return h
1541
1539
1542 def printrecords(version):
1540 def printrecords(version):
1543 ui.write(('* version %d records\n') % version)
1541 ui.write(('* version %d records\n') % version)
1544 if version == 1:
1542 if version == 1:
1545 records = v1records
1543 records = v1records
1546 else:
1544 else:
1547 records = v2records
1545 records = v2records
1548
1546
1549 for rtype, record in records:
1547 for rtype, record in records:
1550 # pretty print some record types
1548 # pretty print some record types
1551 if rtype == 'L':
1549 if rtype == 'L':
1552 ui.write(('local: %s\n') % record)
1550 ui.write(('local: %s\n') % record)
1553 elif rtype == 'O':
1551 elif rtype == 'O':
1554 ui.write(('other: %s\n') % record)
1552 ui.write(('other: %s\n') % record)
1555 elif rtype == 'm':
1553 elif rtype == 'm':
1556 driver, mdstate = record.split('\0', 1)
1554 driver, mdstate = record.split('\0', 1)
1557 ui.write(('merge driver: %s (state "%s")\n')
1555 ui.write(('merge driver: %s (state "%s")\n')
1558 % (driver, mdstate))
1556 % (driver, mdstate))
1559 elif rtype in 'FDC':
1557 elif rtype in 'FDC':
1560 r = record.split('\0')
1558 r = record.split('\0')
1561 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1562 if version == 1:
1560 if version == 1:
1563 onode = 'not stored in v1 format'
1561 onode = 'not stored in v1 format'
1564 flags = r[7]
1562 flags = r[7]
1565 else:
1563 else:
1566 onode, flags = r[7:9]
1564 onode, flags = r[7:9]
1567 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1568 % (f, rtype, state, _hashornull(hash)))
1566 % (f, rtype, state, _hashornull(hash)))
1569 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1570 ui.write((' ancestor path: %s (node %s)\n')
1568 ui.write((' ancestor path: %s (node %s)\n')
1571 % (afile, _hashornull(anode)))
1569 % (afile, _hashornull(anode)))
1572 ui.write((' other path: %s (node %s)\n')
1570 ui.write((' other path: %s (node %s)\n')
1573 % (ofile, _hashornull(onode)))
1571 % (ofile, _hashornull(onode)))
1574 elif rtype == 'f':
1572 elif rtype == 'f':
1575 filename, rawextras = record.split('\0', 1)
1573 filename, rawextras = record.split('\0', 1)
1576 extras = rawextras.split('\0')
1574 extras = rawextras.split('\0')
1577 i = 0
1575 i = 0
1578 extrastrings = []
1576 extrastrings = []
1579 while i < len(extras):
1577 while i < len(extras):
1580 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1581 i += 2
1579 i += 2
1582
1580
1583 ui.write(('file extras: %s (%s)\n')
1581 ui.write(('file extras: %s (%s)\n')
1584 % (filename, ', '.join(extrastrings)))
1582 % (filename, ', '.join(extrastrings)))
1585 elif rtype == 'l':
1583 elif rtype == 'l':
1586 labels = record.split('\0', 2)
1584 labels = record.split('\0', 2)
1587 labels = [l for l in labels if len(l) > 0]
1585 labels = [l for l in labels if len(l) > 0]
1588 ui.write(('labels:\n'))
1586 ui.write(('labels:\n'))
1589 ui.write((' local: %s\n' % labels[0]))
1587 ui.write((' local: %s\n' % labels[0]))
1590 ui.write((' other: %s\n' % labels[1]))
1588 ui.write((' other: %s\n' % labels[1]))
1591 if len(labels) > 2:
1589 if len(labels) > 2:
1592 ui.write((' base: %s\n' % labels[2]))
1590 ui.write((' base: %s\n' % labels[2]))
1593 else:
1591 else:
1594 ui.write(('unrecognized entry: %s\t%s\n')
1592 ui.write(('unrecognized entry: %s\t%s\n')
1595 % (rtype, record.replace('\0', '\t')))
1593 % (rtype, record.replace('\0', '\t')))
1596
1594
1597 # Avoid mergestate.read() since it may raise an exception for unsupported
1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1598 # merge state records. We shouldn't be doing this, but this is OK since this
1596 # merge state records. We shouldn't be doing this, but this is OK since this
1599 # command is pretty low-level.
1597 # command is pretty low-level.
1600 ms = mergemod.mergestate(repo)
1598 ms = mergemod.mergestate(repo)
1601
1599
1602 # sort so that reasonable information is on top
1600 # sort so that reasonable information is on top
1603 v1records = ms._readrecordsv1()
1601 v1records = ms._readrecordsv1()
1604 v2records = ms._readrecordsv2()
1602 v2records = ms._readrecordsv2()
1605 order = 'LOml'
1603 order = 'LOml'
1606 def key(r):
1604 def key(r):
1607 idx = order.find(r[0])
1605 idx = order.find(r[0])
1608 if idx == -1:
1606 if idx == -1:
1609 return (1, r[1])
1607 return (1, r[1])
1610 else:
1608 else:
1611 return (0, idx)
1609 return (0, idx)
1612 v1records.sort(key=key)
1610 v1records.sort(key=key)
1613 v2records.sort(key=key)
1611 v2records.sort(key=key)
1614
1612
1615 if not v1records and not v2records:
1613 if not v1records and not v2records:
1616 ui.write(('no merge state found\n'))
1614 ui.write(('no merge state found\n'))
1617 elif not v2records:
1615 elif not v2records:
1618 ui.note(('no version 2 merge state\n'))
1616 ui.note(('no version 2 merge state\n'))
1619 printrecords(1)
1617 printrecords(1)
1620 elif ms._v1v2match(v1records, v2records):
1618 elif ms._v1v2match(v1records, v2records):
1621 ui.note(('v1 and v2 states match: using v2\n'))
1619 ui.note(('v1 and v2 states match: using v2\n'))
1622 printrecords(2)
1620 printrecords(2)
1623 else:
1621 else:
1624 ui.note(('v1 and v2 states mismatch: using v1\n'))
1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1625 printrecords(1)
1623 printrecords(1)
1626 if ui.verbose:
1624 if ui.verbose:
1627 printrecords(2)
1625 printrecords(2)
1628
1626
1629 @command('debugnamecomplete', [], _('NAME...'))
1627 @command('debugnamecomplete', [], _('NAME...'))
1630 def debugnamecomplete(ui, repo, *args):
1628 def debugnamecomplete(ui, repo, *args):
1631 '''complete "names" - tags, open branch names, bookmark names'''
1629 '''complete "names" - tags, open branch names, bookmark names'''
1632
1630
1633 names = set()
1631 names = set()
1634 # since we previously only listed open branches, we will handle that
1632 # since we previously only listed open branches, we will handle that
1635 # specially (after this for loop)
1633 # specially (after this for loop)
1636 for name, ns in repo.names.iteritems():
1634 for name, ns in repo.names.iteritems():
1637 if name != 'branches':
1635 if name != 'branches':
1638 names.update(ns.listnames(repo))
1636 names.update(ns.listnames(repo))
1639 names.update(tag for (tag, heads, tip, closed)
1637 names.update(tag for (tag, heads, tip, closed)
1640 in repo.branchmap().iterbranches() if not closed)
1638 in repo.branchmap().iterbranches() if not closed)
1641 completions = set()
1639 completions = set()
1642 if not args:
1640 if not args:
1643 args = ['']
1641 args = ['']
1644 for a in args:
1642 for a in args:
1645 completions.update(n for n in names if n.startswith(a))
1643 completions.update(n for n in names if n.startswith(a))
1646 ui.write('\n'.join(sorted(completions)))
1644 ui.write('\n'.join(sorted(completions)))
1647 ui.write('\n')
1645 ui.write('\n')
1648
1646
1649 @command('debugobsolete',
1647 @command('debugobsolete',
1650 [('', 'flags', 0, _('markers flag')),
1648 [('', 'flags', 0, _('markers flag')),
1651 ('', 'record-parents', False,
1649 ('', 'record-parents', False,
1652 _('record parent information for the precursor')),
1650 _('record parent information for the precursor')),
1653 ('r', 'rev', [], _('display markers relevant to REV')),
1651 ('r', 'rev', [], _('display markers relevant to REV')),
1654 ('', 'exclusive', False, _('restrict display to markers only '
1652 ('', 'exclusive', False, _('restrict display to markers only '
1655 'relevant to REV')),
1653 'relevant to REV')),
1656 ('', 'index', False, _('display index of the marker')),
1654 ('', 'index', False, _('display index of the marker')),
1657 ('', 'delete', [], _('delete markers specified by indices')),
1655 ('', 'delete', [], _('delete markers specified by indices')),
1658 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1659 _('[OBSOLETED [REPLACEMENT ...]]'))
1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1661 """create arbitrary obsolete marker
1659 """create arbitrary obsolete marker
1662
1660
1663 With no arguments, displays the list of obsolescence markers."""
1661 With no arguments, displays the list of obsolescence markers."""
1664
1662
1665 opts = pycompat.byteskwargs(opts)
1663 opts = pycompat.byteskwargs(opts)
1666
1664
1667 def parsenodeid(s):
1665 def parsenodeid(s):
1668 try:
1666 try:
1669 # We do not use revsingle/revrange functions here to accept
1667 # We do not use revsingle/revrange functions here to accept
1670 # arbitrary node identifiers, possibly not present in the
1668 # arbitrary node identifiers, possibly not present in the
1671 # local repository.
1669 # local repository.
1672 n = bin(s)
1670 n = bin(s)
1673 if len(n) != len(nullid):
1671 if len(n) != len(nullid):
1674 raise TypeError()
1672 raise TypeError()
1675 return n
1673 return n
1676 except TypeError:
1674 except TypeError:
1677 raise error.Abort('changeset references must be full hexadecimal '
1675 raise error.Abort('changeset references must be full hexadecimal '
1678 'node identifiers')
1676 'node identifiers')
1679
1677
1680 if opts.get('delete'):
1678 if opts.get('delete'):
1681 indices = []
1679 indices = []
1682 for v in opts.get('delete'):
1680 for v in opts.get('delete'):
1683 try:
1681 try:
1684 indices.append(int(v))
1682 indices.append(int(v))
1685 except ValueError:
1683 except ValueError:
1686 raise error.Abort(_('invalid index value: %r') % v,
1684 raise error.Abort(_('invalid index value: %r') % v,
1687 hint=_('use integers for indices'))
1685 hint=_('use integers for indices'))
1688
1686
1689 if repo.currenttransaction():
1687 if repo.currenttransaction():
1690 raise error.Abort(_('cannot delete obsmarkers in the middle '
1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1691 'of transaction.'))
1689 'of transaction.'))
1692
1690
1693 with repo.lock():
1691 with repo.lock():
1694 n = repair.deleteobsmarkers(repo.obsstore, indices)
1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1695 ui.write(_('deleted %i obsolescence markers\n') % n)
1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1696
1694
1697 return
1695 return
1698
1696
1699 if precursor is not None:
1697 if precursor is not None:
1700 if opts['rev']:
1698 if opts['rev']:
1701 raise error.Abort('cannot select revision when creating marker')
1699 raise error.Abort('cannot select revision when creating marker')
1702 metadata = {}
1700 metadata = {}
1703 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1704 succs = tuple(parsenodeid(succ) for succ in successors)
1702 succs = tuple(parsenodeid(succ) for succ in successors)
1705 l = repo.lock()
1703 l = repo.lock()
1706 try:
1704 try:
1707 tr = repo.transaction('debugobsolete')
1705 tr = repo.transaction('debugobsolete')
1708 try:
1706 try:
1709 date = opts.get('date')
1707 date = opts.get('date')
1710 if date:
1708 if date:
1711 date = dateutil.parsedate(date)
1709 date = dateutil.parsedate(date)
1712 else:
1710 else:
1713 date = None
1711 date = None
1714 prec = parsenodeid(precursor)
1712 prec = parsenodeid(precursor)
1715 parents = None
1713 parents = None
1716 if opts['record_parents']:
1714 if opts['record_parents']:
1717 if prec not in repo.unfiltered():
1715 if prec not in repo.unfiltered():
1718 raise error.Abort('cannot used --record-parents on '
1716 raise error.Abort('cannot used --record-parents on '
1719 'unknown changesets')
1717 'unknown changesets')
1720 parents = repo.unfiltered()[prec].parents()
1718 parents = repo.unfiltered()[prec].parents()
1721 parents = tuple(p.node() for p in parents)
1719 parents = tuple(p.node() for p in parents)
1722 repo.obsstore.create(tr, prec, succs, opts['flags'],
1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1723 parents=parents, date=date,
1721 parents=parents, date=date,
1724 metadata=metadata, ui=ui)
1722 metadata=metadata, ui=ui)
1725 tr.close()
1723 tr.close()
1726 except ValueError as exc:
1724 except ValueError as exc:
1727 raise error.Abort(_('bad obsmarker input: %s') %
1725 raise error.Abort(_('bad obsmarker input: %s') %
1728 pycompat.bytestr(exc))
1726 pycompat.bytestr(exc))
1729 finally:
1727 finally:
1730 tr.release()
1728 tr.release()
1731 finally:
1729 finally:
1732 l.release()
1730 l.release()
1733 else:
1731 else:
1734 if opts['rev']:
1732 if opts['rev']:
1735 revs = scmutil.revrange(repo, opts['rev'])
1733 revs = scmutil.revrange(repo, opts['rev'])
1736 nodes = [repo[r].node() for r in revs]
1734 nodes = [repo[r].node() for r in revs]
1737 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1738 exclusive=opts['exclusive']))
1736 exclusive=opts['exclusive']))
1739 markers.sort(key=lambda x: x._data)
1737 markers.sort(key=lambda x: x._data)
1740 else:
1738 else:
1741 markers = obsutil.getmarkers(repo)
1739 markers = obsutil.getmarkers(repo)
1742
1740
1743 markerstoiter = markers
1741 markerstoiter = markers
1744 isrelevant = lambda m: True
1742 isrelevant = lambda m: True
1745 if opts.get('rev') and opts.get('index'):
1743 if opts.get('rev') and opts.get('index'):
1746 markerstoiter = obsutil.getmarkers(repo)
1744 markerstoiter = obsutil.getmarkers(repo)
1747 markerset = set(markers)
1745 markerset = set(markers)
1748 isrelevant = lambda m: m in markerset
1746 isrelevant = lambda m: m in markerset
1749
1747
1750 fm = ui.formatter('debugobsolete', opts)
1748 fm = ui.formatter('debugobsolete', opts)
1751 for i, m in enumerate(markerstoiter):
1749 for i, m in enumerate(markerstoiter):
1752 if not isrelevant(m):
1750 if not isrelevant(m):
1753 # marker can be irrelevant when we're iterating over a set
1751 # marker can be irrelevant when we're iterating over a set
1754 # of markers (markerstoiter) which is bigger than the set
1752 # of markers (markerstoiter) which is bigger than the set
1755 # of markers we want to display (markers)
1753 # of markers we want to display (markers)
1756 # this can happen if both --index and --rev options are
1754 # this can happen if both --index and --rev options are
1757 # provided and thus we need to iterate over all of the markers
1755 # provided and thus we need to iterate over all of the markers
1758 # to get the correct indices, but only display the ones that
1756 # to get the correct indices, but only display the ones that
1759 # are relevant to --rev value
1757 # are relevant to --rev value
1760 continue
1758 continue
1761 fm.startitem()
1759 fm.startitem()
1762 ind = i if opts.get('index') else None
1760 ind = i if opts.get('index') else None
1763 cmdutil.showmarker(fm, m, index=ind)
1761 cmdutil.showmarker(fm, m, index=ind)
1764 fm.end()
1762 fm.end()
1765
1763
1766 @command('debugpathcomplete',
1764 @command('debugpathcomplete',
1767 [('f', 'full', None, _('complete an entire path')),
1765 [('f', 'full', None, _('complete an entire path')),
1768 ('n', 'normal', None, _('show only normal files')),
1766 ('n', 'normal', None, _('show only normal files')),
1769 ('a', 'added', None, _('show only added files')),
1767 ('a', 'added', None, _('show only added files')),
1770 ('r', 'removed', None, _('show only removed files'))],
1768 ('r', 'removed', None, _('show only removed files'))],
1771 _('FILESPEC...'))
1769 _('FILESPEC...'))
1772 def debugpathcomplete(ui, repo, *specs, **opts):
1770 def debugpathcomplete(ui, repo, *specs, **opts):
1773 '''complete part or all of a tracked path
1771 '''complete part or all of a tracked path
1774
1772
1775 This command supports shells that offer path name completion. It
1773 This command supports shells that offer path name completion. It
1776 currently completes only files already known to the dirstate.
1774 currently completes only files already known to the dirstate.
1777
1775
1778 Completion extends only to the next path segment unless
1776 Completion extends only to the next path segment unless
1779 --full is specified, in which case entire paths are used.'''
1777 --full is specified, in which case entire paths are used.'''
1780
1778
1781 def complete(path, acceptable):
1779 def complete(path, acceptable):
1782 dirstate = repo.dirstate
1780 dirstate = repo.dirstate
1783 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1784 rootdir = repo.root + pycompat.ossep
1782 rootdir = repo.root + pycompat.ossep
1785 if spec != repo.root and not spec.startswith(rootdir):
1783 if spec != repo.root and not spec.startswith(rootdir):
1786 return [], []
1784 return [], []
1787 if os.path.isdir(spec):
1785 if os.path.isdir(spec):
1788 spec += '/'
1786 spec += '/'
1789 spec = spec[len(rootdir):]
1787 spec = spec[len(rootdir):]
1790 fixpaths = pycompat.ossep != '/'
1788 fixpaths = pycompat.ossep != '/'
1791 if fixpaths:
1789 if fixpaths:
1792 spec = spec.replace(pycompat.ossep, '/')
1790 spec = spec.replace(pycompat.ossep, '/')
1793 speclen = len(spec)
1791 speclen = len(spec)
1794 fullpaths = opts[r'full']
1792 fullpaths = opts[r'full']
1795 files, dirs = set(), set()
1793 files, dirs = set(), set()
1796 adddir, addfile = dirs.add, files.add
1794 adddir, addfile = dirs.add, files.add
1797 for f, st in dirstate.iteritems():
1795 for f, st in dirstate.iteritems():
1798 if f.startswith(spec) and st[0] in acceptable:
1796 if f.startswith(spec) and st[0] in acceptable:
1799 if fixpaths:
1797 if fixpaths:
1800 f = f.replace('/', pycompat.ossep)
1798 f = f.replace('/', pycompat.ossep)
1801 if fullpaths:
1799 if fullpaths:
1802 addfile(f)
1800 addfile(f)
1803 continue
1801 continue
1804 s = f.find(pycompat.ossep, speclen)
1802 s = f.find(pycompat.ossep, speclen)
1805 if s >= 0:
1803 if s >= 0:
1806 adddir(f[:s])
1804 adddir(f[:s])
1807 else:
1805 else:
1808 addfile(f)
1806 addfile(f)
1809 return files, dirs
1807 return files, dirs
1810
1808
1811 acceptable = ''
1809 acceptable = ''
1812 if opts[r'normal']:
1810 if opts[r'normal']:
1813 acceptable += 'nm'
1811 acceptable += 'nm'
1814 if opts[r'added']:
1812 if opts[r'added']:
1815 acceptable += 'a'
1813 acceptable += 'a'
1816 if opts[r'removed']:
1814 if opts[r'removed']:
1817 acceptable += 'r'
1815 acceptable += 'r'
1818 cwd = repo.getcwd()
1816 cwd = repo.getcwd()
1819 if not specs:
1817 if not specs:
1820 specs = ['.']
1818 specs = ['.']
1821
1819
1822 files, dirs = set(), set()
1820 files, dirs = set(), set()
1823 for spec in specs:
1821 for spec in specs:
1824 f, d = complete(spec, acceptable or 'nmar')
1822 f, d = complete(spec, acceptable or 'nmar')
1825 files.update(f)
1823 files.update(f)
1826 dirs.update(d)
1824 dirs.update(d)
1827 files.update(dirs)
1825 files.update(dirs)
1828 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1829 ui.write('\n')
1827 ui.write('\n')
1830
1828
1831 @command('debugpeer', [], _('PATH'), norepo=True)
1829 @command('debugpeer', [], _('PATH'), norepo=True)
1832 def debugpeer(ui, path):
1830 def debugpeer(ui, path):
1833 """establish a connection to a peer repository"""
1831 """establish a connection to a peer repository"""
1834 # Always enable peer request logging. Requires --debug to display
1832 # Always enable peer request logging. Requires --debug to display
1835 # though.
1833 # though.
1836 overrides = {
1834 overrides = {
1837 ('devel', 'debug.peer-request'): True,
1835 ('devel', 'debug.peer-request'): True,
1838 }
1836 }
1839
1837
1840 with ui.configoverride(overrides):
1838 with ui.configoverride(overrides):
1841 peer = hg.peer(ui, {}, path)
1839 peer = hg.peer(ui, {}, path)
1842
1840
1843 local = peer.local() is not None
1841 local = peer.local() is not None
1844 canpush = peer.canpush()
1842 canpush = peer.canpush()
1845
1843
1846 ui.write(_('url: %s\n') % peer.url())
1844 ui.write(_('url: %s\n') % peer.url())
1847 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1848 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1849
1847
1850 @command('debugpickmergetool',
1848 @command('debugpickmergetool',
1851 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1852 ('', 'changedelete', None, _('emulate merging change and delete')),
1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1853 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1854 _('[PATTERN]...'),
1852 _('[PATTERN]...'),
1855 inferrepo=True)
1853 inferrepo=True)
1856 def debugpickmergetool(ui, repo, *pats, **opts):
1854 def debugpickmergetool(ui, repo, *pats, **opts):
1857 """examine which merge tool is chosen for specified file
1855 """examine which merge tool is chosen for specified file
1858
1856
1859 As described in :hg:`help merge-tools`, Mercurial examines
1857 As described in :hg:`help merge-tools`, Mercurial examines
1860 configurations below in this order to decide which merge tool is
1858 configurations below in this order to decide which merge tool is
1861 chosen for specified file.
1859 chosen for specified file.
1862
1860
1863 1. ``--tool`` option
1861 1. ``--tool`` option
1864 2. ``HGMERGE`` environment variable
1862 2. ``HGMERGE`` environment variable
1865 3. configurations in ``merge-patterns`` section
1863 3. configurations in ``merge-patterns`` section
1866 4. configuration of ``ui.merge``
1864 4. configuration of ``ui.merge``
1867 5. configurations in ``merge-tools`` section
1865 5. configurations in ``merge-tools`` section
1868 6. ``hgmerge`` tool (for historical reason only)
1866 6. ``hgmerge`` tool (for historical reason only)
1869 7. default tool for fallback (``:merge`` or ``:prompt``)
1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1870
1868
1871 This command writes out examination result in the style below::
1869 This command writes out examination result in the style below::
1872
1870
1873 FILE = MERGETOOL
1871 FILE = MERGETOOL
1874
1872
1875 By default, all files known in the first parent context of the
1873 By default, all files known in the first parent context of the
1876 working directory are examined. Use file patterns and/or -I/-X
1874 working directory are examined. Use file patterns and/or -I/-X
1877 options to limit target files. -r/--rev is also useful to examine
1875 options to limit target files. -r/--rev is also useful to examine
1878 files in another context without actual updating to it.
1876 files in another context without actual updating to it.
1879
1877
1880 With --debug, this command shows warning messages while matching
1878 With --debug, this command shows warning messages while matching
1881 against ``merge-patterns`` and so on, too. It is recommended to
1879 against ``merge-patterns`` and so on, too. It is recommended to
1882 use this option with explicit file patterns and/or -I/-X options,
1880 use this option with explicit file patterns and/or -I/-X options,
1883 because this option increases amount of output per file according
1881 because this option increases amount of output per file according
1884 to configurations in hgrc.
1882 to configurations in hgrc.
1885
1883
1886 With -v/--verbose, this command shows configurations below at
1884 With -v/--verbose, this command shows configurations below at
1887 first (only if specified).
1885 first (only if specified).
1888
1886
1889 - ``--tool`` option
1887 - ``--tool`` option
1890 - ``HGMERGE`` environment variable
1888 - ``HGMERGE`` environment variable
1891 - configuration of ``ui.merge``
1889 - configuration of ``ui.merge``
1892
1890
1893 If merge tool is chosen before matching against
1891 If merge tool is chosen before matching against
1894 ``merge-patterns``, this command can't show any helpful
1892 ``merge-patterns``, this command can't show any helpful
1895 information, even with --debug. In such case, information above is
1893 information, even with --debug. In such case, information above is
1896 useful to know why a merge tool is chosen.
1894 useful to know why a merge tool is chosen.
1897 """
1895 """
1898 opts = pycompat.byteskwargs(opts)
1896 opts = pycompat.byteskwargs(opts)
1899 overrides = {}
1897 overrides = {}
1900 if opts['tool']:
1898 if opts['tool']:
1901 overrides[('ui', 'forcemerge')] = opts['tool']
1899 overrides[('ui', 'forcemerge')] = opts['tool']
1902 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1903
1901
1904 with ui.configoverride(overrides, 'debugmergepatterns'):
1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1905 hgmerge = encoding.environ.get("HGMERGE")
1903 hgmerge = encoding.environ.get("HGMERGE")
1906 if hgmerge is not None:
1904 if hgmerge is not None:
1907 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1908 uimerge = ui.config("ui", "merge")
1906 uimerge = ui.config("ui", "merge")
1909 if uimerge:
1907 if uimerge:
1910 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1911
1909
1912 ctx = scmutil.revsingle(repo, opts.get('rev'))
1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1913 m = scmutil.match(ctx, pats, opts)
1911 m = scmutil.match(ctx, pats, opts)
1914 changedelete = opts['changedelete']
1912 changedelete = opts['changedelete']
1915 for path in ctx.walk(m):
1913 for path in ctx.walk(m):
1916 fctx = ctx[path]
1914 fctx = ctx[path]
1917 try:
1915 try:
1918 if not ui.debugflag:
1916 if not ui.debugflag:
1919 ui.pushbuffer(error=True)
1917 ui.pushbuffer(error=True)
1920 tool, toolpath = filemerge._picktool(repo, ui, path,
1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1921 fctx.isbinary(),
1919 fctx.isbinary(),
1922 'l' in fctx.flags(),
1920 'l' in fctx.flags(),
1923 changedelete)
1921 changedelete)
1924 finally:
1922 finally:
1925 if not ui.debugflag:
1923 if not ui.debugflag:
1926 ui.popbuffer()
1924 ui.popbuffer()
1927 ui.write(('%s = %s\n') % (path, tool))
1925 ui.write(('%s = %s\n') % (path, tool))
1928
1926
1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1931 '''access the pushkey key/value protocol
1929 '''access the pushkey key/value protocol
1932
1930
1933 With two args, list the keys in the given namespace.
1931 With two args, list the keys in the given namespace.
1934
1932
1935 With five args, set a key to new if it currently is set to old.
1933 With five args, set a key to new if it currently is set to old.
1936 Reports success or failure.
1934 Reports success or failure.
1937 '''
1935 '''
1938
1936
1939 target = hg.peer(ui, {}, repopath)
1937 target = hg.peer(ui, {}, repopath)
1940 if keyinfo:
1938 if keyinfo:
1941 key, old, new = keyinfo
1939 key, old, new = keyinfo
1942 with target.commandexecutor() as e:
1940 with target.commandexecutor() as e:
1943 r = e.callcommand('pushkey', {
1941 r = e.callcommand('pushkey', {
1944 'namespace': namespace,
1942 'namespace': namespace,
1945 'key': key,
1943 'key': key,
1946 'old': old,
1944 'old': old,
1947 'new': new,
1945 'new': new,
1948 }).result()
1946 }).result()
1949
1947
1950 ui.status(pycompat.bytestr(r) + '\n')
1948 ui.status(pycompat.bytestr(r) + '\n')
1951 return not r
1949 return not r
1952 else:
1950 else:
1953 for k, v in sorted(target.listkeys(namespace).iteritems()):
1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1954 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1955 stringutil.escapestr(v)))
1953 stringutil.escapestr(v)))
1956
1954
1957 @command('debugpvec', [], _('A B'))
1955 @command('debugpvec', [], _('A B'))
1958 def debugpvec(ui, repo, a, b=None):
1956 def debugpvec(ui, repo, a, b=None):
1959 ca = scmutil.revsingle(repo, a)
1957 ca = scmutil.revsingle(repo, a)
1960 cb = scmutil.revsingle(repo, b)
1958 cb = scmutil.revsingle(repo, b)
1961 pa = pvec.ctxpvec(ca)
1959 pa = pvec.ctxpvec(ca)
1962 pb = pvec.ctxpvec(cb)
1960 pb = pvec.ctxpvec(cb)
1963 if pa == pb:
1961 if pa == pb:
1964 rel = "="
1962 rel = "="
1965 elif pa > pb:
1963 elif pa > pb:
1966 rel = ">"
1964 rel = ">"
1967 elif pa < pb:
1965 elif pa < pb:
1968 rel = "<"
1966 rel = "<"
1969 elif pa | pb:
1967 elif pa | pb:
1970 rel = "|"
1968 rel = "|"
1971 ui.write(_("a: %s\n") % pa)
1969 ui.write(_("a: %s\n") % pa)
1972 ui.write(_("b: %s\n") % pb)
1970 ui.write(_("b: %s\n") % pb)
1973 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1974 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1975 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1976 pa.distance(pb), rel))
1974 pa.distance(pb), rel))
1977
1975
1978 @command('debugrebuilddirstate|debugrebuildstate',
1976 @command('debugrebuilddirstate|debugrebuildstate',
1979 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1980 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1981 'the working copy parent')),
1979 'the working copy parent')),
1982 ],
1980 ],
1983 _('[-r REV]'))
1981 _('[-r REV]'))
1984 def debugrebuilddirstate(ui, repo, rev, **opts):
1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1985 """rebuild the dirstate as it would look like for the given revision
1983 """rebuild the dirstate as it would look like for the given revision
1986
1984
1987 If no revision is specified the first current parent will be used.
1985 If no revision is specified the first current parent will be used.
1988
1986
1989 The dirstate will be set to the files of the given revision.
1987 The dirstate will be set to the files of the given revision.
1990 The actual working directory content or existing dirstate
1988 The actual working directory content or existing dirstate
1991 information such as adds or removes is not considered.
1989 information such as adds or removes is not considered.
1992
1990
1993 ``minimal`` will only rebuild the dirstate status for files that claim to be
1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1994 tracked but are not in the parent manifest, or that exist in the parent
1992 tracked but are not in the parent manifest, or that exist in the parent
1995 manifest but are not in the dirstate. It will not change adds, removes, or
1993 manifest but are not in the dirstate. It will not change adds, removes, or
1996 modified files that are in the working copy parent.
1994 modified files that are in the working copy parent.
1997
1995
1998 One use of this command is to make the next :hg:`status` invocation
1996 One use of this command is to make the next :hg:`status` invocation
1999 check the actual file content.
1997 check the actual file content.
2000 """
1998 """
2001 ctx = scmutil.revsingle(repo, rev)
1999 ctx = scmutil.revsingle(repo, rev)
2002 with repo.wlock():
2000 with repo.wlock():
2003 dirstate = repo.dirstate
2001 dirstate = repo.dirstate
2004 changedfiles = None
2002 changedfiles = None
2005 # See command doc for what minimal does.
2003 # See command doc for what minimal does.
2006 if opts.get(r'minimal'):
2004 if opts.get(r'minimal'):
2007 manifestfiles = set(ctx.manifest().keys())
2005 manifestfiles = set(ctx.manifest().keys())
2008 dirstatefiles = set(dirstate)
2006 dirstatefiles = set(dirstate)
2009 manifestonly = manifestfiles - dirstatefiles
2007 manifestonly = manifestfiles - dirstatefiles
2010 dsonly = dirstatefiles - manifestfiles
2008 dsonly = dirstatefiles - manifestfiles
2011 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2012 changedfiles = manifestonly | dsnotadded
2010 changedfiles = manifestonly | dsnotadded
2013
2011
2014 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2015
2013
2016 @command('debugrebuildfncache', [], '')
2014 @command('debugrebuildfncache', [], '')
2017 def debugrebuildfncache(ui, repo):
2015 def debugrebuildfncache(ui, repo):
2018 """rebuild the fncache file"""
2016 """rebuild the fncache file"""
2019 repair.rebuildfncache(ui, repo)
2017 repair.rebuildfncache(ui, repo)
2020
2018
2021 @command('debugrename',
2019 @command('debugrename',
2022 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2023 _('[-r REV] FILE'))
2021 _('[-r REV] FILE'))
2024 def debugrename(ui, repo, file1, *pats, **opts):
2022 def debugrename(ui, repo, file1, *pats, **opts):
2025 """dump rename information"""
2023 """dump rename information"""
2026
2024
2027 opts = pycompat.byteskwargs(opts)
2025 opts = pycompat.byteskwargs(opts)
2028 ctx = scmutil.revsingle(repo, opts.get('rev'))
2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2029 m = scmutil.match(ctx, (file1,) + pats, opts)
2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2030 for abs in ctx.walk(m):
2028 for abs in ctx.walk(m):
2031 fctx = ctx[abs]
2029 fctx = ctx[abs]
2032 o = fctx.filelog().renamed(fctx.filenode())
2030 o = fctx.filelog().renamed(fctx.filenode())
2033 rel = m.rel(abs)
2031 rel = m.rel(abs)
2034 if o:
2032 if o:
2035 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2036 else:
2034 else:
2037 ui.write(_("%s not renamed\n") % rel)
2035 ui.write(_("%s not renamed\n") % rel)
2038
2036
2039 @command('debugrevlog', cmdutil.debugrevlogopts +
2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2040 [('d', 'dump', False, _('dump index data'))],
2038 [('d', 'dump', False, _('dump index data'))],
2041 _('-c|-m|FILE'),
2039 _('-c|-m|FILE'),
2042 optionalrepo=True)
2040 optionalrepo=True)
2043 def debugrevlog(ui, repo, file_=None, **opts):
2041 def debugrevlog(ui, repo, file_=None, **opts):
2044 """show data and statistics about a revlog"""
2042 """show data and statistics about a revlog"""
2045 opts = pycompat.byteskwargs(opts)
2043 opts = pycompat.byteskwargs(opts)
2046 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2047
2045
2048 if opts.get("dump"):
2046 if opts.get("dump"):
2049 numrevs = len(r)
2047 numrevs = len(r)
2050 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2051 " rawsize totalsize compression heads chainlen\n"))
2049 " rawsize totalsize compression heads chainlen\n"))
2052 ts = 0
2050 ts = 0
2053 heads = set()
2051 heads = set()
2054
2052
2055 for rev in pycompat.xrange(numrevs):
2053 for rev in pycompat.xrange(numrevs):
2056 dbase = r.deltaparent(rev)
2054 dbase = r.deltaparent(rev)
2057 if dbase == -1:
2055 if dbase == -1:
2058 dbase = rev
2056 dbase = rev
2059 cbase = r.chainbase(rev)
2057 cbase = r.chainbase(rev)
2060 clen = r.chainlen(rev)
2058 clen = r.chainlen(rev)
2061 p1, p2 = r.parentrevs(rev)
2059 p1, p2 = r.parentrevs(rev)
2062 rs = r.rawsize(rev)
2060 rs = r.rawsize(rev)
2063 ts = ts + rs
2061 ts = ts + rs
2064 heads -= set(r.parentrevs(rev))
2062 heads -= set(r.parentrevs(rev))
2065 heads.add(rev)
2063 heads.add(rev)
2066 try:
2064 try:
2067 compression = ts / r.end(rev)
2065 compression = ts / r.end(rev)
2068 except ZeroDivisionError:
2066 except ZeroDivisionError:
2069 compression = 0
2067 compression = 0
2070 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2071 "%11d %5d %8d\n" %
2069 "%11d %5d %8d\n" %
2072 (rev, p1, p2, r.start(rev), r.end(rev),
2070 (rev, p1, p2, r.start(rev), r.end(rev),
2073 r.start(dbase), r.start(cbase),
2071 r.start(dbase), r.start(cbase),
2074 r.start(p1), r.start(p2),
2072 r.start(p1), r.start(p2),
2075 rs, ts, compression, len(heads), clen))
2073 rs, ts, compression, len(heads), clen))
2076 return 0
2074 return 0
2077
2075
2078 v = r.version
2076 v = r.version
2079 format = v & 0xFFFF
2077 format = v & 0xFFFF
2080 flags = []
2078 flags = []
2081 gdelta = False
2079 gdelta = False
2082 if v & revlog.FLAG_INLINE_DATA:
2080 if v & revlog.FLAG_INLINE_DATA:
2083 flags.append('inline')
2081 flags.append('inline')
2084 if v & revlog.FLAG_GENERALDELTA:
2082 if v & revlog.FLAG_GENERALDELTA:
2085 gdelta = True
2083 gdelta = True
2086 flags.append('generaldelta')
2084 flags.append('generaldelta')
2087 if not flags:
2085 if not flags:
2088 flags = ['(none)']
2086 flags = ['(none)']
2089
2087
2090 ### tracks merge vs single parent
2088 ### tracks merge vs single parent
2091 nummerges = 0
2089 nummerges = 0
2092
2090
2093 ### tracks ways the "delta" are build
2091 ### tracks ways the "delta" are build
2094 # nodelta
2092 # nodelta
2095 numempty = 0
2093 numempty = 0
2096 numemptytext = 0
2094 numemptytext = 0
2097 numemptydelta = 0
2095 numemptydelta = 0
2098 # full file content
2096 # full file content
2099 numfull = 0
2097 numfull = 0
2100 # intermediate snapshot against a prior snapshot
2098 # intermediate snapshot against a prior snapshot
2101 numsemi = 0
2099 numsemi = 0
2102 # snapshot count per depth
2100 # snapshot count per depth
2103 numsnapdepth = collections.defaultdict(lambda: 0)
2101 numsnapdepth = collections.defaultdict(lambda: 0)
2104 # delta against previous revision
2102 # delta against previous revision
2105 numprev = 0
2103 numprev = 0
2106 # delta against first or second parent (not prev)
2104 # delta against first or second parent (not prev)
2107 nump1 = 0
2105 nump1 = 0
2108 nump2 = 0
2106 nump2 = 0
2109 # delta against neither prev nor parents
2107 # delta against neither prev nor parents
2110 numother = 0
2108 numother = 0
2111 # delta against prev that are also first or second parent
2109 # delta against prev that are also first or second parent
2112 # (details of `numprev`)
2110 # (details of `numprev`)
2113 nump1prev = 0
2111 nump1prev = 0
2114 nump2prev = 0
2112 nump2prev = 0
2115
2113
2116 # data about delta chain of each revs
2114 # data about delta chain of each revs
2117 chainlengths = []
2115 chainlengths = []
2118 chainbases = []
2116 chainbases = []
2119 chainspans = []
2117 chainspans = []
2120
2118
2121 # data about each revision
2119 # data about each revision
2122 datasize = [None, 0, 0]
2120 datasize = [None, 0, 0]
2123 fullsize = [None, 0, 0]
2121 fullsize = [None, 0, 0]
2124 semisize = [None, 0, 0]
2122 semisize = [None, 0, 0]
2125 # snapshot count per depth
2123 # snapshot count per depth
2126 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2124 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2127 deltasize = [None, 0, 0]
2125 deltasize = [None, 0, 0]
2128 chunktypecounts = {}
2126 chunktypecounts = {}
2129 chunktypesizes = {}
2127 chunktypesizes = {}
2130
2128
2131 def addsize(size, l):
2129 def addsize(size, l):
2132 if l[0] is None or size < l[0]:
2130 if l[0] is None or size < l[0]:
2133 l[0] = size
2131 l[0] = size
2134 if size > l[1]:
2132 if size > l[1]:
2135 l[1] = size
2133 l[1] = size
2136 l[2] += size
2134 l[2] += size
2137
2135
2138 numrevs = len(r)
2136 numrevs = len(r)
2139 for rev in pycompat.xrange(numrevs):
2137 for rev in pycompat.xrange(numrevs):
2140 p1, p2 = r.parentrevs(rev)
2138 p1, p2 = r.parentrevs(rev)
2141 delta = r.deltaparent(rev)
2139 delta = r.deltaparent(rev)
2142 if format > 0:
2140 if format > 0:
2143 addsize(r.rawsize(rev), datasize)
2141 addsize(r.rawsize(rev), datasize)
2144 if p2 != nullrev:
2142 if p2 != nullrev:
2145 nummerges += 1
2143 nummerges += 1
2146 size = r.length(rev)
2144 size = r.length(rev)
2147 if delta == nullrev:
2145 if delta == nullrev:
2148 chainlengths.append(0)
2146 chainlengths.append(0)
2149 chainbases.append(r.start(rev))
2147 chainbases.append(r.start(rev))
2150 chainspans.append(size)
2148 chainspans.append(size)
2151 if size == 0:
2149 if size == 0:
2152 numempty += 1
2150 numempty += 1
2153 numemptytext += 1
2151 numemptytext += 1
2154 else:
2152 else:
2155 numfull += 1
2153 numfull += 1
2156 numsnapdepth[0] += 1
2154 numsnapdepth[0] += 1
2157 addsize(size, fullsize)
2155 addsize(size, fullsize)
2158 addsize(size, snapsizedepth[0])
2156 addsize(size, snapsizedepth[0])
2159 else:
2157 else:
2160 chainlengths.append(chainlengths[delta] + 1)
2158 chainlengths.append(chainlengths[delta] + 1)
2161 baseaddr = chainbases[delta]
2159 baseaddr = chainbases[delta]
2162 revaddr = r.start(rev)
2160 revaddr = r.start(rev)
2163 chainbases.append(baseaddr)
2161 chainbases.append(baseaddr)
2164 chainspans.append((revaddr - baseaddr) + size)
2162 chainspans.append((revaddr - baseaddr) + size)
2165 if size == 0:
2163 if size == 0:
2166 numempty += 1
2164 numempty += 1
2167 numemptydelta += 1
2165 numemptydelta += 1
2168 elif r.issnapshot(rev):
2166 elif r.issnapshot(rev):
2169 addsize(size, semisize)
2167 addsize(size, semisize)
2170 numsemi += 1
2168 numsemi += 1
2171 depth = r.snapshotdepth(rev)
2169 depth = r.snapshotdepth(rev)
2172 numsnapdepth[depth] += 1
2170 numsnapdepth[depth] += 1
2173 addsize(size, snapsizedepth[depth])
2171 addsize(size, snapsizedepth[depth])
2174 else:
2172 else:
2175 addsize(size, deltasize)
2173 addsize(size, deltasize)
2176 if delta == rev - 1:
2174 if delta == rev - 1:
2177 numprev += 1
2175 numprev += 1
2178 if delta == p1:
2176 if delta == p1:
2179 nump1prev += 1
2177 nump1prev += 1
2180 elif delta == p2:
2178 elif delta == p2:
2181 nump2prev += 1
2179 nump2prev += 1
2182 elif delta == p1:
2180 elif delta == p1:
2183 nump1 += 1
2181 nump1 += 1
2184 elif delta == p2:
2182 elif delta == p2:
2185 nump2 += 1
2183 nump2 += 1
2186 elif delta != nullrev:
2184 elif delta != nullrev:
2187 numother += 1
2185 numother += 1
2188
2186
2189 # Obtain data on the raw chunks in the revlog.
2187 # Obtain data on the raw chunks in the revlog.
2190 if util.safehasattr(r, '_getsegmentforrevs'):
2188 if util.safehasattr(r, '_getsegmentforrevs'):
2191 segment = r._getsegmentforrevs(rev, rev)[1]
2189 segment = r._getsegmentforrevs(rev, rev)[1]
2192 else:
2190 else:
2193 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2191 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2194 if segment:
2192 if segment:
2195 chunktype = bytes(segment[0:1])
2193 chunktype = bytes(segment[0:1])
2196 else:
2194 else:
2197 chunktype = 'empty'
2195 chunktype = 'empty'
2198
2196
2199 if chunktype not in chunktypecounts:
2197 if chunktype not in chunktypecounts:
2200 chunktypecounts[chunktype] = 0
2198 chunktypecounts[chunktype] = 0
2201 chunktypesizes[chunktype] = 0
2199 chunktypesizes[chunktype] = 0
2202
2200
2203 chunktypecounts[chunktype] += 1
2201 chunktypecounts[chunktype] += 1
2204 chunktypesizes[chunktype] += size
2202 chunktypesizes[chunktype] += size
2205
2203
2206 # Adjust size min value for empty cases
2204 # Adjust size min value for empty cases
2207 for size in (datasize, fullsize, semisize, deltasize):
2205 for size in (datasize, fullsize, semisize, deltasize):
2208 if size[0] is None:
2206 if size[0] is None:
2209 size[0] = 0
2207 size[0] = 0
2210
2208
2211 numdeltas = numrevs - numfull - numempty - numsemi
2209 numdeltas = numrevs - numfull - numempty - numsemi
2212 numoprev = numprev - nump1prev - nump2prev
2210 numoprev = numprev - nump1prev - nump2prev
2213 totalrawsize = datasize[2]
2211 totalrawsize = datasize[2]
2214 datasize[2] /= numrevs
2212 datasize[2] /= numrevs
2215 fulltotal = fullsize[2]
2213 fulltotal = fullsize[2]
2216 fullsize[2] /= numfull
2214 fullsize[2] /= numfull
2217 semitotal = semisize[2]
2215 semitotal = semisize[2]
2218 snaptotal = {}
2216 snaptotal = {}
2219 if 0 < numsemi:
2217 if 0 < numsemi:
2220 semisize[2] /= numsemi
2218 semisize[2] /= numsemi
2221 for depth in snapsizedepth:
2219 for depth in snapsizedepth:
2222 snaptotal[depth] = snapsizedepth[depth][2]
2220 snaptotal[depth] = snapsizedepth[depth][2]
2223 snapsizedepth[depth][2] /= numsnapdepth[depth]
2221 snapsizedepth[depth][2] /= numsnapdepth[depth]
2224
2222
2225 deltatotal = deltasize[2]
2223 deltatotal = deltasize[2]
2226 if numdeltas > 0:
2224 if numdeltas > 0:
2227 deltasize[2] /= numdeltas
2225 deltasize[2] /= numdeltas
2228 totalsize = fulltotal + semitotal + deltatotal
2226 totalsize = fulltotal + semitotal + deltatotal
2229 avgchainlen = sum(chainlengths) / numrevs
2227 avgchainlen = sum(chainlengths) / numrevs
2230 maxchainlen = max(chainlengths)
2228 maxchainlen = max(chainlengths)
2231 maxchainspan = max(chainspans)
2229 maxchainspan = max(chainspans)
2232 compratio = 1
2230 compratio = 1
2233 if totalsize:
2231 if totalsize:
2234 compratio = totalrawsize / totalsize
2232 compratio = totalrawsize / totalsize
2235
2233
2236 basedfmtstr = '%%%dd\n'
2234 basedfmtstr = '%%%dd\n'
2237 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2235 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2238
2236
2239 def dfmtstr(max):
2237 def dfmtstr(max):
2240 return basedfmtstr % len(str(max))
2238 return basedfmtstr % len(str(max))
2241 def pcfmtstr(max, padding=0):
2239 def pcfmtstr(max, padding=0):
2242 return basepcfmtstr % (len(str(max)), ' ' * padding)
2240 return basepcfmtstr % (len(str(max)), ' ' * padding)
2243
2241
2244 def pcfmt(value, total):
2242 def pcfmt(value, total):
2245 if total:
2243 if total:
2246 return (value, 100 * float(value) / total)
2244 return (value, 100 * float(value) / total)
2247 else:
2245 else:
2248 return value, 100.0
2246 return value, 100.0
2249
2247
2250 ui.write(('format : %d\n') % format)
2248 ui.write(('format : %d\n') % format)
2251 ui.write(('flags : %s\n') % ', '.join(flags))
2249 ui.write(('flags : %s\n') % ', '.join(flags))
2252
2250
2253 ui.write('\n')
2251 ui.write('\n')
2254 fmt = pcfmtstr(totalsize)
2252 fmt = pcfmtstr(totalsize)
2255 fmt2 = dfmtstr(totalsize)
2253 fmt2 = dfmtstr(totalsize)
2256 ui.write(('revisions : ') + fmt2 % numrevs)
2254 ui.write(('revisions : ') + fmt2 % numrevs)
2257 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2255 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2258 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2256 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2259 ui.write(('revisions : ') + fmt2 % numrevs)
2257 ui.write(('revisions : ') + fmt2 % numrevs)
2260 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2258 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2261 ui.write((' text : ')
2259 ui.write((' text : ')
2262 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2260 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2263 ui.write((' delta : ')
2261 ui.write((' delta : ')
2264 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2262 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2265 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2263 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2266 for depth in sorted(numsnapdepth):
2264 for depth in sorted(numsnapdepth):
2267 ui.write((' lvl-%-3d : ' % depth)
2265 ui.write((' lvl-%-3d : ' % depth)
2268 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2266 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2269 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2267 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2270 ui.write(('revision size : ') + fmt2 % totalsize)
2268 ui.write(('revision size : ') + fmt2 % totalsize)
2271 ui.write((' snapshot : ')
2269 ui.write((' snapshot : ')
2272 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2270 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2273 for depth in sorted(numsnapdepth):
2271 for depth in sorted(numsnapdepth):
2274 ui.write((' lvl-%-3d : ' % depth)
2272 ui.write((' lvl-%-3d : ' % depth)
2275 + fmt % pcfmt(snaptotal[depth], totalsize))
2273 + fmt % pcfmt(snaptotal[depth], totalsize))
2276 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2274 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2277
2275
2278 def fmtchunktype(chunktype):
2276 def fmtchunktype(chunktype):
2279 if chunktype == 'empty':
2277 if chunktype == 'empty':
2280 return ' %s : ' % chunktype
2278 return ' %s : ' % chunktype
2281 elif chunktype in pycompat.bytestr(string.ascii_letters):
2279 elif chunktype in pycompat.bytestr(string.ascii_letters):
2282 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2280 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2283 else:
2281 else:
2284 return ' 0x%s : ' % hex(chunktype)
2282 return ' 0x%s : ' % hex(chunktype)
2285
2283
2286 ui.write('\n')
2284 ui.write('\n')
2287 ui.write(('chunks : ') + fmt2 % numrevs)
2285 ui.write(('chunks : ') + fmt2 % numrevs)
2288 for chunktype in sorted(chunktypecounts):
2286 for chunktype in sorted(chunktypecounts):
2289 ui.write(fmtchunktype(chunktype))
2287 ui.write(fmtchunktype(chunktype))
2290 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2288 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2291 ui.write(('chunks size : ') + fmt2 % totalsize)
2289 ui.write(('chunks size : ') + fmt2 % totalsize)
2292 for chunktype in sorted(chunktypecounts):
2290 for chunktype in sorted(chunktypecounts):
2293 ui.write(fmtchunktype(chunktype))
2291 ui.write(fmtchunktype(chunktype))
2294 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2292 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2295
2293
2296 ui.write('\n')
2294 ui.write('\n')
2297 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2295 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2298 ui.write(('avg chain length : ') + fmt % avgchainlen)
2296 ui.write(('avg chain length : ') + fmt % avgchainlen)
2299 ui.write(('max chain length : ') + fmt % maxchainlen)
2297 ui.write(('max chain length : ') + fmt % maxchainlen)
2300 ui.write(('max chain reach : ') + fmt % maxchainspan)
2298 ui.write(('max chain reach : ') + fmt % maxchainspan)
2301 ui.write(('compression ratio : ') + fmt % compratio)
2299 ui.write(('compression ratio : ') + fmt % compratio)
2302
2300
2303 if format > 0:
2301 if format > 0:
2304 ui.write('\n')
2302 ui.write('\n')
2305 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2303 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2306 % tuple(datasize))
2304 % tuple(datasize))
2307 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2305 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2308 % tuple(fullsize))
2306 % tuple(fullsize))
2309 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2307 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2310 % tuple(semisize))
2308 % tuple(semisize))
2311 for depth in sorted(snapsizedepth):
2309 for depth in sorted(snapsizedepth):
2312 if depth == 0:
2310 if depth == 0:
2313 continue
2311 continue
2314 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2312 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2315 % ((depth,) + tuple(snapsizedepth[depth])))
2313 % ((depth,) + tuple(snapsizedepth[depth])))
2316 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2314 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2317 % tuple(deltasize))
2315 % tuple(deltasize))
2318
2316
2319 if numdeltas > 0:
2317 if numdeltas > 0:
2320 ui.write('\n')
2318 ui.write('\n')
2321 fmt = pcfmtstr(numdeltas)
2319 fmt = pcfmtstr(numdeltas)
2322 fmt2 = pcfmtstr(numdeltas, 4)
2320 fmt2 = pcfmtstr(numdeltas, 4)
2323 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2321 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2324 if numprev > 0:
2322 if numprev > 0:
2325 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2323 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2326 numprev))
2324 numprev))
2327 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2325 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2328 numprev))
2326 numprev))
2329 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2327 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2330 numprev))
2328 numprev))
2331 if gdelta:
2329 if gdelta:
2332 ui.write(('deltas against p1 : ')
2330 ui.write(('deltas against p1 : ')
2333 + fmt % pcfmt(nump1, numdeltas))
2331 + fmt % pcfmt(nump1, numdeltas))
2334 ui.write(('deltas against p2 : ')
2332 ui.write(('deltas against p2 : ')
2335 + fmt % pcfmt(nump2, numdeltas))
2333 + fmt % pcfmt(nump2, numdeltas))
2336 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2334 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2337 numdeltas))
2335 numdeltas))
2338
2336
2339 @command('debugrevspec',
2337 @command('debugrevspec',
2340 [('', 'optimize', None,
2338 [('', 'optimize', None,
2341 _('print parsed tree after optimizing (DEPRECATED)')),
2339 _('print parsed tree after optimizing (DEPRECATED)')),
2342 ('', 'show-revs', True, _('print list of result revisions (default)')),
2340 ('', 'show-revs', True, _('print list of result revisions (default)')),
2343 ('s', 'show-set', None, _('print internal representation of result set')),
2341 ('s', 'show-set', None, _('print internal representation of result set')),
2344 ('p', 'show-stage', [],
2342 ('p', 'show-stage', [],
2345 _('print parsed tree at the given stage'), _('NAME')),
2343 _('print parsed tree at the given stage'), _('NAME')),
2346 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2344 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2347 ('', 'verify-optimized', False, _('verify optimized result')),
2345 ('', 'verify-optimized', False, _('verify optimized result')),
2348 ],
2346 ],
2349 ('REVSPEC'))
2347 ('REVSPEC'))
2350 def debugrevspec(ui, repo, expr, **opts):
2348 def debugrevspec(ui, repo, expr, **opts):
2351 """parse and apply a revision specification
2349 """parse and apply a revision specification
2352
2350
2353 Use -p/--show-stage option to print the parsed tree at the given stages.
2351 Use -p/--show-stage option to print the parsed tree at the given stages.
2354 Use -p all to print tree at every stage.
2352 Use -p all to print tree at every stage.
2355
2353
2356 Use --no-show-revs option with -s or -p to print only the set
2354 Use --no-show-revs option with -s or -p to print only the set
2357 representation or the parsed tree respectively.
2355 representation or the parsed tree respectively.
2358
2356
2359 Use --verify-optimized to compare the optimized result with the unoptimized
2357 Use --verify-optimized to compare the optimized result with the unoptimized
2360 one. Returns 1 if the optimized result differs.
2358 one. Returns 1 if the optimized result differs.
2361 """
2359 """
2362 opts = pycompat.byteskwargs(opts)
2360 opts = pycompat.byteskwargs(opts)
2363 aliases = ui.configitems('revsetalias')
2361 aliases = ui.configitems('revsetalias')
2364 stages = [
2362 stages = [
2365 ('parsed', lambda tree: tree),
2363 ('parsed', lambda tree: tree),
2366 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2364 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2367 ui.warn)),
2365 ui.warn)),
2368 ('concatenated', revsetlang.foldconcat),
2366 ('concatenated', revsetlang.foldconcat),
2369 ('analyzed', revsetlang.analyze),
2367 ('analyzed', revsetlang.analyze),
2370 ('optimized', revsetlang.optimize),
2368 ('optimized', revsetlang.optimize),
2371 ]
2369 ]
2372 if opts['no_optimized']:
2370 if opts['no_optimized']:
2373 stages = stages[:-1]
2371 stages = stages[:-1]
2374 if opts['verify_optimized'] and opts['no_optimized']:
2372 if opts['verify_optimized'] and opts['no_optimized']:
2375 raise error.Abort(_('cannot use --verify-optimized with '
2373 raise error.Abort(_('cannot use --verify-optimized with '
2376 '--no-optimized'))
2374 '--no-optimized'))
2377 stagenames = set(n for n, f in stages)
2375 stagenames = set(n for n, f in stages)
2378
2376
2379 showalways = set()
2377 showalways = set()
2380 showchanged = set()
2378 showchanged = set()
2381 if ui.verbose and not opts['show_stage']:
2379 if ui.verbose and not opts['show_stage']:
2382 # show parsed tree by --verbose (deprecated)
2380 # show parsed tree by --verbose (deprecated)
2383 showalways.add('parsed')
2381 showalways.add('parsed')
2384 showchanged.update(['expanded', 'concatenated'])
2382 showchanged.update(['expanded', 'concatenated'])
2385 if opts['optimize']:
2383 if opts['optimize']:
2386 showalways.add('optimized')
2384 showalways.add('optimized')
2387 if opts['show_stage'] and opts['optimize']:
2385 if opts['show_stage'] and opts['optimize']:
2388 raise error.Abort(_('cannot use --optimize with --show-stage'))
2386 raise error.Abort(_('cannot use --optimize with --show-stage'))
2389 if opts['show_stage'] == ['all']:
2387 if opts['show_stage'] == ['all']:
2390 showalways.update(stagenames)
2388 showalways.update(stagenames)
2391 else:
2389 else:
2392 for n in opts['show_stage']:
2390 for n in opts['show_stage']:
2393 if n not in stagenames:
2391 if n not in stagenames:
2394 raise error.Abort(_('invalid stage name: %s') % n)
2392 raise error.Abort(_('invalid stage name: %s') % n)
2395 showalways.update(opts['show_stage'])
2393 showalways.update(opts['show_stage'])
2396
2394
2397 treebystage = {}
2395 treebystage = {}
2398 printedtree = None
2396 printedtree = None
2399 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2397 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2400 for n, f in stages:
2398 for n, f in stages:
2401 treebystage[n] = tree = f(tree)
2399 treebystage[n] = tree = f(tree)
2402 if n in showalways or (n in showchanged and tree != printedtree):
2400 if n in showalways or (n in showchanged and tree != printedtree):
2403 if opts['show_stage'] or n != 'parsed':
2401 if opts['show_stage'] or n != 'parsed':
2404 ui.write(("* %s:\n") % n)
2402 ui.write(("* %s:\n") % n)
2405 ui.write(revsetlang.prettyformat(tree), "\n")
2403 ui.write(revsetlang.prettyformat(tree), "\n")
2406 printedtree = tree
2404 printedtree = tree
2407
2405
2408 if opts['verify_optimized']:
2406 if opts['verify_optimized']:
2409 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2407 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2410 brevs = revset.makematcher(treebystage['optimized'])(repo)
2408 brevs = revset.makematcher(treebystage['optimized'])(repo)
2411 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2409 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2412 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2410 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2413 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2411 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2414 arevs = list(arevs)
2412 arevs = list(arevs)
2415 brevs = list(brevs)
2413 brevs = list(brevs)
2416 if arevs == brevs:
2414 if arevs == brevs:
2417 return 0
2415 return 0
2418 ui.write(('--- analyzed\n'), label='diff.file_a')
2416 ui.write(('--- analyzed\n'), label='diff.file_a')
2419 ui.write(('+++ optimized\n'), label='diff.file_b')
2417 ui.write(('+++ optimized\n'), label='diff.file_b')
2420 sm = difflib.SequenceMatcher(None, arevs, brevs)
2418 sm = difflib.SequenceMatcher(None, arevs, brevs)
2421 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2419 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2422 if tag in ('delete', 'replace'):
2420 if tag in ('delete', 'replace'):
2423 for c in arevs[alo:ahi]:
2421 for c in arevs[alo:ahi]:
2424 ui.write('-%s\n' % c, label='diff.deleted')
2422 ui.write('-%s\n' % c, label='diff.deleted')
2425 if tag in ('insert', 'replace'):
2423 if tag in ('insert', 'replace'):
2426 for c in brevs[blo:bhi]:
2424 for c in brevs[blo:bhi]:
2427 ui.write('+%s\n' % c, label='diff.inserted')
2425 ui.write('+%s\n' % c, label='diff.inserted')
2428 if tag == 'equal':
2426 if tag == 'equal':
2429 for c in arevs[alo:ahi]:
2427 for c in arevs[alo:ahi]:
2430 ui.write(' %s\n' % c)
2428 ui.write(' %s\n' % c)
2431 return 1
2429 return 1
2432
2430
2433 func = revset.makematcher(tree)
2431 func = revset.makematcher(tree)
2434 revs = func(repo)
2432 revs = func(repo)
2435 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2433 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2436 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2434 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2437 if not opts['show_revs']:
2435 if not opts['show_revs']:
2438 return
2436 return
2439 for c in revs:
2437 for c in revs:
2440 ui.write("%d\n" % c)
2438 ui.write("%d\n" % c)
2441
2439
2442 @command('debugserve', [
2440 @command('debugserve', [
2443 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2441 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2444 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2442 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2445 ('', 'logiofile', '', _('file to log server I/O to')),
2443 ('', 'logiofile', '', _('file to log server I/O to')),
2446 ], '')
2444 ], '')
2447 def debugserve(ui, repo, **opts):
2445 def debugserve(ui, repo, **opts):
2448 """run a server with advanced settings
2446 """run a server with advanced settings
2449
2447
2450 This command is similar to :hg:`serve`. It exists partially as a
2448 This command is similar to :hg:`serve`. It exists partially as a
2451 workaround to the fact that ``hg serve --stdio`` must have specific
2449 workaround to the fact that ``hg serve --stdio`` must have specific
2452 arguments for security reasons.
2450 arguments for security reasons.
2453 """
2451 """
2454 opts = pycompat.byteskwargs(opts)
2452 opts = pycompat.byteskwargs(opts)
2455
2453
2456 if not opts['sshstdio']:
2454 if not opts['sshstdio']:
2457 raise error.Abort(_('only --sshstdio is currently supported'))
2455 raise error.Abort(_('only --sshstdio is currently supported'))
2458
2456
2459 logfh = None
2457 logfh = None
2460
2458
2461 if opts['logiofd'] and opts['logiofile']:
2459 if opts['logiofd'] and opts['logiofile']:
2462 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2460 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2463
2461
2464 if opts['logiofd']:
2462 if opts['logiofd']:
2465 # Line buffered because output is line based.
2463 # Line buffered because output is line based.
2466 try:
2464 try:
2467 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2465 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2468 except OSError as e:
2466 except OSError as e:
2469 if e.errno != errno.ESPIPE:
2467 if e.errno != errno.ESPIPE:
2470 raise
2468 raise
2471 # can't seek a pipe, so `ab` mode fails on py3
2469 # can't seek a pipe, so `ab` mode fails on py3
2472 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2470 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2473 elif opts['logiofile']:
2471 elif opts['logiofile']:
2474 logfh = open(opts['logiofile'], 'ab', 1)
2472 logfh = open(opts['logiofile'], 'ab', 1)
2475
2473
2476 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2474 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2477 s.serve_forever()
2475 s.serve_forever()
2478
2476
2479 @command('debugsetparents', [], _('REV1 [REV2]'))
2477 @command('debugsetparents', [], _('REV1 [REV2]'))
2480 def debugsetparents(ui, repo, rev1, rev2=None):
2478 def debugsetparents(ui, repo, rev1, rev2=None):
2481 """manually set the parents of the current working directory
2479 """manually set the parents of the current working directory
2482
2480
2483 This is useful for writing repository conversion tools, but should
2481 This is useful for writing repository conversion tools, but should
2484 be used with care. For example, neither the working directory nor the
2482 be used with care. For example, neither the working directory nor the
2485 dirstate is updated, so file status may be incorrect after running this
2483 dirstate is updated, so file status may be incorrect after running this
2486 command.
2484 command.
2487
2485
2488 Returns 0 on success.
2486 Returns 0 on success.
2489 """
2487 """
2490
2488
2491 node1 = scmutil.revsingle(repo, rev1).node()
2489 node1 = scmutil.revsingle(repo, rev1).node()
2492 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2490 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2493
2491
2494 with repo.wlock():
2492 with repo.wlock():
2495 repo.setparents(node1, node2)
2493 repo.setparents(node1, node2)
2496
2494
2497 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2495 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2498 def debugssl(ui, repo, source=None, **opts):
2496 def debugssl(ui, repo, source=None, **opts):
2499 '''test a secure connection to a server
2497 '''test a secure connection to a server
2500
2498
2501 This builds the certificate chain for the server on Windows, installing the
2499 This builds the certificate chain for the server on Windows, installing the
2502 missing intermediates and trusted root via Windows Update if necessary. It
2500 missing intermediates and trusted root via Windows Update if necessary. It
2503 does nothing on other platforms.
2501 does nothing on other platforms.
2504
2502
2505 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2503 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2506 that server is used. See :hg:`help urls` for more information.
2504 that server is used. See :hg:`help urls` for more information.
2507
2505
2508 If the update succeeds, retry the original operation. Otherwise, the cause
2506 If the update succeeds, retry the original operation. Otherwise, the cause
2509 of the SSL error is likely another issue.
2507 of the SSL error is likely another issue.
2510 '''
2508 '''
2511 if not pycompat.iswindows:
2509 if not pycompat.iswindows:
2512 raise error.Abort(_('certificate chain building is only possible on '
2510 raise error.Abort(_('certificate chain building is only possible on '
2513 'Windows'))
2511 'Windows'))
2514
2512
2515 if not source:
2513 if not source:
2516 if not repo:
2514 if not repo:
2517 raise error.Abort(_("there is no Mercurial repository here, and no "
2515 raise error.Abort(_("there is no Mercurial repository here, and no "
2518 "server specified"))
2516 "server specified"))
2519 source = "default"
2517 source = "default"
2520
2518
2521 source, branches = hg.parseurl(ui.expandpath(source))
2519 source, branches = hg.parseurl(ui.expandpath(source))
2522 url = util.url(source)
2520 url = util.url(source)
2523 addr = None
2521 addr = None
2524
2522
2525 defaultport = {'https': 443, 'ssh': 22}
2523 defaultport = {'https': 443, 'ssh': 22}
2526 if url.scheme in defaultport:
2524 if url.scheme in defaultport:
2527 try:
2525 try:
2528 addr = (url.host, int(url.port or defaultport[url.scheme]))
2526 addr = (url.host, int(url.port or defaultport[url.scheme]))
2529 except ValueError:
2527 except ValueError:
2530 raise error.Abort(_("malformed port number in URL"))
2528 raise error.Abort(_("malformed port number in URL"))
2531 else:
2529 else:
2532 raise error.Abort(_("only https and ssh connections are supported"))
2530 raise error.Abort(_("only https and ssh connections are supported"))
2533
2531
2534 from . import win32
2532 from . import win32
2535
2533
2536 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2534 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2537 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2535 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2538
2536
2539 try:
2537 try:
2540 s.connect(addr)
2538 s.connect(addr)
2541 cert = s.getpeercert(True)
2539 cert = s.getpeercert(True)
2542
2540
2543 ui.status(_('checking the certificate chain for %s\n') % url.host)
2541 ui.status(_('checking the certificate chain for %s\n') % url.host)
2544
2542
2545 complete = win32.checkcertificatechain(cert, build=False)
2543 complete = win32.checkcertificatechain(cert, build=False)
2546
2544
2547 if not complete:
2545 if not complete:
2548 ui.status(_('certificate chain is incomplete, updating... '))
2546 ui.status(_('certificate chain is incomplete, updating... '))
2549
2547
2550 if not win32.checkcertificatechain(cert):
2548 if not win32.checkcertificatechain(cert):
2551 ui.status(_('failed.\n'))
2549 ui.status(_('failed.\n'))
2552 else:
2550 else:
2553 ui.status(_('done.\n'))
2551 ui.status(_('done.\n'))
2554 else:
2552 else:
2555 ui.status(_('full certificate chain is available\n'))
2553 ui.status(_('full certificate chain is available\n'))
2556 finally:
2554 finally:
2557 s.close()
2555 s.close()
2558
2556
2559 @command('debugsub',
2557 @command('debugsub',
2560 [('r', 'rev', '',
2558 [('r', 'rev', '',
2561 _('revision to check'), _('REV'))],
2559 _('revision to check'), _('REV'))],
2562 _('[-r REV] [REV]'))
2560 _('[-r REV] [REV]'))
2563 def debugsub(ui, repo, rev=None):
2561 def debugsub(ui, repo, rev=None):
2564 ctx = scmutil.revsingle(repo, rev, None)
2562 ctx = scmutil.revsingle(repo, rev, None)
2565 for k, v in sorted(ctx.substate.items()):
2563 for k, v in sorted(ctx.substate.items()):
2566 ui.write(('path %s\n') % k)
2564 ui.write(('path %s\n') % k)
2567 ui.write((' source %s\n') % v[0])
2565 ui.write((' source %s\n') % v[0])
2568 ui.write((' revision %s\n') % v[1])
2566 ui.write((' revision %s\n') % v[1])
2569
2567
2570 @command('debugsuccessorssets',
2568 @command('debugsuccessorssets',
2571 [('', 'closest', False, _('return closest successors sets only'))],
2569 [('', 'closest', False, _('return closest successors sets only'))],
2572 _('[REV]'))
2570 _('[REV]'))
2573 def debugsuccessorssets(ui, repo, *revs, **opts):
2571 def debugsuccessorssets(ui, repo, *revs, **opts):
2574 """show set of successors for revision
2572 """show set of successors for revision
2575
2573
2576 A successors set of changeset A is a consistent group of revisions that
2574 A successors set of changeset A is a consistent group of revisions that
2577 succeed A. It contains non-obsolete changesets only unless closests
2575 succeed A. It contains non-obsolete changesets only unless closests
2578 successors set is set.
2576 successors set is set.
2579
2577
2580 In most cases a changeset A has a single successors set containing a single
2578 In most cases a changeset A has a single successors set containing a single
2581 successor (changeset A replaced by A').
2579 successor (changeset A replaced by A').
2582
2580
2583 A changeset that is made obsolete with no successors are called "pruned".
2581 A changeset that is made obsolete with no successors are called "pruned".
2584 Such changesets have no successors sets at all.
2582 Such changesets have no successors sets at all.
2585
2583
2586 A changeset that has been "split" will have a successors set containing
2584 A changeset that has been "split" will have a successors set containing
2587 more than one successor.
2585 more than one successor.
2588
2586
2589 A changeset that has been rewritten in multiple different ways is called
2587 A changeset that has been rewritten in multiple different ways is called
2590 "divergent". Such changesets have multiple successor sets (each of which
2588 "divergent". Such changesets have multiple successor sets (each of which
2591 may also be split, i.e. have multiple successors).
2589 may also be split, i.e. have multiple successors).
2592
2590
2593 Results are displayed as follows::
2591 Results are displayed as follows::
2594
2592
2595 <rev1>
2593 <rev1>
2596 <successors-1A>
2594 <successors-1A>
2597 <rev2>
2595 <rev2>
2598 <successors-2A>
2596 <successors-2A>
2599 <successors-2B1> <successors-2B2> <successors-2B3>
2597 <successors-2B1> <successors-2B2> <successors-2B3>
2600
2598
2601 Here rev2 has two possible (i.e. divergent) successors sets. The first
2599 Here rev2 has two possible (i.e. divergent) successors sets. The first
2602 holds one element, whereas the second holds three (i.e. the changeset has
2600 holds one element, whereas the second holds three (i.e. the changeset has
2603 been split).
2601 been split).
2604 """
2602 """
2605 # passed to successorssets caching computation from one call to another
2603 # passed to successorssets caching computation from one call to another
2606 cache = {}
2604 cache = {}
2607 ctx2str = bytes
2605 ctx2str = bytes
2608 node2str = short
2606 node2str = short
2609 for rev in scmutil.revrange(repo, revs):
2607 for rev in scmutil.revrange(repo, revs):
2610 ctx = repo[rev]
2608 ctx = repo[rev]
2611 ui.write('%s\n'% ctx2str(ctx))
2609 ui.write('%s\n'% ctx2str(ctx))
2612 for succsset in obsutil.successorssets(repo, ctx.node(),
2610 for succsset in obsutil.successorssets(repo, ctx.node(),
2613 closest=opts[r'closest'],
2611 closest=opts[r'closest'],
2614 cache=cache):
2612 cache=cache):
2615 if succsset:
2613 if succsset:
2616 ui.write(' ')
2614 ui.write(' ')
2617 ui.write(node2str(succsset[0]))
2615 ui.write(node2str(succsset[0]))
2618 for node in succsset[1:]:
2616 for node in succsset[1:]:
2619 ui.write(' ')
2617 ui.write(' ')
2620 ui.write(node2str(node))
2618 ui.write(node2str(node))
2621 ui.write('\n')
2619 ui.write('\n')
2622
2620
2623 @command('debugtemplate',
2621 @command('debugtemplate',
2624 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2622 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2625 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2623 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2626 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2624 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2627 optionalrepo=True)
2625 optionalrepo=True)
2628 def debugtemplate(ui, repo, tmpl, **opts):
2626 def debugtemplate(ui, repo, tmpl, **opts):
2629 """parse and apply a template
2627 """parse and apply a template
2630
2628
2631 If -r/--rev is given, the template is processed as a log template and
2629 If -r/--rev is given, the template is processed as a log template and
2632 applied to the given changesets. Otherwise, it is processed as a generic
2630 applied to the given changesets. Otherwise, it is processed as a generic
2633 template.
2631 template.
2634
2632
2635 Use --verbose to print the parsed tree.
2633 Use --verbose to print the parsed tree.
2636 """
2634 """
2637 revs = None
2635 revs = None
2638 if opts[r'rev']:
2636 if opts[r'rev']:
2639 if repo is None:
2637 if repo is None:
2640 raise error.RepoError(_('there is no Mercurial repository here '
2638 raise error.RepoError(_('there is no Mercurial repository here '
2641 '(.hg not found)'))
2639 '(.hg not found)'))
2642 revs = scmutil.revrange(repo, opts[r'rev'])
2640 revs = scmutil.revrange(repo, opts[r'rev'])
2643
2641
2644 props = {}
2642 props = {}
2645 for d in opts[r'define']:
2643 for d in opts[r'define']:
2646 try:
2644 try:
2647 k, v = (e.strip() for e in d.split('=', 1))
2645 k, v = (e.strip() for e in d.split('=', 1))
2648 if not k or k == 'ui':
2646 if not k or k == 'ui':
2649 raise ValueError
2647 raise ValueError
2650 props[k] = v
2648 props[k] = v
2651 except ValueError:
2649 except ValueError:
2652 raise error.Abort(_('malformed keyword definition: %s') % d)
2650 raise error.Abort(_('malformed keyword definition: %s') % d)
2653
2651
2654 if ui.verbose:
2652 if ui.verbose:
2655 aliases = ui.configitems('templatealias')
2653 aliases = ui.configitems('templatealias')
2656 tree = templater.parse(tmpl)
2654 tree = templater.parse(tmpl)
2657 ui.note(templater.prettyformat(tree), '\n')
2655 ui.note(templater.prettyformat(tree), '\n')
2658 newtree = templater.expandaliases(tree, aliases)
2656 newtree = templater.expandaliases(tree, aliases)
2659 if newtree != tree:
2657 if newtree != tree:
2660 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2658 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2661
2659
2662 if revs is None:
2660 if revs is None:
2663 tres = formatter.templateresources(ui, repo)
2661 tres = formatter.templateresources(ui, repo)
2664 t = formatter.maketemplater(ui, tmpl, resources=tres)
2662 t = formatter.maketemplater(ui, tmpl, resources=tres)
2665 if ui.verbose:
2663 if ui.verbose:
2666 kwds, funcs = t.symbolsuseddefault()
2664 kwds, funcs = t.symbolsuseddefault()
2667 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2665 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2668 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2666 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2669 ui.write(t.renderdefault(props))
2667 ui.write(t.renderdefault(props))
2670 else:
2668 else:
2671 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2669 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2672 if ui.verbose:
2670 if ui.verbose:
2673 kwds, funcs = displayer.t.symbolsuseddefault()
2671 kwds, funcs = displayer.t.symbolsuseddefault()
2674 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2672 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2675 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2673 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2676 for r in revs:
2674 for r in revs:
2677 displayer.show(repo[r], **pycompat.strkwargs(props))
2675 displayer.show(repo[r], **pycompat.strkwargs(props))
2678 displayer.close()
2676 displayer.close()
2679
2677
2680 @command('debuguigetpass', [
2678 @command('debuguigetpass', [
2681 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2679 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2682 ], _('[-p TEXT]'), norepo=True)
2680 ], _('[-p TEXT]'), norepo=True)
2683 def debuguigetpass(ui, prompt=''):
2681 def debuguigetpass(ui, prompt=''):
2684 """show prompt to type password"""
2682 """show prompt to type password"""
2685 r = ui.getpass(prompt)
2683 r = ui.getpass(prompt)
2686 ui.write(('respose: %s\n') % r)
2684 ui.write(('respose: %s\n') % r)
2687
2685
2688 @command('debuguiprompt', [
2686 @command('debuguiprompt', [
2689 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2687 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2690 ], _('[-p TEXT]'), norepo=True)
2688 ], _('[-p TEXT]'), norepo=True)
2691 def debuguiprompt(ui, prompt=''):
2689 def debuguiprompt(ui, prompt=''):
2692 """show plain prompt"""
2690 """show plain prompt"""
2693 r = ui.prompt(prompt)
2691 r = ui.prompt(prompt)
2694 ui.write(('response: %s\n') % r)
2692 ui.write(('response: %s\n') % r)
2695
2693
2696 @command('debugupdatecaches', [])
2694 @command('debugupdatecaches', [])
2697 def debugupdatecaches(ui, repo, *pats, **opts):
2695 def debugupdatecaches(ui, repo, *pats, **opts):
2698 """warm all known caches in the repository"""
2696 """warm all known caches in the repository"""
2699 with repo.wlock(), repo.lock():
2697 with repo.wlock(), repo.lock():
2700 repo.updatecaches(full=True)
2698 repo.updatecaches(full=True)
2701
2699
2702 @command('debugupgraderepo', [
2700 @command('debugupgraderepo', [
2703 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2701 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2704 ('', 'run', False, _('performs an upgrade')),
2702 ('', 'run', False, _('performs an upgrade')),
2705 ])
2703 ])
2706 def debugupgraderepo(ui, repo, run=False, optimize=None):
2704 def debugupgraderepo(ui, repo, run=False, optimize=None):
2707 """upgrade a repository to use different features
2705 """upgrade a repository to use different features
2708
2706
2709 If no arguments are specified, the repository is evaluated for upgrade
2707 If no arguments are specified, the repository is evaluated for upgrade
2710 and a list of problems and potential optimizations is printed.
2708 and a list of problems and potential optimizations is printed.
2711
2709
2712 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2710 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2713 can be influenced via additional arguments. More details will be provided
2711 can be influenced via additional arguments. More details will be provided
2714 by the command output when run without ``--run``.
2712 by the command output when run without ``--run``.
2715
2713
2716 During the upgrade, the repository will be locked and no writes will be
2714 During the upgrade, the repository will be locked and no writes will be
2717 allowed.
2715 allowed.
2718
2716
2719 At the end of the upgrade, the repository may not be readable while new
2717 At the end of the upgrade, the repository may not be readable while new
2720 repository data is swapped in. This window will be as long as it takes to
2718 repository data is swapped in. This window will be as long as it takes to
2721 rename some directories inside the ``.hg`` directory. On most machines, this
2719 rename some directories inside the ``.hg`` directory. On most machines, this
2722 should complete almost instantaneously and the chances of a consumer being
2720 should complete almost instantaneously and the chances of a consumer being
2723 unable to access the repository should be low.
2721 unable to access the repository should be low.
2724 """
2722 """
2725 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2723 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2726
2724
2727 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2725 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2728 inferrepo=True)
2726 inferrepo=True)
2729 def debugwalk(ui, repo, *pats, **opts):
2727 def debugwalk(ui, repo, *pats, **opts):
2730 """show how files match on given patterns"""
2728 """show how files match on given patterns"""
2731 opts = pycompat.byteskwargs(opts)
2729 opts = pycompat.byteskwargs(opts)
2732 m = scmutil.match(repo[None], pats, opts)
2730 m = scmutil.match(repo[None], pats, opts)
2733 if ui.verbose:
2731 if ui.verbose:
2734 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2732 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2735 items = list(repo[None].walk(m))
2733 items = list(repo[None].walk(m))
2736 if not items:
2734 if not items:
2737 return
2735 return
2738 f = lambda fn: fn
2736 f = lambda fn: fn
2739 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2737 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2740 f = lambda fn: util.normpath(fn)
2738 f = lambda fn: util.normpath(fn)
2741 fmt = 'f %%-%ds %%-%ds %%s' % (
2739 fmt = 'f %%-%ds %%-%ds %%s' % (
2742 max([len(abs) for abs in items]),
2740 max([len(abs) for abs in items]),
2743 max([len(m.rel(abs)) for abs in items]))
2741 max([len(m.rel(abs)) for abs in items]))
2744 for abs in items:
2742 for abs in items:
2745 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2743 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2746 ui.write("%s\n" % line.rstrip())
2744 ui.write("%s\n" % line.rstrip())
2747
2745
2748 @command('debugwhyunstable', [], _('REV'))
2746 @command('debugwhyunstable', [], _('REV'))
2749 def debugwhyunstable(ui, repo, rev):
2747 def debugwhyunstable(ui, repo, rev):
2750 """explain instabilities of a changeset"""
2748 """explain instabilities of a changeset"""
2751 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2749 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2752 dnodes = ''
2750 dnodes = ''
2753 if entry.get('divergentnodes'):
2751 if entry.get('divergentnodes'):
2754 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2752 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2755 for ctx in entry['divergentnodes']) + ' '
2753 for ctx in entry['divergentnodes']) + ' '
2756 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2754 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2757 entry['reason'], entry['node']))
2755 entry['reason'], entry['node']))
2758
2756
2759 @command('debugwireargs',
2757 @command('debugwireargs',
2760 [('', 'three', '', 'three'),
2758 [('', 'three', '', 'three'),
2761 ('', 'four', '', 'four'),
2759 ('', 'four', '', 'four'),
2762 ('', 'five', '', 'five'),
2760 ('', 'five', '', 'five'),
2763 ] + cmdutil.remoteopts,
2761 ] + cmdutil.remoteopts,
2764 _('REPO [OPTIONS]... [ONE [TWO]]'),
2762 _('REPO [OPTIONS]... [ONE [TWO]]'),
2765 norepo=True)
2763 norepo=True)
2766 def debugwireargs(ui, repopath, *vals, **opts):
2764 def debugwireargs(ui, repopath, *vals, **opts):
2767 opts = pycompat.byteskwargs(opts)
2765 opts = pycompat.byteskwargs(opts)
2768 repo = hg.peer(ui, opts, repopath)
2766 repo = hg.peer(ui, opts, repopath)
2769 for opt in cmdutil.remoteopts:
2767 for opt in cmdutil.remoteopts:
2770 del opts[opt[1]]
2768 del opts[opt[1]]
2771 args = {}
2769 args = {}
2772 for k, v in opts.iteritems():
2770 for k, v in opts.iteritems():
2773 if v:
2771 if v:
2774 args[k] = v
2772 args[k] = v
2775 args = pycompat.strkwargs(args)
2773 args = pycompat.strkwargs(args)
2776 # run twice to check that we don't mess up the stream for the next command
2774 # run twice to check that we don't mess up the stream for the next command
2777 res1 = repo.debugwireargs(*vals, **args)
2775 res1 = repo.debugwireargs(*vals, **args)
2778 res2 = repo.debugwireargs(*vals, **args)
2776 res2 = repo.debugwireargs(*vals, **args)
2779 ui.write("%s\n" % res1)
2777 ui.write("%s\n" % res1)
2780 if res1 != res2:
2778 if res1 != res2:
2781 ui.warn("%s\n" % res2)
2779 ui.warn("%s\n" % res2)
2782
2780
2783 def _parsewirelangblocks(fh):
2781 def _parsewirelangblocks(fh):
2784 activeaction = None
2782 activeaction = None
2785 blocklines = []
2783 blocklines = []
2786
2784
2787 for line in fh:
2785 for line in fh:
2788 line = line.rstrip()
2786 line = line.rstrip()
2789 if not line:
2787 if not line:
2790 continue
2788 continue
2791
2789
2792 if line.startswith(b'#'):
2790 if line.startswith(b'#'):
2793 continue
2791 continue
2794
2792
2795 if not line.startswith(b' '):
2793 if not line.startswith(b' '):
2796 # New block. Flush previous one.
2794 # New block. Flush previous one.
2797 if activeaction:
2795 if activeaction:
2798 yield activeaction, blocklines
2796 yield activeaction, blocklines
2799
2797
2800 activeaction = line
2798 activeaction = line
2801 blocklines = []
2799 blocklines = []
2802 continue
2800 continue
2803
2801
2804 # Else we start with an indent.
2802 # Else we start with an indent.
2805
2803
2806 if not activeaction:
2804 if not activeaction:
2807 raise error.Abort(_('indented line outside of block'))
2805 raise error.Abort(_('indented line outside of block'))
2808
2806
2809 blocklines.append(line)
2807 blocklines.append(line)
2810
2808
2811 # Flush last block.
2809 # Flush last block.
2812 if activeaction:
2810 if activeaction:
2813 yield activeaction, blocklines
2811 yield activeaction, blocklines
2814
2812
2815 @command('debugwireproto',
2813 @command('debugwireproto',
2816 [
2814 [
2817 ('', 'localssh', False, _('start an SSH server for this repo')),
2815 ('', 'localssh', False, _('start an SSH server for this repo')),
2818 ('', 'peer', '', _('construct a specific version of the peer')),
2816 ('', 'peer', '', _('construct a specific version of the peer')),
2819 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2817 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2820 ('', 'nologhandshake', False,
2818 ('', 'nologhandshake', False,
2821 _('do not log I/O related to the peer handshake')),
2819 _('do not log I/O related to the peer handshake')),
2822 ] + cmdutil.remoteopts,
2820 ] + cmdutil.remoteopts,
2823 _('[PATH]'),
2821 _('[PATH]'),
2824 optionalrepo=True)
2822 optionalrepo=True)
2825 def debugwireproto(ui, repo, path=None, **opts):
2823 def debugwireproto(ui, repo, path=None, **opts):
2826 """send wire protocol commands to a server
2824 """send wire protocol commands to a server
2827
2825
2828 This command can be used to issue wire protocol commands to remote
2826 This command can be used to issue wire protocol commands to remote
2829 peers and to debug the raw data being exchanged.
2827 peers and to debug the raw data being exchanged.
2830
2828
2831 ``--localssh`` will start an SSH server against the current repository
2829 ``--localssh`` will start an SSH server against the current repository
2832 and connect to that. By default, the connection will perform a handshake
2830 and connect to that. By default, the connection will perform a handshake
2833 and establish an appropriate peer instance.
2831 and establish an appropriate peer instance.
2834
2832
2835 ``--peer`` can be used to bypass the handshake protocol and construct a
2833 ``--peer`` can be used to bypass the handshake protocol and construct a
2836 peer instance using the specified class type. Valid values are ``raw``,
2834 peer instance using the specified class type. Valid values are ``raw``,
2837 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2835 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2838 raw data payloads and don't support higher-level command actions.
2836 raw data payloads and don't support higher-level command actions.
2839
2837
2840 ``--noreadstderr`` can be used to disable automatic reading from stderr
2838 ``--noreadstderr`` can be used to disable automatic reading from stderr
2841 of the peer (for SSH connections only). Disabling automatic reading of
2839 of the peer (for SSH connections only). Disabling automatic reading of
2842 stderr is useful for making output more deterministic.
2840 stderr is useful for making output more deterministic.
2843
2841
2844 Commands are issued via a mini language which is specified via stdin.
2842 Commands are issued via a mini language which is specified via stdin.
2845 The language consists of individual actions to perform. An action is
2843 The language consists of individual actions to perform. An action is
2846 defined by a block. A block is defined as a line with no leading
2844 defined by a block. A block is defined as a line with no leading
2847 space followed by 0 or more lines with leading space. Blocks are
2845 space followed by 0 or more lines with leading space. Blocks are
2848 effectively a high-level command with additional metadata.
2846 effectively a high-level command with additional metadata.
2849
2847
2850 Lines beginning with ``#`` are ignored.
2848 Lines beginning with ``#`` are ignored.
2851
2849
2852 The following sections denote available actions.
2850 The following sections denote available actions.
2853
2851
2854 raw
2852 raw
2855 ---
2853 ---
2856
2854
2857 Send raw data to the server.
2855 Send raw data to the server.
2858
2856
2859 The block payload contains the raw data to send as one atomic send
2857 The block payload contains the raw data to send as one atomic send
2860 operation. The data may not actually be delivered in a single system
2858 operation. The data may not actually be delivered in a single system
2861 call: it depends on the abilities of the transport being used.
2859 call: it depends on the abilities of the transport being used.
2862
2860
2863 Each line in the block is de-indented and concatenated. Then, that
2861 Each line in the block is de-indented and concatenated. Then, that
2864 value is evaluated as a Python b'' literal. This allows the use of
2862 value is evaluated as a Python b'' literal. This allows the use of
2865 backslash escaping, etc.
2863 backslash escaping, etc.
2866
2864
2867 raw+
2865 raw+
2868 ----
2866 ----
2869
2867
2870 Behaves like ``raw`` except flushes output afterwards.
2868 Behaves like ``raw`` except flushes output afterwards.
2871
2869
2872 command <X>
2870 command <X>
2873 -----------
2871 -----------
2874
2872
2875 Send a request to run a named command, whose name follows the ``command``
2873 Send a request to run a named command, whose name follows the ``command``
2876 string.
2874 string.
2877
2875
2878 Arguments to the command are defined as lines in this block. The format of
2876 Arguments to the command are defined as lines in this block. The format of
2879 each line is ``<key> <value>``. e.g.::
2877 each line is ``<key> <value>``. e.g.::
2880
2878
2881 command listkeys
2879 command listkeys
2882 namespace bookmarks
2880 namespace bookmarks
2883
2881
2884 If the value begins with ``eval:``, it will be interpreted as a Python
2882 If the value begins with ``eval:``, it will be interpreted as a Python
2885 literal expression. Otherwise values are interpreted as Python b'' literals.
2883 literal expression. Otherwise values are interpreted as Python b'' literals.
2886 This allows sending complex types and encoding special byte sequences via
2884 This allows sending complex types and encoding special byte sequences via
2887 backslash escaping.
2885 backslash escaping.
2888
2886
2889 The following arguments have special meaning:
2887 The following arguments have special meaning:
2890
2888
2891 ``PUSHFILE``
2889 ``PUSHFILE``
2892 When defined, the *push* mechanism of the peer will be used instead
2890 When defined, the *push* mechanism of the peer will be used instead
2893 of the static request-response mechanism and the content of the
2891 of the static request-response mechanism and the content of the
2894 file specified in the value of this argument will be sent as the
2892 file specified in the value of this argument will be sent as the
2895 command payload.
2893 command payload.
2896
2894
2897 This can be used to submit a local bundle file to the remote.
2895 This can be used to submit a local bundle file to the remote.
2898
2896
2899 batchbegin
2897 batchbegin
2900 ----------
2898 ----------
2901
2899
2902 Instruct the peer to begin a batched send.
2900 Instruct the peer to begin a batched send.
2903
2901
2904 All ``command`` blocks are queued for execution until the next
2902 All ``command`` blocks are queued for execution until the next
2905 ``batchsubmit`` block.
2903 ``batchsubmit`` block.
2906
2904
2907 batchsubmit
2905 batchsubmit
2908 -----------
2906 -----------
2909
2907
2910 Submit previously queued ``command`` blocks as a batch request.
2908 Submit previously queued ``command`` blocks as a batch request.
2911
2909
2912 This action MUST be paired with a ``batchbegin`` action.
2910 This action MUST be paired with a ``batchbegin`` action.
2913
2911
2914 httprequest <method> <path>
2912 httprequest <method> <path>
2915 ---------------------------
2913 ---------------------------
2916
2914
2917 (HTTP peer only)
2915 (HTTP peer only)
2918
2916
2919 Send an HTTP request to the peer.
2917 Send an HTTP request to the peer.
2920
2918
2921 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2919 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2922
2920
2923 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2921 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2924 headers to add to the request. e.g. ``Accept: foo``.
2922 headers to add to the request. e.g. ``Accept: foo``.
2925
2923
2926 The following arguments are special:
2924 The following arguments are special:
2927
2925
2928 ``BODYFILE``
2926 ``BODYFILE``
2929 The content of the file defined as the value to this argument will be
2927 The content of the file defined as the value to this argument will be
2930 transferred verbatim as the HTTP request body.
2928 transferred verbatim as the HTTP request body.
2931
2929
2932 ``frame <type> <flags> <payload>``
2930 ``frame <type> <flags> <payload>``
2933 Send a unified protocol frame as part of the request body.
2931 Send a unified protocol frame as part of the request body.
2934
2932
2935 All frames will be collected and sent as the body to the HTTP
2933 All frames will be collected and sent as the body to the HTTP
2936 request.
2934 request.
2937
2935
2938 close
2936 close
2939 -----
2937 -----
2940
2938
2941 Close the connection to the server.
2939 Close the connection to the server.
2942
2940
2943 flush
2941 flush
2944 -----
2942 -----
2945
2943
2946 Flush data written to the server.
2944 Flush data written to the server.
2947
2945
2948 readavailable
2946 readavailable
2949 -------------
2947 -------------
2950
2948
2951 Close the write end of the connection and read all available data from
2949 Close the write end of the connection and read all available data from
2952 the server.
2950 the server.
2953
2951
2954 If the connection to the server encompasses multiple pipes, we poll both
2952 If the connection to the server encompasses multiple pipes, we poll both
2955 pipes and read available data.
2953 pipes and read available data.
2956
2954
2957 readline
2955 readline
2958 --------
2956 --------
2959
2957
2960 Read a line of output from the server. If there are multiple output
2958 Read a line of output from the server. If there are multiple output
2961 pipes, reads only the main pipe.
2959 pipes, reads only the main pipe.
2962
2960
2963 ereadline
2961 ereadline
2964 ---------
2962 ---------
2965
2963
2966 Like ``readline``, but read from the stderr pipe, if available.
2964 Like ``readline``, but read from the stderr pipe, if available.
2967
2965
2968 read <X>
2966 read <X>
2969 --------
2967 --------
2970
2968
2971 ``read()`` N bytes from the server's main output pipe.
2969 ``read()`` N bytes from the server's main output pipe.
2972
2970
2973 eread <X>
2971 eread <X>
2974 ---------
2972 ---------
2975
2973
2976 ``read()`` N bytes from the server's stderr pipe, if available.
2974 ``read()`` N bytes from the server's stderr pipe, if available.
2977
2975
2978 Specifying Unified Frame-Based Protocol Frames
2976 Specifying Unified Frame-Based Protocol Frames
2979 ----------------------------------------------
2977 ----------------------------------------------
2980
2978
2981 It is possible to emit a *Unified Frame-Based Protocol* by using special
2979 It is possible to emit a *Unified Frame-Based Protocol* by using special
2982 syntax.
2980 syntax.
2983
2981
2984 A frame is composed as a type, flags, and payload. These can be parsed
2982 A frame is composed as a type, flags, and payload. These can be parsed
2985 from a string of the form:
2983 from a string of the form:
2986
2984
2987 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2985 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2988
2986
2989 ``request-id`` and ``stream-id`` are integers defining the request and
2987 ``request-id`` and ``stream-id`` are integers defining the request and
2990 stream identifiers.
2988 stream identifiers.
2991
2989
2992 ``type`` can be an integer value for the frame type or the string name
2990 ``type`` can be an integer value for the frame type or the string name
2993 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2991 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2994 ``command-name``.
2992 ``command-name``.
2995
2993
2996 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2994 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2997 components. Each component (and there can be just one) can be an integer
2995 components. Each component (and there can be just one) can be an integer
2998 or a flag name for stream flags or frame flags, respectively. Values are
2996 or a flag name for stream flags or frame flags, respectively. Values are
2999 resolved to integers and then bitwise OR'd together.
2997 resolved to integers and then bitwise OR'd together.
3000
2998
3001 ``payload`` represents the raw frame payload. If it begins with
2999 ``payload`` represents the raw frame payload. If it begins with
3002 ``cbor:``, the following string is evaluated as Python code and the
3000 ``cbor:``, the following string is evaluated as Python code and the
3003 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3001 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3004 as a Python byte string literal.
3002 as a Python byte string literal.
3005 """
3003 """
3006 opts = pycompat.byteskwargs(opts)
3004 opts = pycompat.byteskwargs(opts)
3007
3005
3008 if opts['localssh'] and not repo:
3006 if opts['localssh'] and not repo:
3009 raise error.Abort(_('--localssh requires a repository'))
3007 raise error.Abort(_('--localssh requires a repository'))
3010
3008
3011 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3009 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3012 raise error.Abort(_('invalid value for --peer'),
3010 raise error.Abort(_('invalid value for --peer'),
3013 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3011 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3014
3012
3015 if path and opts['localssh']:
3013 if path and opts['localssh']:
3016 raise error.Abort(_('cannot specify --localssh with an explicit '
3014 raise error.Abort(_('cannot specify --localssh with an explicit '
3017 'path'))
3015 'path'))
3018
3016
3019 if ui.interactive():
3017 if ui.interactive():
3020 ui.write(_('(waiting for commands on stdin)\n'))
3018 ui.write(_('(waiting for commands on stdin)\n'))
3021
3019
3022 blocks = list(_parsewirelangblocks(ui.fin))
3020 blocks = list(_parsewirelangblocks(ui.fin))
3023
3021
3024 proc = None
3022 proc = None
3025 stdin = None
3023 stdin = None
3026 stdout = None
3024 stdout = None
3027 stderr = None
3025 stderr = None
3028 opener = None
3026 opener = None
3029
3027
3030 if opts['localssh']:
3028 if opts['localssh']:
3031 # We start the SSH server in its own process so there is process
3029 # We start the SSH server in its own process so there is process
3032 # separation. This prevents a whole class of potential bugs around
3030 # separation. This prevents a whole class of potential bugs around
3033 # shared state from interfering with server operation.
3031 # shared state from interfering with server operation.
3034 args = procutil.hgcmd() + [
3032 args = procutil.hgcmd() + [
3035 '-R', repo.root,
3033 '-R', repo.root,
3036 'debugserve', '--sshstdio',
3034 'debugserve', '--sshstdio',
3037 ]
3035 ]
3038 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3036 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3039 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3037 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3040 bufsize=0)
3038 bufsize=0)
3041
3039
3042 stdin = proc.stdin
3040 stdin = proc.stdin
3043 stdout = proc.stdout
3041 stdout = proc.stdout
3044 stderr = proc.stderr
3042 stderr = proc.stderr
3045
3043
3046 # We turn the pipes into observers so we can log I/O.
3044 # We turn the pipes into observers so we can log I/O.
3047 if ui.verbose or opts['peer'] == 'raw':
3045 if ui.verbose or opts['peer'] == 'raw':
3048 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3046 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3049 logdata=True)
3047 logdata=True)
3050 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3048 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3051 logdata=True)
3049 logdata=True)
3052 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3050 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3053 logdata=True)
3051 logdata=True)
3054
3052
3055 # --localssh also implies the peer connection settings.
3053 # --localssh also implies the peer connection settings.
3056
3054
3057 url = 'ssh://localserver'
3055 url = 'ssh://localserver'
3058 autoreadstderr = not opts['noreadstderr']
3056 autoreadstderr = not opts['noreadstderr']
3059
3057
3060 if opts['peer'] == 'ssh1':
3058 if opts['peer'] == 'ssh1':
3061 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3059 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3062 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3060 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3063 None, autoreadstderr=autoreadstderr)
3061 None, autoreadstderr=autoreadstderr)
3064 elif opts['peer'] == 'ssh2':
3062 elif opts['peer'] == 'ssh2':
3065 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3063 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3066 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3064 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3067 None, autoreadstderr=autoreadstderr)
3065 None, autoreadstderr=autoreadstderr)
3068 elif opts['peer'] == 'raw':
3066 elif opts['peer'] == 'raw':
3069 ui.write(_('using raw connection to peer\n'))
3067 ui.write(_('using raw connection to peer\n'))
3070 peer = None
3068 peer = None
3071 else:
3069 else:
3072 ui.write(_('creating ssh peer from handshake results\n'))
3070 ui.write(_('creating ssh peer from handshake results\n'))
3073 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3071 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3074 autoreadstderr=autoreadstderr)
3072 autoreadstderr=autoreadstderr)
3075
3073
3076 elif path:
3074 elif path:
3077 # We bypass hg.peer() so we can proxy the sockets.
3075 # We bypass hg.peer() so we can proxy the sockets.
3078 # TODO consider not doing this because we skip
3076 # TODO consider not doing this because we skip
3079 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3077 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3080 u = util.url(path)
3078 u = util.url(path)
3081 if u.scheme != 'http':
3079 if u.scheme != 'http':
3082 raise error.Abort(_('only http:// paths are currently supported'))
3080 raise error.Abort(_('only http:// paths are currently supported'))
3083
3081
3084 url, authinfo = u.authinfo()
3082 url, authinfo = u.authinfo()
3085 openerargs = {
3083 openerargs = {
3086 r'useragent': b'Mercurial debugwireproto',
3084 r'useragent': b'Mercurial debugwireproto',
3087 }
3085 }
3088
3086
3089 # Turn pipes/sockets into observers so we can log I/O.
3087 # Turn pipes/sockets into observers so we can log I/O.
3090 if ui.verbose:
3088 if ui.verbose:
3091 openerargs.update({
3089 openerargs.update({
3092 r'loggingfh': ui,
3090 r'loggingfh': ui,
3093 r'loggingname': b's',
3091 r'loggingname': b's',
3094 r'loggingopts': {
3092 r'loggingopts': {
3095 r'logdata': True,
3093 r'logdata': True,
3096 r'logdataapis': False,
3094 r'logdataapis': False,
3097 },
3095 },
3098 })
3096 })
3099
3097
3100 if ui.debugflag:
3098 if ui.debugflag:
3101 openerargs[r'loggingopts'][r'logdataapis'] = True
3099 openerargs[r'loggingopts'][r'logdataapis'] = True
3102
3100
3103 # Don't send default headers when in raw mode. This allows us to
3101 # Don't send default headers when in raw mode. This allows us to
3104 # bypass most of the behavior of our URL handling code so we can
3102 # bypass most of the behavior of our URL handling code so we can
3105 # have near complete control over what's sent on the wire.
3103 # have near complete control over what's sent on the wire.
3106 if opts['peer'] == 'raw':
3104 if opts['peer'] == 'raw':
3107 openerargs[r'sendaccept'] = False
3105 openerargs[r'sendaccept'] = False
3108
3106
3109 opener = urlmod.opener(ui, authinfo, **openerargs)
3107 opener = urlmod.opener(ui, authinfo, **openerargs)
3110
3108
3111 if opts['peer'] == 'http2':
3109 if opts['peer'] == 'http2':
3112 ui.write(_('creating http peer for wire protocol version 2\n'))
3110 ui.write(_('creating http peer for wire protocol version 2\n'))
3113 # We go through makepeer() because we need an API descriptor for
3111 # We go through makepeer() because we need an API descriptor for
3114 # the peer instance to be useful.
3112 # the peer instance to be useful.
3115 with ui.configoverride({
3113 with ui.configoverride({
3116 ('experimental', 'httppeer.advertise-v2'): True}):
3114 ('experimental', 'httppeer.advertise-v2'): True}):
3117 if opts['nologhandshake']:
3115 if opts['nologhandshake']:
3118 ui.pushbuffer()
3116 ui.pushbuffer()
3119
3117
3120 peer = httppeer.makepeer(ui, path, opener=opener)
3118 peer = httppeer.makepeer(ui, path, opener=opener)
3121
3119
3122 if opts['nologhandshake']:
3120 if opts['nologhandshake']:
3123 ui.popbuffer()
3121 ui.popbuffer()
3124
3122
3125 if not isinstance(peer, httppeer.httpv2peer):
3123 if not isinstance(peer, httppeer.httpv2peer):
3126 raise error.Abort(_('could not instantiate HTTP peer for '
3124 raise error.Abort(_('could not instantiate HTTP peer for '
3127 'wire protocol version 2'),
3125 'wire protocol version 2'),
3128 hint=_('the server may not have the feature '
3126 hint=_('the server may not have the feature '
3129 'enabled or is not allowing this '
3127 'enabled or is not allowing this '
3130 'client version'))
3128 'client version'))
3131
3129
3132 elif opts['peer'] == 'raw':
3130 elif opts['peer'] == 'raw':
3133 ui.write(_('using raw connection to peer\n'))
3131 ui.write(_('using raw connection to peer\n'))
3134 peer = None
3132 peer = None
3135 elif opts['peer']:
3133 elif opts['peer']:
3136 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3134 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3137 opts['peer'])
3135 opts['peer'])
3138 else:
3136 else:
3139 peer = httppeer.makepeer(ui, path, opener=opener)
3137 peer = httppeer.makepeer(ui, path, opener=opener)
3140
3138
3141 # We /could/ populate stdin/stdout with sock.makefile()...
3139 # We /could/ populate stdin/stdout with sock.makefile()...
3142 else:
3140 else:
3143 raise error.Abort(_('unsupported connection configuration'))
3141 raise error.Abort(_('unsupported connection configuration'))
3144
3142
3145 batchedcommands = None
3143 batchedcommands = None
3146
3144
3147 # Now perform actions based on the parsed wire language instructions.
3145 # Now perform actions based on the parsed wire language instructions.
3148 for action, lines in blocks:
3146 for action, lines in blocks:
3149 if action in ('raw', 'raw+'):
3147 if action in ('raw', 'raw+'):
3150 if not stdin:
3148 if not stdin:
3151 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3149 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3152
3150
3153 # Concatenate the data together.
3151 # Concatenate the data together.
3154 data = ''.join(l.lstrip() for l in lines)
3152 data = ''.join(l.lstrip() for l in lines)
3155 data = stringutil.unescapestr(data)
3153 data = stringutil.unescapestr(data)
3156 stdin.write(data)
3154 stdin.write(data)
3157
3155
3158 if action == 'raw+':
3156 if action == 'raw+':
3159 stdin.flush()
3157 stdin.flush()
3160 elif action == 'flush':
3158 elif action == 'flush':
3161 if not stdin:
3159 if not stdin:
3162 raise error.Abort(_('cannot call flush on this peer'))
3160 raise error.Abort(_('cannot call flush on this peer'))
3163 stdin.flush()
3161 stdin.flush()
3164 elif action.startswith('command'):
3162 elif action.startswith('command'):
3165 if not peer:
3163 if not peer:
3166 raise error.Abort(_('cannot send commands unless peer instance '
3164 raise error.Abort(_('cannot send commands unless peer instance '
3167 'is available'))
3165 'is available'))
3168
3166
3169 command = action.split(' ', 1)[1]
3167 command = action.split(' ', 1)[1]
3170
3168
3171 args = {}
3169 args = {}
3172 for line in lines:
3170 for line in lines:
3173 # We need to allow empty values.
3171 # We need to allow empty values.
3174 fields = line.lstrip().split(' ', 1)
3172 fields = line.lstrip().split(' ', 1)
3175 if len(fields) == 1:
3173 if len(fields) == 1:
3176 key = fields[0]
3174 key = fields[0]
3177 value = ''
3175 value = ''
3178 else:
3176 else:
3179 key, value = fields
3177 key, value = fields
3180
3178
3181 if value.startswith('eval:'):
3179 if value.startswith('eval:'):
3182 value = stringutil.evalpythonliteral(value[5:])
3180 value = stringutil.evalpythonliteral(value[5:])
3183 else:
3181 else:
3184 value = stringutil.unescapestr(value)
3182 value = stringutil.unescapestr(value)
3185
3183
3186 args[key] = value
3184 args[key] = value
3187
3185
3188 if batchedcommands is not None:
3186 if batchedcommands is not None:
3189 batchedcommands.append((command, args))
3187 batchedcommands.append((command, args))
3190 continue
3188 continue
3191
3189
3192 ui.status(_('sending %s command\n') % command)
3190 ui.status(_('sending %s command\n') % command)
3193
3191
3194 if 'PUSHFILE' in args:
3192 if 'PUSHFILE' in args:
3195 with open(args['PUSHFILE'], r'rb') as fh:
3193 with open(args['PUSHFILE'], r'rb') as fh:
3196 del args['PUSHFILE']
3194 del args['PUSHFILE']
3197 res, output = peer._callpush(command, fh,
3195 res, output = peer._callpush(command, fh,
3198 **pycompat.strkwargs(args))
3196 **pycompat.strkwargs(args))
3199 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3197 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3200 ui.status(_('remote output: %s\n') %
3198 ui.status(_('remote output: %s\n') %
3201 stringutil.escapestr(output))
3199 stringutil.escapestr(output))
3202 else:
3200 else:
3203 with peer.commandexecutor() as e:
3201 with peer.commandexecutor() as e:
3204 res = e.callcommand(command, args).result()
3202 res = e.callcommand(command, args).result()
3205
3203
3206 if isinstance(res, wireprotov2peer.commandresponse):
3204 if isinstance(res, wireprotov2peer.commandresponse):
3207 val = list(res.cborobjects())
3205 val = list(res.cborobjects())
3208 ui.status(_('response: %s\n') %
3206 ui.status(_('response: %s\n') %
3209 stringutil.pprint(val, bprefix=True))
3207 stringutil.pprint(val, bprefix=True))
3210
3208
3211 else:
3209 else:
3212 ui.status(_('response: %s\n') %
3210 ui.status(_('response: %s\n') %
3213 stringutil.pprint(res, bprefix=True))
3211 stringutil.pprint(res, bprefix=True))
3214
3212
3215 elif action == 'batchbegin':
3213 elif action == 'batchbegin':
3216 if batchedcommands is not None:
3214 if batchedcommands is not None:
3217 raise error.Abort(_('nested batchbegin not allowed'))
3215 raise error.Abort(_('nested batchbegin not allowed'))
3218
3216
3219 batchedcommands = []
3217 batchedcommands = []
3220 elif action == 'batchsubmit':
3218 elif action == 'batchsubmit':
3221 # There is a batching API we could go through. But it would be
3219 # There is a batching API we could go through. But it would be
3222 # difficult to normalize requests into function calls. It is easier
3220 # difficult to normalize requests into function calls. It is easier
3223 # to bypass this layer and normalize to commands + args.
3221 # to bypass this layer and normalize to commands + args.
3224 ui.status(_('sending batch with %d sub-commands\n') %
3222 ui.status(_('sending batch with %d sub-commands\n') %
3225 len(batchedcommands))
3223 len(batchedcommands))
3226 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3224 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3227 ui.status(_('response #%d: %s\n') %
3225 ui.status(_('response #%d: %s\n') %
3228 (i, stringutil.escapestr(chunk)))
3226 (i, stringutil.escapestr(chunk)))
3229
3227
3230 batchedcommands = None
3228 batchedcommands = None
3231
3229
3232 elif action.startswith('httprequest '):
3230 elif action.startswith('httprequest '):
3233 if not opener:
3231 if not opener:
3234 raise error.Abort(_('cannot use httprequest without an HTTP '
3232 raise error.Abort(_('cannot use httprequest without an HTTP '
3235 'peer'))
3233 'peer'))
3236
3234
3237 request = action.split(' ', 2)
3235 request = action.split(' ', 2)
3238 if len(request) != 3:
3236 if len(request) != 3:
3239 raise error.Abort(_('invalid httprequest: expected format is '
3237 raise error.Abort(_('invalid httprequest: expected format is '
3240 '"httprequest <method> <path>'))
3238 '"httprequest <method> <path>'))
3241
3239
3242 method, httppath = request[1:]
3240 method, httppath = request[1:]
3243 headers = {}
3241 headers = {}
3244 body = None
3242 body = None
3245 frames = []
3243 frames = []
3246 for line in lines:
3244 for line in lines:
3247 line = line.lstrip()
3245 line = line.lstrip()
3248 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3246 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3249 if m:
3247 if m:
3250 headers[m.group(1)] = m.group(2)
3248 headers[m.group(1)] = m.group(2)
3251 continue
3249 continue
3252
3250
3253 if line.startswith(b'BODYFILE '):
3251 if line.startswith(b'BODYFILE '):
3254 with open(line.split(b' ', 1), 'rb') as fh:
3252 with open(line.split(b' ', 1), 'rb') as fh:
3255 body = fh.read()
3253 body = fh.read()
3256 elif line.startswith(b'frame '):
3254 elif line.startswith(b'frame '):
3257 frame = wireprotoframing.makeframefromhumanstring(
3255 frame = wireprotoframing.makeframefromhumanstring(
3258 line[len(b'frame '):])
3256 line[len(b'frame '):])
3259
3257
3260 frames.append(frame)
3258 frames.append(frame)
3261 else:
3259 else:
3262 raise error.Abort(_('unknown argument to httprequest: %s') %
3260 raise error.Abort(_('unknown argument to httprequest: %s') %
3263 line)
3261 line)
3264
3262
3265 url = path + httppath
3263 url = path + httppath
3266
3264
3267 if frames:
3265 if frames:
3268 body = b''.join(bytes(f) for f in frames)
3266 body = b''.join(bytes(f) for f in frames)
3269
3267
3270 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3268 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3271
3269
3272 # urllib.Request insists on using has_data() as a proxy for
3270 # urllib.Request insists on using has_data() as a proxy for
3273 # determining the request method. Override that to use our
3271 # determining the request method. Override that to use our
3274 # explicitly requested method.
3272 # explicitly requested method.
3275 req.get_method = lambda: pycompat.sysstr(method)
3273 req.get_method = lambda: pycompat.sysstr(method)
3276
3274
3277 try:
3275 try:
3278 res = opener.open(req)
3276 res = opener.open(req)
3279 body = res.read()
3277 body = res.read()
3280 except util.urlerr.urlerror as e:
3278 except util.urlerr.urlerror as e:
3281 # read() method must be called, but only exists in Python 2
3279 # read() method must be called, but only exists in Python 2
3282 getattr(e, 'read', lambda: None)()
3280 getattr(e, 'read', lambda: None)()
3283 continue
3281 continue
3284
3282
3285 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3283 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3286 ui.write(_('cbor> %s\n') %
3284 ui.write(_('cbor> %s\n') %
3287 stringutil.pprint(cbor.loads(body), bprefix=True))
3285 stringutil.pprint(cbor.loads(body), bprefix=True))
3288
3286
3289 elif action == 'close':
3287 elif action == 'close':
3290 peer.close()
3288 peer.close()
3291 elif action == 'readavailable':
3289 elif action == 'readavailable':
3292 if not stdout or not stderr:
3290 if not stdout or not stderr:
3293 raise error.Abort(_('readavailable not available on this peer'))
3291 raise error.Abort(_('readavailable not available on this peer'))
3294
3292
3295 stdin.close()
3293 stdin.close()
3296 stdout.read()
3294 stdout.read()
3297 stderr.read()
3295 stderr.read()
3298
3296
3299 elif action == 'readline':
3297 elif action == 'readline':
3300 if not stdout:
3298 if not stdout:
3301 raise error.Abort(_('readline not available on this peer'))
3299 raise error.Abort(_('readline not available on this peer'))
3302 stdout.readline()
3300 stdout.readline()
3303 elif action == 'ereadline':
3301 elif action == 'ereadline':
3304 if not stderr:
3302 if not stderr:
3305 raise error.Abort(_('ereadline not available on this peer'))
3303 raise error.Abort(_('ereadline not available on this peer'))
3306 stderr.readline()
3304 stderr.readline()
3307 elif action.startswith('read '):
3305 elif action.startswith('read '):
3308 count = int(action.split(' ', 1)[1])
3306 count = int(action.split(' ', 1)[1])
3309 if not stdout:
3307 if not stdout:
3310 raise error.Abort(_('read not available on this peer'))
3308 raise error.Abort(_('read not available on this peer'))
3311 stdout.read(count)
3309 stdout.read(count)
3312 elif action.startswith('eread '):
3310 elif action.startswith('eread '):
3313 count = int(action.split(' ', 1)[1])
3311 count = int(action.split(' ', 1)[1])
3314 if not stderr:
3312 if not stderr:
3315 raise error.Abort(_('eread not available on this peer'))
3313 raise error.Abort(_('eread not available on this peer'))
3316 stderr.read(count)
3314 stderr.read(count)
3317 else:
3315 else:
3318 raise error.Abort(_('unknown action: %s') % action)
3316 raise error.Abort(_('unknown action: %s') % action)
3319
3317
3320 if batchedcommands is not None:
3318 if batchedcommands is not None:
3321 raise error.Abort(_('unclosed "batchbegin" request'))
3319 raise error.Abort(_('unclosed "batchbegin" request'))
3322
3320
3323 if peer:
3321 if peer:
3324 peer.close()
3322 peer.close()
3325
3323
3326 if proc:
3324 if proc:
3327 proc.kill()
3325 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now