##// END OF EJS Templates
wireprotov2: establish a type for representing command response...
Gregory Szorc -
r37738:d715a850 default
parent child Browse files
Show More
@@ -1,3127 +1,3137 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import tempfile
24 import tempfile
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .thirdparty import (
36 from .thirdparty import (
37 cbor,
37 cbor,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 dagparser,
45 dagparser,
46 dagutil,
46 dagutil,
47 encoding,
47 encoding,
48 error,
48 error,
49 exchange,
49 exchange,
50 extensions,
50 extensions,
51 filemerge,
51 filemerge,
52 fileset,
52 fileset,
53 formatter,
53 formatter,
54 hg,
54 hg,
55 httppeer,
55 httppeer,
56 localrepo,
56 localrepo,
57 lock as lockmod,
57 lock as lockmod,
58 logcmdutil,
58 logcmdutil,
59 merge as mergemod,
59 merge as mergemod,
60 obsolete,
60 obsolete,
61 obsutil,
61 obsutil,
62 phases,
62 phases,
63 policy,
63 policy,
64 pvec,
64 pvec,
65 pycompat,
65 pycompat,
66 registrar,
66 registrar,
67 repair,
67 repair,
68 revlog,
68 revlog,
69 revset,
69 revset,
70 revsetlang,
70 revsetlang,
71 scmutil,
71 scmutil,
72 setdiscovery,
72 setdiscovery,
73 simplemerge,
73 simplemerge,
74 smartset,
74 smartset,
75 sshpeer,
75 sshpeer,
76 sslutil,
76 sslutil,
77 streamclone,
77 streamclone,
78 templater,
78 templater,
79 treediscovery,
79 treediscovery,
80 upgrade,
80 upgrade,
81 url as urlmod,
81 url as urlmod,
82 util,
82 util,
83 vfs as vfsmod,
83 vfs as vfsmod,
84 wireprotoframing,
84 wireprotoframing,
85 wireprotoserver,
85 wireprotoserver,
86 wireprotov2peer,
86 )
87 )
87 from .utils import (
88 from .utils import (
88 dateutil,
89 dateutil,
89 procutil,
90 procutil,
90 stringutil,
91 stringutil,
91 )
92 )
92
93
93 release = lockmod.release
94 release = lockmod.release
94
95
95 command = registrar.command()
96 command = registrar.command()
96
97
97 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
99 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
100 if len(args) == 3:
101 if len(args) == 3:
101 index, rev1, rev2 = args
102 index, rev1, rev2 = args
102 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
103 lookup = r.lookup
104 lookup = r.lookup
104 elif len(args) == 2:
105 elif len(args) == 2:
105 if not repo:
106 if not repo:
106 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
107 '(.hg not found)'))
108 '(.hg not found)'))
108 rev1, rev2 = args
109 rev1, rev2 = args
109 r = repo.changelog
110 r = repo.changelog
110 lookup = repo.lookup
111 lookup = repo.lookup
111 else:
112 else:
112 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
113 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115
116
116 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
118 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
119 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
120 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen.apply(repo)
122 gen.apply(repo)
122
123
123 @command('debugbuilddag',
124 @command('debugbuilddag',
124 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
128 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
129 mergeable_file=False,
130 mergeable_file=False,
130 overwritten_file=False,
131 overwritten_file=False,
131 new_file=False):
132 new_file=False):
132 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
133
134
134 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
135 command line.
136 command line.
136
137
137 Elements:
138 Elements:
138
139
139 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
141 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
142 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
143 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
144 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
145 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
147 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
148 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
149 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
150
151
151 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
152
153
153 A backref is either
154 A backref is either
154
155
155 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
156 node, or
157 node, or
157 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
158 - empty to denote the default parent.
159 - empty to denote the default parent.
159
160
160 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
161 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 """
163 """
163
164
164 if text is None:
165 if text is None:
165 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
166 text = ui.fin.read()
167 text = ui.fin.read()
167
168
168 cl = repo.changelog
169 cl = repo.changelog
169 if len(cl) > 0:
170 if len(cl) > 0:
170 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
171
172
172 # determine number of revs in DAG
173 # determine number of revs in DAG
173 total = 0
174 total = 0
174 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
175 if type == 'n':
176 if type == 'n':
176 total += 1
177 total += 1
177
178
178 if mergeable_file:
179 if mergeable_file:
179 linesperrev = 2
180 linesperrev = 2
180 # make a file with k lines per rev
181 # make a file with k lines per rev
181 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
182 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
183 initialmergedlines.append("")
183
184
184 tags = []
185 tags = []
185
186
186 wlock = lock = tr = None
187 wlock = lock = tr = None
187 try:
188 try:
188 wlock = repo.wlock()
189 wlock = repo.wlock()
189 lock = repo.lock()
190 lock = repo.lock()
190 tr = repo.transaction("builddag")
191 tr = repo.transaction("builddag")
191
192
192 at = -1
193 at = -1
193 atbranch = 'default'
194 atbranch = 'default'
194 nodeids = []
195 nodeids = []
195 id = 0
196 id = 0
196 ui.progress(_('building'), id, unit=_('revisions'), total=total)
197 ui.progress(_('building'), id, unit=_('revisions'), total=total)
197 for type, data in dagparser.parsedag(text):
198 for type, data in dagparser.parsedag(text):
198 if type == 'n':
199 if type == 'n':
199 ui.note(('node %s\n' % pycompat.bytestr(data)))
200 ui.note(('node %s\n' % pycompat.bytestr(data)))
200 id, ps = data
201 id, ps = data
201
202
202 files = []
203 files = []
203 filecontent = {}
204 filecontent = {}
204
205
205 p2 = None
206 p2 = None
206 if mergeable_file:
207 if mergeable_file:
207 fn = "mf"
208 fn = "mf"
208 p1 = repo[ps[0]]
209 p1 = repo[ps[0]]
209 if len(ps) > 1:
210 if len(ps) > 1:
210 p2 = repo[ps[1]]
211 p2 = repo[ps[1]]
211 pa = p1.ancestor(p2)
212 pa = p1.ancestor(p2)
212 base, local, other = [x[fn].data() for x in (pa, p1,
213 base, local, other = [x[fn].data() for x in (pa, p1,
213 p2)]
214 p2)]
214 m3 = simplemerge.Merge3Text(base, local, other)
215 m3 = simplemerge.Merge3Text(base, local, other)
215 ml = [l.strip() for l in m3.merge_lines()]
216 ml = [l.strip() for l in m3.merge_lines()]
216 ml.append("")
217 ml.append("")
217 elif at > 0:
218 elif at > 0:
218 ml = p1[fn].data().split("\n")
219 ml = p1[fn].data().split("\n")
219 else:
220 else:
220 ml = initialmergedlines
221 ml = initialmergedlines
221 ml[id * linesperrev] += " r%i" % id
222 ml[id * linesperrev] += " r%i" % id
222 mergedtext = "\n".join(ml)
223 mergedtext = "\n".join(ml)
223 files.append(fn)
224 files.append(fn)
224 filecontent[fn] = mergedtext
225 filecontent[fn] = mergedtext
225
226
226 if overwritten_file:
227 if overwritten_file:
227 fn = "of"
228 fn = "of"
228 files.append(fn)
229 files.append(fn)
229 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
230
231
231 if new_file:
232 if new_file:
232 fn = "nf%i" % id
233 fn = "nf%i" % id
233 files.append(fn)
234 files.append(fn)
234 filecontent[fn] = "r%i\n" % id
235 filecontent[fn] = "r%i\n" % id
235 if len(ps) > 1:
236 if len(ps) > 1:
236 if not p2:
237 if not p2:
237 p2 = repo[ps[1]]
238 p2 = repo[ps[1]]
238 for fn in p2:
239 for fn in p2:
239 if fn.startswith("nf"):
240 if fn.startswith("nf"):
240 files.append(fn)
241 files.append(fn)
241 filecontent[fn] = p2[fn].data()
242 filecontent[fn] = p2[fn].data()
242
243
243 def fctxfn(repo, cx, path):
244 def fctxfn(repo, cx, path):
244 if path in filecontent:
245 if path in filecontent:
245 return context.memfilectx(repo, cx, path,
246 return context.memfilectx(repo, cx, path,
246 filecontent[path])
247 filecontent[path])
247 return None
248 return None
248
249
249 if len(ps) == 0 or ps[0] < 0:
250 if len(ps) == 0 or ps[0] < 0:
250 pars = [None, None]
251 pars = [None, None]
251 elif len(ps) == 1:
252 elif len(ps) == 1:
252 pars = [nodeids[ps[0]], None]
253 pars = [nodeids[ps[0]], None]
253 else:
254 else:
254 pars = [nodeids[p] for p in ps]
255 pars = [nodeids[p] for p in ps]
255 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
256 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
256 date=(id, 0),
257 date=(id, 0),
257 user="debugbuilddag",
258 user="debugbuilddag",
258 extra={'branch': atbranch})
259 extra={'branch': atbranch})
259 nodeid = repo.commitctx(cx)
260 nodeid = repo.commitctx(cx)
260 nodeids.append(nodeid)
261 nodeids.append(nodeid)
261 at = id
262 at = id
262 elif type == 'l':
263 elif type == 'l':
263 id, name = data
264 id, name = data
264 ui.note(('tag %s\n' % name))
265 ui.note(('tag %s\n' % name))
265 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
266 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
266 elif type == 'a':
267 elif type == 'a':
267 ui.note(('branch %s\n' % data))
268 ui.note(('branch %s\n' % data))
268 atbranch = data
269 atbranch = data
269 ui.progress(_('building'), id, unit=_('revisions'), total=total)
270 ui.progress(_('building'), id, unit=_('revisions'), total=total)
270 tr.close()
271 tr.close()
271
272
272 if tags:
273 if tags:
273 repo.vfs.write("localtags", "".join(tags))
274 repo.vfs.write("localtags", "".join(tags))
274 finally:
275 finally:
275 ui.progress(_('building'), None)
276 ui.progress(_('building'), None)
276 release(tr, lock, wlock)
277 release(tr, lock, wlock)
277
278
278 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
279 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
279 indent_string = ' ' * indent
280 indent_string = ' ' * indent
280 if all:
281 if all:
281 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
282 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
282 % indent_string)
283 % indent_string)
283
284
284 def showchunks(named):
285 def showchunks(named):
285 ui.write("\n%s%s\n" % (indent_string, named))
286 ui.write("\n%s%s\n" % (indent_string, named))
286 for deltadata in gen.deltaiter():
287 for deltadata in gen.deltaiter():
287 node, p1, p2, cs, deltabase, delta, flags = deltadata
288 node, p1, p2, cs, deltabase, delta, flags = deltadata
288 ui.write("%s%s %s %s %s %s %d\n" %
289 ui.write("%s%s %s %s %s %s %d\n" %
289 (indent_string, hex(node), hex(p1), hex(p2),
290 (indent_string, hex(node), hex(p1), hex(p2),
290 hex(cs), hex(deltabase), len(delta)))
291 hex(cs), hex(deltabase), len(delta)))
291
292
292 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
293 showchunks("changelog")
294 showchunks("changelog")
294 chunkdata = gen.manifestheader()
295 chunkdata = gen.manifestheader()
295 showchunks("manifest")
296 showchunks("manifest")
296 for chunkdata in iter(gen.filelogheader, {}):
297 for chunkdata in iter(gen.filelogheader, {}):
297 fname = chunkdata['filename']
298 fname = chunkdata['filename']
298 showchunks(fname)
299 showchunks(fname)
299 else:
300 else:
300 if isinstance(gen, bundle2.unbundle20):
301 if isinstance(gen, bundle2.unbundle20):
301 raise error.Abort(_('use debugbundle2 for this file'))
302 raise error.Abort(_('use debugbundle2 for this file'))
302 chunkdata = gen.changelogheader()
303 chunkdata = gen.changelogheader()
303 for deltadata in gen.deltaiter():
304 for deltadata in gen.deltaiter():
304 node, p1, p2, cs, deltabase, delta, flags = deltadata
305 node, p1, p2, cs, deltabase, delta, flags = deltadata
305 ui.write("%s%s\n" % (indent_string, hex(node)))
306 ui.write("%s%s\n" % (indent_string, hex(node)))
306
307
307 def _debugobsmarkers(ui, part, indent=0, **opts):
308 def _debugobsmarkers(ui, part, indent=0, **opts):
308 """display version and markers contained in 'data'"""
309 """display version and markers contained in 'data'"""
309 opts = pycompat.byteskwargs(opts)
310 opts = pycompat.byteskwargs(opts)
310 data = part.read()
311 data = part.read()
311 indent_string = ' ' * indent
312 indent_string = ' ' * indent
312 try:
313 try:
313 version, markers = obsolete._readmarkers(data)
314 version, markers = obsolete._readmarkers(data)
314 except error.UnknownVersion as exc:
315 except error.UnknownVersion as exc:
315 msg = "%sunsupported version: %s (%d bytes)\n"
316 msg = "%sunsupported version: %s (%d bytes)\n"
316 msg %= indent_string, exc.version, len(data)
317 msg %= indent_string, exc.version, len(data)
317 ui.write(msg)
318 ui.write(msg)
318 else:
319 else:
319 msg = "%sversion: %d (%d bytes)\n"
320 msg = "%sversion: %d (%d bytes)\n"
320 msg %= indent_string, version, len(data)
321 msg %= indent_string, version, len(data)
321 ui.write(msg)
322 ui.write(msg)
322 fm = ui.formatter('debugobsolete', opts)
323 fm = ui.formatter('debugobsolete', opts)
323 for rawmarker in sorted(markers):
324 for rawmarker in sorted(markers):
324 m = obsutil.marker(None, rawmarker)
325 m = obsutil.marker(None, rawmarker)
325 fm.startitem()
326 fm.startitem()
326 fm.plain(indent_string)
327 fm.plain(indent_string)
327 cmdutil.showmarker(fm, m)
328 cmdutil.showmarker(fm, m)
328 fm.end()
329 fm.end()
329
330
330 def _debugphaseheads(ui, data, indent=0):
331 def _debugphaseheads(ui, data, indent=0):
331 """display version and markers contained in 'data'"""
332 """display version and markers contained in 'data'"""
332 indent_string = ' ' * indent
333 indent_string = ' ' * indent
333 headsbyphase = phases.binarydecode(data)
334 headsbyphase = phases.binarydecode(data)
334 for phase in phases.allphases:
335 for phase in phases.allphases:
335 for head in headsbyphase[phase]:
336 for head in headsbyphase[phase]:
336 ui.write(indent_string)
337 ui.write(indent_string)
337 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
338 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
338
339
339 def _quasirepr(thing):
340 def _quasirepr(thing):
340 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
341 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
341 return '{%s}' % (
342 return '{%s}' % (
342 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
343 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
343 return pycompat.bytestr(repr(thing))
344 return pycompat.bytestr(repr(thing))
344
345
345 def _debugbundle2(ui, gen, all=None, **opts):
346 def _debugbundle2(ui, gen, all=None, **opts):
346 """lists the contents of a bundle2"""
347 """lists the contents of a bundle2"""
347 if not isinstance(gen, bundle2.unbundle20):
348 if not isinstance(gen, bundle2.unbundle20):
348 raise error.Abort(_('not a bundle2 file'))
349 raise error.Abort(_('not a bundle2 file'))
349 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
350 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
350 parttypes = opts.get(r'part_type', [])
351 parttypes = opts.get(r'part_type', [])
351 for part in gen.iterparts():
352 for part in gen.iterparts():
352 if parttypes and part.type not in parttypes:
353 if parttypes and part.type not in parttypes:
353 continue
354 continue
354 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
355 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
355 if part.type == 'changegroup':
356 if part.type == 'changegroup':
356 version = part.params.get('version', '01')
357 version = part.params.get('version', '01')
357 cg = changegroup.getunbundler(version, part, 'UN')
358 cg = changegroup.getunbundler(version, part, 'UN')
358 if not ui.quiet:
359 if not ui.quiet:
359 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
360 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
360 if part.type == 'obsmarkers':
361 if part.type == 'obsmarkers':
361 if not ui.quiet:
362 if not ui.quiet:
362 _debugobsmarkers(ui, part, indent=4, **opts)
363 _debugobsmarkers(ui, part, indent=4, **opts)
363 if part.type == 'phase-heads':
364 if part.type == 'phase-heads':
364 if not ui.quiet:
365 if not ui.quiet:
365 _debugphaseheads(ui, part, indent=4)
366 _debugphaseheads(ui, part, indent=4)
366
367
367 @command('debugbundle',
368 @command('debugbundle',
368 [('a', 'all', None, _('show all details')),
369 [('a', 'all', None, _('show all details')),
369 ('', 'part-type', [], _('show only the named part type')),
370 ('', 'part-type', [], _('show only the named part type')),
370 ('', 'spec', None, _('print the bundlespec of the bundle'))],
371 ('', 'spec', None, _('print the bundlespec of the bundle'))],
371 _('FILE'),
372 _('FILE'),
372 norepo=True)
373 norepo=True)
373 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
374 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
374 """lists the contents of a bundle"""
375 """lists the contents of a bundle"""
375 with hg.openpath(ui, bundlepath) as f:
376 with hg.openpath(ui, bundlepath) as f:
376 if spec:
377 if spec:
377 spec = exchange.getbundlespec(ui, f)
378 spec = exchange.getbundlespec(ui, f)
378 ui.write('%s\n' % spec)
379 ui.write('%s\n' % spec)
379 return
380 return
380
381
381 gen = exchange.readbundle(ui, f, bundlepath)
382 gen = exchange.readbundle(ui, f, bundlepath)
382 if isinstance(gen, bundle2.unbundle20):
383 if isinstance(gen, bundle2.unbundle20):
383 return _debugbundle2(ui, gen, all=all, **opts)
384 return _debugbundle2(ui, gen, all=all, **opts)
384 _debugchangegroup(ui, gen, all=all, **opts)
385 _debugchangegroup(ui, gen, all=all, **opts)
385
386
386 @command('debugcapabilities',
387 @command('debugcapabilities',
387 [], _('PATH'),
388 [], _('PATH'),
388 norepo=True)
389 norepo=True)
389 def debugcapabilities(ui, path, **opts):
390 def debugcapabilities(ui, path, **opts):
390 """lists the capabilities of a remote peer"""
391 """lists the capabilities of a remote peer"""
391 opts = pycompat.byteskwargs(opts)
392 opts = pycompat.byteskwargs(opts)
392 peer = hg.peer(ui, opts, path)
393 peer = hg.peer(ui, opts, path)
393 caps = peer.capabilities()
394 caps = peer.capabilities()
394 ui.write(('Main capabilities:\n'))
395 ui.write(('Main capabilities:\n'))
395 for c in sorted(caps):
396 for c in sorted(caps):
396 ui.write((' %s\n') % c)
397 ui.write((' %s\n') % c)
397 b2caps = bundle2.bundle2caps(peer)
398 b2caps = bundle2.bundle2caps(peer)
398 if b2caps:
399 if b2caps:
399 ui.write(('Bundle2 capabilities:\n'))
400 ui.write(('Bundle2 capabilities:\n'))
400 for key, values in sorted(b2caps.iteritems()):
401 for key, values in sorted(b2caps.iteritems()):
401 ui.write((' %s\n') % key)
402 ui.write((' %s\n') % key)
402 for v in values:
403 for v in values:
403 ui.write((' %s\n') % v)
404 ui.write((' %s\n') % v)
404
405
405 @command('debugcheckstate', [], '')
406 @command('debugcheckstate', [], '')
406 def debugcheckstate(ui, repo):
407 def debugcheckstate(ui, repo):
407 """validate the correctness of the current dirstate"""
408 """validate the correctness of the current dirstate"""
408 parent1, parent2 = repo.dirstate.parents()
409 parent1, parent2 = repo.dirstate.parents()
409 m1 = repo[parent1].manifest()
410 m1 = repo[parent1].manifest()
410 m2 = repo[parent2].manifest()
411 m2 = repo[parent2].manifest()
411 errors = 0
412 errors = 0
412 for f in repo.dirstate:
413 for f in repo.dirstate:
413 state = repo.dirstate[f]
414 state = repo.dirstate[f]
414 if state in "nr" and f not in m1:
415 if state in "nr" and f not in m1:
415 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
416 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
416 errors += 1
417 errors += 1
417 if state in "a" and f in m1:
418 if state in "a" and f in m1:
418 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
419 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
419 errors += 1
420 errors += 1
420 if state in "m" and f not in m1 and f not in m2:
421 if state in "m" and f not in m1 and f not in m2:
421 ui.warn(_("%s in state %s, but not in either manifest\n") %
422 ui.warn(_("%s in state %s, but not in either manifest\n") %
422 (f, state))
423 (f, state))
423 errors += 1
424 errors += 1
424 for f in m1:
425 for f in m1:
425 state = repo.dirstate[f]
426 state = repo.dirstate[f]
426 if state not in "nrm":
427 if state not in "nrm":
427 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
428 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
428 errors += 1
429 errors += 1
429 if errors:
430 if errors:
430 error = _(".hg/dirstate inconsistent with current parent's manifest")
431 error = _(".hg/dirstate inconsistent with current parent's manifest")
431 raise error.Abort(error)
432 raise error.Abort(error)
432
433
433 @command('debugcolor',
434 @command('debugcolor',
434 [('', 'style', None, _('show all configured styles'))],
435 [('', 'style', None, _('show all configured styles'))],
435 'hg debugcolor')
436 'hg debugcolor')
436 def debugcolor(ui, repo, **opts):
437 def debugcolor(ui, repo, **opts):
437 """show available color, effects or style"""
438 """show available color, effects or style"""
438 ui.write(('color mode: %s\n') % ui._colormode)
439 ui.write(('color mode: %s\n') % ui._colormode)
439 if opts.get(r'style'):
440 if opts.get(r'style'):
440 return _debugdisplaystyle(ui)
441 return _debugdisplaystyle(ui)
441 else:
442 else:
442 return _debugdisplaycolor(ui)
443 return _debugdisplaycolor(ui)
443
444
444 def _debugdisplaycolor(ui):
445 def _debugdisplaycolor(ui):
445 ui = ui.copy()
446 ui = ui.copy()
446 ui._styles.clear()
447 ui._styles.clear()
447 for effect in color._activeeffects(ui).keys():
448 for effect in color._activeeffects(ui).keys():
448 ui._styles[effect] = effect
449 ui._styles[effect] = effect
449 if ui._terminfoparams:
450 if ui._terminfoparams:
450 for k, v in ui.configitems('color'):
451 for k, v in ui.configitems('color'):
451 if k.startswith('color.'):
452 if k.startswith('color.'):
452 ui._styles[k] = k[6:]
453 ui._styles[k] = k[6:]
453 elif k.startswith('terminfo.'):
454 elif k.startswith('terminfo.'):
454 ui._styles[k] = k[9:]
455 ui._styles[k] = k[9:]
455 ui.write(_('available colors:\n'))
456 ui.write(_('available colors:\n'))
456 # sort label with a '_' after the other to group '_background' entry.
457 # sort label with a '_' after the other to group '_background' entry.
457 items = sorted(ui._styles.items(),
458 items = sorted(ui._styles.items(),
458 key=lambda i: ('_' in i[0], i[0], i[1]))
459 key=lambda i: ('_' in i[0], i[0], i[1]))
459 for colorname, label in items:
460 for colorname, label in items:
460 ui.write(('%s\n') % colorname, label=label)
461 ui.write(('%s\n') % colorname, label=label)
461
462
462 def _debugdisplaystyle(ui):
463 def _debugdisplaystyle(ui):
463 ui.write(_('available style:\n'))
464 ui.write(_('available style:\n'))
464 width = max(len(s) for s in ui._styles)
465 width = max(len(s) for s in ui._styles)
465 for label, effects in sorted(ui._styles.items()):
466 for label, effects in sorted(ui._styles.items()):
466 ui.write('%s' % label, label=label)
467 ui.write('%s' % label, label=label)
467 if effects:
468 if effects:
468 # 50
469 # 50
469 ui.write(': ')
470 ui.write(': ')
470 ui.write(' ' * (max(0, width - len(label))))
471 ui.write(' ' * (max(0, width - len(label))))
471 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
472 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
472 ui.write('\n')
473 ui.write('\n')
473
474
474 @command('debugcreatestreamclonebundle', [], 'FILE')
475 @command('debugcreatestreamclonebundle', [], 'FILE')
475 def debugcreatestreamclonebundle(ui, repo, fname):
476 def debugcreatestreamclonebundle(ui, repo, fname):
476 """create a stream clone bundle file
477 """create a stream clone bundle file
477
478
478 Stream bundles are special bundles that are essentially archives of
479 Stream bundles are special bundles that are essentially archives of
479 revlog files. They are commonly used for cloning very quickly.
480 revlog files. They are commonly used for cloning very quickly.
480 """
481 """
481 # TODO we may want to turn this into an abort when this functionality
482 # TODO we may want to turn this into an abort when this functionality
482 # is moved into `hg bundle`.
483 # is moved into `hg bundle`.
483 if phases.hassecret(repo):
484 if phases.hassecret(repo):
484 ui.warn(_('(warning: stream clone bundle will contain secret '
485 ui.warn(_('(warning: stream clone bundle will contain secret '
485 'revisions)\n'))
486 'revisions)\n'))
486
487
487 requirements, gen = streamclone.generatebundlev1(repo)
488 requirements, gen = streamclone.generatebundlev1(repo)
488 changegroup.writechunks(ui, gen, fname)
489 changegroup.writechunks(ui, gen, fname)
489
490
490 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
491 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
491
492
492 @command('debugdag',
493 @command('debugdag',
493 [('t', 'tags', None, _('use tags as labels')),
494 [('t', 'tags', None, _('use tags as labels')),
494 ('b', 'branches', None, _('annotate with branch names')),
495 ('b', 'branches', None, _('annotate with branch names')),
495 ('', 'dots', None, _('use dots for runs')),
496 ('', 'dots', None, _('use dots for runs')),
496 ('s', 'spaces', None, _('separate elements by spaces'))],
497 ('s', 'spaces', None, _('separate elements by spaces'))],
497 _('[OPTION]... [FILE [REV]...]'),
498 _('[OPTION]... [FILE [REV]...]'),
498 optionalrepo=True)
499 optionalrepo=True)
499 def debugdag(ui, repo, file_=None, *revs, **opts):
500 def debugdag(ui, repo, file_=None, *revs, **opts):
500 """format the changelog or an index DAG as a concise textual description
501 """format the changelog or an index DAG as a concise textual description
501
502
502 If you pass a revlog index, the revlog's DAG is emitted. If you list
503 If you pass a revlog index, the revlog's DAG is emitted. If you list
503 revision numbers, they get labeled in the output as rN.
504 revision numbers, they get labeled in the output as rN.
504
505
505 Otherwise, the changelog DAG of the current repo is emitted.
506 Otherwise, the changelog DAG of the current repo is emitted.
506 """
507 """
507 spaces = opts.get(r'spaces')
508 spaces = opts.get(r'spaces')
508 dots = opts.get(r'dots')
509 dots = opts.get(r'dots')
509 if file_:
510 if file_:
510 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
511 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
511 file_)
512 file_)
512 revs = set((int(r) for r in revs))
513 revs = set((int(r) for r in revs))
513 def events():
514 def events():
514 for r in rlog:
515 for r in rlog:
515 yield 'n', (r, list(p for p in rlog.parentrevs(r)
516 yield 'n', (r, list(p for p in rlog.parentrevs(r)
516 if p != -1))
517 if p != -1))
517 if r in revs:
518 if r in revs:
518 yield 'l', (r, "r%i" % r)
519 yield 'l', (r, "r%i" % r)
519 elif repo:
520 elif repo:
520 cl = repo.changelog
521 cl = repo.changelog
521 tags = opts.get(r'tags')
522 tags = opts.get(r'tags')
522 branches = opts.get(r'branches')
523 branches = opts.get(r'branches')
523 if tags:
524 if tags:
524 labels = {}
525 labels = {}
525 for l, n in repo.tags().items():
526 for l, n in repo.tags().items():
526 labels.setdefault(cl.rev(n), []).append(l)
527 labels.setdefault(cl.rev(n), []).append(l)
527 def events():
528 def events():
528 b = "default"
529 b = "default"
529 for r in cl:
530 for r in cl:
530 if branches:
531 if branches:
531 newb = cl.read(cl.node(r))[5]['branch']
532 newb = cl.read(cl.node(r))[5]['branch']
532 if newb != b:
533 if newb != b:
533 yield 'a', newb
534 yield 'a', newb
534 b = newb
535 b = newb
535 yield 'n', (r, list(p for p in cl.parentrevs(r)
536 yield 'n', (r, list(p for p in cl.parentrevs(r)
536 if p != -1))
537 if p != -1))
537 if tags:
538 if tags:
538 ls = labels.get(r)
539 ls = labels.get(r)
539 if ls:
540 if ls:
540 for l in ls:
541 for l in ls:
541 yield 'l', (r, l)
542 yield 'l', (r, l)
542 else:
543 else:
543 raise error.Abort(_('need repo for changelog dag'))
544 raise error.Abort(_('need repo for changelog dag'))
544
545
545 for line in dagparser.dagtextlines(events(),
546 for line in dagparser.dagtextlines(events(),
546 addspaces=spaces,
547 addspaces=spaces,
547 wraplabels=True,
548 wraplabels=True,
548 wrapannotations=True,
549 wrapannotations=True,
549 wrapnonlinear=dots,
550 wrapnonlinear=dots,
550 usedots=dots,
551 usedots=dots,
551 maxlinewidth=70):
552 maxlinewidth=70):
552 ui.write(line)
553 ui.write(line)
553 ui.write("\n")
554 ui.write("\n")
554
555
555 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
556 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
556 def debugdata(ui, repo, file_, rev=None, **opts):
557 def debugdata(ui, repo, file_, rev=None, **opts):
557 """dump the contents of a data file revision"""
558 """dump the contents of a data file revision"""
558 opts = pycompat.byteskwargs(opts)
559 opts = pycompat.byteskwargs(opts)
559 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
560 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
560 if rev is not None:
561 if rev is not None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 raise error.CommandError('debugdata', _('invalid arguments'))
562 file_, rev = None, file_
563 file_, rev = None, file_
563 elif rev is None:
564 elif rev is None:
564 raise error.CommandError('debugdata', _('invalid arguments'))
565 raise error.CommandError('debugdata', _('invalid arguments'))
565 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
566 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
566 try:
567 try:
567 ui.write(r.revision(r.lookup(rev), raw=True))
568 ui.write(r.revision(r.lookup(rev), raw=True))
568 except KeyError:
569 except KeyError:
569 raise error.Abort(_('invalid revision identifier %s') % rev)
570 raise error.Abort(_('invalid revision identifier %s') % rev)
570
571
571 @command('debugdate',
572 @command('debugdate',
572 [('e', 'extended', None, _('try extended date formats'))],
573 [('e', 'extended', None, _('try extended date formats'))],
573 _('[-e] DATE [RANGE]'),
574 _('[-e] DATE [RANGE]'),
574 norepo=True, optionalrepo=True)
575 norepo=True, optionalrepo=True)
575 def debugdate(ui, date, range=None, **opts):
576 def debugdate(ui, date, range=None, **opts):
576 """parse and display a date"""
577 """parse and display a date"""
577 if opts[r"extended"]:
578 if opts[r"extended"]:
578 d = dateutil.parsedate(date, util.extendeddateformats)
579 d = dateutil.parsedate(date, util.extendeddateformats)
579 else:
580 else:
580 d = dateutil.parsedate(date)
581 d = dateutil.parsedate(date)
581 ui.write(("internal: %d %d\n") % d)
582 ui.write(("internal: %d %d\n") % d)
582 ui.write(("standard: %s\n") % dateutil.datestr(d))
583 ui.write(("standard: %s\n") % dateutil.datestr(d))
583 if range:
584 if range:
584 m = dateutil.matchdate(range)
585 m = dateutil.matchdate(range)
585 ui.write(("match: %s\n") % m(d[0]))
586 ui.write(("match: %s\n") % m(d[0]))
586
587
587 @command('debugdeltachain',
588 @command('debugdeltachain',
588 cmdutil.debugrevlogopts + cmdutil.formatteropts,
589 cmdutil.debugrevlogopts + cmdutil.formatteropts,
589 _('-c|-m|FILE'),
590 _('-c|-m|FILE'),
590 optionalrepo=True)
591 optionalrepo=True)
591 def debugdeltachain(ui, repo, file_=None, **opts):
592 def debugdeltachain(ui, repo, file_=None, **opts):
592 """dump information about delta chains in a revlog
593 """dump information about delta chains in a revlog
593
594
594 Output can be templatized. Available template keywords are:
595 Output can be templatized. Available template keywords are:
595
596
596 :``rev``: revision number
597 :``rev``: revision number
597 :``chainid``: delta chain identifier (numbered by unique base)
598 :``chainid``: delta chain identifier (numbered by unique base)
598 :``chainlen``: delta chain length to this revision
599 :``chainlen``: delta chain length to this revision
599 :``prevrev``: previous revision in delta chain
600 :``prevrev``: previous revision in delta chain
600 :``deltatype``: role of delta / how it was computed
601 :``deltatype``: role of delta / how it was computed
601 :``compsize``: compressed size of revision
602 :``compsize``: compressed size of revision
602 :``uncompsize``: uncompressed size of revision
603 :``uncompsize``: uncompressed size of revision
603 :``chainsize``: total size of compressed revisions in chain
604 :``chainsize``: total size of compressed revisions in chain
604 :``chainratio``: total chain size divided by uncompressed revision size
605 :``chainratio``: total chain size divided by uncompressed revision size
605 (new delta chains typically start at ratio 2.00)
606 (new delta chains typically start at ratio 2.00)
606 :``lindist``: linear distance from base revision in delta chain to end
607 :``lindist``: linear distance from base revision in delta chain to end
607 of this revision
608 of this revision
608 :``extradist``: total size of revisions not part of this delta chain from
609 :``extradist``: total size of revisions not part of this delta chain from
609 base of delta chain to end of this revision; a measurement
610 base of delta chain to end of this revision; a measurement
610 of how much extra data we need to read/seek across to read
611 of how much extra data we need to read/seek across to read
611 the delta chain for this revision
612 the delta chain for this revision
612 :``extraratio``: extradist divided by chainsize; another representation of
613 :``extraratio``: extradist divided by chainsize; another representation of
613 how much unrelated data is needed to load this delta chain
614 how much unrelated data is needed to load this delta chain
614
615
615 If the repository is configured to use the sparse read, additional keywords
616 If the repository is configured to use the sparse read, additional keywords
616 are available:
617 are available:
617
618
618 :``readsize``: total size of data read from the disk for a revision
619 :``readsize``: total size of data read from the disk for a revision
619 (sum of the sizes of all the blocks)
620 (sum of the sizes of all the blocks)
620 :``largestblock``: size of the largest block of data read from the disk
621 :``largestblock``: size of the largest block of data read from the disk
621 :``readdensity``: density of useful bytes in the data read from the disk
622 :``readdensity``: density of useful bytes in the data read from the disk
622 :``srchunks``: in how many data hunks the whole revision would be read
623 :``srchunks``: in how many data hunks the whole revision would be read
623
624
624 The sparse read can be enabled with experimental.sparse-read = True
625 The sparse read can be enabled with experimental.sparse-read = True
625 """
626 """
626 opts = pycompat.byteskwargs(opts)
627 opts = pycompat.byteskwargs(opts)
627 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
628 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
628 index = r.index
629 index = r.index
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 withsparseread = getattr(r, '_withsparseread', False)
631 withsparseread = getattr(r, '_withsparseread', False)
631
632
632 def revinfo(rev):
633 def revinfo(rev):
633 e = index[rev]
634 e = index[rev]
634 compsize = e[1]
635 compsize = e[1]
635 uncompsize = e[2]
636 uncompsize = e[2]
636 chainsize = 0
637 chainsize = 0
637
638
638 if generaldelta:
639 if generaldelta:
639 if e[3] == e[5]:
640 if e[3] == e[5]:
640 deltatype = 'p1'
641 deltatype = 'p1'
641 elif e[3] == e[6]:
642 elif e[3] == e[6]:
642 deltatype = 'p2'
643 deltatype = 'p2'
643 elif e[3] == rev - 1:
644 elif e[3] == rev - 1:
644 deltatype = 'prev'
645 deltatype = 'prev'
645 elif e[3] == rev:
646 elif e[3] == rev:
646 deltatype = 'base'
647 deltatype = 'base'
647 else:
648 else:
648 deltatype = 'other'
649 deltatype = 'other'
649 else:
650 else:
650 if e[3] == rev:
651 if e[3] == rev:
651 deltatype = 'base'
652 deltatype = 'base'
652 else:
653 else:
653 deltatype = 'prev'
654 deltatype = 'prev'
654
655
655 chain = r._deltachain(rev)[0]
656 chain = r._deltachain(rev)[0]
656 for iterrev in chain:
657 for iterrev in chain:
657 e = index[iterrev]
658 e = index[iterrev]
658 chainsize += e[1]
659 chainsize += e[1]
659
660
660 return compsize, uncompsize, deltatype, chain, chainsize
661 return compsize, uncompsize, deltatype, chain, chainsize
661
662
662 fm = ui.formatter('debugdeltachain', opts)
663 fm = ui.formatter('debugdeltachain', opts)
663
664
664 fm.plain(' rev chain# chainlen prev delta '
665 fm.plain(' rev chain# chainlen prev delta '
665 'size rawsize chainsize ratio lindist extradist '
666 'size rawsize chainsize ratio lindist extradist '
666 'extraratio')
667 'extraratio')
667 if withsparseread:
668 if withsparseread:
668 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain('\n')
670 fm.plain('\n')
670
671
671 chainbases = {}
672 chainbases = {}
672 for rev in r:
673 for rev in r:
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 chainbase = chain[0]
675 chainbase = chain[0]
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 start = r.start
677 start = r.start
677 length = r.length
678 length = r.length
678 basestart = start(chainbase)
679 basestart = start(chainbase)
679 revstart = start(rev)
680 revstart = start(rev)
680 lineardist = revstart + comp - basestart
681 lineardist = revstart + comp - basestart
681 extradist = lineardist - chainsize
682 extradist = lineardist - chainsize
682 try:
683 try:
683 prevrev = chain[-2]
684 prevrev = chain[-2]
684 except IndexError:
685 except IndexError:
685 prevrev = -1
686 prevrev = -1
686
687
687 chainratio = float(chainsize) / float(uncomp)
688 chainratio = float(chainsize) / float(uncomp)
688 extraratio = float(extradist) / float(chainsize)
689 extraratio = float(extradist) / float(chainsize)
689
690
690 fm.startitem()
691 fm.startitem()
691 fm.write('rev chainid chainlen prevrev deltatype compsize '
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
692 'uncompsize chainsize chainratio lindist extradist '
693 'uncompsize chainsize chainratio lindist extradist '
693 'extraratio',
694 'extraratio',
694 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
695 rev, chainid, len(chain), prevrev, deltatype, comp,
696 rev, chainid, len(chain), prevrev, deltatype, comp,
696 uncomp, chainsize, chainratio, lineardist, extradist,
697 uncomp, chainsize, chainratio, lineardist, extradist,
697 extraratio,
698 extraratio,
698 rev=rev, chainid=chainid, chainlen=len(chain),
699 rev=rev, chainid=chainid, chainlen=len(chain),
699 prevrev=prevrev, deltatype=deltatype, compsize=comp,
700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
700 uncompsize=uncomp, chainsize=chainsize,
701 uncompsize=uncomp, chainsize=chainsize,
701 chainratio=chainratio, lindist=lineardist,
702 chainratio=chainratio, lindist=lineardist,
702 extradist=extradist, extraratio=extraratio)
703 extradist=extradist, extraratio=extraratio)
703 if withsparseread:
704 if withsparseread:
704 readsize = 0
705 readsize = 0
705 largestblock = 0
706 largestblock = 0
706 srchunks = 0
707 srchunks = 0
707
708
708 for revschunk in revlog._slicechunk(r, chain):
709 for revschunk in revlog._slicechunk(r, chain):
709 srchunks += 1
710 srchunks += 1
710 blkend = start(revschunk[-1]) + length(revschunk[-1])
711 blkend = start(revschunk[-1]) + length(revschunk[-1])
711 blksize = blkend - start(revschunk[0])
712 blksize = blkend - start(revschunk[0])
712
713
713 readsize += blksize
714 readsize += blksize
714 if largestblock < blksize:
715 if largestblock < blksize:
715 largestblock = blksize
716 largestblock = blksize
716
717
717 readdensity = float(chainsize) / float(readsize)
718 readdensity = float(chainsize) / float(readsize)
718
719
719 fm.write('readsize largestblock readdensity srchunks',
720 fm.write('readsize largestblock readdensity srchunks',
720 ' %10d %10d %9.5f %8d',
721 ' %10d %10d %9.5f %8d',
721 readsize, largestblock, readdensity, srchunks,
722 readsize, largestblock, readdensity, srchunks,
722 readsize=readsize, largestblock=largestblock,
723 readsize=readsize, largestblock=largestblock,
723 readdensity=readdensity, srchunks=srchunks)
724 readdensity=readdensity, srchunks=srchunks)
724
725
725 fm.plain('\n')
726 fm.plain('\n')
726
727
727 fm.end()
728 fm.end()
728
729
729 @command('debugdirstate|debugstate',
730 @command('debugdirstate|debugstate',
730 [('', 'nodates', None, _('do not display the saved mtime')),
731 [('', 'nodates', None, _('do not display the saved mtime')),
731 ('', 'datesort', None, _('sort by saved mtime'))],
732 ('', 'datesort', None, _('sort by saved mtime'))],
732 _('[OPTION]...'))
733 _('[OPTION]...'))
733 def debugstate(ui, repo, **opts):
734 def debugstate(ui, repo, **opts):
734 """show the contents of the current dirstate"""
735 """show the contents of the current dirstate"""
735
736
736 nodates = opts.get(r'nodates')
737 nodates = opts.get(r'nodates')
737 datesort = opts.get(r'datesort')
738 datesort = opts.get(r'datesort')
738
739
739 timestr = ""
740 timestr = ""
740 if datesort:
741 if datesort:
741 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
742 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
742 else:
743 else:
743 keyfunc = None # sort by filename
744 keyfunc = None # sort by filename
744 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
745 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
745 if ent[3] == -1:
746 if ent[3] == -1:
746 timestr = 'unset '
747 timestr = 'unset '
747 elif nodates:
748 elif nodates:
748 timestr = 'set '
749 timestr = 'set '
749 else:
750 else:
750 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
751 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
751 time.localtime(ent[3]))
752 time.localtime(ent[3]))
752 timestr = encoding.strtolocal(timestr)
753 timestr = encoding.strtolocal(timestr)
753 if ent[1] & 0o20000:
754 if ent[1] & 0o20000:
754 mode = 'lnk'
755 mode = 'lnk'
755 else:
756 else:
756 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
757 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
757 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
758 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
758 for f in repo.dirstate.copies():
759 for f in repo.dirstate.copies():
759 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
760 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
760
761
761 @command('debugdiscovery',
762 @command('debugdiscovery',
762 [('', 'old', None, _('use old-style discovery')),
763 [('', 'old', None, _('use old-style discovery')),
763 ('', 'nonheads', None,
764 ('', 'nonheads', None,
764 _('use old-style discovery with non-heads included')),
765 _('use old-style discovery with non-heads included')),
765 ('', 'rev', [], 'restrict discovery to this set of revs'),
766 ('', 'rev', [], 'restrict discovery to this set of revs'),
766 ] + cmdutil.remoteopts,
767 ] + cmdutil.remoteopts,
767 _('[--rev REV] [OTHER]'))
768 _('[--rev REV] [OTHER]'))
768 def debugdiscovery(ui, repo, remoteurl="default", **opts):
769 def debugdiscovery(ui, repo, remoteurl="default", **opts):
769 """runs the changeset discovery protocol in isolation"""
770 """runs the changeset discovery protocol in isolation"""
770 opts = pycompat.byteskwargs(opts)
771 opts = pycompat.byteskwargs(opts)
771 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
772 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
772 remote = hg.peer(repo, opts, remoteurl)
773 remote = hg.peer(repo, opts, remoteurl)
773 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
774 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
774
775
775 # make sure tests are repeatable
776 # make sure tests are repeatable
776 random.seed(12323)
777 random.seed(12323)
777
778
778 def doit(pushedrevs, remoteheads, remote=remote):
779 def doit(pushedrevs, remoteheads, remote=remote):
779 if opts.get('old'):
780 if opts.get('old'):
780 if not util.safehasattr(remote, 'branches'):
781 if not util.safehasattr(remote, 'branches'):
781 # enable in-client legacy support
782 # enable in-client legacy support
782 remote = localrepo.locallegacypeer(remote.local())
783 remote = localrepo.locallegacypeer(remote.local())
783 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
784 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
784 force=True)
785 force=True)
785 common = set(common)
786 common = set(common)
786 if not opts.get('nonheads'):
787 if not opts.get('nonheads'):
787 ui.write(("unpruned common: %s\n") %
788 ui.write(("unpruned common: %s\n") %
788 " ".join(sorted(short(n) for n in common)))
789 " ".join(sorted(short(n) for n in common)))
789 dag = dagutil.revlogdag(repo.changelog)
790 dag = dagutil.revlogdag(repo.changelog)
790 all = dag.ancestorset(dag.internalizeall(common))
791 all = dag.ancestorset(dag.internalizeall(common))
791 common = dag.externalizeall(dag.headsetofconnecteds(all))
792 common = dag.externalizeall(dag.headsetofconnecteds(all))
792 else:
793 else:
793 nodes = None
794 nodes = None
794 if pushedrevs:
795 if pushedrevs:
795 revs = scmutil.revrange(repo, pushedrevs)
796 revs = scmutil.revrange(repo, pushedrevs)
796 nodes = [repo[r].node() for r in revs]
797 nodes = [repo[r].node() for r in revs]
797 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
798 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
798 ancestorsof=nodes)
799 ancestorsof=nodes)
799 common = set(common)
800 common = set(common)
800 rheads = set(hds)
801 rheads = set(hds)
801 lheads = set(repo.heads())
802 lheads = set(repo.heads())
802 ui.write(("common heads: %s\n") %
803 ui.write(("common heads: %s\n") %
803 " ".join(sorted(short(n) for n in common)))
804 " ".join(sorted(short(n) for n in common)))
804 if lheads <= common:
805 if lheads <= common:
805 ui.write(("local is subset\n"))
806 ui.write(("local is subset\n"))
806 elif rheads <= common:
807 elif rheads <= common:
807 ui.write(("remote is subset\n"))
808 ui.write(("remote is subset\n"))
808
809
809 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
810 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
810 localrevs = opts['rev']
811 localrevs = opts['rev']
811 doit(localrevs, remoterevs)
812 doit(localrevs, remoterevs)
812
813
813 _chunksize = 4 << 10
814 _chunksize = 4 << 10
814
815
815 @command('debugdownload',
816 @command('debugdownload',
816 [
817 [
817 ('o', 'output', '', _('path')),
818 ('o', 'output', '', _('path')),
818 ],
819 ],
819 optionalrepo=True)
820 optionalrepo=True)
820 def debugdownload(ui, repo, url, output=None, **opts):
821 def debugdownload(ui, repo, url, output=None, **opts):
821 """download a resource using Mercurial logic and config
822 """download a resource using Mercurial logic and config
822 """
823 """
823 fh = urlmod.open(ui, url, output)
824 fh = urlmod.open(ui, url, output)
824
825
825 dest = ui
826 dest = ui
826 if output:
827 if output:
827 dest = open(output, "wb", _chunksize)
828 dest = open(output, "wb", _chunksize)
828 try:
829 try:
829 data = fh.read(_chunksize)
830 data = fh.read(_chunksize)
830 while data:
831 while data:
831 dest.write(data)
832 dest.write(data)
832 data = fh.read(_chunksize)
833 data = fh.read(_chunksize)
833 finally:
834 finally:
834 if output:
835 if output:
835 dest.close()
836 dest.close()
836
837
837 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
838 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
838 def debugextensions(ui, **opts):
839 def debugextensions(ui, **opts):
839 '''show information about active extensions'''
840 '''show information about active extensions'''
840 opts = pycompat.byteskwargs(opts)
841 opts = pycompat.byteskwargs(opts)
841 exts = extensions.extensions(ui)
842 exts = extensions.extensions(ui)
842 hgver = util.version()
843 hgver = util.version()
843 fm = ui.formatter('debugextensions', opts)
844 fm = ui.formatter('debugextensions', opts)
844 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
845 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
845 isinternal = extensions.ismoduleinternal(extmod)
846 isinternal = extensions.ismoduleinternal(extmod)
846 extsource = pycompat.fsencode(extmod.__file__)
847 extsource = pycompat.fsencode(extmod.__file__)
847 if isinternal:
848 if isinternal:
848 exttestedwith = [] # never expose magic string to users
849 exttestedwith = [] # never expose magic string to users
849 else:
850 else:
850 exttestedwith = getattr(extmod, 'testedwith', '').split()
851 exttestedwith = getattr(extmod, 'testedwith', '').split()
851 extbuglink = getattr(extmod, 'buglink', None)
852 extbuglink = getattr(extmod, 'buglink', None)
852
853
853 fm.startitem()
854 fm.startitem()
854
855
855 if ui.quiet or ui.verbose:
856 if ui.quiet or ui.verbose:
856 fm.write('name', '%s\n', extname)
857 fm.write('name', '%s\n', extname)
857 else:
858 else:
858 fm.write('name', '%s', extname)
859 fm.write('name', '%s', extname)
859 if isinternal or hgver in exttestedwith:
860 if isinternal or hgver in exttestedwith:
860 fm.plain('\n')
861 fm.plain('\n')
861 elif not exttestedwith:
862 elif not exttestedwith:
862 fm.plain(_(' (untested!)\n'))
863 fm.plain(_(' (untested!)\n'))
863 else:
864 else:
864 lasttestedversion = exttestedwith[-1]
865 lasttestedversion = exttestedwith[-1]
865 fm.plain(' (%s!)\n' % lasttestedversion)
866 fm.plain(' (%s!)\n' % lasttestedversion)
866
867
867 fm.condwrite(ui.verbose and extsource, 'source',
868 fm.condwrite(ui.verbose and extsource, 'source',
868 _(' location: %s\n'), extsource or "")
869 _(' location: %s\n'), extsource or "")
869
870
870 if ui.verbose:
871 if ui.verbose:
871 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
872 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
872 fm.data(bundled=isinternal)
873 fm.data(bundled=isinternal)
873
874
874 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
875 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
875 _(' tested with: %s\n'),
876 _(' tested with: %s\n'),
876 fm.formatlist(exttestedwith, name='ver'))
877 fm.formatlist(exttestedwith, name='ver'))
877
878
878 fm.condwrite(ui.verbose and extbuglink, 'buglink',
879 fm.condwrite(ui.verbose and extbuglink, 'buglink',
879 _(' bug reporting: %s\n'), extbuglink or "")
880 _(' bug reporting: %s\n'), extbuglink or "")
880
881
881 fm.end()
882 fm.end()
882
883
883 @command('debugfileset',
884 @command('debugfileset',
884 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
885 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
885 _('[-r REV] FILESPEC'))
886 _('[-r REV] FILESPEC'))
886 def debugfileset(ui, repo, expr, **opts):
887 def debugfileset(ui, repo, expr, **opts):
887 '''parse and apply a fileset specification'''
888 '''parse and apply a fileset specification'''
888 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
889 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
889 if ui.verbose:
890 if ui.verbose:
890 tree = fileset.parse(expr)
891 tree = fileset.parse(expr)
891 ui.note(fileset.prettyformat(tree), "\n")
892 ui.note(fileset.prettyformat(tree), "\n")
892
893
893 for f in ctx.getfileset(expr):
894 for f in ctx.getfileset(expr):
894 ui.write("%s\n" % f)
895 ui.write("%s\n" % f)
895
896
896 @command('debugformat',
897 @command('debugformat',
897 [] + cmdutil.formatteropts,
898 [] + cmdutil.formatteropts,
898 _(''))
899 _(''))
899 def debugformat(ui, repo, **opts):
900 def debugformat(ui, repo, **opts):
900 """display format information about the current repository
901 """display format information about the current repository
901
902
902 Use --verbose to get extra information about current config value and
903 Use --verbose to get extra information about current config value and
903 Mercurial default."""
904 Mercurial default."""
904 opts = pycompat.byteskwargs(opts)
905 opts = pycompat.byteskwargs(opts)
905 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
906 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
906 maxvariantlength = max(len('format-variant'), maxvariantlength)
907 maxvariantlength = max(len('format-variant'), maxvariantlength)
907
908
908 def makeformatname(name):
909 def makeformatname(name):
909 return '%s:' + (' ' * (maxvariantlength - len(name)))
910 return '%s:' + (' ' * (maxvariantlength - len(name)))
910
911
911 fm = ui.formatter('debugformat', opts)
912 fm = ui.formatter('debugformat', opts)
912 if fm.isplain():
913 if fm.isplain():
913 def formatvalue(value):
914 def formatvalue(value):
914 if util.safehasattr(value, 'startswith'):
915 if util.safehasattr(value, 'startswith'):
915 return value
916 return value
916 if value:
917 if value:
917 return 'yes'
918 return 'yes'
918 else:
919 else:
919 return 'no'
920 return 'no'
920 else:
921 else:
921 formatvalue = pycompat.identity
922 formatvalue = pycompat.identity
922
923
923 fm.plain('format-variant')
924 fm.plain('format-variant')
924 fm.plain(' ' * (maxvariantlength - len('format-variant')))
925 fm.plain(' ' * (maxvariantlength - len('format-variant')))
925 fm.plain(' repo')
926 fm.plain(' repo')
926 if ui.verbose:
927 if ui.verbose:
927 fm.plain(' config default')
928 fm.plain(' config default')
928 fm.plain('\n')
929 fm.plain('\n')
929 for fv in upgrade.allformatvariant:
930 for fv in upgrade.allformatvariant:
930 fm.startitem()
931 fm.startitem()
931 repovalue = fv.fromrepo(repo)
932 repovalue = fv.fromrepo(repo)
932 configvalue = fv.fromconfig(repo)
933 configvalue = fv.fromconfig(repo)
933
934
934 if repovalue != configvalue:
935 if repovalue != configvalue:
935 namelabel = 'formatvariant.name.mismatchconfig'
936 namelabel = 'formatvariant.name.mismatchconfig'
936 repolabel = 'formatvariant.repo.mismatchconfig'
937 repolabel = 'formatvariant.repo.mismatchconfig'
937 elif repovalue != fv.default:
938 elif repovalue != fv.default:
938 namelabel = 'formatvariant.name.mismatchdefault'
939 namelabel = 'formatvariant.name.mismatchdefault'
939 repolabel = 'formatvariant.repo.mismatchdefault'
940 repolabel = 'formatvariant.repo.mismatchdefault'
940 else:
941 else:
941 namelabel = 'formatvariant.name.uptodate'
942 namelabel = 'formatvariant.name.uptodate'
942 repolabel = 'formatvariant.repo.uptodate'
943 repolabel = 'formatvariant.repo.uptodate'
943
944
944 fm.write('name', makeformatname(fv.name), fv.name,
945 fm.write('name', makeformatname(fv.name), fv.name,
945 label=namelabel)
946 label=namelabel)
946 fm.write('repo', ' %3s', formatvalue(repovalue),
947 fm.write('repo', ' %3s', formatvalue(repovalue),
947 label=repolabel)
948 label=repolabel)
948 if fv.default != configvalue:
949 if fv.default != configvalue:
949 configlabel = 'formatvariant.config.special'
950 configlabel = 'formatvariant.config.special'
950 else:
951 else:
951 configlabel = 'formatvariant.config.default'
952 configlabel = 'formatvariant.config.default'
952 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
953 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
953 label=configlabel)
954 label=configlabel)
954 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
955 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
955 label='formatvariant.default')
956 label='formatvariant.default')
956 fm.plain('\n')
957 fm.plain('\n')
957 fm.end()
958 fm.end()
958
959
959 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
960 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
960 def debugfsinfo(ui, path="."):
961 def debugfsinfo(ui, path="."):
961 """show information detected about current filesystem"""
962 """show information detected about current filesystem"""
962 ui.write(('path: %s\n') % path)
963 ui.write(('path: %s\n') % path)
963 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
964 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
964 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
965 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
965 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
966 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
966 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
967 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
967 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
968 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
968 casesensitive = '(unknown)'
969 casesensitive = '(unknown)'
969 try:
970 try:
970 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
971 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
971 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
972 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
972 except OSError:
973 except OSError:
973 pass
974 pass
974 ui.write(('case-sensitive: %s\n') % casesensitive)
975 ui.write(('case-sensitive: %s\n') % casesensitive)
975
976
976 @command('debuggetbundle',
977 @command('debuggetbundle',
977 [('H', 'head', [], _('id of head node'), _('ID')),
978 [('H', 'head', [], _('id of head node'), _('ID')),
978 ('C', 'common', [], _('id of common node'), _('ID')),
979 ('C', 'common', [], _('id of common node'), _('ID')),
979 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
980 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
980 _('REPO FILE [-H|-C ID]...'),
981 _('REPO FILE [-H|-C ID]...'),
981 norepo=True)
982 norepo=True)
982 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
983 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
983 """retrieves a bundle from a repo
984 """retrieves a bundle from a repo
984
985
985 Every ID must be a full-length hex node id string. Saves the bundle to the
986 Every ID must be a full-length hex node id string. Saves the bundle to the
986 given file.
987 given file.
987 """
988 """
988 opts = pycompat.byteskwargs(opts)
989 opts = pycompat.byteskwargs(opts)
989 repo = hg.peer(ui, opts, repopath)
990 repo = hg.peer(ui, opts, repopath)
990 if not repo.capable('getbundle'):
991 if not repo.capable('getbundle'):
991 raise error.Abort("getbundle() not supported by target repository")
992 raise error.Abort("getbundle() not supported by target repository")
992 args = {}
993 args = {}
993 if common:
994 if common:
994 args[r'common'] = [bin(s) for s in common]
995 args[r'common'] = [bin(s) for s in common]
995 if head:
996 if head:
996 args[r'heads'] = [bin(s) for s in head]
997 args[r'heads'] = [bin(s) for s in head]
997 # TODO: get desired bundlecaps from command line.
998 # TODO: get desired bundlecaps from command line.
998 args[r'bundlecaps'] = None
999 args[r'bundlecaps'] = None
999 bundle = repo.getbundle('debug', **args)
1000 bundle = repo.getbundle('debug', **args)
1000
1001
1001 bundletype = opts.get('type', 'bzip2').lower()
1002 bundletype = opts.get('type', 'bzip2').lower()
1002 btypes = {'none': 'HG10UN',
1003 btypes = {'none': 'HG10UN',
1003 'bzip2': 'HG10BZ',
1004 'bzip2': 'HG10BZ',
1004 'gzip': 'HG10GZ',
1005 'gzip': 'HG10GZ',
1005 'bundle2': 'HG20'}
1006 'bundle2': 'HG20'}
1006 bundletype = btypes.get(bundletype)
1007 bundletype = btypes.get(bundletype)
1007 if bundletype not in bundle2.bundletypes:
1008 if bundletype not in bundle2.bundletypes:
1008 raise error.Abort(_('unknown bundle type specified with --type'))
1009 raise error.Abort(_('unknown bundle type specified with --type'))
1009 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1010 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1010
1011
1011 @command('debugignore', [], '[FILE]')
1012 @command('debugignore', [], '[FILE]')
1012 def debugignore(ui, repo, *files, **opts):
1013 def debugignore(ui, repo, *files, **opts):
1013 """display the combined ignore pattern and information about ignored files
1014 """display the combined ignore pattern and information about ignored files
1014
1015
1015 With no argument display the combined ignore pattern.
1016 With no argument display the combined ignore pattern.
1016
1017
1017 Given space separated file names, shows if the given file is ignored and
1018 Given space separated file names, shows if the given file is ignored and
1018 if so, show the ignore rule (file and line number) that matched it.
1019 if so, show the ignore rule (file and line number) that matched it.
1019 """
1020 """
1020 ignore = repo.dirstate._ignore
1021 ignore = repo.dirstate._ignore
1021 if not files:
1022 if not files:
1022 # Show all the patterns
1023 # Show all the patterns
1023 ui.write("%s\n" % pycompat.byterepr(ignore))
1024 ui.write("%s\n" % pycompat.byterepr(ignore))
1024 else:
1025 else:
1025 m = scmutil.match(repo[None], pats=files)
1026 m = scmutil.match(repo[None], pats=files)
1026 for f in m.files():
1027 for f in m.files():
1027 nf = util.normpath(f)
1028 nf = util.normpath(f)
1028 ignored = None
1029 ignored = None
1029 ignoredata = None
1030 ignoredata = None
1030 if nf != '.':
1031 if nf != '.':
1031 if ignore(nf):
1032 if ignore(nf):
1032 ignored = nf
1033 ignored = nf
1033 ignoredata = repo.dirstate._ignorefileandline(nf)
1034 ignoredata = repo.dirstate._ignorefileandline(nf)
1034 else:
1035 else:
1035 for p in util.finddirs(nf):
1036 for p in util.finddirs(nf):
1036 if ignore(p):
1037 if ignore(p):
1037 ignored = p
1038 ignored = p
1038 ignoredata = repo.dirstate._ignorefileandline(p)
1039 ignoredata = repo.dirstate._ignorefileandline(p)
1039 break
1040 break
1040 if ignored:
1041 if ignored:
1041 if ignored == nf:
1042 if ignored == nf:
1042 ui.write(_("%s is ignored\n") % m.uipath(f))
1043 ui.write(_("%s is ignored\n") % m.uipath(f))
1043 else:
1044 else:
1044 ui.write(_("%s is ignored because of "
1045 ui.write(_("%s is ignored because of "
1045 "containing folder %s\n")
1046 "containing folder %s\n")
1046 % (m.uipath(f), ignored))
1047 % (m.uipath(f), ignored))
1047 ignorefile, lineno, line = ignoredata
1048 ignorefile, lineno, line = ignoredata
1048 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1049 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1049 % (ignorefile, lineno, line))
1050 % (ignorefile, lineno, line))
1050 else:
1051 else:
1051 ui.write(_("%s is not ignored\n") % m.uipath(f))
1052 ui.write(_("%s is not ignored\n") % m.uipath(f))
1052
1053
1053 @command('debugindex', cmdutil.debugrevlogopts +
1054 @command('debugindex', cmdutil.debugrevlogopts +
1054 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1055 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1055 _('[-f FORMAT] -c|-m|FILE'),
1056 _('[-f FORMAT] -c|-m|FILE'),
1056 optionalrepo=True)
1057 optionalrepo=True)
1057 def debugindex(ui, repo, file_=None, **opts):
1058 def debugindex(ui, repo, file_=None, **opts):
1058 """dump the contents of an index file"""
1059 """dump the contents of an index file"""
1059 opts = pycompat.byteskwargs(opts)
1060 opts = pycompat.byteskwargs(opts)
1060 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1061 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1061 format = opts.get('format', 0)
1062 format = opts.get('format', 0)
1062 if format not in (0, 1):
1063 if format not in (0, 1):
1063 raise error.Abort(_("unknown format %d") % format)
1064 raise error.Abort(_("unknown format %d") % format)
1064
1065
1065 if ui.debugflag:
1066 if ui.debugflag:
1066 shortfn = hex
1067 shortfn = hex
1067 else:
1068 else:
1068 shortfn = short
1069 shortfn = short
1069
1070
1070 # There might not be anything in r, so have a sane default
1071 # There might not be anything in r, so have a sane default
1071 idlen = 12
1072 idlen = 12
1072 for i in r:
1073 for i in r:
1073 idlen = len(shortfn(r.node(i)))
1074 idlen = len(shortfn(r.node(i)))
1074 break
1075 break
1075
1076
1076 if format == 0:
1077 if format == 0:
1077 if ui.verbose:
1078 if ui.verbose:
1078 ui.write((" rev offset length linkrev"
1079 ui.write((" rev offset length linkrev"
1079 " %s %s p2\n") % ("nodeid".ljust(idlen),
1080 " %s %s p2\n") % ("nodeid".ljust(idlen),
1080 "p1".ljust(idlen)))
1081 "p1".ljust(idlen)))
1081 else:
1082 else:
1082 ui.write((" rev linkrev %s %s p2\n") % (
1083 ui.write((" rev linkrev %s %s p2\n") % (
1083 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1084 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1084 elif format == 1:
1085 elif format == 1:
1085 if ui.verbose:
1086 if ui.verbose:
1086 ui.write((" rev flag offset length size link p1"
1087 ui.write((" rev flag offset length size link p1"
1087 " p2 %s\n") % "nodeid".rjust(idlen))
1088 " p2 %s\n") % "nodeid".rjust(idlen))
1088 else:
1089 else:
1089 ui.write((" rev flag size link p1 p2 %s\n") %
1090 ui.write((" rev flag size link p1 p2 %s\n") %
1090 "nodeid".rjust(idlen))
1091 "nodeid".rjust(idlen))
1091
1092
1092 for i in r:
1093 for i in r:
1093 node = r.node(i)
1094 node = r.node(i)
1094 if format == 0:
1095 if format == 0:
1095 try:
1096 try:
1096 pp = r.parents(node)
1097 pp = r.parents(node)
1097 except Exception:
1098 except Exception:
1098 pp = [nullid, nullid]
1099 pp = [nullid, nullid]
1099 if ui.verbose:
1100 if ui.verbose:
1100 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1101 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1101 i, r.start(i), r.length(i), r.linkrev(i),
1102 i, r.start(i), r.length(i), r.linkrev(i),
1102 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1103 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1103 else:
1104 else:
1104 ui.write("% 6d % 7d %s %s %s\n" % (
1105 ui.write("% 6d % 7d %s %s %s\n" % (
1105 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1106 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1106 shortfn(pp[1])))
1107 shortfn(pp[1])))
1107 elif format == 1:
1108 elif format == 1:
1108 pr = r.parentrevs(i)
1109 pr = r.parentrevs(i)
1109 if ui.verbose:
1110 if ui.verbose:
1110 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1111 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1111 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1112 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1112 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1113 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1113 else:
1114 else:
1114 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1115 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1115 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1116 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1116 shortfn(node)))
1117 shortfn(node)))
1117
1118
1118 @command('debugindexdot', cmdutil.debugrevlogopts,
1119 @command('debugindexdot', cmdutil.debugrevlogopts,
1119 _('-c|-m|FILE'), optionalrepo=True)
1120 _('-c|-m|FILE'), optionalrepo=True)
1120 def debugindexdot(ui, repo, file_=None, **opts):
1121 def debugindexdot(ui, repo, file_=None, **opts):
1121 """dump an index DAG as a graphviz dot file"""
1122 """dump an index DAG as a graphviz dot file"""
1122 opts = pycompat.byteskwargs(opts)
1123 opts = pycompat.byteskwargs(opts)
1123 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1124 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1124 ui.write(("digraph G {\n"))
1125 ui.write(("digraph G {\n"))
1125 for i in r:
1126 for i in r:
1126 node = r.node(i)
1127 node = r.node(i)
1127 pp = r.parents(node)
1128 pp = r.parents(node)
1128 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1129 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1129 if pp[1] != nullid:
1130 if pp[1] != nullid:
1130 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1131 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1131 ui.write("}\n")
1132 ui.write("}\n")
1132
1133
1133 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1134 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1134 def debuginstall(ui, **opts):
1135 def debuginstall(ui, **opts):
1135 '''test Mercurial installation
1136 '''test Mercurial installation
1136
1137
1137 Returns 0 on success.
1138 Returns 0 on success.
1138 '''
1139 '''
1139 opts = pycompat.byteskwargs(opts)
1140 opts = pycompat.byteskwargs(opts)
1140
1141
1141 def writetemp(contents):
1142 def writetemp(contents):
1142 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1143 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1143 f = os.fdopen(fd, r"wb")
1144 f = os.fdopen(fd, r"wb")
1144 f.write(contents)
1145 f.write(contents)
1145 f.close()
1146 f.close()
1146 return name
1147 return name
1147
1148
1148 problems = 0
1149 problems = 0
1149
1150
1150 fm = ui.formatter('debuginstall', opts)
1151 fm = ui.formatter('debuginstall', opts)
1151 fm.startitem()
1152 fm.startitem()
1152
1153
1153 # encoding
1154 # encoding
1154 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1155 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1155 err = None
1156 err = None
1156 try:
1157 try:
1157 codecs.lookup(pycompat.sysstr(encoding.encoding))
1158 codecs.lookup(pycompat.sysstr(encoding.encoding))
1158 except LookupError as inst:
1159 except LookupError as inst:
1159 err = stringutil.forcebytestr(inst)
1160 err = stringutil.forcebytestr(inst)
1160 problems += 1
1161 problems += 1
1161 fm.condwrite(err, 'encodingerror', _(" %s\n"
1162 fm.condwrite(err, 'encodingerror', _(" %s\n"
1162 " (check that your locale is properly set)\n"), err)
1163 " (check that your locale is properly set)\n"), err)
1163
1164
1164 # Python
1165 # Python
1165 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1166 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1166 pycompat.sysexecutable)
1167 pycompat.sysexecutable)
1167 fm.write('pythonver', _("checking Python version (%s)\n"),
1168 fm.write('pythonver', _("checking Python version (%s)\n"),
1168 ("%d.%d.%d" % sys.version_info[:3]))
1169 ("%d.%d.%d" % sys.version_info[:3]))
1169 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1170 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1170 os.path.dirname(pycompat.fsencode(os.__file__)))
1171 os.path.dirname(pycompat.fsencode(os.__file__)))
1171
1172
1172 security = set(sslutil.supportedprotocols)
1173 security = set(sslutil.supportedprotocols)
1173 if sslutil.hassni:
1174 if sslutil.hassni:
1174 security.add('sni')
1175 security.add('sni')
1175
1176
1176 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1177 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1177 fm.formatlist(sorted(security), name='protocol',
1178 fm.formatlist(sorted(security), name='protocol',
1178 fmt='%s', sep=','))
1179 fmt='%s', sep=','))
1179
1180
1180 # These are warnings, not errors. So don't increment problem count. This
1181 # These are warnings, not errors. So don't increment problem count. This
1181 # may change in the future.
1182 # may change in the future.
1182 if 'tls1.2' not in security:
1183 if 'tls1.2' not in security:
1183 fm.plain(_(' TLS 1.2 not supported by Python install; '
1184 fm.plain(_(' TLS 1.2 not supported by Python install; '
1184 'network connections lack modern security\n'))
1185 'network connections lack modern security\n'))
1185 if 'sni' not in security:
1186 if 'sni' not in security:
1186 fm.plain(_(' SNI not supported by Python install; may have '
1187 fm.plain(_(' SNI not supported by Python install; may have '
1187 'connectivity issues with some servers\n'))
1188 'connectivity issues with some servers\n'))
1188
1189
1189 # TODO print CA cert info
1190 # TODO print CA cert info
1190
1191
1191 # hg version
1192 # hg version
1192 hgver = util.version()
1193 hgver = util.version()
1193 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1194 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1194 hgver.split('+')[0])
1195 hgver.split('+')[0])
1195 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1196 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1196 '+'.join(hgver.split('+')[1:]))
1197 '+'.join(hgver.split('+')[1:]))
1197
1198
1198 # compiled modules
1199 # compiled modules
1199 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1200 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1200 policy.policy)
1201 policy.policy)
1201 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1202 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1202 os.path.dirname(pycompat.fsencode(__file__)))
1203 os.path.dirname(pycompat.fsencode(__file__)))
1203
1204
1204 if policy.policy in ('c', 'allow'):
1205 if policy.policy in ('c', 'allow'):
1205 err = None
1206 err = None
1206 try:
1207 try:
1207 from .cext import (
1208 from .cext import (
1208 base85,
1209 base85,
1209 bdiff,
1210 bdiff,
1210 mpatch,
1211 mpatch,
1211 osutil,
1212 osutil,
1212 )
1213 )
1213 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1214 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1214 except Exception as inst:
1215 except Exception as inst:
1215 err = stringutil.forcebytestr(inst)
1216 err = stringutil.forcebytestr(inst)
1216 problems += 1
1217 problems += 1
1217 fm.condwrite(err, 'extensionserror', " %s\n", err)
1218 fm.condwrite(err, 'extensionserror', " %s\n", err)
1218
1219
1219 compengines = util.compengines._engines.values()
1220 compengines = util.compengines._engines.values()
1220 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1221 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1221 fm.formatlist(sorted(e.name() for e in compengines),
1222 fm.formatlist(sorted(e.name() for e in compengines),
1222 name='compengine', fmt='%s', sep=', '))
1223 name='compengine', fmt='%s', sep=', '))
1223 fm.write('compenginesavail', _('checking available compression engines '
1224 fm.write('compenginesavail', _('checking available compression engines '
1224 '(%s)\n'),
1225 '(%s)\n'),
1225 fm.formatlist(sorted(e.name() for e in compengines
1226 fm.formatlist(sorted(e.name() for e in compengines
1226 if e.available()),
1227 if e.available()),
1227 name='compengine', fmt='%s', sep=', '))
1228 name='compengine', fmt='%s', sep=', '))
1228 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1229 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1229 fm.write('compenginesserver', _('checking available compression engines '
1230 fm.write('compenginesserver', _('checking available compression engines '
1230 'for wire protocol (%s)\n'),
1231 'for wire protocol (%s)\n'),
1231 fm.formatlist([e.name() for e in wirecompengines
1232 fm.formatlist([e.name() for e in wirecompengines
1232 if e.wireprotosupport()],
1233 if e.wireprotosupport()],
1233 name='compengine', fmt='%s', sep=', '))
1234 name='compengine', fmt='%s', sep=', '))
1234 re2 = 'missing'
1235 re2 = 'missing'
1235 if util._re2:
1236 if util._re2:
1236 re2 = 'available'
1237 re2 = 'available'
1237 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1238 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1238 fm.data(re2=bool(util._re2))
1239 fm.data(re2=bool(util._re2))
1239
1240
1240 # templates
1241 # templates
1241 p = templater.templatepaths()
1242 p = templater.templatepaths()
1242 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1243 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1243 fm.condwrite(not p, '', _(" no template directories found\n"))
1244 fm.condwrite(not p, '', _(" no template directories found\n"))
1244 if p:
1245 if p:
1245 m = templater.templatepath("map-cmdline.default")
1246 m = templater.templatepath("map-cmdline.default")
1246 if m:
1247 if m:
1247 # template found, check if it is working
1248 # template found, check if it is working
1248 err = None
1249 err = None
1249 try:
1250 try:
1250 templater.templater.frommapfile(m)
1251 templater.templater.frommapfile(m)
1251 except Exception as inst:
1252 except Exception as inst:
1252 err = stringutil.forcebytestr(inst)
1253 err = stringutil.forcebytestr(inst)
1253 p = None
1254 p = None
1254 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1255 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1255 else:
1256 else:
1256 p = None
1257 p = None
1257 fm.condwrite(p, 'defaulttemplate',
1258 fm.condwrite(p, 'defaulttemplate',
1258 _("checking default template (%s)\n"), m)
1259 _("checking default template (%s)\n"), m)
1259 fm.condwrite(not m, 'defaulttemplatenotfound',
1260 fm.condwrite(not m, 'defaulttemplatenotfound',
1260 _(" template '%s' not found\n"), "default")
1261 _(" template '%s' not found\n"), "default")
1261 if not p:
1262 if not p:
1262 problems += 1
1263 problems += 1
1263 fm.condwrite(not p, '',
1264 fm.condwrite(not p, '',
1264 _(" (templates seem to have been installed incorrectly)\n"))
1265 _(" (templates seem to have been installed incorrectly)\n"))
1265
1266
1266 # editor
1267 # editor
1267 editor = ui.geteditor()
1268 editor = ui.geteditor()
1268 editor = util.expandpath(editor)
1269 editor = util.expandpath(editor)
1269 editorbin = procutil.shellsplit(editor)[0]
1270 editorbin = procutil.shellsplit(editor)[0]
1270 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1271 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1271 cmdpath = procutil.findexe(editorbin)
1272 cmdpath = procutil.findexe(editorbin)
1272 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1273 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1273 _(" No commit editor set and can't find %s in PATH\n"
1274 _(" No commit editor set and can't find %s in PATH\n"
1274 " (specify a commit editor in your configuration"
1275 " (specify a commit editor in your configuration"
1275 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1276 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1276 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1277 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1277 _(" Can't find editor '%s' in PATH\n"
1278 _(" Can't find editor '%s' in PATH\n"
1278 " (specify a commit editor in your configuration"
1279 " (specify a commit editor in your configuration"
1279 " file)\n"), not cmdpath and editorbin)
1280 " file)\n"), not cmdpath and editorbin)
1280 if not cmdpath and editor != 'vi':
1281 if not cmdpath and editor != 'vi':
1281 problems += 1
1282 problems += 1
1282
1283
1283 # check username
1284 # check username
1284 username = None
1285 username = None
1285 err = None
1286 err = None
1286 try:
1287 try:
1287 username = ui.username()
1288 username = ui.username()
1288 except error.Abort as e:
1289 except error.Abort as e:
1289 err = stringutil.forcebytestr(e)
1290 err = stringutil.forcebytestr(e)
1290 problems += 1
1291 problems += 1
1291
1292
1292 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1293 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1293 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1294 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1294 " (specify a username in your configuration file)\n"), err)
1295 " (specify a username in your configuration file)\n"), err)
1295
1296
1296 fm.condwrite(not problems, '',
1297 fm.condwrite(not problems, '',
1297 _("no problems detected\n"))
1298 _("no problems detected\n"))
1298 if not problems:
1299 if not problems:
1299 fm.data(problems=problems)
1300 fm.data(problems=problems)
1300 fm.condwrite(problems, 'problems',
1301 fm.condwrite(problems, 'problems',
1301 _("%d problems detected,"
1302 _("%d problems detected,"
1302 " please check your install!\n"), problems)
1303 " please check your install!\n"), problems)
1303 fm.end()
1304 fm.end()
1304
1305
1305 return problems
1306 return problems
1306
1307
1307 @command('debugknown', [], _('REPO ID...'), norepo=True)
1308 @command('debugknown', [], _('REPO ID...'), norepo=True)
1308 def debugknown(ui, repopath, *ids, **opts):
1309 def debugknown(ui, repopath, *ids, **opts):
1309 """test whether node ids are known to a repo
1310 """test whether node ids are known to a repo
1310
1311
1311 Every ID must be a full-length hex node id string. Returns a list of 0s
1312 Every ID must be a full-length hex node id string. Returns a list of 0s
1312 and 1s indicating unknown/known.
1313 and 1s indicating unknown/known.
1313 """
1314 """
1314 opts = pycompat.byteskwargs(opts)
1315 opts = pycompat.byteskwargs(opts)
1315 repo = hg.peer(ui, opts, repopath)
1316 repo = hg.peer(ui, opts, repopath)
1316 if not repo.capable('known'):
1317 if not repo.capable('known'):
1317 raise error.Abort("known() not supported by target repository")
1318 raise error.Abort("known() not supported by target repository")
1318 flags = repo.known([bin(s) for s in ids])
1319 flags = repo.known([bin(s) for s in ids])
1319 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1320 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1320
1321
1321 @command('debuglabelcomplete', [], _('LABEL...'))
1322 @command('debuglabelcomplete', [], _('LABEL...'))
1322 def debuglabelcomplete(ui, repo, *args):
1323 def debuglabelcomplete(ui, repo, *args):
1323 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1324 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1324 debugnamecomplete(ui, repo, *args)
1325 debugnamecomplete(ui, repo, *args)
1325
1326
1326 @command('debuglocks',
1327 @command('debuglocks',
1327 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1328 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1328 ('W', 'force-wlock', None,
1329 ('W', 'force-wlock', None,
1329 _('free the working state lock (DANGEROUS)')),
1330 _('free the working state lock (DANGEROUS)')),
1330 ('s', 'set-lock', None, _('set the store lock until stopped')),
1331 ('s', 'set-lock', None, _('set the store lock until stopped')),
1331 ('S', 'set-wlock', None,
1332 ('S', 'set-wlock', None,
1332 _('set the working state lock until stopped'))],
1333 _('set the working state lock until stopped'))],
1333 _('[OPTION]...'))
1334 _('[OPTION]...'))
1334 def debuglocks(ui, repo, **opts):
1335 def debuglocks(ui, repo, **opts):
1335 """show or modify state of locks
1336 """show or modify state of locks
1336
1337
1337 By default, this command will show which locks are held. This
1338 By default, this command will show which locks are held. This
1338 includes the user and process holding the lock, the amount of time
1339 includes the user and process holding the lock, the amount of time
1339 the lock has been held, and the machine name where the process is
1340 the lock has been held, and the machine name where the process is
1340 running if it's not local.
1341 running if it's not local.
1341
1342
1342 Locks protect the integrity of Mercurial's data, so should be
1343 Locks protect the integrity of Mercurial's data, so should be
1343 treated with care. System crashes or other interruptions may cause
1344 treated with care. System crashes or other interruptions may cause
1344 locks to not be properly released, though Mercurial will usually
1345 locks to not be properly released, though Mercurial will usually
1345 detect and remove such stale locks automatically.
1346 detect and remove such stale locks automatically.
1346
1347
1347 However, detecting stale locks may not always be possible (for
1348 However, detecting stale locks may not always be possible (for
1348 instance, on a shared filesystem). Removing locks may also be
1349 instance, on a shared filesystem). Removing locks may also be
1349 blocked by filesystem permissions.
1350 blocked by filesystem permissions.
1350
1351
1351 Setting a lock will prevent other commands from changing the data.
1352 Setting a lock will prevent other commands from changing the data.
1352 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1353 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1353 The set locks are removed when the command exits.
1354 The set locks are removed when the command exits.
1354
1355
1355 Returns 0 if no locks are held.
1356 Returns 0 if no locks are held.
1356
1357
1357 """
1358 """
1358
1359
1359 if opts.get(r'force_lock'):
1360 if opts.get(r'force_lock'):
1360 repo.svfs.unlink('lock')
1361 repo.svfs.unlink('lock')
1361 if opts.get(r'force_wlock'):
1362 if opts.get(r'force_wlock'):
1362 repo.vfs.unlink('wlock')
1363 repo.vfs.unlink('wlock')
1363 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1364 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1364 return 0
1365 return 0
1365
1366
1366 locks = []
1367 locks = []
1367 try:
1368 try:
1368 if opts.get(r'set_wlock'):
1369 if opts.get(r'set_wlock'):
1369 try:
1370 try:
1370 locks.append(repo.wlock(False))
1371 locks.append(repo.wlock(False))
1371 except error.LockHeld:
1372 except error.LockHeld:
1372 raise error.Abort(_('wlock is already held'))
1373 raise error.Abort(_('wlock is already held'))
1373 if opts.get(r'set_lock'):
1374 if opts.get(r'set_lock'):
1374 try:
1375 try:
1375 locks.append(repo.lock(False))
1376 locks.append(repo.lock(False))
1376 except error.LockHeld:
1377 except error.LockHeld:
1377 raise error.Abort(_('lock is already held'))
1378 raise error.Abort(_('lock is already held'))
1378 if len(locks):
1379 if len(locks):
1379 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1380 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1380 return 0
1381 return 0
1381 finally:
1382 finally:
1382 release(*locks)
1383 release(*locks)
1383
1384
1384 now = time.time()
1385 now = time.time()
1385 held = 0
1386 held = 0
1386
1387
1387 def report(vfs, name, method):
1388 def report(vfs, name, method):
1388 # this causes stale locks to get reaped for more accurate reporting
1389 # this causes stale locks to get reaped for more accurate reporting
1389 try:
1390 try:
1390 l = method(False)
1391 l = method(False)
1391 except error.LockHeld:
1392 except error.LockHeld:
1392 l = None
1393 l = None
1393
1394
1394 if l:
1395 if l:
1395 l.release()
1396 l.release()
1396 else:
1397 else:
1397 try:
1398 try:
1398 st = vfs.lstat(name)
1399 st = vfs.lstat(name)
1399 age = now - st[stat.ST_MTIME]
1400 age = now - st[stat.ST_MTIME]
1400 user = util.username(st.st_uid)
1401 user = util.username(st.st_uid)
1401 locker = vfs.readlock(name)
1402 locker = vfs.readlock(name)
1402 if ":" in locker:
1403 if ":" in locker:
1403 host, pid = locker.split(':')
1404 host, pid = locker.split(':')
1404 if host == socket.gethostname():
1405 if host == socket.gethostname():
1405 locker = 'user %s, process %s' % (user, pid)
1406 locker = 'user %s, process %s' % (user, pid)
1406 else:
1407 else:
1407 locker = 'user %s, process %s, host %s' \
1408 locker = 'user %s, process %s, host %s' \
1408 % (user, pid, host)
1409 % (user, pid, host)
1409 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1410 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1410 return 1
1411 return 1
1411 except OSError as e:
1412 except OSError as e:
1412 if e.errno != errno.ENOENT:
1413 if e.errno != errno.ENOENT:
1413 raise
1414 raise
1414
1415
1415 ui.write(("%-6s free\n") % (name + ":"))
1416 ui.write(("%-6s free\n") % (name + ":"))
1416 return 0
1417 return 0
1417
1418
1418 held += report(repo.svfs, "lock", repo.lock)
1419 held += report(repo.svfs, "lock", repo.lock)
1419 held += report(repo.vfs, "wlock", repo.wlock)
1420 held += report(repo.vfs, "wlock", repo.wlock)
1420
1421
1421 return held
1422 return held
1422
1423
1423 @command('debugmergestate', [], '')
1424 @command('debugmergestate', [], '')
1424 def debugmergestate(ui, repo, *args):
1425 def debugmergestate(ui, repo, *args):
1425 """print merge state
1426 """print merge state
1426
1427
1427 Use --verbose to print out information about whether v1 or v2 merge state
1428 Use --verbose to print out information about whether v1 or v2 merge state
1428 was chosen."""
1429 was chosen."""
1429 def _hashornull(h):
1430 def _hashornull(h):
1430 if h == nullhex:
1431 if h == nullhex:
1431 return 'null'
1432 return 'null'
1432 else:
1433 else:
1433 return h
1434 return h
1434
1435
1435 def printrecords(version):
1436 def printrecords(version):
1436 ui.write(('* version %d records\n') % version)
1437 ui.write(('* version %d records\n') % version)
1437 if version == 1:
1438 if version == 1:
1438 records = v1records
1439 records = v1records
1439 else:
1440 else:
1440 records = v2records
1441 records = v2records
1441
1442
1442 for rtype, record in records:
1443 for rtype, record in records:
1443 # pretty print some record types
1444 # pretty print some record types
1444 if rtype == 'L':
1445 if rtype == 'L':
1445 ui.write(('local: %s\n') % record)
1446 ui.write(('local: %s\n') % record)
1446 elif rtype == 'O':
1447 elif rtype == 'O':
1447 ui.write(('other: %s\n') % record)
1448 ui.write(('other: %s\n') % record)
1448 elif rtype == 'm':
1449 elif rtype == 'm':
1449 driver, mdstate = record.split('\0', 1)
1450 driver, mdstate = record.split('\0', 1)
1450 ui.write(('merge driver: %s (state "%s")\n')
1451 ui.write(('merge driver: %s (state "%s")\n')
1451 % (driver, mdstate))
1452 % (driver, mdstate))
1452 elif rtype in 'FDC':
1453 elif rtype in 'FDC':
1453 r = record.split('\0')
1454 r = record.split('\0')
1454 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1455 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1455 if version == 1:
1456 if version == 1:
1456 onode = 'not stored in v1 format'
1457 onode = 'not stored in v1 format'
1457 flags = r[7]
1458 flags = r[7]
1458 else:
1459 else:
1459 onode, flags = r[7:9]
1460 onode, flags = r[7:9]
1460 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1461 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1461 % (f, rtype, state, _hashornull(hash)))
1462 % (f, rtype, state, _hashornull(hash)))
1462 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1463 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1463 ui.write((' ancestor path: %s (node %s)\n')
1464 ui.write((' ancestor path: %s (node %s)\n')
1464 % (afile, _hashornull(anode)))
1465 % (afile, _hashornull(anode)))
1465 ui.write((' other path: %s (node %s)\n')
1466 ui.write((' other path: %s (node %s)\n')
1466 % (ofile, _hashornull(onode)))
1467 % (ofile, _hashornull(onode)))
1467 elif rtype == 'f':
1468 elif rtype == 'f':
1468 filename, rawextras = record.split('\0', 1)
1469 filename, rawextras = record.split('\0', 1)
1469 extras = rawextras.split('\0')
1470 extras = rawextras.split('\0')
1470 i = 0
1471 i = 0
1471 extrastrings = []
1472 extrastrings = []
1472 while i < len(extras):
1473 while i < len(extras):
1473 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1474 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1474 i += 2
1475 i += 2
1475
1476
1476 ui.write(('file extras: %s (%s)\n')
1477 ui.write(('file extras: %s (%s)\n')
1477 % (filename, ', '.join(extrastrings)))
1478 % (filename, ', '.join(extrastrings)))
1478 elif rtype == 'l':
1479 elif rtype == 'l':
1479 labels = record.split('\0', 2)
1480 labels = record.split('\0', 2)
1480 labels = [l for l in labels if len(l) > 0]
1481 labels = [l for l in labels if len(l) > 0]
1481 ui.write(('labels:\n'))
1482 ui.write(('labels:\n'))
1482 ui.write((' local: %s\n' % labels[0]))
1483 ui.write((' local: %s\n' % labels[0]))
1483 ui.write((' other: %s\n' % labels[1]))
1484 ui.write((' other: %s\n' % labels[1]))
1484 if len(labels) > 2:
1485 if len(labels) > 2:
1485 ui.write((' base: %s\n' % labels[2]))
1486 ui.write((' base: %s\n' % labels[2]))
1486 else:
1487 else:
1487 ui.write(('unrecognized entry: %s\t%s\n')
1488 ui.write(('unrecognized entry: %s\t%s\n')
1488 % (rtype, record.replace('\0', '\t')))
1489 % (rtype, record.replace('\0', '\t')))
1489
1490
1490 # Avoid mergestate.read() since it may raise an exception for unsupported
1491 # Avoid mergestate.read() since it may raise an exception for unsupported
1491 # merge state records. We shouldn't be doing this, but this is OK since this
1492 # merge state records. We shouldn't be doing this, but this is OK since this
1492 # command is pretty low-level.
1493 # command is pretty low-level.
1493 ms = mergemod.mergestate(repo)
1494 ms = mergemod.mergestate(repo)
1494
1495
1495 # sort so that reasonable information is on top
1496 # sort so that reasonable information is on top
1496 v1records = ms._readrecordsv1()
1497 v1records = ms._readrecordsv1()
1497 v2records = ms._readrecordsv2()
1498 v2records = ms._readrecordsv2()
1498 order = 'LOml'
1499 order = 'LOml'
1499 def key(r):
1500 def key(r):
1500 idx = order.find(r[0])
1501 idx = order.find(r[0])
1501 if idx == -1:
1502 if idx == -1:
1502 return (1, r[1])
1503 return (1, r[1])
1503 else:
1504 else:
1504 return (0, idx)
1505 return (0, idx)
1505 v1records.sort(key=key)
1506 v1records.sort(key=key)
1506 v2records.sort(key=key)
1507 v2records.sort(key=key)
1507
1508
1508 if not v1records and not v2records:
1509 if not v1records and not v2records:
1509 ui.write(('no merge state found\n'))
1510 ui.write(('no merge state found\n'))
1510 elif not v2records:
1511 elif not v2records:
1511 ui.note(('no version 2 merge state\n'))
1512 ui.note(('no version 2 merge state\n'))
1512 printrecords(1)
1513 printrecords(1)
1513 elif ms._v1v2match(v1records, v2records):
1514 elif ms._v1v2match(v1records, v2records):
1514 ui.note(('v1 and v2 states match: using v2\n'))
1515 ui.note(('v1 and v2 states match: using v2\n'))
1515 printrecords(2)
1516 printrecords(2)
1516 else:
1517 else:
1517 ui.note(('v1 and v2 states mismatch: using v1\n'))
1518 ui.note(('v1 and v2 states mismatch: using v1\n'))
1518 printrecords(1)
1519 printrecords(1)
1519 if ui.verbose:
1520 if ui.verbose:
1520 printrecords(2)
1521 printrecords(2)
1521
1522
1522 @command('debugnamecomplete', [], _('NAME...'))
1523 @command('debugnamecomplete', [], _('NAME...'))
1523 def debugnamecomplete(ui, repo, *args):
1524 def debugnamecomplete(ui, repo, *args):
1524 '''complete "names" - tags, open branch names, bookmark names'''
1525 '''complete "names" - tags, open branch names, bookmark names'''
1525
1526
1526 names = set()
1527 names = set()
1527 # since we previously only listed open branches, we will handle that
1528 # since we previously only listed open branches, we will handle that
1528 # specially (after this for loop)
1529 # specially (after this for loop)
1529 for name, ns in repo.names.iteritems():
1530 for name, ns in repo.names.iteritems():
1530 if name != 'branches':
1531 if name != 'branches':
1531 names.update(ns.listnames(repo))
1532 names.update(ns.listnames(repo))
1532 names.update(tag for (tag, heads, tip, closed)
1533 names.update(tag for (tag, heads, tip, closed)
1533 in repo.branchmap().iterbranches() if not closed)
1534 in repo.branchmap().iterbranches() if not closed)
1534 completions = set()
1535 completions = set()
1535 if not args:
1536 if not args:
1536 args = ['']
1537 args = ['']
1537 for a in args:
1538 for a in args:
1538 completions.update(n for n in names if n.startswith(a))
1539 completions.update(n for n in names if n.startswith(a))
1539 ui.write('\n'.join(sorted(completions)))
1540 ui.write('\n'.join(sorted(completions)))
1540 ui.write('\n')
1541 ui.write('\n')
1541
1542
1542 @command('debugobsolete',
1543 @command('debugobsolete',
1543 [('', 'flags', 0, _('markers flag')),
1544 [('', 'flags', 0, _('markers flag')),
1544 ('', 'record-parents', False,
1545 ('', 'record-parents', False,
1545 _('record parent information for the precursor')),
1546 _('record parent information for the precursor')),
1546 ('r', 'rev', [], _('display markers relevant to REV')),
1547 ('r', 'rev', [], _('display markers relevant to REV')),
1547 ('', 'exclusive', False, _('restrict display to markers only '
1548 ('', 'exclusive', False, _('restrict display to markers only '
1548 'relevant to REV')),
1549 'relevant to REV')),
1549 ('', 'index', False, _('display index of the marker')),
1550 ('', 'index', False, _('display index of the marker')),
1550 ('', 'delete', [], _('delete markers specified by indices')),
1551 ('', 'delete', [], _('delete markers specified by indices')),
1551 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1552 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1552 _('[OBSOLETED [REPLACEMENT ...]]'))
1553 _('[OBSOLETED [REPLACEMENT ...]]'))
1553 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1554 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1554 """create arbitrary obsolete marker
1555 """create arbitrary obsolete marker
1555
1556
1556 With no arguments, displays the list of obsolescence markers."""
1557 With no arguments, displays the list of obsolescence markers."""
1557
1558
1558 opts = pycompat.byteskwargs(opts)
1559 opts = pycompat.byteskwargs(opts)
1559
1560
1560 def parsenodeid(s):
1561 def parsenodeid(s):
1561 try:
1562 try:
1562 # We do not use revsingle/revrange functions here to accept
1563 # We do not use revsingle/revrange functions here to accept
1563 # arbitrary node identifiers, possibly not present in the
1564 # arbitrary node identifiers, possibly not present in the
1564 # local repository.
1565 # local repository.
1565 n = bin(s)
1566 n = bin(s)
1566 if len(n) != len(nullid):
1567 if len(n) != len(nullid):
1567 raise TypeError()
1568 raise TypeError()
1568 return n
1569 return n
1569 except TypeError:
1570 except TypeError:
1570 raise error.Abort('changeset references must be full hexadecimal '
1571 raise error.Abort('changeset references must be full hexadecimal '
1571 'node identifiers')
1572 'node identifiers')
1572
1573
1573 if opts.get('delete'):
1574 if opts.get('delete'):
1574 indices = []
1575 indices = []
1575 for v in opts.get('delete'):
1576 for v in opts.get('delete'):
1576 try:
1577 try:
1577 indices.append(int(v))
1578 indices.append(int(v))
1578 except ValueError:
1579 except ValueError:
1579 raise error.Abort(_('invalid index value: %r') % v,
1580 raise error.Abort(_('invalid index value: %r') % v,
1580 hint=_('use integers for indices'))
1581 hint=_('use integers for indices'))
1581
1582
1582 if repo.currenttransaction():
1583 if repo.currenttransaction():
1583 raise error.Abort(_('cannot delete obsmarkers in the middle '
1584 raise error.Abort(_('cannot delete obsmarkers in the middle '
1584 'of transaction.'))
1585 'of transaction.'))
1585
1586
1586 with repo.lock():
1587 with repo.lock():
1587 n = repair.deleteobsmarkers(repo.obsstore, indices)
1588 n = repair.deleteobsmarkers(repo.obsstore, indices)
1588 ui.write(_('deleted %i obsolescence markers\n') % n)
1589 ui.write(_('deleted %i obsolescence markers\n') % n)
1589
1590
1590 return
1591 return
1591
1592
1592 if precursor is not None:
1593 if precursor is not None:
1593 if opts['rev']:
1594 if opts['rev']:
1594 raise error.Abort('cannot select revision when creating marker')
1595 raise error.Abort('cannot select revision when creating marker')
1595 metadata = {}
1596 metadata = {}
1596 metadata['user'] = opts['user'] or ui.username()
1597 metadata['user'] = opts['user'] or ui.username()
1597 succs = tuple(parsenodeid(succ) for succ in successors)
1598 succs = tuple(parsenodeid(succ) for succ in successors)
1598 l = repo.lock()
1599 l = repo.lock()
1599 try:
1600 try:
1600 tr = repo.transaction('debugobsolete')
1601 tr = repo.transaction('debugobsolete')
1601 try:
1602 try:
1602 date = opts.get('date')
1603 date = opts.get('date')
1603 if date:
1604 if date:
1604 date = dateutil.parsedate(date)
1605 date = dateutil.parsedate(date)
1605 else:
1606 else:
1606 date = None
1607 date = None
1607 prec = parsenodeid(precursor)
1608 prec = parsenodeid(precursor)
1608 parents = None
1609 parents = None
1609 if opts['record_parents']:
1610 if opts['record_parents']:
1610 if prec not in repo.unfiltered():
1611 if prec not in repo.unfiltered():
1611 raise error.Abort('cannot used --record-parents on '
1612 raise error.Abort('cannot used --record-parents on '
1612 'unknown changesets')
1613 'unknown changesets')
1613 parents = repo.unfiltered()[prec].parents()
1614 parents = repo.unfiltered()[prec].parents()
1614 parents = tuple(p.node() for p in parents)
1615 parents = tuple(p.node() for p in parents)
1615 repo.obsstore.create(tr, prec, succs, opts['flags'],
1616 repo.obsstore.create(tr, prec, succs, opts['flags'],
1616 parents=parents, date=date,
1617 parents=parents, date=date,
1617 metadata=metadata, ui=ui)
1618 metadata=metadata, ui=ui)
1618 tr.close()
1619 tr.close()
1619 except ValueError as exc:
1620 except ValueError as exc:
1620 raise error.Abort(_('bad obsmarker input: %s') %
1621 raise error.Abort(_('bad obsmarker input: %s') %
1621 pycompat.bytestr(exc))
1622 pycompat.bytestr(exc))
1622 finally:
1623 finally:
1623 tr.release()
1624 tr.release()
1624 finally:
1625 finally:
1625 l.release()
1626 l.release()
1626 else:
1627 else:
1627 if opts['rev']:
1628 if opts['rev']:
1628 revs = scmutil.revrange(repo, opts['rev'])
1629 revs = scmutil.revrange(repo, opts['rev'])
1629 nodes = [repo[r].node() for r in revs]
1630 nodes = [repo[r].node() for r in revs]
1630 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1631 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1631 exclusive=opts['exclusive']))
1632 exclusive=opts['exclusive']))
1632 markers.sort(key=lambda x: x._data)
1633 markers.sort(key=lambda x: x._data)
1633 else:
1634 else:
1634 markers = obsutil.getmarkers(repo)
1635 markers = obsutil.getmarkers(repo)
1635
1636
1636 markerstoiter = markers
1637 markerstoiter = markers
1637 isrelevant = lambda m: True
1638 isrelevant = lambda m: True
1638 if opts.get('rev') and opts.get('index'):
1639 if opts.get('rev') and opts.get('index'):
1639 markerstoiter = obsutil.getmarkers(repo)
1640 markerstoiter = obsutil.getmarkers(repo)
1640 markerset = set(markers)
1641 markerset = set(markers)
1641 isrelevant = lambda m: m in markerset
1642 isrelevant = lambda m: m in markerset
1642
1643
1643 fm = ui.formatter('debugobsolete', opts)
1644 fm = ui.formatter('debugobsolete', opts)
1644 for i, m in enumerate(markerstoiter):
1645 for i, m in enumerate(markerstoiter):
1645 if not isrelevant(m):
1646 if not isrelevant(m):
1646 # marker can be irrelevant when we're iterating over a set
1647 # marker can be irrelevant when we're iterating over a set
1647 # of markers (markerstoiter) which is bigger than the set
1648 # of markers (markerstoiter) which is bigger than the set
1648 # of markers we want to display (markers)
1649 # of markers we want to display (markers)
1649 # this can happen if both --index and --rev options are
1650 # this can happen if both --index and --rev options are
1650 # provided and thus we need to iterate over all of the markers
1651 # provided and thus we need to iterate over all of the markers
1651 # to get the correct indices, but only display the ones that
1652 # to get the correct indices, but only display the ones that
1652 # are relevant to --rev value
1653 # are relevant to --rev value
1653 continue
1654 continue
1654 fm.startitem()
1655 fm.startitem()
1655 ind = i if opts.get('index') else None
1656 ind = i if opts.get('index') else None
1656 cmdutil.showmarker(fm, m, index=ind)
1657 cmdutil.showmarker(fm, m, index=ind)
1657 fm.end()
1658 fm.end()
1658
1659
1659 @command('debugpathcomplete',
1660 @command('debugpathcomplete',
1660 [('f', 'full', None, _('complete an entire path')),
1661 [('f', 'full', None, _('complete an entire path')),
1661 ('n', 'normal', None, _('show only normal files')),
1662 ('n', 'normal', None, _('show only normal files')),
1662 ('a', 'added', None, _('show only added files')),
1663 ('a', 'added', None, _('show only added files')),
1663 ('r', 'removed', None, _('show only removed files'))],
1664 ('r', 'removed', None, _('show only removed files'))],
1664 _('FILESPEC...'))
1665 _('FILESPEC...'))
1665 def debugpathcomplete(ui, repo, *specs, **opts):
1666 def debugpathcomplete(ui, repo, *specs, **opts):
1666 '''complete part or all of a tracked path
1667 '''complete part or all of a tracked path
1667
1668
1668 This command supports shells that offer path name completion. It
1669 This command supports shells that offer path name completion. It
1669 currently completes only files already known to the dirstate.
1670 currently completes only files already known to the dirstate.
1670
1671
1671 Completion extends only to the next path segment unless
1672 Completion extends only to the next path segment unless
1672 --full is specified, in which case entire paths are used.'''
1673 --full is specified, in which case entire paths are used.'''
1673
1674
1674 def complete(path, acceptable):
1675 def complete(path, acceptable):
1675 dirstate = repo.dirstate
1676 dirstate = repo.dirstate
1676 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1677 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1677 rootdir = repo.root + pycompat.ossep
1678 rootdir = repo.root + pycompat.ossep
1678 if spec != repo.root and not spec.startswith(rootdir):
1679 if spec != repo.root and not spec.startswith(rootdir):
1679 return [], []
1680 return [], []
1680 if os.path.isdir(spec):
1681 if os.path.isdir(spec):
1681 spec += '/'
1682 spec += '/'
1682 spec = spec[len(rootdir):]
1683 spec = spec[len(rootdir):]
1683 fixpaths = pycompat.ossep != '/'
1684 fixpaths = pycompat.ossep != '/'
1684 if fixpaths:
1685 if fixpaths:
1685 spec = spec.replace(pycompat.ossep, '/')
1686 spec = spec.replace(pycompat.ossep, '/')
1686 speclen = len(spec)
1687 speclen = len(spec)
1687 fullpaths = opts[r'full']
1688 fullpaths = opts[r'full']
1688 files, dirs = set(), set()
1689 files, dirs = set(), set()
1689 adddir, addfile = dirs.add, files.add
1690 adddir, addfile = dirs.add, files.add
1690 for f, st in dirstate.iteritems():
1691 for f, st in dirstate.iteritems():
1691 if f.startswith(spec) and st[0] in acceptable:
1692 if f.startswith(spec) and st[0] in acceptable:
1692 if fixpaths:
1693 if fixpaths:
1693 f = f.replace('/', pycompat.ossep)
1694 f = f.replace('/', pycompat.ossep)
1694 if fullpaths:
1695 if fullpaths:
1695 addfile(f)
1696 addfile(f)
1696 continue
1697 continue
1697 s = f.find(pycompat.ossep, speclen)
1698 s = f.find(pycompat.ossep, speclen)
1698 if s >= 0:
1699 if s >= 0:
1699 adddir(f[:s])
1700 adddir(f[:s])
1700 else:
1701 else:
1701 addfile(f)
1702 addfile(f)
1702 return files, dirs
1703 return files, dirs
1703
1704
1704 acceptable = ''
1705 acceptable = ''
1705 if opts[r'normal']:
1706 if opts[r'normal']:
1706 acceptable += 'nm'
1707 acceptable += 'nm'
1707 if opts[r'added']:
1708 if opts[r'added']:
1708 acceptable += 'a'
1709 acceptable += 'a'
1709 if opts[r'removed']:
1710 if opts[r'removed']:
1710 acceptable += 'r'
1711 acceptable += 'r'
1711 cwd = repo.getcwd()
1712 cwd = repo.getcwd()
1712 if not specs:
1713 if not specs:
1713 specs = ['.']
1714 specs = ['.']
1714
1715
1715 files, dirs = set(), set()
1716 files, dirs = set(), set()
1716 for spec in specs:
1717 for spec in specs:
1717 f, d = complete(spec, acceptable or 'nmar')
1718 f, d = complete(spec, acceptable or 'nmar')
1718 files.update(f)
1719 files.update(f)
1719 dirs.update(d)
1720 dirs.update(d)
1720 files.update(dirs)
1721 files.update(dirs)
1721 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1722 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1722 ui.write('\n')
1723 ui.write('\n')
1723
1724
1724 @command('debugpeer', [], _('PATH'), norepo=True)
1725 @command('debugpeer', [], _('PATH'), norepo=True)
1725 def debugpeer(ui, path):
1726 def debugpeer(ui, path):
1726 """establish a connection to a peer repository"""
1727 """establish a connection to a peer repository"""
1727 # Always enable peer request logging. Requires --debug to display
1728 # Always enable peer request logging. Requires --debug to display
1728 # though.
1729 # though.
1729 overrides = {
1730 overrides = {
1730 ('devel', 'debug.peer-request'): True,
1731 ('devel', 'debug.peer-request'): True,
1731 }
1732 }
1732
1733
1733 with ui.configoverride(overrides):
1734 with ui.configoverride(overrides):
1734 peer = hg.peer(ui, {}, path)
1735 peer = hg.peer(ui, {}, path)
1735
1736
1736 local = peer.local() is not None
1737 local = peer.local() is not None
1737 canpush = peer.canpush()
1738 canpush = peer.canpush()
1738
1739
1739 ui.write(_('url: %s\n') % peer.url())
1740 ui.write(_('url: %s\n') % peer.url())
1740 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1741 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1741 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1742 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1742
1743
1743 @command('debugpickmergetool',
1744 @command('debugpickmergetool',
1744 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1745 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1745 ('', 'changedelete', None, _('emulate merging change and delete')),
1746 ('', 'changedelete', None, _('emulate merging change and delete')),
1746 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1747 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1747 _('[PATTERN]...'),
1748 _('[PATTERN]...'),
1748 inferrepo=True)
1749 inferrepo=True)
1749 def debugpickmergetool(ui, repo, *pats, **opts):
1750 def debugpickmergetool(ui, repo, *pats, **opts):
1750 """examine which merge tool is chosen for specified file
1751 """examine which merge tool is chosen for specified file
1751
1752
1752 As described in :hg:`help merge-tools`, Mercurial examines
1753 As described in :hg:`help merge-tools`, Mercurial examines
1753 configurations below in this order to decide which merge tool is
1754 configurations below in this order to decide which merge tool is
1754 chosen for specified file.
1755 chosen for specified file.
1755
1756
1756 1. ``--tool`` option
1757 1. ``--tool`` option
1757 2. ``HGMERGE`` environment variable
1758 2. ``HGMERGE`` environment variable
1758 3. configurations in ``merge-patterns`` section
1759 3. configurations in ``merge-patterns`` section
1759 4. configuration of ``ui.merge``
1760 4. configuration of ``ui.merge``
1760 5. configurations in ``merge-tools`` section
1761 5. configurations in ``merge-tools`` section
1761 6. ``hgmerge`` tool (for historical reason only)
1762 6. ``hgmerge`` tool (for historical reason only)
1762 7. default tool for fallback (``:merge`` or ``:prompt``)
1763 7. default tool for fallback (``:merge`` or ``:prompt``)
1763
1764
1764 This command writes out examination result in the style below::
1765 This command writes out examination result in the style below::
1765
1766
1766 FILE = MERGETOOL
1767 FILE = MERGETOOL
1767
1768
1768 By default, all files known in the first parent context of the
1769 By default, all files known in the first parent context of the
1769 working directory are examined. Use file patterns and/or -I/-X
1770 working directory are examined. Use file patterns and/or -I/-X
1770 options to limit target files. -r/--rev is also useful to examine
1771 options to limit target files. -r/--rev is also useful to examine
1771 files in another context without actual updating to it.
1772 files in another context without actual updating to it.
1772
1773
1773 With --debug, this command shows warning messages while matching
1774 With --debug, this command shows warning messages while matching
1774 against ``merge-patterns`` and so on, too. It is recommended to
1775 against ``merge-patterns`` and so on, too. It is recommended to
1775 use this option with explicit file patterns and/or -I/-X options,
1776 use this option with explicit file patterns and/or -I/-X options,
1776 because this option increases amount of output per file according
1777 because this option increases amount of output per file according
1777 to configurations in hgrc.
1778 to configurations in hgrc.
1778
1779
1779 With -v/--verbose, this command shows configurations below at
1780 With -v/--verbose, this command shows configurations below at
1780 first (only if specified).
1781 first (only if specified).
1781
1782
1782 - ``--tool`` option
1783 - ``--tool`` option
1783 - ``HGMERGE`` environment variable
1784 - ``HGMERGE`` environment variable
1784 - configuration of ``ui.merge``
1785 - configuration of ``ui.merge``
1785
1786
1786 If merge tool is chosen before matching against
1787 If merge tool is chosen before matching against
1787 ``merge-patterns``, this command can't show any helpful
1788 ``merge-patterns``, this command can't show any helpful
1788 information, even with --debug. In such case, information above is
1789 information, even with --debug. In such case, information above is
1789 useful to know why a merge tool is chosen.
1790 useful to know why a merge tool is chosen.
1790 """
1791 """
1791 opts = pycompat.byteskwargs(opts)
1792 opts = pycompat.byteskwargs(opts)
1792 overrides = {}
1793 overrides = {}
1793 if opts['tool']:
1794 if opts['tool']:
1794 overrides[('ui', 'forcemerge')] = opts['tool']
1795 overrides[('ui', 'forcemerge')] = opts['tool']
1795 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1796 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1796
1797
1797 with ui.configoverride(overrides, 'debugmergepatterns'):
1798 with ui.configoverride(overrides, 'debugmergepatterns'):
1798 hgmerge = encoding.environ.get("HGMERGE")
1799 hgmerge = encoding.environ.get("HGMERGE")
1799 if hgmerge is not None:
1800 if hgmerge is not None:
1800 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1801 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1801 uimerge = ui.config("ui", "merge")
1802 uimerge = ui.config("ui", "merge")
1802 if uimerge:
1803 if uimerge:
1803 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1804 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1804
1805
1805 ctx = scmutil.revsingle(repo, opts.get('rev'))
1806 ctx = scmutil.revsingle(repo, opts.get('rev'))
1806 m = scmutil.match(ctx, pats, opts)
1807 m = scmutil.match(ctx, pats, opts)
1807 changedelete = opts['changedelete']
1808 changedelete = opts['changedelete']
1808 for path in ctx.walk(m):
1809 for path in ctx.walk(m):
1809 fctx = ctx[path]
1810 fctx = ctx[path]
1810 try:
1811 try:
1811 if not ui.debugflag:
1812 if not ui.debugflag:
1812 ui.pushbuffer(error=True)
1813 ui.pushbuffer(error=True)
1813 tool, toolpath = filemerge._picktool(repo, ui, path,
1814 tool, toolpath = filemerge._picktool(repo, ui, path,
1814 fctx.isbinary(),
1815 fctx.isbinary(),
1815 'l' in fctx.flags(),
1816 'l' in fctx.flags(),
1816 changedelete)
1817 changedelete)
1817 finally:
1818 finally:
1818 if not ui.debugflag:
1819 if not ui.debugflag:
1819 ui.popbuffer()
1820 ui.popbuffer()
1820 ui.write(('%s = %s\n') % (path, tool))
1821 ui.write(('%s = %s\n') % (path, tool))
1821
1822
1822 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1823 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1823 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1824 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1824 '''access the pushkey key/value protocol
1825 '''access the pushkey key/value protocol
1825
1826
1826 With two args, list the keys in the given namespace.
1827 With two args, list the keys in the given namespace.
1827
1828
1828 With five args, set a key to new if it currently is set to old.
1829 With five args, set a key to new if it currently is set to old.
1829 Reports success or failure.
1830 Reports success or failure.
1830 '''
1831 '''
1831
1832
1832 target = hg.peer(ui, {}, repopath)
1833 target = hg.peer(ui, {}, repopath)
1833 if keyinfo:
1834 if keyinfo:
1834 key, old, new = keyinfo
1835 key, old, new = keyinfo
1835 with target.commandexecutor() as e:
1836 with target.commandexecutor() as e:
1836 r = e.callcommand('pushkey', {
1837 r = e.callcommand('pushkey', {
1837 'namespace': namespace,
1838 'namespace': namespace,
1838 'key': key,
1839 'key': key,
1839 'old': old,
1840 'old': old,
1840 'new': new,
1841 'new': new,
1841 }).result()
1842 }).result()
1842
1843
1843 ui.status(pycompat.bytestr(r) + '\n')
1844 ui.status(pycompat.bytestr(r) + '\n')
1844 return not r
1845 return not r
1845 else:
1846 else:
1846 for k, v in sorted(target.listkeys(namespace).iteritems()):
1847 for k, v in sorted(target.listkeys(namespace).iteritems()):
1847 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1848 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1848 stringutil.escapestr(v)))
1849 stringutil.escapestr(v)))
1849
1850
1850 @command('debugpvec', [], _('A B'))
1851 @command('debugpvec', [], _('A B'))
1851 def debugpvec(ui, repo, a, b=None):
1852 def debugpvec(ui, repo, a, b=None):
1852 ca = scmutil.revsingle(repo, a)
1853 ca = scmutil.revsingle(repo, a)
1853 cb = scmutil.revsingle(repo, b)
1854 cb = scmutil.revsingle(repo, b)
1854 pa = pvec.ctxpvec(ca)
1855 pa = pvec.ctxpvec(ca)
1855 pb = pvec.ctxpvec(cb)
1856 pb = pvec.ctxpvec(cb)
1856 if pa == pb:
1857 if pa == pb:
1857 rel = "="
1858 rel = "="
1858 elif pa > pb:
1859 elif pa > pb:
1859 rel = ">"
1860 rel = ">"
1860 elif pa < pb:
1861 elif pa < pb:
1861 rel = "<"
1862 rel = "<"
1862 elif pa | pb:
1863 elif pa | pb:
1863 rel = "|"
1864 rel = "|"
1864 ui.write(_("a: %s\n") % pa)
1865 ui.write(_("a: %s\n") % pa)
1865 ui.write(_("b: %s\n") % pb)
1866 ui.write(_("b: %s\n") % pb)
1866 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1867 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1867 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1868 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1868 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1869 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1869 pa.distance(pb), rel))
1870 pa.distance(pb), rel))
1870
1871
1871 @command('debugrebuilddirstate|debugrebuildstate',
1872 @command('debugrebuilddirstate|debugrebuildstate',
1872 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1873 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1873 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1874 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1874 'the working copy parent')),
1875 'the working copy parent')),
1875 ],
1876 ],
1876 _('[-r REV]'))
1877 _('[-r REV]'))
1877 def debugrebuilddirstate(ui, repo, rev, **opts):
1878 def debugrebuilddirstate(ui, repo, rev, **opts):
1878 """rebuild the dirstate as it would look like for the given revision
1879 """rebuild the dirstate as it would look like for the given revision
1879
1880
1880 If no revision is specified the first current parent will be used.
1881 If no revision is specified the first current parent will be used.
1881
1882
1882 The dirstate will be set to the files of the given revision.
1883 The dirstate will be set to the files of the given revision.
1883 The actual working directory content or existing dirstate
1884 The actual working directory content or existing dirstate
1884 information such as adds or removes is not considered.
1885 information such as adds or removes is not considered.
1885
1886
1886 ``minimal`` will only rebuild the dirstate status for files that claim to be
1887 ``minimal`` will only rebuild the dirstate status for files that claim to be
1887 tracked but are not in the parent manifest, or that exist in the parent
1888 tracked but are not in the parent manifest, or that exist in the parent
1888 manifest but are not in the dirstate. It will not change adds, removes, or
1889 manifest but are not in the dirstate. It will not change adds, removes, or
1889 modified files that are in the working copy parent.
1890 modified files that are in the working copy parent.
1890
1891
1891 One use of this command is to make the next :hg:`status` invocation
1892 One use of this command is to make the next :hg:`status` invocation
1892 check the actual file content.
1893 check the actual file content.
1893 """
1894 """
1894 ctx = scmutil.revsingle(repo, rev)
1895 ctx = scmutil.revsingle(repo, rev)
1895 with repo.wlock():
1896 with repo.wlock():
1896 dirstate = repo.dirstate
1897 dirstate = repo.dirstate
1897 changedfiles = None
1898 changedfiles = None
1898 # See command doc for what minimal does.
1899 # See command doc for what minimal does.
1899 if opts.get(r'minimal'):
1900 if opts.get(r'minimal'):
1900 manifestfiles = set(ctx.manifest().keys())
1901 manifestfiles = set(ctx.manifest().keys())
1901 dirstatefiles = set(dirstate)
1902 dirstatefiles = set(dirstate)
1902 manifestonly = manifestfiles - dirstatefiles
1903 manifestonly = manifestfiles - dirstatefiles
1903 dsonly = dirstatefiles - manifestfiles
1904 dsonly = dirstatefiles - manifestfiles
1904 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1905 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1905 changedfiles = manifestonly | dsnotadded
1906 changedfiles = manifestonly | dsnotadded
1906
1907
1907 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1908 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1908
1909
1909 @command('debugrebuildfncache', [], '')
1910 @command('debugrebuildfncache', [], '')
1910 def debugrebuildfncache(ui, repo):
1911 def debugrebuildfncache(ui, repo):
1911 """rebuild the fncache file"""
1912 """rebuild the fncache file"""
1912 repair.rebuildfncache(ui, repo)
1913 repair.rebuildfncache(ui, repo)
1913
1914
1914 @command('debugrename',
1915 @command('debugrename',
1915 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1916 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1916 _('[-r REV] FILE'))
1917 _('[-r REV] FILE'))
1917 def debugrename(ui, repo, file1, *pats, **opts):
1918 def debugrename(ui, repo, file1, *pats, **opts):
1918 """dump rename information"""
1919 """dump rename information"""
1919
1920
1920 opts = pycompat.byteskwargs(opts)
1921 opts = pycompat.byteskwargs(opts)
1921 ctx = scmutil.revsingle(repo, opts.get('rev'))
1922 ctx = scmutil.revsingle(repo, opts.get('rev'))
1922 m = scmutil.match(ctx, (file1,) + pats, opts)
1923 m = scmutil.match(ctx, (file1,) + pats, opts)
1923 for abs in ctx.walk(m):
1924 for abs in ctx.walk(m):
1924 fctx = ctx[abs]
1925 fctx = ctx[abs]
1925 o = fctx.filelog().renamed(fctx.filenode())
1926 o = fctx.filelog().renamed(fctx.filenode())
1926 rel = m.rel(abs)
1927 rel = m.rel(abs)
1927 if o:
1928 if o:
1928 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1929 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1929 else:
1930 else:
1930 ui.write(_("%s not renamed\n") % rel)
1931 ui.write(_("%s not renamed\n") % rel)
1931
1932
1932 @command('debugrevlog', cmdutil.debugrevlogopts +
1933 @command('debugrevlog', cmdutil.debugrevlogopts +
1933 [('d', 'dump', False, _('dump index data'))],
1934 [('d', 'dump', False, _('dump index data'))],
1934 _('-c|-m|FILE'),
1935 _('-c|-m|FILE'),
1935 optionalrepo=True)
1936 optionalrepo=True)
1936 def debugrevlog(ui, repo, file_=None, **opts):
1937 def debugrevlog(ui, repo, file_=None, **opts):
1937 """show data and statistics about a revlog"""
1938 """show data and statistics about a revlog"""
1938 opts = pycompat.byteskwargs(opts)
1939 opts = pycompat.byteskwargs(opts)
1939 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1940 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1940
1941
1941 if opts.get("dump"):
1942 if opts.get("dump"):
1942 numrevs = len(r)
1943 numrevs = len(r)
1943 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1944 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1944 " rawsize totalsize compression heads chainlen\n"))
1945 " rawsize totalsize compression heads chainlen\n"))
1945 ts = 0
1946 ts = 0
1946 heads = set()
1947 heads = set()
1947
1948
1948 for rev in xrange(numrevs):
1949 for rev in xrange(numrevs):
1949 dbase = r.deltaparent(rev)
1950 dbase = r.deltaparent(rev)
1950 if dbase == -1:
1951 if dbase == -1:
1951 dbase = rev
1952 dbase = rev
1952 cbase = r.chainbase(rev)
1953 cbase = r.chainbase(rev)
1953 clen = r.chainlen(rev)
1954 clen = r.chainlen(rev)
1954 p1, p2 = r.parentrevs(rev)
1955 p1, p2 = r.parentrevs(rev)
1955 rs = r.rawsize(rev)
1956 rs = r.rawsize(rev)
1956 ts = ts + rs
1957 ts = ts + rs
1957 heads -= set(r.parentrevs(rev))
1958 heads -= set(r.parentrevs(rev))
1958 heads.add(rev)
1959 heads.add(rev)
1959 try:
1960 try:
1960 compression = ts / r.end(rev)
1961 compression = ts / r.end(rev)
1961 except ZeroDivisionError:
1962 except ZeroDivisionError:
1962 compression = 0
1963 compression = 0
1963 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1964 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1964 "%11d %5d %8d\n" %
1965 "%11d %5d %8d\n" %
1965 (rev, p1, p2, r.start(rev), r.end(rev),
1966 (rev, p1, p2, r.start(rev), r.end(rev),
1966 r.start(dbase), r.start(cbase),
1967 r.start(dbase), r.start(cbase),
1967 r.start(p1), r.start(p2),
1968 r.start(p1), r.start(p2),
1968 rs, ts, compression, len(heads), clen))
1969 rs, ts, compression, len(heads), clen))
1969 return 0
1970 return 0
1970
1971
1971 v = r.version
1972 v = r.version
1972 format = v & 0xFFFF
1973 format = v & 0xFFFF
1973 flags = []
1974 flags = []
1974 gdelta = False
1975 gdelta = False
1975 if v & revlog.FLAG_INLINE_DATA:
1976 if v & revlog.FLAG_INLINE_DATA:
1976 flags.append('inline')
1977 flags.append('inline')
1977 if v & revlog.FLAG_GENERALDELTA:
1978 if v & revlog.FLAG_GENERALDELTA:
1978 gdelta = True
1979 gdelta = True
1979 flags.append('generaldelta')
1980 flags.append('generaldelta')
1980 if not flags:
1981 if not flags:
1981 flags = ['(none)']
1982 flags = ['(none)']
1982
1983
1983 nummerges = 0
1984 nummerges = 0
1984 numfull = 0
1985 numfull = 0
1985 numprev = 0
1986 numprev = 0
1986 nump1 = 0
1987 nump1 = 0
1987 nump2 = 0
1988 nump2 = 0
1988 numother = 0
1989 numother = 0
1989 nump1prev = 0
1990 nump1prev = 0
1990 nump2prev = 0
1991 nump2prev = 0
1991 chainlengths = []
1992 chainlengths = []
1992 chainbases = []
1993 chainbases = []
1993 chainspans = []
1994 chainspans = []
1994
1995
1995 datasize = [None, 0, 0]
1996 datasize = [None, 0, 0]
1996 fullsize = [None, 0, 0]
1997 fullsize = [None, 0, 0]
1997 deltasize = [None, 0, 0]
1998 deltasize = [None, 0, 0]
1998 chunktypecounts = {}
1999 chunktypecounts = {}
1999 chunktypesizes = {}
2000 chunktypesizes = {}
2000
2001
2001 def addsize(size, l):
2002 def addsize(size, l):
2002 if l[0] is None or size < l[0]:
2003 if l[0] is None or size < l[0]:
2003 l[0] = size
2004 l[0] = size
2004 if size > l[1]:
2005 if size > l[1]:
2005 l[1] = size
2006 l[1] = size
2006 l[2] += size
2007 l[2] += size
2007
2008
2008 numrevs = len(r)
2009 numrevs = len(r)
2009 for rev in xrange(numrevs):
2010 for rev in xrange(numrevs):
2010 p1, p2 = r.parentrevs(rev)
2011 p1, p2 = r.parentrevs(rev)
2011 delta = r.deltaparent(rev)
2012 delta = r.deltaparent(rev)
2012 if format > 0:
2013 if format > 0:
2013 addsize(r.rawsize(rev), datasize)
2014 addsize(r.rawsize(rev), datasize)
2014 if p2 != nullrev:
2015 if p2 != nullrev:
2015 nummerges += 1
2016 nummerges += 1
2016 size = r.length(rev)
2017 size = r.length(rev)
2017 if delta == nullrev:
2018 if delta == nullrev:
2018 chainlengths.append(0)
2019 chainlengths.append(0)
2019 chainbases.append(r.start(rev))
2020 chainbases.append(r.start(rev))
2020 chainspans.append(size)
2021 chainspans.append(size)
2021 numfull += 1
2022 numfull += 1
2022 addsize(size, fullsize)
2023 addsize(size, fullsize)
2023 else:
2024 else:
2024 chainlengths.append(chainlengths[delta] + 1)
2025 chainlengths.append(chainlengths[delta] + 1)
2025 baseaddr = chainbases[delta]
2026 baseaddr = chainbases[delta]
2026 revaddr = r.start(rev)
2027 revaddr = r.start(rev)
2027 chainbases.append(baseaddr)
2028 chainbases.append(baseaddr)
2028 chainspans.append((revaddr - baseaddr) + size)
2029 chainspans.append((revaddr - baseaddr) + size)
2029 addsize(size, deltasize)
2030 addsize(size, deltasize)
2030 if delta == rev - 1:
2031 if delta == rev - 1:
2031 numprev += 1
2032 numprev += 1
2032 if delta == p1:
2033 if delta == p1:
2033 nump1prev += 1
2034 nump1prev += 1
2034 elif delta == p2:
2035 elif delta == p2:
2035 nump2prev += 1
2036 nump2prev += 1
2036 elif delta == p1:
2037 elif delta == p1:
2037 nump1 += 1
2038 nump1 += 1
2038 elif delta == p2:
2039 elif delta == p2:
2039 nump2 += 1
2040 nump2 += 1
2040 elif delta != nullrev:
2041 elif delta != nullrev:
2041 numother += 1
2042 numother += 1
2042
2043
2043 # Obtain data on the raw chunks in the revlog.
2044 # Obtain data on the raw chunks in the revlog.
2044 segment = r._getsegmentforrevs(rev, rev)[1]
2045 segment = r._getsegmentforrevs(rev, rev)[1]
2045 if segment:
2046 if segment:
2046 chunktype = bytes(segment[0:1])
2047 chunktype = bytes(segment[0:1])
2047 else:
2048 else:
2048 chunktype = 'empty'
2049 chunktype = 'empty'
2049
2050
2050 if chunktype not in chunktypecounts:
2051 if chunktype not in chunktypecounts:
2051 chunktypecounts[chunktype] = 0
2052 chunktypecounts[chunktype] = 0
2052 chunktypesizes[chunktype] = 0
2053 chunktypesizes[chunktype] = 0
2053
2054
2054 chunktypecounts[chunktype] += 1
2055 chunktypecounts[chunktype] += 1
2055 chunktypesizes[chunktype] += size
2056 chunktypesizes[chunktype] += size
2056
2057
2057 # Adjust size min value for empty cases
2058 # Adjust size min value for empty cases
2058 for size in (datasize, fullsize, deltasize):
2059 for size in (datasize, fullsize, deltasize):
2059 if size[0] is None:
2060 if size[0] is None:
2060 size[0] = 0
2061 size[0] = 0
2061
2062
2062 numdeltas = numrevs - numfull
2063 numdeltas = numrevs - numfull
2063 numoprev = numprev - nump1prev - nump2prev
2064 numoprev = numprev - nump1prev - nump2prev
2064 totalrawsize = datasize[2]
2065 totalrawsize = datasize[2]
2065 datasize[2] /= numrevs
2066 datasize[2] /= numrevs
2066 fulltotal = fullsize[2]
2067 fulltotal = fullsize[2]
2067 fullsize[2] /= numfull
2068 fullsize[2] /= numfull
2068 deltatotal = deltasize[2]
2069 deltatotal = deltasize[2]
2069 if numrevs - numfull > 0:
2070 if numrevs - numfull > 0:
2070 deltasize[2] /= numrevs - numfull
2071 deltasize[2] /= numrevs - numfull
2071 totalsize = fulltotal + deltatotal
2072 totalsize = fulltotal + deltatotal
2072 avgchainlen = sum(chainlengths) / numrevs
2073 avgchainlen = sum(chainlengths) / numrevs
2073 maxchainlen = max(chainlengths)
2074 maxchainlen = max(chainlengths)
2074 maxchainspan = max(chainspans)
2075 maxchainspan = max(chainspans)
2075 compratio = 1
2076 compratio = 1
2076 if totalsize:
2077 if totalsize:
2077 compratio = totalrawsize / totalsize
2078 compratio = totalrawsize / totalsize
2078
2079
2079 basedfmtstr = '%%%dd\n'
2080 basedfmtstr = '%%%dd\n'
2080 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2081 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2081
2082
2082 def dfmtstr(max):
2083 def dfmtstr(max):
2083 return basedfmtstr % len(str(max))
2084 return basedfmtstr % len(str(max))
2084 def pcfmtstr(max, padding=0):
2085 def pcfmtstr(max, padding=0):
2085 return basepcfmtstr % (len(str(max)), ' ' * padding)
2086 return basepcfmtstr % (len(str(max)), ' ' * padding)
2086
2087
2087 def pcfmt(value, total):
2088 def pcfmt(value, total):
2088 if total:
2089 if total:
2089 return (value, 100 * float(value) / total)
2090 return (value, 100 * float(value) / total)
2090 else:
2091 else:
2091 return value, 100.0
2092 return value, 100.0
2092
2093
2093 ui.write(('format : %d\n') % format)
2094 ui.write(('format : %d\n') % format)
2094 ui.write(('flags : %s\n') % ', '.join(flags))
2095 ui.write(('flags : %s\n') % ', '.join(flags))
2095
2096
2096 ui.write('\n')
2097 ui.write('\n')
2097 fmt = pcfmtstr(totalsize)
2098 fmt = pcfmtstr(totalsize)
2098 fmt2 = dfmtstr(totalsize)
2099 fmt2 = dfmtstr(totalsize)
2099 ui.write(('revisions : ') + fmt2 % numrevs)
2100 ui.write(('revisions : ') + fmt2 % numrevs)
2100 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2101 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2101 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2102 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2102 ui.write(('revisions : ') + fmt2 % numrevs)
2103 ui.write(('revisions : ') + fmt2 % numrevs)
2103 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2104 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2104 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2105 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2105 ui.write(('revision size : ') + fmt2 % totalsize)
2106 ui.write(('revision size : ') + fmt2 % totalsize)
2106 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2107 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2107 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2108 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2108
2109
2109 def fmtchunktype(chunktype):
2110 def fmtchunktype(chunktype):
2110 if chunktype == 'empty':
2111 if chunktype == 'empty':
2111 return ' %s : ' % chunktype
2112 return ' %s : ' % chunktype
2112 elif chunktype in pycompat.bytestr(string.ascii_letters):
2113 elif chunktype in pycompat.bytestr(string.ascii_letters):
2113 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2114 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2114 else:
2115 else:
2115 return ' 0x%s : ' % hex(chunktype)
2116 return ' 0x%s : ' % hex(chunktype)
2116
2117
2117 ui.write('\n')
2118 ui.write('\n')
2118 ui.write(('chunks : ') + fmt2 % numrevs)
2119 ui.write(('chunks : ') + fmt2 % numrevs)
2119 for chunktype in sorted(chunktypecounts):
2120 for chunktype in sorted(chunktypecounts):
2120 ui.write(fmtchunktype(chunktype))
2121 ui.write(fmtchunktype(chunktype))
2121 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2122 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2122 ui.write(('chunks size : ') + fmt2 % totalsize)
2123 ui.write(('chunks size : ') + fmt2 % totalsize)
2123 for chunktype in sorted(chunktypecounts):
2124 for chunktype in sorted(chunktypecounts):
2124 ui.write(fmtchunktype(chunktype))
2125 ui.write(fmtchunktype(chunktype))
2125 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2126 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2126
2127
2127 ui.write('\n')
2128 ui.write('\n')
2128 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2129 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2129 ui.write(('avg chain length : ') + fmt % avgchainlen)
2130 ui.write(('avg chain length : ') + fmt % avgchainlen)
2130 ui.write(('max chain length : ') + fmt % maxchainlen)
2131 ui.write(('max chain length : ') + fmt % maxchainlen)
2131 ui.write(('max chain reach : ') + fmt % maxchainspan)
2132 ui.write(('max chain reach : ') + fmt % maxchainspan)
2132 ui.write(('compression ratio : ') + fmt % compratio)
2133 ui.write(('compression ratio : ') + fmt % compratio)
2133
2134
2134 if format > 0:
2135 if format > 0:
2135 ui.write('\n')
2136 ui.write('\n')
2136 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2137 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2137 % tuple(datasize))
2138 % tuple(datasize))
2138 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2139 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2139 % tuple(fullsize))
2140 % tuple(fullsize))
2140 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2141 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2141 % tuple(deltasize))
2142 % tuple(deltasize))
2142
2143
2143 if numdeltas > 0:
2144 if numdeltas > 0:
2144 ui.write('\n')
2145 ui.write('\n')
2145 fmt = pcfmtstr(numdeltas)
2146 fmt = pcfmtstr(numdeltas)
2146 fmt2 = pcfmtstr(numdeltas, 4)
2147 fmt2 = pcfmtstr(numdeltas, 4)
2147 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2148 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2148 if numprev > 0:
2149 if numprev > 0:
2149 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2150 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2150 numprev))
2151 numprev))
2151 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2152 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2152 numprev))
2153 numprev))
2153 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2154 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2154 numprev))
2155 numprev))
2155 if gdelta:
2156 if gdelta:
2156 ui.write(('deltas against p1 : ')
2157 ui.write(('deltas against p1 : ')
2157 + fmt % pcfmt(nump1, numdeltas))
2158 + fmt % pcfmt(nump1, numdeltas))
2158 ui.write(('deltas against p2 : ')
2159 ui.write(('deltas against p2 : ')
2159 + fmt % pcfmt(nump2, numdeltas))
2160 + fmt % pcfmt(nump2, numdeltas))
2160 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2161 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2161 numdeltas))
2162 numdeltas))
2162
2163
2163 @command('debugrevspec',
2164 @command('debugrevspec',
2164 [('', 'optimize', None,
2165 [('', 'optimize', None,
2165 _('print parsed tree after optimizing (DEPRECATED)')),
2166 _('print parsed tree after optimizing (DEPRECATED)')),
2166 ('', 'show-revs', True, _('print list of result revisions (default)')),
2167 ('', 'show-revs', True, _('print list of result revisions (default)')),
2167 ('s', 'show-set', None, _('print internal representation of result set')),
2168 ('s', 'show-set', None, _('print internal representation of result set')),
2168 ('p', 'show-stage', [],
2169 ('p', 'show-stage', [],
2169 _('print parsed tree at the given stage'), _('NAME')),
2170 _('print parsed tree at the given stage'), _('NAME')),
2170 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2171 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2171 ('', 'verify-optimized', False, _('verify optimized result')),
2172 ('', 'verify-optimized', False, _('verify optimized result')),
2172 ],
2173 ],
2173 ('REVSPEC'))
2174 ('REVSPEC'))
2174 def debugrevspec(ui, repo, expr, **opts):
2175 def debugrevspec(ui, repo, expr, **opts):
2175 """parse and apply a revision specification
2176 """parse and apply a revision specification
2176
2177
2177 Use -p/--show-stage option to print the parsed tree at the given stages.
2178 Use -p/--show-stage option to print the parsed tree at the given stages.
2178 Use -p all to print tree at every stage.
2179 Use -p all to print tree at every stage.
2179
2180
2180 Use --no-show-revs option with -s or -p to print only the set
2181 Use --no-show-revs option with -s or -p to print only the set
2181 representation or the parsed tree respectively.
2182 representation or the parsed tree respectively.
2182
2183
2183 Use --verify-optimized to compare the optimized result with the unoptimized
2184 Use --verify-optimized to compare the optimized result with the unoptimized
2184 one. Returns 1 if the optimized result differs.
2185 one. Returns 1 if the optimized result differs.
2185 """
2186 """
2186 opts = pycompat.byteskwargs(opts)
2187 opts = pycompat.byteskwargs(opts)
2187 aliases = ui.configitems('revsetalias')
2188 aliases = ui.configitems('revsetalias')
2188 stages = [
2189 stages = [
2189 ('parsed', lambda tree: tree),
2190 ('parsed', lambda tree: tree),
2190 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2191 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2191 ui.warn)),
2192 ui.warn)),
2192 ('concatenated', revsetlang.foldconcat),
2193 ('concatenated', revsetlang.foldconcat),
2193 ('analyzed', revsetlang.analyze),
2194 ('analyzed', revsetlang.analyze),
2194 ('optimized', revsetlang.optimize),
2195 ('optimized', revsetlang.optimize),
2195 ]
2196 ]
2196 if opts['no_optimized']:
2197 if opts['no_optimized']:
2197 stages = stages[:-1]
2198 stages = stages[:-1]
2198 if opts['verify_optimized'] and opts['no_optimized']:
2199 if opts['verify_optimized'] and opts['no_optimized']:
2199 raise error.Abort(_('cannot use --verify-optimized with '
2200 raise error.Abort(_('cannot use --verify-optimized with '
2200 '--no-optimized'))
2201 '--no-optimized'))
2201 stagenames = set(n for n, f in stages)
2202 stagenames = set(n for n, f in stages)
2202
2203
2203 showalways = set()
2204 showalways = set()
2204 showchanged = set()
2205 showchanged = set()
2205 if ui.verbose and not opts['show_stage']:
2206 if ui.verbose and not opts['show_stage']:
2206 # show parsed tree by --verbose (deprecated)
2207 # show parsed tree by --verbose (deprecated)
2207 showalways.add('parsed')
2208 showalways.add('parsed')
2208 showchanged.update(['expanded', 'concatenated'])
2209 showchanged.update(['expanded', 'concatenated'])
2209 if opts['optimize']:
2210 if opts['optimize']:
2210 showalways.add('optimized')
2211 showalways.add('optimized')
2211 if opts['show_stage'] and opts['optimize']:
2212 if opts['show_stage'] and opts['optimize']:
2212 raise error.Abort(_('cannot use --optimize with --show-stage'))
2213 raise error.Abort(_('cannot use --optimize with --show-stage'))
2213 if opts['show_stage'] == ['all']:
2214 if opts['show_stage'] == ['all']:
2214 showalways.update(stagenames)
2215 showalways.update(stagenames)
2215 else:
2216 else:
2216 for n in opts['show_stage']:
2217 for n in opts['show_stage']:
2217 if n not in stagenames:
2218 if n not in stagenames:
2218 raise error.Abort(_('invalid stage name: %s') % n)
2219 raise error.Abort(_('invalid stage name: %s') % n)
2219 showalways.update(opts['show_stage'])
2220 showalways.update(opts['show_stage'])
2220
2221
2221 treebystage = {}
2222 treebystage = {}
2222 printedtree = None
2223 printedtree = None
2223 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2224 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2224 for n, f in stages:
2225 for n, f in stages:
2225 treebystage[n] = tree = f(tree)
2226 treebystage[n] = tree = f(tree)
2226 if n in showalways or (n in showchanged and tree != printedtree):
2227 if n in showalways or (n in showchanged and tree != printedtree):
2227 if opts['show_stage'] or n != 'parsed':
2228 if opts['show_stage'] or n != 'parsed':
2228 ui.write(("* %s:\n") % n)
2229 ui.write(("* %s:\n") % n)
2229 ui.write(revsetlang.prettyformat(tree), "\n")
2230 ui.write(revsetlang.prettyformat(tree), "\n")
2230 printedtree = tree
2231 printedtree = tree
2231
2232
2232 if opts['verify_optimized']:
2233 if opts['verify_optimized']:
2233 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2234 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2234 brevs = revset.makematcher(treebystage['optimized'])(repo)
2235 brevs = revset.makematcher(treebystage['optimized'])(repo)
2235 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2236 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2236 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2237 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2237 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2238 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2238 arevs = list(arevs)
2239 arevs = list(arevs)
2239 brevs = list(brevs)
2240 brevs = list(brevs)
2240 if arevs == brevs:
2241 if arevs == brevs:
2241 return 0
2242 return 0
2242 ui.write(('--- analyzed\n'), label='diff.file_a')
2243 ui.write(('--- analyzed\n'), label='diff.file_a')
2243 ui.write(('+++ optimized\n'), label='diff.file_b')
2244 ui.write(('+++ optimized\n'), label='diff.file_b')
2244 sm = difflib.SequenceMatcher(None, arevs, brevs)
2245 sm = difflib.SequenceMatcher(None, arevs, brevs)
2245 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2246 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2246 if tag in ('delete', 'replace'):
2247 if tag in ('delete', 'replace'):
2247 for c in arevs[alo:ahi]:
2248 for c in arevs[alo:ahi]:
2248 ui.write('-%s\n' % c, label='diff.deleted')
2249 ui.write('-%s\n' % c, label='diff.deleted')
2249 if tag in ('insert', 'replace'):
2250 if tag in ('insert', 'replace'):
2250 for c in brevs[blo:bhi]:
2251 for c in brevs[blo:bhi]:
2251 ui.write('+%s\n' % c, label='diff.inserted')
2252 ui.write('+%s\n' % c, label='diff.inserted')
2252 if tag == 'equal':
2253 if tag == 'equal':
2253 for c in arevs[alo:ahi]:
2254 for c in arevs[alo:ahi]:
2254 ui.write(' %s\n' % c)
2255 ui.write(' %s\n' % c)
2255 return 1
2256 return 1
2256
2257
2257 func = revset.makematcher(tree)
2258 func = revset.makematcher(tree)
2258 revs = func(repo)
2259 revs = func(repo)
2259 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2260 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2260 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2261 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2261 if not opts['show_revs']:
2262 if not opts['show_revs']:
2262 return
2263 return
2263 for c in revs:
2264 for c in revs:
2264 ui.write("%d\n" % c)
2265 ui.write("%d\n" % c)
2265
2266
2266 @command('debugserve', [
2267 @command('debugserve', [
2267 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2268 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2268 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2269 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2269 ('', 'logiofile', '', _('file to log server I/O to')),
2270 ('', 'logiofile', '', _('file to log server I/O to')),
2270 ], '')
2271 ], '')
2271 def debugserve(ui, repo, **opts):
2272 def debugserve(ui, repo, **opts):
2272 """run a server with advanced settings
2273 """run a server with advanced settings
2273
2274
2274 This command is similar to :hg:`serve`. It exists partially as a
2275 This command is similar to :hg:`serve`. It exists partially as a
2275 workaround to the fact that ``hg serve --stdio`` must have specific
2276 workaround to the fact that ``hg serve --stdio`` must have specific
2276 arguments for security reasons.
2277 arguments for security reasons.
2277 """
2278 """
2278 opts = pycompat.byteskwargs(opts)
2279 opts = pycompat.byteskwargs(opts)
2279
2280
2280 if not opts['sshstdio']:
2281 if not opts['sshstdio']:
2281 raise error.Abort(_('only --sshstdio is currently supported'))
2282 raise error.Abort(_('only --sshstdio is currently supported'))
2282
2283
2283 logfh = None
2284 logfh = None
2284
2285
2285 if opts['logiofd'] and opts['logiofile']:
2286 if opts['logiofd'] and opts['logiofile']:
2286 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2287 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2287
2288
2288 if opts['logiofd']:
2289 if opts['logiofd']:
2289 # Line buffered because output is line based.
2290 # Line buffered because output is line based.
2290 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2291 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2291 elif opts['logiofile']:
2292 elif opts['logiofile']:
2292 logfh = open(opts['logiofile'], 'ab', 1)
2293 logfh = open(opts['logiofile'], 'ab', 1)
2293
2294
2294 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2295 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2295 s.serve_forever()
2296 s.serve_forever()
2296
2297
2297 @command('debugsetparents', [], _('REV1 [REV2]'))
2298 @command('debugsetparents', [], _('REV1 [REV2]'))
2298 def debugsetparents(ui, repo, rev1, rev2=None):
2299 def debugsetparents(ui, repo, rev1, rev2=None):
2299 """manually set the parents of the current working directory
2300 """manually set the parents of the current working directory
2300
2301
2301 This is useful for writing repository conversion tools, but should
2302 This is useful for writing repository conversion tools, but should
2302 be used with care. For example, neither the working directory nor the
2303 be used with care. For example, neither the working directory nor the
2303 dirstate is updated, so file status may be incorrect after running this
2304 dirstate is updated, so file status may be incorrect after running this
2304 command.
2305 command.
2305
2306
2306 Returns 0 on success.
2307 Returns 0 on success.
2307 """
2308 """
2308
2309
2309 node1 = scmutil.revsingle(repo, rev1).node()
2310 node1 = scmutil.revsingle(repo, rev1).node()
2310 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2311 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2311
2312
2312 with repo.wlock():
2313 with repo.wlock():
2313 repo.setparents(node1, node2)
2314 repo.setparents(node1, node2)
2314
2315
2315 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2316 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2316 def debugssl(ui, repo, source=None, **opts):
2317 def debugssl(ui, repo, source=None, **opts):
2317 '''test a secure connection to a server
2318 '''test a secure connection to a server
2318
2319
2319 This builds the certificate chain for the server on Windows, installing the
2320 This builds the certificate chain for the server on Windows, installing the
2320 missing intermediates and trusted root via Windows Update if necessary. It
2321 missing intermediates and trusted root via Windows Update if necessary. It
2321 does nothing on other platforms.
2322 does nothing on other platforms.
2322
2323
2323 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2324 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2324 that server is used. See :hg:`help urls` for more information.
2325 that server is used. See :hg:`help urls` for more information.
2325
2326
2326 If the update succeeds, retry the original operation. Otherwise, the cause
2327 If the update succeeds, retry the original operation. Otherwise, the cause
2327 of the SSL error is likely another issue.
2328 of the SSL error is likely another issue.
2328 '''
2329 '''
2329 if not pycompat.iswindows:
2330 if not pycompat.iswindows:
2330 raise error.Abort(_('certificate chain building is only possible on '
2331 raise error.Abort(_('certificate chain building is only possible on '
2331 'Windows'))
2332 'Windows'))
2332
2333
2333 if not source:
2334 if not source:
2334 if not repo:
2335 if not repo:
2335 raise error.Abort(_("there is no Mercurial repository here, and no "
2336 raise error.Abort(_("there is no Mercurial repository here, and no "
2336 "server specified"))
2337 "server specified"))
2337 source = "default"
2338 source = "default"
2338
2339
2339 source, branches = hg.parseurl(ui.expandpath(source))
2340 source, branches = hg.parseurl(ui.expandpath(source))
2340 url = util.url(source)
2341 url = util.url(source)
2341 addr = None
2342 addr = None
2342
2343
2343 defaultport = {'https': 443, 'ssh': 22}
2344 defaultport = {'https': 443, 'ssh': 22}
2344 if url.scheme in defaultport:
2345 if url.scheme in defaultport:
2345 try:
2346 try:
2346 addr = (url.host, int(url.port or defaultport[url.scheme]))
2347 addr = (url.host, int(url.port or defaultport[url.scheme]))
2347 except ValueError:
2348 except ValueError:
2348 raise error.Abort(_("malformed port number in URL"))
2349 raise error.Abort(_("malformed port number in URL"))
2349 else:
2350 else:
2350 raise error.Abort(_("only https and ssh connections are supported"))
2351 raise error.Abort(_("only https and ssh connections are supported"))
2351
2352
2352 from . import win32
2353 from . import win32
2353
2354
2354 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2355 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2355 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2356 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2356
2357
2357 try:
2358 try:
2358 s.connect(addr)
2359 s.connect(addr)
2359 cert = s.getpeercert(True)
2360 cert = s.getpeercert(True)
2360
2361
2361 ui.status(_('checking the certificate chain for %s\n') % url.host)
2362 ui.status(_('checking the certificate chain for %s\n') % url.host)
2362
2363
2363 complete = win32.checkcertificatechain(cert, build=False)
2364 complete = win32.checkcertificatechain(cert, build=False)
2364
2365
2365 if not complete:
2366 if not complete:
2366 ui.status(_('certificate chain is incomplete, updating... '))
2367 ui.status(_('certificate chain is incomplete, updating... '))
2367
2368
2368 if not win32.checkcertificatechain(cert):
2369 if not win32.checkcertificatechain(cert):
2369 ui.status(_('failed.\n'))
2370 ui.status(_('failed.\n'))
2370 else:
2371 else:
2371 ui.status(_('done.\n'))
2372 ui.status(_('done.\n'))
2372 else:
2373 else:
2373 ui.status(_('full certificate chain is available\n'))
2374 ui.status(_('full certificate chain is available\n'))
2374 finally:
2375 finally:
2375 s.close()
2376 s.close()
2376
2377
2377 @command('debugsub',
2378 @command('debugsub',
2378 [('r', 'rev', '',
2379 [('r', 'rev', '',
2379 _('revision to check'), _('REV'))],
2380 _('revision to check'), _('REV'))],
2380 _('[-r REV] [REV]'))
2381 _('[-r REV] [REV]'))
2381 def debugsub(ui, repo, rev=None):
2382 def debugsub(ui, repo, rev=None):
2382 ctx = scmutil.revsingle(repo, rev, None)
2383 ctx = scmutil.revsingle(repo, rev, None)
2383 for k, v in sorted(ctx.substate.items()):
2384 for k, v in sorted(ctx.substate.items()):
2384 ui.write(('path %s\n') % k)
2385 ui.write(('path %s\n') % k)
2385 ui.write((' source %s\n') % v[0])
2386 ui.write((' source %s\n') % v[0])
2386 ui.write((' revision %s\n') % v[1])
2387 ui.write((' revision %s\n') % v[1])
2387
2388
2388 @command('debugsuccessorssets',
2389 @command('debugsuccessorssets',
2389 [('', 'closest', False, _('return closest successors sets only'))],
2390 [('', 'closest', False, _('return closest successors sets only'))],
2390 _('[REV]'))
2391 _('[REV]'))
2391 def debugsuccessorssets(ui, repo, *revs, **opts):
2392 def debugsuccessorssets(ui, repo, *revs, **opts):
2392 """show set of successors for revision
2393 """show set of successors for revision
2393
2394
2394 A successors set of changeset A is a consistent group of revisions that
2395 A successors set of changeset A is a consistent group of revisions that
2395 succeed A. It contains non-obsolete changesets only unless closests
2396 succeed A. It contains non-obsolete changesets only unless closests
2396 successors set is set.
2397 successors set is set.
2397
2398
2398 In most cases a changeset A has a single successors set containing a single
2399 In most cases a changeset A has a single successors set containing a single
2399 successor (changeset A replaced by A').
2400 successor (changeset A replaced by A').
2400
2401
2401 A changeset that is made obsolete with no successors are called "pruned".
2402 A changeset that is made obsolete with no successors are called "pruned".
2402 Such changesets have no successors sets at all.
2403 Such changesets have no successors sets at all.
2403
2404
2404 A changeset that has been "split" will have a successors set containing
2405 A changeset that has been "split" will have a successors set containing
2405 more than one successor.
2406 more than one successor.
2406
2407
2407 A changeset that has been rewritten in multiple different ways is called
2408 A changeset that has been rewritten in multiple different ways is called
2408 "divergent". Such changesets have multiple successor sets (each of which
2409 "divergent". Such changesets have multiple successor sets (each of which
2409 may also be split, i.e. have multiple successors).
2410 may also be split, i.e. have multiple successors).
2410
2411
2411 Results are displayed as follows::
2412 Results are displayed as follows::
2412
2413
2413 <rev1>
2414 <rev1>
2414 <successors-1A>
2415 <successors-1A>
2415 <rev2>
2416 <rev2>
2416 <successors-2A>
2417 <successors-2A>
2417 <successors-2B1> <successors-2B2> <successors-2B3>
2418 <successors-2B1> <successors-2B2> <successors-2B3>
2418
2419
2419 Here rev2 has two possible (i.e. divergent) successors sets. The first
2420 Here rev2 has two possible (i.e. divergent) successors sets. The first
2420 holds one element, whereas the second holds three (i.e. the changeset has
2421 holds one element, whereas the second holds three (i.e. the changeset has
2421 been split).
2422 been split).
2422 """
2423 """
2423 # passed to successorssets caching computation from one call to another
2424 # passed to successorssets caching computation from one call to another
2424 cache = {}
2425 cache = {}
2425 ctx2str = bytes
2426 ctx2str = bytes
2426 node2str = short
2427 node2str = short
2427 for rev in scmutil.revrange(repo, revs):
2428 for rev in scmutil.revrange(repo, revs):
2428 ctx = repo[rev]
2429 ctx = repo[rev]
2429 ui.write('%s\n'% ctx2str(ctx))
2430 ui.write('%s\n'% ctx2str(ctx))
2430 for succsset in obsutil.successorssets(repo, ctx.node(),
2431 for succsset in obsutil.successorssets(repo, ctx.node(),
2431 closest=opts[r'closest'],
2432 closest=opts[r'closest'],
2432 cache=cache):
2433 cache=cache):
2433 if succsset:
2434 if succsset:
2434 ui.write(' ')
2435 ui.write(' ')
2435 ui.write(node2str(succsset[0]))
2436 ui.write(node2str(succsset[0]))
2436 for node in succsset[1:]:
2437 for node in succsset[1:]:
2437 ui.write(' ')
2438 ui.write(' ')
2438 ui.write(node2str(node))
2439 ui.write(node2str(node))
2439 ui.write('\n')
2440 ui.write('\n')
2440
2441
2441 @command('debugtemplate',
2442 @command('debugtemplate',
2442 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2443 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2443 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2444 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2444 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2445 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2445 optionalrepo=True)
2446 optionalrepo=True)
2446 def debugtemplate(ui, repo, tmpl, **opts):
2447 def debugtemplate(ui, repo, tmpl, **opts):
2447 """parse and apply a template
2448 """parse and apply a template
2448
2449
2449 If -r/--rev is given, the template is processed as a log template and
2450 If -r/--rev is given, the template is processed as a log template and
2450 applied to the given changesets. Otherwise, it is processed as a generic
2451 applied to the given changesets. Otherwise, it is processed as a generic
2451 template.
2452 template.
2452
2453
2453 Use --verbose to print the parsed tree.
2454 Use --verbose to print the parsed tree.
2454 """
2455 """
2455 revs = None
2456 revs = None
2456 if opts[r'rev']:
2457 if opts[r'rev']:
2457 if repo is None:
2458 if repo is None:
2458 raise error.RepoError(_('there is no Mercurial repository here '
2459 raise error.RepoError(_('there is no Mercurial repository here '
2459 '(.hg not found)'))
2460 '(.hg not found)'))
2460 revs = scmutil.revrange(repo, opts[r'rev'])
2461 revs = scmutil.revrange(repo, opts[r'rev'])
2461
2462
2462 props = {}
2463 props = {}
2463 for d in opts[r'define']:
2464 for d in opts[r'define']:
2464 try:
2465 try:
2465 k, v = (e.strip() for e in d.split('=', 1))
2466 k, v = (e.strip() for e in d.split('=', 1))
2466 if not k or k == 'ui':
2467 if not k or k == 'ui':
2467 raise ValueError
2468 raise ValueError
2468 props[k] = v
2469 props[k] = v
2469 except ValueError:
2470 except ValueError:
2470 raise error.Abort(_('malformed keyword definition: %s') % d)
2471 raise error.Abort(_('malformed keyword definition: %s') % d)
2471
2472
2472 if ui.verbose:
2473 if ui.verbose:
2473 aliases = ui.configitems('templatealias')
2474 aliases = ui.configitems('templatealias')
2474 tree = templater.parse(tmpl)
2475 tree = templater.parse(tmpl)
2475 ui.note(templater.prettyformat(tree), '\n')
2476 ui.note(templater.prettyformat(tree), '\n')
2476 newtree = templater.expandaliases(tree, aliases)
2477 newtree = templater.expandaliases(tree, aliases)
2477 if newtree != tree:
2478 if newtree != tree:
2478 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2479 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2479
2480
2480 if revs is None:
2481 if revs is None:
2481 tres = formatter.templateresources(ui, repo)
2482 tres = formatter.templateresources(ui, repo)
2482 t = formatter.maketemplater(ui, tmpl, resources=tres)
2483 t = formatter.maketemplater(ui, tmpl, resources=tres)
2483 ui.write(t.renderdefault(props))
2484 ui.write(t.renderdefault(props))
2484 else:
2485 else:
2485 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2486 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2486 for r in revs:
2487 for r in revs:
2487 displayer.show(repo[r], **pycompat.strkwargs(props))
2488 displayer.show(repo[r], **pycompat.strkwargs(props))
2488 displayer.close()
2489 displayer.close()
2489
2490
2490 @command('debuguigetpass', [
2491 @command('debuguigetpass', [
2491 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2492 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2492 ], _('[-p TEXT]'), norepo=True)
2493 ], _('[-p TEXT]'), norepo=True)
2493 def debuguigetpass(ui, prompt=''):
2494 def debuguigetpass(ui, prompt=''):
2494 """show prompt to type password"""
2495 """show prompt to type password"""
2495 r = ui.getpass(prompt)
2496 r = ui.getpass(prompt)
2496 ui.write(('respose: %s\n') % r)
2497 ui.write(('respose: %s\n') % r)
2497
2498
2498 @command('debuguiprompt', [
2499 @command('debuguiprompt', [
2499 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2500 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2500 ], _('[-p TEXT]'), norepo=True)
2501 ], _('[-p TEXT]'), norepo=True)
2501 def debuguiprompt(ui, prompt=''):
2502 def debuguiprompt(ui, prompt=''):
2502 """show plain prompt"""
2503 """show plain prompt"""
2503 r = ui.prompt(prompt)
2504 r = ui.prompt(prompt)
2504 ui.write(('response: %s\n') % r)
2505 ui.write(('response: %s\n') % r)
2505
2506
2506 @command('debugupdatecaches', [])
2507 @command('debugupdatecaches', [])
2507 def debugupdatecaches(ui, repo, *pats, **opts):
2508 def debugupdatecaches(ui, repo, *pats, **opts):
2508 """warm all known caches in the repository"""
2509 """warm all known caches in the repository"""
2509 with repo.wlock(), repo.lock():
2510 with repo.wlock(), repo.lock():
2510 repo.updatecaches(full=True)
2511 repo.updatecaches(full=True)
2511
2512
2512 @command('debugupgraderepo', [
2513 @command('debugupgraderepo', [
2513 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2514 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2514 ('', 'run', False, _('performs an upgrade')),
2515 ('', 'run', False, _('performs an upgrade')),
2515 ])
2516 ])
2516 def debugupgraderepo(ui, repo, run=False, optimize=None):
2517 def debugupgraderepo(ui, repo, run=False, optimize=None):
2517 """upgrade a repository to use different features
2518 """upgrade a repository to use different features
2518
2519
2519 If no arguments are specified, the repository is evaluated for upgrade
2520 If no arguments are specified, the repository is evaluated for upgrade
2520 and a list of problems and potential optimizations is printed.
2521 and a list of problems and potential optimizations is printed.
2521
2522
2522 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2523 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2523 can be influenced via additional arguments. More details will be provided
2524 can be influenced via additional arguments. More details will be provided
2524 by the command output when run without ``--run``.
2525 by the command output when run without ``--run``.
2525
2526
2526 During the upgrade, the repository will be locked and no writes will be
2527 During the upgrade, the repository will be locked and no writes will be
2527 allowed.
2528 allowed.
2528
2529
2529 At the end of the upgrade, the repository may not be readable while new
2530 At the end of the upgrade, the repository may not be readable while new
2530 repository data is swapped in. This window will be as long as it takes to
2531 repository data is swapped in. This window will be as long as it takes to
2531 rename some directories inside the ``.hg`` directory. On most machines, this
2532 rename some directories inside the ``.hg`` directory. On most machines, this
2532 should complete almost instantaneously and the chances of a consumer being
2533 should complete almost instantaneously and the chances of a consumer being
2533 unable to access the repository should be low.
2534 unable to access the repository should be low.
2534 """
2535 """
2535 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2536 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2536
2537
2537 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2538 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2538 inferrepo=True)
2539 inferrepo=True)
2539 def debugwalk(ui, repo, *pats, **opts):
2540 def debugwalk(ui, repo, *pats, **opts):
2540 """show how files match on given patterns"""
2541 """show how files match on given patterns"""
2541 opts = pycompat.byteskwargs(opts)
2542 opts = pycompat.byteskwargs(opts)
2542 m = scmutil.match(repo[None], pats, opts)
2543 m = scmutil.match(repo[None], pats, opts)
2543 ui.write(('matcher: %r\n' % m))
2544 ui.write(('matcher: %r\n' % m))
2544 items = list(repo[None].walk(m))
2545 items = list(repo[None].walk(m))
2545 if not items:
2546 if not items:
2546 return
2547 return
2547 f = lambda fn: fn
2548 f = lambda fn: fn
2548 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2549 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2549 f = lambda fn: util.normpath(fn)
2550 f = lambda fn: util.normpath(fn)
2550 fmt = 'f %%-%ds %%-%ds %%s' % (
2551 fmt = 'f %%-%ds %%-%ds %%s' % (
2551 max([len(abs) for abs in items]),
2552 max([len(abs) for abs in items]),
2552 max([len(m.rel(abs)) for abs in items]))
2553 max([len(m.rel(abs)) for abs in items]))
2553 for abs in items:
2554 for abs in items:
2554 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2555 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2555 ui.write("%s\n" % line.rstrip())
2556 ui.write("%s\n" % line.rstrip())
2556
2557
2557 @command('debugwhyunstable', [], _('REV'))
2558 @command('debugwhyunstable', [], _('REV'))
2558 def debugwhyunstable(ui, repo, rev):
2559 def debugwhyunstable(ui, repo, rev):
2559 """explain instabilities of a changeset"""
2560 """explain instabilities of a changeset"""
2560 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2561 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2561 dnodes = ''
2562 dnodes = ''
2562 if entry.get('divergentnodes'):
2563 if entry.get('divergentnodes'):
2563 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2564 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2564 for ctx in entry['divergentnodes']) + ' '
2565 for ctx in entry['divergentnodes']) + ' '
2565 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2566 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2566 entry['reason'], entry['node']))
2567 entry['reason'], entry['node']))
2567
2568
2568 @command('debugwireargs',
2569 @command('debugwireargs',
2569 [('', 'three', '', 'three'),
2570 [('', 'three', '', 'three'),
2570 ('', 'four', '', 'four'),
2571 ('', 'four', '', 'four'),
2571 ('', 'five', '', 'five'),
2572 ('', 'five', '', 'five'),
2572 ] + cmdutil.remoteopts,
2573 ] + cmdutil.remoteopts,
2573 _('REPO [OPTIONS]... [ONE [TWO]]'),
2574 _('REPO [OPTIONS]... [ONE [TWO]]'),
2574 norepo=True)
2575 norepo=True)
2575 def debugwireargs(ui, repopath, *vals, **opts):
2576 def debugwireargs(ui, repopath, *vals, **opts):
2576 opts = pycompat.byteskwargs(opts)
2577 opts = pycompat.byteskwargs(opts)
2577 repo = hg.peer(ui, opts, repopath)
2578 repo = hg.peer(ui, opts, repopath)
2578 for opt in cmdutil.remoteopts:
2579 for opt in cmdutil.remoteopts:
2579 del opts[opt[1]]
2580 del opts[opt[1]]
2580 args = {}
2581 args = {}
2581 for k, v in opts.iteritems():
2582 for k, v in opts.iteritems():
2582 if v:
2583 if v:
2583 args[k] = v
2584 args[k] = v
2584 args = pycompat.strkwargs(args)
2585 args = pycompat.strkwargs(args)
2585 # run twice to check that we don't mess up the stream for the next command
2586 # run twice to check that we don't mess up the stream for the next command
2586 res1 = repo.debugwireargs(*vals, **args)
2587 res1 = repo.debugwireargs(*vals, **args)
2587 res2 = repo.debugwireargs(*vals, **args)
2588 res2 = repo.debugwireargs(*vals, **args)
2588 ui.write("%s\n" % res1)
2589 ui.write("%s\n" % res1)
2589 if res1 != res2:
2590 if res1 != res2:
2590 ui.warn("%s\n" % res2)
2591 ui.warn("%s\n" % res2)
2591
2592
2592 def _parsewirelangblocks(fh):
2593 def _parsewirelangblocks(fh):
2593 activeaction = None
2594 activeaction = None
2594 blocklines = []
2595 blocklines = []
2595
2596
2596 for line in fh:
2597 for line in fh:
2597 line = line.rstrip()
2598 line = line.rstrip()
2598 if not line:
2599 if not line:
2599 continue
2600 continue
2600
2601
2601 if line.startswith(b'#'):
2602 if line.startswith(b'#'):
2602 continue
2603 continue
2603
2604
2604 if not line.startswith(' '):
2605 if not line.startswith(' '):
2605 # New block. Flush previous one.
2606 # New block. Flush previous one.
2606 if activeaction:
2607 if activeaction:
2607 yield activeaction, blocklines
2608 yield activeaction, blocklines
2608
2609
2609 activeaction = line
2610 activeaction = line
2610 blocklines = []
2611 blocklines = []
2611 continue
2612 continue
2612
2613
2613 # Else we start with an indent.
2614 # Else we start with an indent.
2614
2615
2615 if not activeaction:
2616 if not activeaction:
2616 raise error.Abort(_('indented line outside of block'))
2617 raise error.Abort(_('indented line outside of block'))
2617
2618
2618 blocklines.append(line)
2619 blocklines.append(line)
2619
2620
2620 # Flush last block.
2621 # Flush last block.
2621 if activeaction:
2622 if activeaction:
2622 yield activeaction, blocklines
2623 yield activeaction, blocklines
2623
2624
2624 @command('debugwireproto',
2625 @command('debugwireproto',
2625 [
2626 [
2626 ('', 'localssh', False, _('start an SSH server for this repo')),
2627 ('', 'localssh', False, _('start an SSH server for this repo')),
2627 ('', 'peer', '', _('construct a specific version of the peer')),
2628 ('', 'peer', '', _('construct a specific version of the peer')),
2628 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2629 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2629 ('', 'nologhandshake', False,
2630 ('', 'nologhandshake', False,
2630 _('do not log I/O related to the peer handshake')),
2631 _('do not log I/O related to the peer handshake')),
2631 ] + cmdutil.remoteopts,
2632 ] + cmdutil.remoteopts,
2632 _('[PATH]'),
2633 _('[PATH]'),
2633 optionalrepo=True)
2634 optionalrepo=True)
2634 def debugwireproto(ui, repo, path=None, **opts):
2635 def debugwireproto(ui, repo, path=None, **opts):
2635 """send wire protocol commands to a server
2636 """send wire protocol commands to a server
2636
2637
2637 This command can be used to issue wire protocol commands to remote
2638 This command can be used to issue wire protocol commands to remote
2638 peers and to debug the raw data being exchanged.
2639 peers and to debug the raw data being exchanged.
2639
2640
2640 ``--localssh`` will start an SSH server against the current repository
2641 ``--localssh`` will start an SSH server against the current repository
2641 and connect to that. By default, the connection will perform a handshake
2642 and connect to that. By default, the connection will perform a handshake
2642 and establish an appropriate peer instance.
2643 and establish an appropriate peer instance.
2643
2644
2644 ``--peer`` can be used to bypass the handshake protocol and construct a
2645 ``--peer`` can be used to bypass the handshake protocol and construct a
2645 peer instance using the specified class type. Valid values are ``raw``,
2646 peer instance using the specified class type. Valid values are ``raw``,
2646 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2647 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2647 raw data payloads and don't support higher-level command actions.
2648 raw data payloads and don't support higher-level command actions.
2648
2649
2649 ``--noreadstderr`` can be used to disable automatic reading from stderr
2650 ``--noreadstderr`` can be used to disable automatic reading from stderr
2650 of the peer (for SSH connections only). Disabling automatic reading of
2651 of the peer (for SSH connections only). Disabling automatic reading of
2651 stderr is useful for making output more deterministic.
2652 stderr is useful for making output more deterministic.
2652
2653
2653 Commands are issued via a mini language which is specified via stdin.
2654 Commands are issued via a mini language which is specified via stdin.
2654 The language consists of individual actions to perform. An action is
2655 The language consists of individual actions to perform. An action is
2655 defined by a block. A block is defined as a line with no leading
2656 defined by a block. A block is defined as a line with no leading
2656 space followed by 0 or more lines with leading space. Blocks are
2657 space followed by 0 or more lines with leading space. Blocks are
2657 effectively a high-level command with additional metadata.
2658 effectively a high-level command with additional metadata.
2658
2659
2659 Lines beginning with ``#`` are ignored.
2660 Lines beginning with ``#`` are ignored.
2660
2661
2661 The following sections denote available actions.
2662 The following sections denote available actions.
2662
2663
2663 raw
2664 raw
2664 ---
2665 ---
2665
2666
2666 Send raw data to the server.
2667 Send raw data to the server.
2667
2668
2668 The block payload contains the raw data to send as one atomic send
2669 The block payload contains the raw data to send as one atomic send
2669 operation. The data may not actually be delivered in a single system
2670 operation. The data may not actually be delivered in a single system
2670 call: it depends on the abilities of the transport being used.
2671 call: it depends on the abilities of the transport being used.
2671
2672
2672 Each line in the block is de-indented and concatenated. Then, that
2673 Each line in the block is de-indented and concatenated. Then, that
2673 value is evaluated as a Python b'' literal. This allows the use of
2674 value is evaluated as a Python b'' literal. This allows the use of
2674 backslash escaping, etc.
2675 backslash escaping, etc.
2675
2676
2676 raw+
2677 raw+
2677 ----
2678 ----
2678
2679
2679 Behaves like ``raw`` except flushes output afterwards.
2680 Behaves like ``raw`` except flushes output afterwards.
2680
2681
2681 command <X>
2682 command <X>
2682 -----------
2683 -----------
2683
2684
2684 Send a request to run a named command, whose name follows the ``command``
2685 Send a request to run a named command, whose name follows the ``command``
2685 string.
2686 string.
2686
2687
2687 Arguments to the command are defined as lines in this block. The format of
2688 Arguments to the command are defined as lines in this block. The format of
2688 each line is ``<key> <value>``. e.g.::
2689 each line is ``<key> <value>``. e.g.::
2689
2690
2690 command listkeys
2691 command listkeys
2691 namespace bookmarks
2692 namespace bookmarks
2692
2693
2693 If the value begins with ``eval:``, it will be interpreted as a Python
2694 If the value begins with ``eval:``, it will be interpreted as a Python
2694 literal expression. Otherwise values are interpreted as Python b'' literals.
2695 literal expression. Otherwise values are interpreted as Python b'' literals.
2695 This allows sending complex types and encoding special byte sequences via
2696 This allows sending complex types and encoding special byte sequences via
2696 backslash escaping.
2697 backslash escaping.
2697
2698
2698 The following arguments have special meaning:
2699 The following arguments have special meaning:
2699
2700
2700 ``PUSHFILE``
2701 ``PUSHFILE``
2701 When defined, the *push* mechanism of the peer will be used instead
2702 When defined, the *push* mechanism of the peer will be used instead
2702 of the static request-response mechanism and the content of the
2703 of the static request-response mechanism and the content of the
2703 file specified in the value of this argument will be sent as the
2704 file specified in the value of this argument will be sent as the
2704 command payload.
2705 command payload.
2705
2706
2706 This can be used to submit a local bundle file to the remote.
2707 This can be used to submit a local bundle file to the remote.
2707
2708
2708 batchbegin
2709 batchbegin
2709 ----------
2710 ----------
2710
2711
2711 Instruct the peer to begin a batched send.
2712 Instruct the peer to begin a batched send.
2712
2713
2713 All ``command`` blocks are queued for execution until the next
2714 All ``command`` blocks are queued for execution until the next
2714 ``batchsubmit`` block.
2715 ``batchsubmit`` block.
2715
2716
2716 batchsubmit
2717 batchsubmit
2717 -----------
2718 -----------
2718
2719
2719 Submit previously queued ``command`` blocks as a batch request.
2720 Submit previously queued ``command`` blocks as a batch request.
2720
2721
2721 This action MUST be paired with a ``batchbegin`` action.
2722 This action MUST be paired with a ``batchbegin`` action.
2722
2723
2723 httprequest <method> <path>
2724 httprequest <method> <path>
2724 ---------------------------
2725 ---------------------------
2725
2726
2726 (HTTP peer only)
2727 (HTTP peer only)
2727
2728
2728 Send an HTTP request to the peer.
2729 Send an HTTP request to the peer.
2729
2730
2730 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2731 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2731
2732
2732 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2733 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2733 headers to add to the request. e.g. ``Accept: foo``.
2734 headers to add to the request. e.g. ``Accept: foo``.
2734
2735
2735 The following arguments are special:
2736 The following arguments are special:
2736
2737
2737 ``BODYFILE``
2738 ``BODYFILE``
2738 The content of the file defined as the value to this argument will be
2739 The content of the file defined as the value to this argument will be
2739 transferred verbatim as the HTTP request body.
2740 transferred verbatim as the HTTP request body.
2740
2741
2741 ``frame <type> <flags> <payload>``
2742 ``frame <type> <flags> <payload>``
2742 Send a unified protocol frame as part of the request body.
2743 Send a unified protocol frame as part of the request body.
2743
2744
2744 All frames will be collected and sent as the body to the HTTP
2745 All frames will be collected and sent as the body to the HTTP
2745 request.
2746 request.
2746
2747
2747 close
2748 close
2748 -----
2749 -----
2749
2750
2750 Close the connection to the server.
2751 Close the connection to the server.
2751
2752
2752 flush
2753 flush
2753 -----
2754 -----
2754
2755
2755 Flush data written to the server.
2756 Flush data written to the server.
2756
2757
2757 readavailable
2758 readavailable
2758 -------------
2759 -------------
2759
2760
2760 Close the write end of the connection and read all available data from
2761 Close the write end of the connection and read all available data from
2761 the server.
2762 the server.
2762
2763
2763 If the connection to the server encompasses multiple pipes, we poll both
2764 If the connection to the server encompasses multiple pipes, we poll both
2764 pipes and read available data.
2765 pipes and read available data.
2765
2766
2766 readline
2767 readline
2767 --------
2768 --------
2768
2769
2769 Read a line of output from the server. If there are multiple output
2770 Read a line of output from the server. If there are multiple output
2770 pipes, reads only the main pipe.
2771 pipes, reads only the main pipe.
2771
2772
2772 ereadline
2773 ereadline
2773 ---------
2774 ---------
2774
2775
2775 Like ``readline``, but read from the stderr pipe, if available.
2776 Like ``readline``, but read from the stderr pipe, if available.
2776
2777
2777 read <X>
2778 read <X>
2778 --------
2779 --------
2779
2780
2780 ``read()`` N bytes from the server's main output pipe.
2781 ``read()`` N bytes from the server's main output pipe.
2781
2782
2782 eread <X>
2783 eread <X>
2783 ---------
2784 ---------
2784
2785
2785 ``read()`` N bytes from the server's stderr pipe, if available.
2786 ``read()`` N bytes from the server's stderr pipe, if available.
2786
2787
2787 Specifying Unified Frame-Based Protocol Frames
2788 Specifying Unified Frame-Based Protocol Frames
2788 ----------------------------------------------
2789 ----------------------------------------------
2789
2790
2790 It is possible to emit a *Unified Frame-Based Protocol* by using special
2791 It is possible to emit a *Unified Frame-Based Protocol* by using special
2791 syntax.
2792 syntax.
2792
2793
2793 A frame is composed as a type, flags, and payload. These can be parsed
2794 A frame is composed as a type, flags, and payload. These can be parsed
2794 from a string of the form:
2795 from a string of the form:
2795
2796
2796 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2797 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2797
2798
2798 ``request-id`` and ``stream-id`` are integers defining the request and
2799 ``request-id`` and ``stream-id`` are integers defining the request and
2799 stream identifiers.
2800 stream identifiers.
2800
2801
2801 ``type`` can be an integer value for the frame type or the string name
2802 ``type`` can be an integer value for the frame type or the string name
2802 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2803 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2803 ``command-name``.
2804 ``command-name``.
2804
2805
2805 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2806 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2806 components. Each component (and there can be just one) can be an integer
2807 components. Each component (and there can be just one) can be an integer
2807 or a flag name for stream flags or frame flags, respectively. Values are
2808 or a flag name for stream flags or frame flags, respectively. Values are
2808 resolved to integers and then bitwise OR'd together.
2809 resolved to integers and then bitwise OR'd together.
2809
2810
2810 ``payload`` represents the raw frame payload. If it begins with
2811 ``payload`` represents the raw frame payload. If it begins with
2811 ``cbor:``, the following string is evaluated as Python code and the
2812 ``cbor:``, the following string is evaluated as Python code and the
2812 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2813 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2813 as a Python byte string literal.
2814 as a Python byte string literal.
2814 """
2815 """
2815 opts = pycompat.byteskwargs(opts)
2816 opts = pycompat.byteskwargs(opts)
2816
2817
2817 if opts['localssh'] and not repo:
2818 if opts['localssh'] and not repo:
2818 raise error.Abort(_('--localssh requires a repository'))
2819 raise error.Abort(_('--localssh requires a repository'))
2819
2820
2820 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2821 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2821 raise error.Abort(_('invalid value for --peer'),
2822 raise error.Abort(_('invalid value for --peer'),
2822 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2823 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2823
2824
2824 if path and opts['localssh']:
2825 if path and opts['localssh']:
2825 raise error.Abort(_('cannot specify --localssh with an explicit '
2826 raise error.Abort(_('cannot specify --localssh with an explicit '
2826 'path'))
2827 'path'))
2827
2828
2828 if ui.interactive():
2829 if ui.interactive():
2829 ui.write(_('(waiting for commands on stdin)\n'))
2830 ui.write(_('(waiting for commands on stdin)\n'))
2830
2831
2831 blocks = list(_parsewirelangblocks(ui.fin))
2832 blocks = list(_parsewirelangblocks(ui.fin))
2832
2833
2833 proc = None
2834 proc = None
2834 stdin = None
2835 stdin = None
2835 stdout = None
2836 stdout = None
2836 stderr = None
2837 stderr = None
2837 opener = None
2838 opener = None
2838
2839
2839 if opts['localssh']:
2840 if opts['localssh']:
2840 # We start the SSH server in its own process so there is process
2841 # We start the SSH server in its own process so there is process
2841 # separation. This prevents a whole class of potential bugs around
2842 # separation. This prevents a whole class of potential bugs around
2842 # shared state from interfering with server operation.
2843 # shared state from interfering with server operation.
2843 args = procutil.hgcmd() + [
2844 args = procutil.hgcmd() + [
2844 '-R', repo.root,
2845 '-R', repo.root,
2845 'debugserve', '--sshstdio',
2846 'debugserve', '--sshstdio',
2846 ]
2847 ]
2847 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2848 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2848 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2849 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2849 bufsize=0)
2850 bufsize=0)
2850
2851
2851 stdin = proc.stdin
2852 stdin = proc.stdin
2852 stdout = proc.stdout
2853 stdout = proc.stdout
2853 stderr = proc.stderr
2854 stderr = proc.stderr
2854
2855
2855 # We turn the pipes into observers so we can log I/O.
2856 # We turn the pipes into observers so we can log I/O.
2856 if ui.verbose or opts['peer'] == 'raw':
2857 if ui.verbose or opts['peer'] == 'raw':
2857 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2858 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2858 logdata=True)
2859 logdata=True)
2859 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2860 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2860 logdata=True)
2861 logdata=True)
2861 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2862 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2862 logdata=True)
2863 logdata=True)
2863
2864
2864 # --localssh also implies the peer connection settings.
2865 # --localssh also implies the peer connection settings.
2865
2866
2866 url = 'ssh://localserver'
2867 url = 'ssh://localserver'
2867 autoreadstderr = not opts['noreadstderr']
2868 autoreadstderr = not opts['noreadstderr']
2868
2869
2869 if opts['peer'] == 'ssh1':
2870 if opts['peer'] == 'ssh1':
2870 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2871 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2871 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2872 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2872 None, autoreadstderr=autoreadstderr)
2873 None, autoreadstderr=autoreadstderr)
2873 elif opts['peer'] == 'ssh2':
2874 elif opts['peer'] == 'ssh2':
2874 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2875 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2875 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2876 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2876 None, autoreadstderr=autoreadstderr)
2877 None, autoreadstderr=autoreadstderr)
2877 elif opts['peer'] == 'raw':
2878 elif opts['peer'] == 'raw':
2878 ui.write(_('using raw connection to peer\n'))
2879 ui.write(_('using raw connection to peer\n'))
2879 peer = None
2880 peer = None
2880 else:
2881 else:
2881 ui.write(_('creating ssh peer from handshake results\n'))
2882 ui.write(_('creating ssh peer from handshake results\n'))
2882 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2883 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2883 autoreadstderr=autoreadstderr)
2884 autoreadstderr=autoreadstderr)
2884
2885
2885 elif path:
2886 elif path:
2886 # We bypass hg.peer() so we can proxy the sockets.
2887 # We bypass hg.peer() so we can proxy the sockets.
2887 # TODO consider not doing this because we skip
2888 # TODO consider not doing this because we skip
2888 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2889 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2889 u = util.url(path)
2890 u = util.url(path)
2890 if u.scheme != 'http':
2891 if u.scheme != 'http':
2891 raise error.Abort(_('only http:// paths are currently supported'))
2892 raise error.Abort(_('only http:// paths are currently supported'))
2892
2893
2893 url, authinfo = u.authinfo()
2894 url, authinfo = u.authinfo()
2894 openerargs = {
2895 openerargs = {
2895 r'useragent': b'Mercurial debugwireproto',
2896 r'useragent': b'Mercurial debugwireproto',
2896 }
2897 }
2897
2898
2898 # Turn pipes/sockets into observers so we can log I/O.
2899 # Turn pipes/sockets into observers so we can log I/O.
2899 if ui.verbose:
2900 if ui.verbose:
2900 openerargs.update({
2901 openerargs.update({
2901 r'loggingfh': ui,
2902 r'loggingfh': ui,
2902 r'loggingname': b's',
2903 r'loggingname': b's',
2903 r'loggingopts': {
2904 r'loggingopts': {
2904 r'logdata': True,
2905 r'logdata': True,
2905 r'logdataapis': False,
2906 r'logdataapis': False,
2906 },
2907 },
2907 })
2908 })
2908
2909
2909 if ui.debugflag:
2910 if ui.debugflag:
2910 openerargs[r'loggingopts'][r'logdataapis'] = True
2911 openerargs[r'loggingopts'][r'logdataapis'] = True
2911
2912
2912 # Don't send default headers when in raw mode. This allows us to
2913 # Don't send default headers when in raw mode. This allows us to
2913 # bypass most of the behavior of our URL handling code so we can
2914 # bypass most of the behavior of our URL handling code so we can
2914 # have near complete control over what's sent on the wire.
2915 # have near complete control over what's sent on the wire.
2915 if opts['peer'] == 'raw':
2916 if opts['peer'] == 'raw':
2916 openerargs[r'sendaccept'] = False
2917 openerargs[r'sendaccept'] = False
2917
2918
2918 opener = urlmod.opener(ui, authinfo, **openerargs)
2919 opener = urlmod.opener(ui, authinfo, **openerargs)
2919
2920
2920 if opts['peer'] == 'http2':
2921 if opts['peer'] == 'http2':
2921 ui.write(_('creating http peer for wire protocol version 2\n'))
2922 ui.write(_('creating http peer for wire protocol version 2\n'))
2922 # We go through makepeer() because we need an API descriptor for
2923 # We go through makepeer() because we need an API descriptor for
2923 # the peer instance to be useful.
2924 # the peer instance to be useful.
2924 with ui.configoverride({
2925 with ui.configoverride({
2925 ('experimental', 'httppeer.advertise-v2'): True}):
2926 ('experimental', 'httppeer.advertise-v2'): True}):
2926 if opts['nologhandshake']:
2927 if opts['nologhandshake']:
2927 ui.pushbuffer()
2928 ui.pushbuffer()
2928
2929
2929 peer = httppeer.makepeer(ui, path, opener=opener)
2930 peer = httppeer.makepeer(ui, path, opener=opener)
2930
2931
2931 if opts['nologhandshake']:
2932 if opts['nologhandshake']:
2932 ui.popbuffer()
2933 ui.popbuffer()
2933
2934
2934 if not isinstance(peer, httppeer.httpv2peer):
2935 if not isinstance(peer, httppeer.httpv2peer):
2935 raise error.Abort(_('could not instantiate HTTP peer for '
2936 raise error.Abort(_('could not instantiate HTTP peer for '
2936 'wire protocol version 2'),
2937 'wire protocol version 2'),
2937 hint=_('the server may not have the feature '
2938 hint=_('the server may not have the feature '
2938 'enabled or is not allowing this '
2939 'enabled or is not allowing this '
2939 'client version'))
2940 'client version'))
2940
2941
2941 elif opts['peer'] == 'raw':
2942 elif opts['peer'] == 'raw':
2942 ui.write(_('using raw connection to peer\n'))
2943 ui.write(_('using raw connection to peer\n'))
2943 peer = None
2944 peer = None
2944 elif opts['peer']:
2945 elif opts['peer']:
2945 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2946 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2946 opts['peer'])
2947 opts['peer'])
2947 else:
2948 else:
2948 peer = httppeer.makepeer(ui, path, opener=opener)
2949 peer = httppeer.makepeer(ui, path, opener=opener)
2949
2950
2950 # We /could/ populate stdin/stdout with sock.makefile()...
2951 # We /could/ populate stdin/stdout with sock.makefile()...
2951 else:
2952 else:
2952 raise error.Abort(_('unsupported connection configuration'))
2953 raise error.Abort(_('unsupported connection configuration'))
2953
2954
2954 batchedcommands = None
2955 batchedcommands = None
2955
2956
2956 # Now perform actions based on the parsed wire language instructions.
2957 # Now perform actions based on the parsed wire language instructions.
2957 for action, lines in blocks:
2958 for action, lines in blocks:
2958 if action in ('raw', 'raw+'):
2959 if action in ('raw', 'raw+'):
2959 if not stdin:
2960 if not stdin:
2960 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2961 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2961
2962
2962 # Concatenate the data together.
2963 # Concatenate the data together.
2963 data = ''.join(l.lstrip() for l in lines)
2964 data = ''.join(l.lstrip() for l in lines)
2964 data = stringutil.unescapestr(data)
2965 data = stringutil.unescapestr(data)
2965 stdin.write(data)
2966 stdin.write(data)
2966
2967
2967 if action == 'raw+':
2968 if action == 'raw+':
2968 stdin.flush()
2969 stdin.flush()
2969 elif action == 'flush':
2970 elif action == 'flush':
2970 if not stdin:
2971 if not stdin:
2971 raise error.Abort(_('cannot call flush on this peer'))
2972 raise error.Abort(_('cannot call flush on this peer'))
2972 stdin.flush()
2973 stdin.flush()
2973 elif action.startswith('command'):
2974 elif action.startswith('command'):
2974 if not peer:
2975 if not peer:
2975 raise error.Abort(_('cannot send commands unless peer instance '
2976 raise error.Abort(_('cannot send commands unless peer instance '
2976 'is available'))
2977 'is available'))
2977
2978
2978 command = action.split(' ', 1)[1]
2979 command = action.split(' ', 1)[1]
2979
2980
2980 args = {}
2981 args = {}
2981 for line in lines:
2982 for line in lines:
2982 # We need to allow empty values.
2983 # We need to allow empty values.
2983 fields = line.lstrip().split(' ', 1)
2984 fields = line.lstrip().split(' ', 1)
2984 if len(fields) == 1:
2985 if len(fields) == 1:
2985 key = fields[0]
2986 key = fields[0]
2986 value = ''
2987 value = ''
2987 else:
2988 else:
2988 key, value = fields
2989 key, value = fields
2989
2990
2990 if value.startswith('eval:'):
2991 if value.startswith('eval:'):
2991 value = stringutil.evalpythonliteral(value[5:])
2992 value = stringutil.evalpythonliteral(value[5:])
2992 else:
2993 else:
2993 value = stringutil.unescapestr(value)
2994 value = stringutil.unescapestr(value)
2994
2995
2995 args[key] = value
2996 args[key] = value
2996
2997
2997 if batchedcommands is not None:
2998 if batchedcommands is not None:
2998 batchedcommands.append((command, args))
2999 batchedcommands.append((command, args))
2999 continue
3000 continue
3000
3001
3001 ui.status(_('sending %s command\n') % command)
3002 ui.status(_('sending %s command\n') % command)
3002
3003
3003 if 'PUSHFILE' in args:
3004 if 'PUSHFILE' in args:
3004 with open(args['PUSHFILE'], r'rb') as fh:
3005 with open(args['PUSHFILE'], r'rb') as fh:
3005 del args['PUSHFILE']
3006 del args['PUSHFILE']
3006 res, output = peer._callpush(command, fh,
3007 res, output = peer._callpush(command, fh,
3007 **pycompat.strkwargs(args))
3008 **pycompat.strkwargs(args))
3008 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3009 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3009 ui.status(_('remote output: %s\n') %
3010 ui.status(_('remote output: %s\n') %
3010 stringutil.escapestr(output))
3011 stringutil.escapestr(output))
3011 else:
3012 else:
3012 with peer.commandexecutor() as e:
3013 with peer.commandexecutor() as e:
3013 res = e.callcommand(command, args).result()
3014 res = e.callcommand(command, args).result()
3014
3015
3015 ui.status(_('response: %s\n') % stringutil.pprint(res))
3016 if isinstance(res, wireprotov2peer.commandresponse):
3017 if res.cbor:
3018 val = list(res.cborobjects())
3019 else:
3020 val = [res.b.getvalue()]
3021
3022 ui.status(_('response: %s\n') % stringutil.pprint(val))
3023
3024 else:
3025 ui.status(_('response: %s\n') % stringutil.pprint(res))
3016
3026
3017 elif action == 'batchbegin':
3027 elif action == 'batchbegin':
3018 if batchedcommands is not None:
3028 if batchedcommands is not None:
3019 raise error.Abort(_('nested batchbegin not allowed'))
3029 raise error.Abort(_('nested batchbegin not allowed'))
3020
3030
3021 batchedcommands = []
3031 batchedcommands = []
3022 elif action == 'batchsubmit':
3032 elif action == 'batchsubmit':
3023 # There is a batching API we could go through. But it would be
3033 # There is a batching API we could go through. But it would be
3024 # difficult to normalize requests into function calls. It is easier
3034 # difficult to normalize requests into function calls. It is easier
3025 # to bypass this layer and normalize to commands + args.
3035 # to bypass this layer and normalize to commands + args.
3026 ui.status(_('sending batch with %d sub-commands\n') %
3036 ui.status(_('sending batch with %d sub-commands\n') %
3027 len(batchedcommands))
3037 len(batchedcommands))
3028 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3038 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3029 ui.status(_('response #%d: %s\n') %
3039 ui.status(_('response #%d: %s\n') %
3030 (i, stringutil.escapestr(chunk)))
3040 (i, stringutil.escapestr(chunk)))
3031
3041
3032 batchedcommands = None
3042 batchedcommands = None
3033
3043
3034 elif action.startswith('httprequest '):
3044 elif action.startswith('httprequest '):
3035 if not opener:
3045 if not opener:
3036 raise error.Abort(_('cannot use httprequest without an HTTP '
3046 raise error.Abort(_('cannot use httprequest without an HTTP '
3037 'peer'))
3047 'peer'))
3038
3048
3039 request = action.split(' ', 2)
3049 request = action.split(' ', 2)
3040 if len(request) != 3:
3050 if len(request) != 3:
3041 raise error.Abort(_('invalid httprequest: expected format is '
3051 raise error.Abort(_('invalid httprequest: expected format is '
3042 '"httprequest <method> <path>'))
3052 '"httprequest <method> <path>'))
3043
3053
3044 method, httppath = request[1:]
3054 method, httppath = request[1:]
3045 headers = {}
3055 headers = {}
3046 body = None
3056 body = None
3047 frames = []
3057 frames = []
3048 for line in lines:
3058 for line in lines:
3049 line = line.lstrip()
3059 line = line.lstrip()
3050 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3060 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3051 if m:
3061 if m:
3052 headers[m.group(1)] = m.group(2)
3062 headers[m.group(1)] = m.group(2)
3053 continue
3063 continue
3054
3064
3055 if line.startswith(b'BODYFILE '):
3065 if line.startswith(b'BODYFILE '):
3056 with open(line.split(b' ', 1), 'rb') as fh:
3066 with open(line.split(b' ', 1), 'rb') as fh:
3057 body = fh.read()
3067 body = fh.read()
3058 elif line.startswith(b'frame '):
3068 elif line.startswith(b'frame '):
3059 frame = wireprotoframing.makeframefromhumanstring(
3069 frame = wireprotoframing.makeframefromhumanstring(
3060 line[len(b'frame '):])
3070 line[len(b'frame '):])
3061
3071
3062 frames.append(frame)
3072 frames.append(frame)
3063 else:
3073 else:
3064 raise error.Abort(_('unknown argument to httprequest: %s') %
3074 raise error.Abort(_('unknown argument to httprequest: %s') %
3065 line)
3075 line)
3066
3076
3067 url = path + httppath
3077 url = path + httppath
3068
3078
3069 if frames:
3079 if frames:
3070 body = b''.join(bytes(f) for f in frames)
3080 body = b''.join(bytes(f) for f in frames)
3071
3081
3072 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3082 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3073
3083
3074 # urllib.Request insists on using has_data() as a proxy for
3084 # urllib.Request insists on using has_data() as a proxy for
3075 # determining the request method. Override that to use our
3085 # determining the request method. Override that to use our
3076 # explicitly requested method.
3086 # explicitly requested method.
3077 req.get_method = lambda: method
3087 req.get_method = lambda: method
3078
3088
3079 try:
3089 try:
3080 res = opener.open(req)
3090 res = opener.open(req)
3081 body = res.read()
3091 body = res.read()
3082 except util.urlerr.urlerror as e:
3092 except util.urlerr.urlerror as e:
3083 e.read()
3093 e.read()
3084 continue
3094 continue
3085
3095
3086 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3096 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3087 ui.write(_('cbor> %s\n') % stringutil.pprint(cbor.loads(body)))
3097 ui.write(_('cbor> %s\n') % stringutil.pprint(cbor.loads(body)))
3088
3098
3089 elif action == 'close':
3099 elif action == 'close':
3090 peer.close()
3100 peer.close()
3091 elif action == 'readavailable':
3101 elif action == 'readavailable':
3092 if not stdout or not stderr:
3102 if not stdout or not stderr:
3093 raise error.Abort(_('readavailable not available on this peer'))
3103 raise error.Abort(_('readavailable not available on this peer'))
3094
3104
3095 stdin.close()
3105 stdin.close()
3096 stdout.read()
3106 stdout.read()
3097 stderr.read()
3107 stderr.read()
3098
3108
3099 elif action == 'readline':
3109 elif action == 'readline':
3100 if not stdout:
3110 if not stdout:
3101 raise error.Abort(_('readline not available on this peer'))
3111 raise error.Abort(_('readline not available on this peer'))
3102 stdout.readline()
3112 stdout.readline()
3103 elif action == 'ereadline':
3113 elif action == 'ereadline':
3104 if not stderr:
3114 if not stderr:
3105 raise error.Abort(_('ereadline not available on this peer'))
3115 raise error.Abort(_('ereadline not available on this peer'))
3106 stderr.readline()
3116 stderr.readline()
3107 elif action.startswith('read '):
3117 elif action.startswith('read '):
3108 count = int(action.split(' ', 1)[1])
3118 count = int(action.split(' ', 1)[1])
3109 if not stdout:
3119 if not stdout:
3110 raise error.Abort(_('read not available on this peer'))
3120 raise error.Abort(_('read not available on this peer'))
3111 stdout.read(count)
3121 stdout.read(count)
3112 elif action.startswith('eread '):
3122 elif action.startswith('eread '):
3113 count = int(action.split(' ', 1)[1])
3123 count = int(action.split(' ', 1)[1])
3114 if not stderr:
3124 if not stderr:
3115 raise error.Abort(_('eread not available on this peer'))
3125 raise error.Abort(_('eread not available on this peer'))
3116 stderr.read(count)
3126 stderr.read(count)
3117 else:
3127 else:
3118 raise error.Abort(_('unknown action: %s') % action)
3128 raise error.Abort(_('unknown action: %s') % action)
3119
3129
3120 if batchedcommands is not None:
3130 if batchedcommands is not None:
3121 raise error.Abort(_('unclosed "batchbegin" request'))
3131 raise error.Abort(_('unclosed "batchbegin" request'))
3122
3132
3123 if peer:
3133 if peer:
3124 peer.close()
3134 peer.close()
3125
3135
3126 if proc:
3136 if proc:
3127 proc.kill()
3137 proc.kill()
@@ -1,135 +1,137 b''
1 # wireprotov2peer.py - client side code for wire protocol version 2
1 # wireprotov2peer.py - client side code for wire protocol version 2
2 #
2 #
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .thirdparty import (
11 from .thirdparty import (
12 cbor,
12 cbor,
13 )
13 )
14 from . import (
14 from . import (
15 error,
15 error,
16 util,
16 util,
17 wireprotoframing,
17 wireprotoframing,
18 )
18 )
19
19
20 class commandresponse(object):
21 """Represents the response to a command request."""
22
23 def __init__(self, requestid, command):
24 self.requestid = requestid
25 self.command = command
26
27 self.cbor = False
28 self.b = util.bytesio()
29
30 def cborobjects(self):
31 """Obtain decoded CBOR objects from this response."""
32 size = self.b.tell()
33 self.b.seek(0)
34
35 decoder = cbor.CBORDecoder(self.b)
36
37 while self.b.tell() < size:
38 yield decoder.decode()
39
20 class clienthandler(object):
40 class clienthandler(object):
21 """Object to handle higher-level client activities.
41 """Object to handle higher-level client activities.
22
42
23 The ``clientreactor`` is used to hold low-level state about the frame-based
43 The ``clientreactor`` is used to hold low-level state about the frame-based
24 protocol, such as which requests and streams are active. This type is used
44 protocol, such as which requests and streams are active. This type is used
25 for higher-level operations, such as reading frames from a socket, exposing
45 for higher-level operations, such as reading frames from a socket, exposing
26 and managing a higher-level primitive for representing command responses,
46 and managing a higher-level primitive for representing command responses,
27 etc. This class is what peers should probably use to bridge wire activity
47 etc. This class is what peers should probably use to bridge wire activity
28 with the higher-level peer API.
48 with the higher-level peer API.
29 """
49 """
30
50
31 def __init__(self, ui, clientreactor):
51 def __init__(self, ui, clientreactor):
32 self._ui = ui
52 self._ui = ui
33 self._reactor = clientreactor
53 self._reactor = clientreactor
34 self._requests = {}
54 self._requests = {}
35 self._futures = {}
55 self._futures = {}
36 self._responses = {}
56 self._responses = {}
37
57
38 def callcommand(self, command, args, f):
58 def callcommand(self, command, args, f):
39 """Register a request to call a command.
59 """Register a request to call a command.
40
60
41 Returns an iterable of frames that should be sent over the wire.
61 Returns an iterable of frames that should be sent over the wire.
42 """
62 """
43 request, action, meta = self._reactor.callcommand(command, args)
63 request, action, meta = self._reactor.callcommand(command, args)
44
64
45 if action != 'noop':
65 if action != 'noop':
46 raise error.ProgrammingError('%s not yet supported' % action)
66 raise error.ProgrammingError('%s not yet supported' % action)
47
67
48 rid = request.requestid
68 rid = request.requestid
49 self._requests[rid] = request
69 self._requests[rid] = request
50 self._futures[rid] = f
70 self._futures[rid] = f
51 self._responses[rid] = {
71 self._responses[rid] = commandresponse(rid, command)
52 'cbor': False,
53 'b': util.bytesio(),
54 }
55
72
56 return iter(())
73 return iter(())
57
74
58 def flushcommands(self):
75 def flushcommands(self):
59 """Flush all queued commands.
76 """Flush all queued commands.
60
77
61 Returns an iterable of frames that should be sent over the wire.
78 Returns an iterable of frames that should be sent over the wire.
62 """
79 """
63 action, meta = self._reactor.flushcommands()
80 action, meta = self._reactor.flushcommands()
64
81
65 if action != 'sendframes':
82 if action != 'sendframes':
66 raise error.ProgrammingError('%s not yet supported' % action)
83 raise error.ProgrammingError('%s not yet supported' % action)
67
84
68 return meta['framegen']
85 return meta['framegen']
69
86
70 def readframe(self, fh):
87 def readframe(self, fh):
71 """Attempt to read and process a frame.
88 """Attempt to read and process a frame.
72
89
73 Returns None if no frame was read. Presumably this means EOF.
90 Returns None if no frame was read. Presumably this means EOF.
74 """
91 """
75 frame = wireprotoframing.readframe(fh)
92 frame = wireprotoframing.readframe(fh)
76 if frame is None:
93 if frame is None:
77 # TODO tell reactor?
94 # TODO tell reactor?
78 return
95 return
79
96
80 self._ui.note(_('received %r\n') % frame)
97 self._ui.note(_('received %r\n') % frame)
81 self._processframe(frame)
98 self._processframe(frame)
82
99
83 return True
100 return True
84
101
85 def _processframe(self, frame):
102 def _processframe(self, frame):
86 """Process a single read frame."""
103 """Process a single read frame."""
87
104
88 action, meta = self._reactor.onframerecv(frame)
105 action, meta = self._reactor.onframerecv(frame)
89
106
90 if action == 'error':
107 if action == 'error':
91 e = error.RepoError(meta['message'])
108 e = error.RepoError(meta['message'])
92
109
93 if frame.requestid in self._futures:
110 if frame.requestid in self._futures:
94 self._futures[frame.requestid].set_exception(e)
111 self._futures[frame.requestid].set_exception(e)
95 else:
112 else:
96 raise e
113 raise e
97
114
98 if frame.requestid not in self._requests:
115 if frame.requestid not in self._requests:
99 raise error.ProgrammingError(
116 raise error.ProgrammingError(
100 'received frame for unknown request; this is either a bug in '
117 'received frame for unknown request; this is either a bug in '
101 'the clientreactor not screening for this or this instance was '
118 'the clientreactor not screening for this or this instance was '
102 'never told about this request: %r' % frame)
119 'never told about this request: %r' % frame)
103
120
104 response = self._responses[frame.requestid]
121 response = self._responses[frame.requestid]
105
122
106 if action == 'responsedata':
123 if action == 'responsedata':
107 response['b'].write(meta['data'])
124 response.b.write(meta['data'])
108
125
109 if meta['cbor']:
126 if meta['cbor']:
110 response['cbor'] = True
127 response.cbor = True
111
128
112 if meta['eos']:
129 if meta['eos']:
113 if meta['cbor']:
130 self._futures[frame.requestid].set_result(response)
114 # If CBOR, decode every object.
115 b = response['b']
116
117 size = b.tell()
118 b.seek(0)
119
120 decoder = cbor.CBORDecoder(b)
121
122 result = []
123 while b.tell() < size:
124 result.append(decoder.decode())
125 else:
126 result = [response['b'].getvalue()]
127
128 self._futures[frame.requestid].set_result(result)
129
131
130 del self._requests[frame.requestid]
132 del self._requests[frame.requestid]
131 del self._futures[frame.requestid]
133 del self._futures[frame.requestid]
132
134
133 else:
135 else:
134 raise error.ProgrammingError(
136 raise error.ProgrammingError(
135 'unhandled action from clientreactor: %s' % action)
137 'unhandled action from clientreactor: %s' % action)
General Comments 0
You need to be logged in to leave comments. Login now