##// END OF EJS Templates
debugcommands: work around logiofd being a pipe and unseekable...
Augie Fackler -
r38333:275cc461 default
parent child Browse files
Show More
@@ -1,3138 +1,3144 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 fileset,
51 fileset,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 initialmergedlines.append("")
181 initialmergedlines.append("")
182
182
183 tags = []
183 tags = []
184
184
185 wlock = lock = tr = None
185 wlock = lock = tr = None
186 try:
186 try:
187 wlock = repo.wlock()
187 wlock = repo.wlock()
188 lock = repo.lock()
188 lock = repo.lock()
189 tr = repo.transaction("builddag")
189 tr = repo.transaction("builddag")
190
190
191 at = -1
191 at = -1
192 atbranch = 'default'
192 atbranch = 'default'
193 nodeids = []
193 nodeids = []
194 id = 0
194 id = 0
195 ui.progress(_('building'), id, unit=_('revisions'), total=total)
195 ui.progress(_('building'), id, unit=_('revisions'), total=total)
196 for type, data in dagparser.parsedag(text):
196 for type, data in dagparser.parsedag(text):
197 if type == 'n':
197 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
199 id, ps = data
200
200
201 files = []
201 files = []
202 filecontent = {}
202 filecontent = {}
203
203
204 p2 = None
204 p2 = None
205 if mergeable_file:
205 if mergeable_file:
206 fn = "mf"
206 fn = "mf"
207 p1 = repo[ps[0]]
207 p1 = repo[ps[0]]
208 if len(ps) > 1:
208 if len(ps) > 1:
209 p2 = repo[ps[1]]
209 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
210 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
211 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
212 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
213 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
214 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
215 ml.append("")
216 elif at > 0:
216 elif at > 0:
217 ml = p1[fn].data().split("\n")
217 ml = p1[fn].data().split("\n")
218 else:
218 else:
219 ml = initialmergedlines
219 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
220 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
221 mergedtext = "\n".join(ml)
222 files.append(fn)
222 files.append(fn)
223 filecontent[fn] = mergedtext
223 filecontent[fn] = mergedtext
224
224
225 if overwritten_file:
225 if overwritten_file:
226 fn = "of"
226 fn = "of"
227 files.append(fn)
227 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
228 filecontent[fn] = "r%i\n" % id
229
229
230 if new_file:
230 if new_file:
231 fn = "nf%i" % id
231 fn = "nf%i" % id
232 files.append(fn)
232 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
233 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
234 if len(ps) > 1:
235 if not p2:
235 if not p2:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 for fn in p2:
237 for fn in p2:
238 if fn.startswith("nf"):
238 if fn.startswith("nf"):
239 files.append(fn)
239 files.append(fn)
240 filecontent[fn] = p2[fn].data()
240 filecontent[fn] = p2[fn].data()
241
241
242 def fctxfn(repo, cx, path):
242 def fctxfn(repo, cx, path):
243 if path in filecontent:
243 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
244 return context.memfilectx(repo, cx, path,
245 filecontent[path])
245 filecontent[path])
246 return None
246 return None
247
247
248 if len(ps) == 0 or ps[0] < 0:
248 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
249 pars = [None, None]
250 elif len(ps) == 1:
250 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
251 pars = [nodeids[ps[0]], None]
252 else:
252 else:
253 pars = [nodeids[p] for p in ps]
253 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
255 date=(id, 0),
256 user="debugbuilddag",
256 user="debugbuilddag",
257 extra={'branch': atbranch})
257 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
258 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
259 nodeids.append(nodeid)
260 at = id
260 at = id
261 elif type == 'l':
261 elif type == 'l':
262 id, name = data
262 id, name = data
263 ui.note(('tag %s\n' % name))
263 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
265 elif type == 'a':
266 ui.note(('branch %s\n' % data))
266 ui.note(('branch %s\n' % data))
267 atbranch = data
267 atbranch = data
268 ui.progress(_('building'), id, unit=_('revisions'), total=total)
268 ui.progress(_('building'), id, unit=_('revisions'), total=total)
269 tr.close()
269 tr.close()
270
270
271 if tags:
271 if tags:
272 repo.vfs.write("localtags", "".join(tags))
272 repo.vfs.write("localtags", "".join(tags))
273 finally:
273 finally:
274 ui.progress(_('building'), None)
274 ui.progress(_('building'), None)
275 release(tr, lock, wlock)
275 release(tr, lock, wlock)
276
276
277 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
277 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
278 indent_string = ' ' * indent
278 indent_string = ' ' * indent
279 if all:
279 if all:
280 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
280 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
281 % indent_string)
281 % indent_string)
282
282
283 def showchunks(named):
283 def showchunks(named):
284 ui.write("\n%s%s\n" % (indent_string, named))
284 ui.write("\n%s%s\n" % (indent_string, named))
285 for deltadata in gen.deltaiter():
285 for deltadata in gen.deltaiter():
286 node, p1, p2, cs, deltabase, delta, flags = deltadata
286 node, p1, p2, cs, deltabase, delta, flags = deltadata
287 ui.write("%s%s %s %s %s %s %d\n" %
287 ui.write("%s%s %s %s %s %s %d\n" %
288 (indent_string, hex(node), hex(p1), hex(p2),
288 (indent_string, hex(node), hex(p1), hex(p2),
289 hex(cs), hex(deltabase), len(delta)))
289 hex(cs), hex(deltabase), len(delta)))
290
290
291 chunkdata = gen.changelogheader()
291 chunkdata = gen.changelogheader()
292 showchunks("changelog")
292 showchunks("changelog")
293 chunkdata = gen.manifestheader()
293 chunkdata = gen.manifestheader()
294 showchunks("manifest")
294 showchunks("manifest")
295 for chunkdata in iter(gen.filelogheader, {}):
295 for chunkdata in iter(gen.filelogheader, {}):
296 fname = chunkdata['filename']
296 fname = chunkdata['filename']
297 showchunks(fname)
297 showchunks(fname)
298 else:
298 else:
299 if isinstance(gen, bundle2.unbundle20):
299 if isinstance(gen, bundle2.unbundle20):
300 raise error.Abort(_('use debugbundle2 for this file'))
300 raise error.Abort(_('use debugbundle2 for this file'))
301 chunkdata = gen.changelogheader()
301 chunkdata = gen.changelogheader()
302 for deltadata in gen.deltaiter():
302 for deltadata in gen.deltaiter():
303 node, p1, p2, cs, deltabase, delta, flags = deltadata
303 node, p1, p2, cs, deltabase, delta, flags = deltadata
304 ui.write("%s%s\n" % (indent_string, hex(node)))
304 ui.write("%s%s\n" % (indent_string, hex(node)))
305
305
306 def _debugobsmarkers(ui, part, indent=0, **opts):
306 def _debugobsmarkers(ui, part, indent=0, **opts):
307 """display version and markers contained in 'data'"""
307 """display version and markers contained in 'data'"""
308 opts = pycompat.byteskwargs(opts)
308 opts = pycompat.byteskwargs(opts)
309 data = part.read()
309 data = part.read()
310 indent_string = ' ' * indent
310 indent_string = ' ' * indent
311 try:
311 try:
312 version, markers = obsolete._readmarkers(data)
312 version, markers = obsolete._readmarkers(data)
313 except error.UnknownVersion as exc:
313 except error.UnknownVersion as exc:
314 msg = "%sunsupported version: %s (%d bytes)\n"
314 msg = "%sunsupported version: %s (%d bytes)\n"
315 msg %= indent_string, exc.version, len(data)
315 msg %= indent_string, exc.version, len(data)
316 ui.write(msg)
316 ui.write(msg)
317 else:
317 else:
318 msg = "%sversion: %d (%d bytes)\n"
318 msg = "%sversion: %d (%d bytes)\n"
319 msg %= indent_string, version, len(data)
319 msg %= indent_string, version, len(data)
320 ui.write(msg)
320 ui.write(msg)
321 fm = ui.formatter('debugobsolete', opts)
321 fm = ui.formatter('debugobsolete', opts)
322 for rawmarker in sorted(markers):
322 for rawmarker in sorted(markers):
323 m = obsutil.marker(None, rawmarker)
323 m = obsutil.marker(None, rawmarker)
324 fm.startitem()
324 fm.startitem()
325 fm.plain(indent_string)
325 fm.plain(indent_string)
326 cmdutil.showmarker(fm, m)
326 cmdutil.showmarker(fm, m)
327 fm.end()
327 fm.end()
328
328
329 def _debugphaseheads(ui, data, indent=0):
329 def _debugphaseheads(ui, data, indent=0):
330 """display version and markers contained in 'data'"""
330 """display version and markers contained in 'data'"""
331 indent_string = ' ' * indent
331 indent_string = ' ' * indent
332 headsbyphase = phases.binarydecode(data)
332 headsbyphase = phases.binarydecode(data)
333 for phase in phases.allphases:
333 for phase in phases.allphases:
334 for head in headsbyphase[phase]:
334 for head in headsbyphase[phase]:
335 ui.write(indent_string)
335 ui.write(indent_string)
336 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
336 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
337
337
338 def _quasirepr(thing):
338 def _quasirepr(thing):
339 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
339 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
340 return '{%s}' % (
340 return '{%s}' % (
341 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
341 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
342 return pycompat.bytestr(repr(thing))
342 return pycompat.bytestr(repr(thing))
343
343
344 def _debugbundle2(ui, gen, all=None, **opts):
344 def _debugbundle2(ui, gen, all=None, **opts):
345 """lists the contents of a bundle2"""
345 """lists the contents of a bundle2"""
346 if not isinstance(gen, bundle2.unbundle20):
346 if not isinstance(gen, bundle2.unbundle20):
347 raise error.Abort(_('not a bundle2 file'))
347 raise error.Abort(_('not a bundle2 file'))
348 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
348 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
349 parttypes = opts.get(r'part_type', [])
349 parttypes = opts.get(r'part_type', [])
350 for part in gen.iterparts():
350 for part in gen.iterparts():
351 if parttypes and part.type not in parttypes:
351 if parttypes and part.type not in parttypes:
352 continue
352 continue
353 msg = '%s -- %s (mandatory: %r)\n'
353 msg = '%s -- %s (mandatory: %r)\n'
354 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
354 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
355 if part.type == 'changegroup':
355 if part.type == 'changegroup':
356 version = part.params.get('version', '01')
356 version = part.params.get('version', '01')
357 cg = changegroup.getunbundler(version, part, 'UN')
357 cg = changegroup.getunbundler(version, part, 'UN')
358 if not ui.quiet:
358 if not ui.quiet:
359 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
359 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
360 if part.type == 'obsmarkers':
360 if part.type == 'obsmarkers':
361 if not ui.quiet:
361 if not ui.quiet:
362 _debugobsmarkers(ui, part, indent=4, **opts)
362 _debugobsmarkers(ui, part, indent=4, **opts)
363 if part.type == 'phase-heads':
363 if part.type == 'phase-heads':
364 if not ui.quiet:
364 if not ui.quiet:
365 _debugphaseheads(ui, part, indent=4)
365 _debugphaseheads(ui, part, indent=4)
366
366
367 @command('debugbundle',
367 @command('debugbundle',
368 [('a', 'all', None, _('show all details')),
368 [('a', 'all', None, _('show all details')),
369 ('', 'part-type', [], _('show only the named part type')),
369 ('', 'part-type', [], _('show only the named part type')),
370 ('', 'spec', None, _('print the bundlespec of the bundle'))],
370 ('', 'spec', None, _('print the bundlespec of the bundle'))],
371 _('FILE'),
371 _('FILE'),
372 norepo=True)
372 norepo=True)
373 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
373 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
374 """lists the contents of a bundle"""
374 """lists the contents of a bundle"""
375 with hg.openpath(ui, bundlepath) as f:
375 with hg.openpath(ui, bundlepath) as f:
376 if spec:
376 if spec:
377 spec = exchange.getbundlespec(ui, f)
377 spec = exchange.getbundlespec(ui, f)
378 ui.write('%s\n' % spec)
378 ui.write('%s\n' % spec)
379 return
379 return
380
380
381 gen = exchange.readbundle(ui, f, bundlepath)
381 gen = exchange.readbundle(ui, f, bundlepath)
382 if isinstance(gen, bundle2.unbundle20):
382 if isinstance(gen, bundle2.unbundle20):
383 return _debugbundle2(ui, gen, all=all, **opts)
383 return _debugbundle2(ui, gen, all=all, **opts)
384 _debugchangegroup(ui, gen, all=all, **opts)
384 _debugchangegroup(ui, gen, all=all, **opts)
385
385
386 @command('debugcapabilities',
386 @command('debugcapabilities',
387 [], _('PATH'),
387 [], _('PATH'),
388 norepo=True)
388 norepo=True)
389 def debugcapabilities(ui, path, **opts):
389 def debugcapabilities(ui, path, **opts):
390 """lists the capabilities of a remote peer"""
390 """lists the capabilities of a remote peer"""
391 opts = pycompat.byteskwargs(opts)
391 opts = pycompat.byteskwargs(opts)
392 peer = hg.peer(ui, opts, path)
392 peer = hg.peer(ui, opts, path)
393 caps = peer.capabilities()
393 caps = peer.capabilities()
394 ui.write(('Main capabilities:\n'))
394 ui.write(('Main capabilities:\n'))
395 for c in sorted(caps):
395 for c in sorted(caps):
396 ui.write((' %s\n') % c)
396 ui.write((' %s\n') % c)
397 b2caps = bundle2.bundle2caps(peer)
397 b2caps = bundle2.bundle2caps(peer)
398 if b2caps:
398 if b2caps:
399 ui.write(('Bundle2 capabilities:\n'))
399 ui.write(('Bundle2 capabilities:\n'))
400 for key, values in sorted(b2caps.iteritems()):
400 for key, values in sorted(b2caps.iteritems()):
401 ui.write((' %s\n') % key)
401 ui.write((' %s\n') % key)
402 for v in values:
402 for v in values:
403 ui.write((' %s\n') % v)
403 ui.write((' %s\n') % v)
404
404
405 @command('debugcheckstate', [], '')
405 @command('debugcheckstate', [], '')
406 def debugcheckstate(ui, repo):
406 def debugcheckstate(ui, repo):
407 """validate the correctness of the current dirstate"""
407 """validate the correctness of the current dirstate"""
408 parent1, parent2 = repo.dirstate.parents()
408 parent1, parent2 = repo.dirstate.parents()
409 m1 = repo[parent1].manifest()
409 m1 = repo[parent1].manifest()
410 m2 = repo[parent2].manifest()
410 m2 = repo[parent2].manifest()
411 errors = 0
411 errors = 0
412 for f in repo.dirstate:
412 for f in repo.dirstate:
413 state = repo.dirstate[f]
413 state = repo.dirstate[f]
414 if state in "nr" and f not in m1:
414 if state in "nr" and f not in m1:
415 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
415 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
416 errors += 1
416 errors += 1
417 if state in "a" and f in m1:
417 if state in "a" and f in m1:
418 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
418 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
419 errors += 1
419 errors += 1
420 if state in "m" and f not in m1 and f not in m2:
420 if state in "m" and f not in m1 and f not in m2:
421 ui.warn(_("%s in state %s, but not in either manifest\n") %
421 ui.warn(_("%s in state %s, but not in either manifest\n") %
422 (f, state))
422 (f, state))
423 errors += 1
423 errors += 1
424 for f in m1:
424 for f in m1:
425 state = repo.dirstate[f]
425 state = repo.dirstate[f]
426 if state not in "nrm":
426 if state not in "nrm":
427 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
427 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
428 errors += 1
428 errors += 1
429 if errors:
429 if errors:
430 error = _(".hg/dirstate inconsistent with current parent's manifest")
430 error = _(".hg/dirstate inconsistent with current parent's manifest")
431 raise error.Abort(error)
431 raise error.Abort(error)
432
432
433 @command('debugcolor',
433 @command('debugcolor',
434 [('', 'style', None, _('show all configured styles'))],
434 [('', 'style', None, _('show all configured styles'))],
435 'hg debugcolor')
435 'hg debugcolor')
436 def debugcolor(ui, repo, **opts):
436 def debugcolor(ui, repo, **opts):
437 """show available color, effects or style"""
437 """show available color, effects or style"""
438 ui.write(('color mode: %s\n') % ui._colormode)
438 ui.write(('color mode: %s\n') % ui._colormode)
439 if opts.get(r'style'):
439 if opts.get(r'style'):
440 return _debugdisplaystyle(ui)
440 return _debugdisplaystyle(ui)
441 else:
441 else:
442 return _debugdisplaycolor(ui)
442 return _debugdisplaycolor(ui)
443
443
444 def _debugdisplaycolor(ui):
444 def _debugdisplaycolor(ui):
445 ui = ui.copy()
445 ui = ui.copy()
446 ui._styles.clear()
446 ui._styles.clear()
447 for effect in color._activeeffects(ui).keys():
447 for effect in color._activeeffects(ui).keys():
448 ui._styles[effect] = effect
448 ui._styles[effect] = effect
449 if ui._terminfoparams:
449 if ui._terminfoparams:
450 for k, v in ui.configitems('color'):
450 for k, v in ui.configitems('color'):
451 if k.startswith('color.'):
451 if k.startswith('color.'):
452 ui._styles[k] = k[6:]
452 ui._styles[k] = k[6:]
453 elif k.startswith('terminfo.'):
453 elif k.startswith('terminfo.'):
454 ui._styles[k] = k[9:]
454 ui._styles[k] = k[9:]
455 ui.write(_('available colors:\n'))
455 ui.write(_('available colors:\n'))
456 # sort label with a '_' after the other to group '_background' entry.
456 # sort label with a '_' after the other to group '_background' entry.
457 items = sorted(ui._styles.items(),
457 items = sorted(ui._styles.items(),
458 key=lambda i: ('_' in i[0], i[0], i[1]))
458 key=lambda i: ('_' in i[0], i[0], i[1]))
459 for colorname, label in items:
459 for colorname, label in items:
460 ui.write(('%s\n') % colorname, label=label)
460 ui.write(('%s\n') % colorname, label=label)
461
461
462 def _debugdisplaystyle(ui):
462 def _debugdisplaystyle(ui):
463 ui.write(_('available style:\n'))
463 ui.write(_('available style:\n'))
464 if not ui._styles:
464 if not ui._styles:
465 return
465 return
466 width = max(len(s) for s in ui._styles)
466 width = max(len(s) for s in ui._styles)
467 for label, effects in sorted(ui._styles.items()):
467 for label, effects in sorted(ui._styles.items()):
468 ui.write('%s' % label, label=label)
468 ui.write('%s' % label, label=label)
469 if effects:
469 if effects:
470 # 50
470 # 50
471 ui.write(': ')
471 ui.write(': ')
472 ui.write(' ' * (max(0, width - len(label))))
472 ui.write(' ' * (max(0, width - len(label))))
473 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
473 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
474 ui.write('\n')
474 ui.write('\n')
475
475
476 @command('debugcreatestreamclonebundle', [], 'FILE')
476 @command('debugcreatestreamclonebundle', [], 'FILE')
477 def debugcreatestreamclonebundle(ui, repo, fname):
477 def debugcreatestreamclonebundle(ui, repo, fname):
478 """create a stream clone bundle file
478 """create a stream clone bundle file
479
479
480 Stream bundles are special bundles that are essentially archives of
480 Stream bundles are special bundles that are essentially archives of
481 revlog files. They are commonly used for cloning very quickly.
481 revlog files. They are commonly used for cloning very quickly.
482 """
482 """
483 # TODO we may want to turn this into an abort when this functionality
483 # TODO we may want to turn this into an abort when this functionality
484 # is moved into `hg bundle`.
484 # is moved into `hg bundle`.
485 if phases.hassecret(repo):
485 if phases.hassecret(repo):
486 ui.warn(_('(warning: stream clone bundle will contain secret '
486 ui.warn(_('(warning: stream clone bundle will contain secret '
487 'revisions)\n'))
487 'revisions)\n'))
488
488
489 requirements, gen = streamclone.generatebundlev1(repo)
489 requirements, gen = streamclone.generatebundlev1(repo)
490 changegroup.writechunks(ui, gen, fname)
490 changegroup.writechunks(ui, gen, fname)
491
491
492 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
492 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
493
493
494 @command('debugdag',
494 @command('debugdag',
495 [('t', 'tags', None, _('use tags as labels')),
495 [('t', 'tags', None, _('use tags as labels')),
496 ('b', 'branches', None, _('annotate with branch names')),
496 ('b', 'branches', None, _('annotate with branch names')),
497 ('', 'dots', None, _('use dots for runs')),
497 ('', 'dots', None, _('use dots for runs')),
498 ('s', 'spaces', None, _('separate elements by spaces'))],
498 ('s', 'spaces', None, _('separate elements by spaces'))],
499 _('[OPTION]... [FILE [REV]...]'),
499 _('[OPTION]... [FILE [REV]...]'),
500 optionalrepo=True)
500 optionalrepo=True)
501 def debugdag(ui, repo, file_=None, *revs, **opts):
501 def debugdag(ui, repo, file_=None, *revs, **opts):
502 """format the changelog or an index DAG as a concise textual description
502 """format the changelog or an index DAG as a concise textual description
503
503
504 If you pass a revlog index, the revlog's DAG is emitted. If you list
504 If you pass a revlog index, the revlog's DAG is emitted. If you list
505 revision numbers, they get labeled in the output as rN.
505 revision numbers, they get labeled in the output as rN.
506
506
507 Otherwise, the changelog DAG of the current repo is emitted.
507 Otherwise, the changelog DAG of the current repo is emitted.
508 """
508 """
509 spaces = opts.get(r'spaces')
509 spaces = opts.get(r'spaces')
510 dots = opts.get(r'dots')
510 dots = opts.get(r'dots')
511 if file_:
511 if file_:
512 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
512 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
513 file_)
513 file_)
514 revs = set((int(r) for r in revs))
514 revs = set((int(r) for r in revs))
515 def events():
515 def events():
516 for r in rlog:
516 for r in rlog:
517 yield 'n', (r, list(p for p in rlog.parentrevs(r)
517 yield 'n', (r, list(p for p in rlog.parentrevs(r)
518 if p != -1))
518 if p != -1))
519 if r in revs:
519 if r in revs:
520 yield 'l', (r, "r%i" % r)
520 yield 'l', (r, "r%i" % r)
521 elif repo:
521 elif repo:
522 cl = repo.changelog
522 cl = repo.changelog
523 tags = opts.get(r'tags')
523 tags = opts.get(r'tags')
524 branches = opts.get(r'branches')
524 branches = opts.get(r'branches')
525 if tags:
525 if tags:
526 labels = {}
526 labels = {}
527 for l, n in repo.tags().items():
527 for l, n in repo.tags().items():
528 labels.setdefault(cl.rev(n), []).append(l)
528 labels.setdefault(cl.rev(n), []).append(l)
529 def events():
529 def events():
530 b = "default"
530 b = "default"
531 for r in cl:
531 for r in cl:
532 if branches:
532 if branches:
533 newb = cl.read(cl.node(r))[5]['branch']
533 newb = cl.read(cl.node(r))[5]['branch']
534 if newb != b:
534 if newb != b:
535 yield 'a', newb
535 yield 'a', newb
536 b = newb
536 b = newb
537 yield 'n', (r, list(p for p in cl.parentrevs(r)
537 yield 'n', (r, list(p for p in cl.parentrevs(r)
538 if p != -1))
538 if p != -1))
539 if tags:
539 if tags:
540 ls = labels.get(r)
540 ls = labels.get(r)
541 if ls:
541 if ls:
542 for l in ls:
542 for l in ls:
543 yield 'l', (r, l)
543 yield 'l', (r, l)
544 else:
544 else:
545 raise error.Abort(_('need repo for changelog dag'))
545 raise error.Abort(_('need repo for changelog dag'))
546
546
547 for line in dagparser.dagtextlines(events(),
547 for line in dagparser.dagtextlines(events(),
548 addspaces=spaces,
548 addspaces=spaces,
549 wraplabels=True,
549 wraplabels=True,
550 wrapannotations=True,
550 wrapannotations=True,
551 wrapnonlinear=dots,
551 wrapnonlinear=dots,
552 usedots=dots,
552 usedots=dots,
553 maxlinewidth=70):
553 maxlinewidth=70):
554 ui.write(line)
554 ui.write(line)
555 ui.write("\n")
555 ui.write("\n")
556
556
557 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
557 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
558 def debugdata(ui, repo, file_, rev=None, **opts):
558 def debugdata(ui, repo, file_, rev=None, **opts):
559 """dump the contents of a data file revision"""
559 """dump the contents of a data file revision"""
560 opts = pycompat.byteskwargs(opts)
560 opts = pycompat.byteskwargs(opts)
561 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
561 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
562 if rev is not None:
562 if rev is not None:
563 raise error.CommandError('debugdata', _('invalid arguments'))
563 raise error.CommandError('debugdata', _('invalid arguments'))
564 file_, rev = None, file_
564 file_, rev = None, file_
565 elif rev is None:
565 elif rev is None:
566 raise error.CommandError('debugdata', _('invalid arguments'))
566 raise error.CommandError('debugdata', _('invalid arguments'))
567 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
567 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
568 try:
568 try:
569 ui.write(r.revision(r.lookup(rev), raw=True))
569 ui.write(r.revision(r.lookup(rev), raw=True))
570 except KeyError:
570 except KeyError:
571 raise error.Abort(_('invalid revision identifier %s') % rev)
571 raise error.Abort(_('invalid revision identifier %s') % rev)
572
572
573 @command('debugdate',
573 @command('debugdate',
574 [('e', 'extended', None, _('try extended date formats'))],
574 [('e', 'extended', None, _('try extended date formats'))],
575 _('[-e] DATE [RANGE]'),
575 _('[-e] DATE [RANGE]'),
576 norepo=True, optionalrepo=True)
576 norepo=True, optionalrepo=True)
577 def debugdate(ui, date, range=None, **opts):
577 def debugdate(ui, date, range=None, **opts):
578 """parse and display a date"""
578 """parse and display a date"""
579 if opts[r"extended"]:
579 if opts[r"extended"]:
580 d = dateutil.parsedate(date, util.extendeddateformats)
580 d = dateutil.parsedate(date, util.extendeddateformats)
581 else:
581 else:
582 d = dateutil.parsedate(date)
582 d = dateutil.parsedate(date)
583 ui.write(("internal: %d %d\n") % d)
583 ui.write(("internal: %d %d\n") % d)
584 ui.write(("standard: %s\n") % dateutil.datestr(d))
584 ui.write(("standard: %s\n") % dateutil.datestr(d))
585 if range:
585 if range:
586 m = dateutil.matchdate(range)
586 m = dateutil.matchdate(range)
587 ui.write(("match: %s\n") % m(d[0]))
587 ui.write(("match: %s\n") % m(d[0]))
588
588
589 @command('debugdeltachain',
589 @command('debugdeltachain',
590 cmdutil.debugrevlogopts + cmdutil.formatteropts,
590 cmdutil.debugrevlogopts + cmdutil.formatteropts,
591 _('-c|-m|FILE'),
591 _('-c|-m|FILE'),
592 optionalrepo=True)
592 optionalrepo=True)
593 def debugdeltachain(ui, repo, file_=None, **opts):
593 def debugdeltachain(ui, repo, file_=None, **opts):
594 """dump information about delta chains in a revlog
594 """dump information about delta chains in a revlog
595
595
596 Output can be templatized. Available template keywords are:
596 Output can be templatized. Available template keywords are:
597
597
598 :``rev``: revision number
598 :``rev``: revision number
599 :``chainid``: delta chain identifier (numbered by unique base)
599 :``chainid``: delta chain identifier (numbered by unique base)
600 :``chainlen``: delta chain length to this revision
600 :``chainlen``: delta chain length to this revision
601 :``prevrev``: previous revision in delta chain
601 :``prevrev``: previous revision in delta chain
602 :``deltatype``: role of delta / how it was computed
602 :``deltatype``: role of delta / how it was computed
603 :``compsize``: compressed size of revision
603 :``compsize``: compressed size of revision
604 :``uncompsize``: uncompressed size of revision
604 :``uncompsize``: uncompressed size of revision
605 :``chainsize``: total size of compressed revisions in chain
605 :``chainsize``: total size of compressed revisions in chain
606 :``chainratio``: total chain size divided by uncompressed revision size
606 :``chainratio``: total chain size divided by uncompressed revision size
607 (new delta chains typically start at ratio 2.00)
607 (new delta chains typically start at ratio 2.00)
608 :``lindist``: linear distance from base revision in delta chain to end
608 :``lindist``: linear distance from base revision in delta chain to end
609 of this revision
609 of this revision
610 :``extradist``: total size of revisions not part of this delta chain from
610 :``extradist``: total size of revisions not part of this delta chain from
611 base of delta chain to end of this revision; a measurement
611 base of delta chain to end of this revision; a measurement
612 of how much extra data we need to read/seek across to read
612 of how much extra data we need to read/seek across to read
613 the delta chain for this revision
613 the delta chain for this revision
614 :``extraratio``: extradist divided by chainsize; another representation of
614 :``extraratio``: extradist divided by chainsize; another representation of
615 how much unrelated data is needed to load this delta chain
615 how much unrelated data is needed to load this delta chain
616
616
617 If the repository is configured to use the sparse read, additional keywords
617 If the repository is configured to use the sparse read, additional keywords
618 are available:
618 are available:
619
619
620 :``readsize``: total size of data read from the disk for a revision
620 :``readsize``: total size of data read from the disk for a revision
621 (sum of the sizes of all the blocks)
621 (sum of the sizes of all the blocks)
622 :``largestblock``: size of the largest block of data read from the disk
622 :``largestblock``: size of the largest block of data read from the disk
623 :``readdensity``: density of useful bytes in the data read from the disk
623 :``readdensity``: density of useful bytes in the data read from the disk
624 :``srchunks``: in how many data hunks the whole revision would be read
624 :``srchunks``: in how many data hunks the whole revision would be read
625
625
626 The sparse read can be enabled with experimental.sparse-read = True
626 The sparse read can be enabled with experimental.sparse-read = True
627 """
627 """
628 opts = pycompat.byteskwargs(opts)
628 opts = pycompat.byteskwargs(opts)
629 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
629 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
630 index = r.index
630 index = r.index
631 start = r.start
631 start = r.start
632 length = r.length
632 length = r.length
633 generaldelta = r.version & revlog.FLAG_GENERALDELTA
633 generaldelta = r.version & revlog.FLAG_GENERALDELTA
634 withsparseread = getattr(r, '_withsparseread', False)
634 withsparseread = getattr(r, '_withsparseread', False)
635
635
636 def revinfo(rev):
636 def revinfo(rev):
637 e = index[rev]
637 e = index[rev]
638 compsize = e[1]
638 compsize = e[1]
639 uncompsize = e[2]
639 uncompsize = e[2]
640 chainsize = 0
640 chainsize = 0
641
641
642 if generaldelta:
642 if generaldelta:
643 if e[3] == e[5]:
643 if e[3] == e[5]:
644 deltatype = 'p1'
644 deltatype = 'p1'
645 elif e[3] == e[6]:
645 elif e[3] == e[6]:
646 deltatype = 'p2'
646 deltatype = 'p2'
647 elif e[3] == rev - 1:
647 elif e[3] == rev - 1:
648 deltatype = 'prev'
648 deltatype = 'prev'
649 elif e[3] == rev:
649 elif e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'other'
652 deltatype = 'other'
653 else:
653 else:
654 if e[3] == rev:
654 if e[3] == rev:
655 deltatype = 'base'
655 deltatype = 'base'
656 else:
656 else:
657 deltatype = 'prev'
657 deltatype = 'prev'
658
658
659 chain = r._deltachain(rev)[0]
659 chain = r._deltachain(rev)[0]
660 for iterrev in chain:
660 for iterrev in chain:
661 e = index[iterrev]
661 e = index[iterrev]
662 chainsize += e[1]
662 chainsize += e[1]
663
663
664 return compsize, uncompsize, deltatype, chain, chainsize
664 return compsize, uncompsize, deltatype, chain, chainsize
665
665
666 fm = ui.formatter('debugdeltachain', opts)
666 fm = ui.formatter('debugdeltachain', opts)
667
667
668 fm.plain(' rev chain# chainlen prev delta '
668 fm.plain(' rev chain# chainlen prev delta '
669 'size rawsize chainsize ratio lindist extradist '
669 'size rawsize chainsize ratio lindist extradist '
670 'extraratio')
670 'extraratio')
671 if withsparseread:
671 if withsparseread:
672 fm.plain(' readsize largestblk rddensity srchunks')
672 fm.plain(' readsize largestblk rddensity srchunks')
673 fm.plain('\n')
673 fm.plain('\n')
674
674
675 chainbases = {}
675 chainbases = {}
676 for rev in r:
676 for rev in r:
677 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
677 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
678 chainbase = chain[0]
678 chainbase = chain[0]
679 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
679 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
680 basestart = start(chainbase)
680 basestart = start(chainbase)
681 revstart = start(rev)
681 revstart = start(rev)
682 lineardist = revstart + comp - basestart
682 lineardist = revstart + comp - basestart
683 extradist = lineardist - chainsize
683 extradist = lineardist - chainsize
684 try:
684 try:
685 prevrev = chain[-2]
685 prevrev = chain[-2]
686 except IndexError:
686 except IndexError:
687 prevrev = -1
687 prevrev = -1
688
688
689 chainratio = float(chainsize) / float(uncomp)
689 chainratio = float(chainsize) / float(uncomp)
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691
691
692 fm.startitem()
692 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
694 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
695 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
698 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
699 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
700 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
702 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
703 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
704 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
705 if withsparseread:
706 readsize = 0
706 readsize = 0
707 largestblock = 0
707 largestblock = 0
708 srchunks = 0
708 srchunks = 0
709
709
710 for revschunk in revlog._slicechunk(r, chain):
710 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
711 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
713 blksize = blkend - start(revschunk[0])
714
714
715 readsize += blksize
715 readsize += blksize
716 if largestblock < blksize:
716 if largestblock < blksize:
717 largestblock = blksize
717 largestblock = blksize
718
718
719 readdensity = float(chainsize) / float(readsize)
719 readdensity = float(chainsize) / float(readsize)
720
720
721 fm.write('readsize largestblock readdensity srchunks',
721 fm.write('readsize largestblock readdensity srchunks',
722 ' %10d %10d %9.5f %8d',
722 ' %10d %10d %9.5f %8d',
723 readsize, largestblock, readdensity, srchunks,
723 readsize, largestblock, readdensity, srchunks,
724 readsize=readsize, largestblock=largestblock,
724 readsize=readsize, largestblock=largestblock,
725 readdensity=readdensity, srchunks=srchunks)
725 readdensity=readdensity, srchunks=srchunks)
726
726
727 fm.plain('\n')
727 fm.plain('\n')
728
728
729 fm.end()
729 fm.end()
730
730
731 @command('debugdirstate|debugstate',
731 @command('debugdirstate|debugstate',
732 [('', 'nodates', None, _('do not display the saved mtime')),
732 [('', 'nodates', None, _('do not display the saved mtime')),
733 ('', 'datesort', None, _('sort by saved mtime'))],
733 ('', 'datesort', None, _('sort by saved mtime'))],
734 _('[OPTION]...'))
734 _('[OPTION]...'))
735 def debugstate(ui, repo, **opts):
735 def debugstate(ui, repo, **opts):
736 """show the contents of the current dirstate"""
736 """show the contents of the current dirstate"""
737
737
738 nodates = opts.get(r'nodates')
738 nodates = opts.get(r'nodates')
739 datesort = opts.get(r'datesort')
739 datesort = opts.get(r'datesort')
740
740
741 timestr = ""
741 timestr = ""
742 if datesort:
742 if datesort:
743 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
743 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
744 else:
744 else:
745 keyfunc = None # sort by filename
745 keyfunc = None # sort by filename
746 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
746 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
747 if ent[3] == -1:
747 if ent[3] == -1:
748 timestr = 'unset '
748 timestr = 'unset '
749 elif nodates:
749 elif nodates:
750 timestr = 'set '
750 timestr = 'set '
751 else:
751 else:
752 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
752 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
753 time.localtime(ent[3]))
753 time.localtime(ent[3]))
754 timestr = encoding.strtolocal(timestr)
754 timestr = encoding.strtolocal(timestr)
755 if ent[1] & 0o20000:
755 if ent[1] & 0o20000:
756 mode = 'lnk'
756 mode = 'lnk'
757 else:
757 else:
758 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
758 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
759 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
759 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
760 for f in repo.dirstate.copies():
760 for f in repo.dirstate.copies():
761 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
761 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
762
762
763 @command('debugdiscovery',
763 @command('debugdiscovery',
764 [('', 'old', None, _('use old-style discovery')),
764 [('', 'old', None, _('use old-style discovery')),
765 ('', 'nonheads', None,
765 ('', 'nonheads', None,
766 _('use old-style discovery with non-heads included')),
766 _('use old-style discovery with non-heads included')),
767 ('', 'rev', [], 'restrict discovery to this set of revs'),
767 ('', 'rev', [], 'restrict discovery to this set of revs'),
768 ] + cmdutil.remoteopts,
768 ] + cmdutil.remoteopts,
769 _('[--rev REV] [OTHER]'))
769 _('[--rev REV] [OTHER]'))
770 def debugdiscovery(ui, repo, remoteurl="default", **opts):
770 def debugdiscovery(ui, repo, remoteurl="default", **opts):
771 """runs the changeset discovery protocol in isolation"""
771 """runs the changeset discovery protocol in isolation"""
772 opts = pycompat.byteskwargs(opts)
772 opts = pycompat.byteskwargs(opts)
773 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
773 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
774 remote = hg.peer(repo, opts, remoteurl)
774 remote = hg.peer(repo, opts, remoteurl)
775 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
775 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
776
776
777 # make sure tests are repeatable
777 # make sure tests are repeatable
778 random.seed(12323)
778 random.seed(12323)
779
779
780 def doit(pushedrevs, remoteheads, remote=remote):
780 def doit(pushedrevs, remoteheads, remote=remote):
781 if opts.get('old'):
781 if opts.get('old'):
782 if not util.safehasattr(remote, 'branches'):
782 if not util.safehasattr(remote, 'branches'):
783 # enable in-client legacy support
783 # enable in-client legacy support
784 remote = localrepo.locallegacypeer(remote.local())
784 remote = localrepo.locallegacypeer(remote.local())
785 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
785 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
786 force=True)
786 force=True)
787 common = set(common)
787 common = set(common)
788 if not opts.get('nonheads'):
788 if not opts.get('nonheads'):
789 ui.write(("unpruned common: %s\n") %
789 ui.write(("unpruned common: %s\n") %
790 " ".join(sorted(short(n) for n in common)))
790 " ".join(sorted(short(n) for n in common)))
791 dag = dagutil.revlogdag(repo.changelog)
791 dag = dagutil.revlogdag(repo.changelog)
792 all = dag.ancestorset(dag.internalizeall(common))
792 all = dag.ancestorset(dag.internalizeall(common))
793 common = dag.externalizeall(dag.headsetofconnecteds(all))
793 common = dag.externalizeall(dag.headsetofconnecteds(all))
794 else:
794 else:
795 nodes = None
795 nodes = None
796 if pushedrevs:
796 if pushedrevs:
797 revs = scmutil.revrange(repo, pushedrevs)
797 revs = scmutil.revrange(repo, pushedrevs)
798 nodes = [repo[r].node() for r in revs]
798 nodes = [repo[r].node() for r in revs]
799 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
799 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
800 ancestorsof=nodes)
800 ancestorsof=nodes)
801 common = set(common)
801 common = set(common)
802 rheads = set(hds)
802 rheads = set(hds)
803 lheads = set(repo.heads())
803 lheads = set(repo.heads())
804 ui.write(("common heads: %s\n") %
804 ui.write(("common heads: %s\n") %
805 " ".join(sorted(short(n) for n in common)))
805 " ".join(sorted(short(n) for n in common)))
806 if lheads <= common:
806 if lheads <= common:
807 ui.write(("local is subset\n"))
807 ui.write(("local is subset\n"))
808 elif rheads <= common:
808 elif rheads <= common:
809 ui.write(("remote is subset\n"))
809 ui.write(("remote is subset\n"))
810
810
811 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
811 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
812 localrevs = opts['rev']
812 localrevs = opts['rev']
813 doit(localrevs, remoterevs)
813 doit(localrevs, remoterevs)
814
814
815 _chunksize = 4 << 10
815 _chunksize = 4 << 10
816
816
817 @command('debugdownload',
817 @command('debugdownload',
818 [
818 [
819 ('o', 'output', '', _('path')),
819 ('o', 'output', '', _('path')),
820 ],
820 ],
821 optionalrepo=True)
821 optionalrepo=True)
822 def debugdownload(ui, repo, url, output=None, **opts):
822 def debugdownload(ui, repo, url, output=None, **opts):
823 """download a resource using Mercurial logic and config
823 """download a resource using Mercurial logic and config
824 """
824 """
825 fh = urlmod.open(ui, url, output)
825 fh = urlmod.open(ui, url, output)
826
826
827 dest = ui
827 dest = ui
828 if output:
828 if output:
829 dest = open(output, "wb", _chunksize)
829 dest = open(output, "wb", _chunksize)
830 try:
830 try:
831 data = fh.read(_chunksize)
831 data = fh.read(_chunksize)
832 while data:
832 while data:
833 dest.write(data)
833 dest.write(data)
834 data = fh.read(_chunksize)
834 data = fh.read(_chunksize)
835 finally:
835 finally:
836 if output:
836 if output:
837 dest.close()
837 dest.close()
838
838
839 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
839 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
840 def debugextensions(ui, repo, **opts):
840 def debugextensions(ui, repo, **opts):
841 '''show information about active extensions'''
841 '''show information about active extensions'''
842 opts = pycompat.byteskwargs(opts)
842 opts = pycompat.byteskwargs(opts)
843 exts = extensions.extensions(ui)
843 exts = extensions.extensions(ui)
844 hgver = util.version()
844 hgver = util.version()
845 fm = ui.formatter('debugextensions', opts)
845 fm = ui.formatter('debugextensions', opts)
846 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
846 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
847 isinternal = extensions.ismoduleinternal(extmod)
847 isinternal = extensions.ismoduleinternal(extmod)
848 extsource = pycompat.fsencode(extmod.__file__)
848 extsource = pycompat.fsencode(extmod.__file__)
849 if isinternal:
849 if isinternal:
850 exttestedwith = [] # never expose magic string to users
850 exttestedwith = [] # never expose magic string to users
851 else:
851 else:
852 exttestedwith = getattr(extmod, 'testedwith', '').split()
852 exttestedwith = getattr(extmod, 'testedwith', '').split()
853 extbuglink = getattr(extmod, 'buglink', None)
853 extbuglink = getattr(extmod, 'buglink', None)
854
854
855 fm.startitem()
855 fm.startitem()
856
856
857 if ui.quiet or ui.verbose:
857 if ui.quiet or ui.verbose:
858 fm.write('name', '%s\n', extname)
858 fm.write('name', '%s\n', extname)
859 else:
859 else:
860 fm.write('name', '%s', extname)
860 fm.write('name', '%s', extname)
861 if isinternal or hgver in exttestedwith:
861 if isinternal or hgver in exttestedwith:
862 fm.plain('\n')
862 fm.plain('\n')
863 elif not exttestedwith:
863 elif not exttestedwith:
864 fm.plain(_(' (untested!)\n'))
864 fm.plain(_(' (untested!)\n'))
865 else:
865 else:
866 lasttestedversion = exttestedwith[-1]
866 lasttestedversion = exttestedwith[-1]
867 fm.plain(' (%s!)\n' % lasttestedversion)
867 fm.plain(' (%s!)\n' % lasttestedversion)
868
868
869 fm.condwrite(ui.verbose and extsource, 'source',
869 fm.condwrite(ui.verbose and extsource, 'source',
870 _(' location: %s\n'), extsource or "")
870 _(' location: %s\n'), extsource or "")
871
871
872 if ui.verbose:
872 if ui.verbose:
873 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
873 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
874 fm.data(bundled=isinternal)
874 fm.data(bundled=isinternal)
875
875
876 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
876 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
877 _(' tested with: %s\n'),
877 _(' tested with: %s\n'),
878 fm.formatlist(exttestedwith, name='ver'))
878 fm.formatlist(exttestedwith, name='ver'))
879
879
880 fm.condwrite(ui.verbose and extbuglink, 'buglink',
880 fm.condwrite(ui.verbose and extbuglink, 'buglink',
881 _(' bug reporting: %s\n'), extbuglink or "")
881 _(' bug reporting: %s\n'), extbuglink or "")
882
882
883 fm.end()
883 fm.end()
884
884
885 @command('debugfileset',
885 @command('debugfileset',
886 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
886 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
887 _('[-r REV] FILESPEC'))
887 _('[-r REV] FILESPEC'))
888 def debugfileset(ui, repo, expr, **opts):
888 def debugfileset(ui, repo, expr, **opts):
889 '''parse and apply a fileset specification'''
889 '''parse and apply a fileset specification'''
890 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
890 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
891 if ui.verbose:
891 if ui.verbose:
892 tree = fileset.parse(expr)
892 tree = fileset.parse(expr)
893 ui.note(fileset.prettyformat(tree), "\n")
893 ui.note(fileset.prettyformat(tree), "\n")
894
894
895 for f in ctx.getfileset(expr):
895 for f in ctx.getfileset(expr):
896 ui.write("%s\n" % f)
896 ui.write("%s\n" % f)
897
897
898 @command('debugformat',
898 @command('debugformat',
899 [] + cmdutil.formatteropts,
899 [] + cmdutil.formatteropts,
900 _(''))
900 _(''))
901 def debugformat(ui, repo, **opts):
901 def debugformat(ui, repo, **opts):
902 """display format information about the current repository
902 """display format information about the current repository
903
903
904 Use --verbose to get extra information about current config value and
904 Use --verbose to get extra information about current config value and
905 Mercurial default."""
905 Mercurial default."""
906 opts = pycompat.byteskwargs(opts)
906 opts = pycompat.byteskwargs(opts)
907 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
907 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
908 maxvariantlength = max(len('format-variant'), maxvariantlength)
908 maxvariantlength = max(len('format-variant'), maxvariantlength)
909
909
910 def makeformatname(name):
910 def makeformatname(name):
911 return '%s:' + (' ' * (maxvariantlength - len(name)))
911 return '%s:' + (' ' * (maxvariantlength - len(name)))
912
912
913 fm = ui.formatter('debugformat', opts)
913 fm = ui.formatter('debugformat', opts)
914 if fm.isplain():
914 if fm.isplain():
915 def formatvalue(value):
915 def formatvalue(value):
916 if util.safehasattr(value, 'startswith'):
916 if util.safehasattr(value, 'startswith'):
917 return value
917 return value
918 if value:
918 if value:
919 return 'yes'
919 return 'yes'
920 else:
920 else:
921 return 'no'
921 return 'no'
922 else:
922 else:
923 formatvalue = pycompat.identity
923 formatvalue = pycompat.identity
924
924
925 fm.plain('format-variant')
925 fm.plain('format-variant')
926 fm.plain(' ' * (maxvariantlength - len('format-variant')))
926 fm.plain(' ' * (maxvariantlength - len('format-variant')))
927 fm.plain(' repo')
927 fm.plain(' repo')
928 if ui.verbose:
928 if ui.verbose:
929 fm.plain(' config default')
929 fm.plain(' config default')
930 fm.plain('\n')
930 fm.plain('\n')
931 for fv in upgrade.allformatvariant:
931 for fv in upgrade.allformatvariant:
932 fm.startitem()
932 fm.startitem()
933 repovalue = fv.fromrepo(repo)
933 repovalue = fv.fromrepo(repo)
934 configvalue = fv.fromconfig(repo)
934 configvalue = fv.fromconfig(repo)
935
935
936 if repovalue != configvalue:
936 if repovalue != configvalue:
937 namelabel = 'formatvariant.name.mismatchconfig'
937 namelabel = 'formatvariant.name.mismatchconfig'
938 repolabel = 'formatvariant.repo.mismatchconfig'
938 repolabel = 'formatvariant.repo.mismatchconfig'
939 elif repovalue != fv.default:
939 elif repovalue != fv.default:
940 namelabel = 'formatvariant.name.mismatchdefault'
940 namelabel = 'formatvariant.name.mismatchdefault'
941 repolabel = 'formatvariant.repo.mismatchdefault'
941 repolabel = 'formatvariant.repo.mismatchdefault'
942 else:
942 else:
943 namelabel = 'formatvariant.name.uptodate'
943 namelabel = 'formatvariant.name.uptodate'
944 repolabel = 'formatvariant.repo.uptodate'
944 repolabel = 'formatvariant.repo.uptodate'
945
945
946 fm.write('name', makeformatname(fv.name), fv.name,
946 fm.write('name', makeformatname(fv.name), fv.name,
947 label=namelabel)
947 label=namelabel)
948 fm.write('repo', ' %3s', formatvalue(repovalue),
948 fm.write('repo', ' %3s', formatvalue(repovalue),
949 label=repolabel)
949 label=repolabel)
950 if fv.default != configvalue:
950 if fv.default != configvalue:
951 configlabel = 'formatvariant.config.special'
951 configlabel = 'formatvariant.config.special'
952 else:
952 else:
953 configlabel = 'formatvariant.config.default'
953 configlabel = 'formatvariant.config.default'
954 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
954 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
955 label=configlabel)
955 label=configlabel)
956 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
956 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
957 label='formatvariant.default')
957 label='formatvariant.default')
958 fm.plain('\n')
958 fm.plain('\n')
959 fm.end()
959 fm.end()
960
960
961 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
961 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
962 def debugfsinfo(ui, path="."):
962 def debugfsinfo(ui, path="."):
963 """show information detected about current filesystem"""
963 """show information detected about current filesystem"""
964 ui.write(('path: %s\n') % path)
964 ui.write(('path: %s\n') % path)
965 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
965 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
966 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
966 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
967 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
967 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
968 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
968 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
969 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
969 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
970 casesensitive = '(unknown)'
970 casesensitive = '(unknown)'
971 try:
971 try:
972 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
972 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
973 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
973 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
974 except OSError:
974 except OSError:
975 pass
975 pass
976 ui.write(('case-sensitive: %s\n') % casesensitive)
976 ui.write(('case-sensitive: %s\n') % casesensitive)
977
977
978 @command('debuggetbundle',
978 @command('debuggetbundle',
979 [('H', 'head', [], _('id of head node'), _('ID')),
979 [('H', 'head', [], _('id of head node'), _('ID')),
980 ('C', 'common', [], _('id of common node'), _('ID')),
980 ('C', 'common', [], _('id of common node'), _('ID')),
981 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
981 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
982 _('REPO FILE [-H|-C ID]...'),
982 _('REPO FILE [-H|-C ID]...'),
983 norepo=True)
983 norepo=True)
984 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
984 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
985 """retrieves a bundle from a repo
985 """retrieves a bundle from a repo
986
986
987 Every ID must be a full-length hex node id string. Saves the bundle to the
987 Every ID must be a full-length hex node id string. Saves the bundle to the
988 given file.
988 given file.
989 """
989 """
990 opts = pycompat.byteskwargs(opts)
990 opts = pycompat.byteskwargs(opts)
991 repo = hg.peer(ui, opts, repopath)
991 repo = hg.peer(ui, opts, repopath)
992 if not repo.capable('getbundle'):
992 if not repo.capable('getbundle'):
993 raise error.Abort("getbundle() not supported by target repository")
993 raise error.Abort("getbundle() not supported by target repository")
994 args = {}
994 args = {}
995 if common:
995 if common:
996 args[r'common'] = [bin(s) for s in common]
996 args[r'common'] = [bin(s) for s in common]
997 if head:
997 if head:
998 args[r'heads'] = [bin(s) for s in head]
998 args[r'heads'] = [bin(s) for s in head]
999 # TODO: get desired bundlecaps from command line.
999 # TODO: get desired bundlecaps from command line.
1000 args[r'bundlecaps'] = None
1000 args[r'bundlecaps'] = None
1001 bundle = repo.getbundle('debug', **args)
1001 bundle = repo.getbundle('debug', **args)
1002
1002
1003 bundletype = opts.get('type', 'bzip2').lower()
1003 bundletype = opts.get('type', 'bzip2').lower()
1004 btypes = {'none': 'HG10UN',
1004 btypes = {'none': 'HG10UN',
1005 'bzip2': 'HG10BZ',
1005 'bzip2': 'HG10BZ',
1006 'gzip': 'HG10GZ',
1006 'gzip': 'HG10GZ',
1007 'bundle2': 'HG20'}
1007 'bundle2': 'HG20'}
1008 bundletype = btypes.get(bundletype)
1008 bundletype = btypes.get(bundletype)
1009 if bundletype not in bundle2.bundletypes:
1009 if bundletype not in bundle2.bundletypes:
1010 raise error.Abort(_('unknown bundle type specified with --type'))
1010 raise error.Abort(_('unknown bundle type specified with --type'))
1011 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1011 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1012
1012
1013 @command('debugignore', [], '[FILE]')
1013 @command('debugignore', [], '[FILE]')
1014 def debugignore(ui, repo, *files, **opts):
1014 def debugignore(ui, repo, *files, **opts):
1015 """display the combined ignore pattern and information about ignored files
1015 """display the combined ignore pattern and information about ignored files
1016
1016
1017 With no argument display the combined ignore pattern.
1017 With no argument display the combined ignore pattern.
1018
1018
1019 Given space separated file names, shows if the given file is ignored and
1019 Given space separated file names, shows if the given file is ignored and
1020 if so, show the ignore rule (file and line number) that matched it.
1020 if so, show the ignore rule (file and line number) that matched it.
1021 """
1021 """
1022 ignore = repo.dirstate._ignore
1022 ignore = repo.dirstate._ignore
1023 if not files:
1023 if not files:
1024 # Show all the patterns
1024 # Show all the patterns
1025 ui.write("%s\n" % pycompat.byterepr(ignore))
1025 ui.write("%s\n" % pycompat.byterepr(ignore))
1026 else:
1026 else:
1027 m = scmutil.match(repo[None], pats=files)
1027 m = scmutil.match(repo[None], pats=files)
1028 for f in m.files():
1028 for f in m.files():
1029 nf = util.normpath(f)
1029 nf = util.normpath(f)
1030 ignored = None
1030 ignored = None
1031 ignoredata = None
1031 ignoredata = None
1032 if nf != '.':
1032 if nf != '.':
1033 if ignore(nf):
1033 if ignore(nf):
1034 ignored = nf
1034 ignored = nf
1035 ignoredata = repo.dirstate._ignorefileandline(nf)
1035 ignoredata = repo.dirstate._ignorefileandline(nf)
1036 else:
1036 else:
1037 for p in util.finddirs(nf):
1037 for p in util.finddirs(nf):
1038 if ignore(p):
1038 if ignore(p):
1039 ignored = p
1039 ignored = p
1040 ignoredata = repo.dirstate._ignorefileandline(p)
1040 ignoredata = repo.dirstate._ignorefileandline(p)
1041 break
1041 break
1042 if ignored:
1042 if ignored:
1043 if ignored == nf:
1043 if ignored == nf:
1044 ui.write(_("%s is ignored\n") % m.uipath(f))
1044 ui.write(_("%s is ignored\n") % m.uipath(f))
1045 else:
1045 else:
1046 ui.write(_("%s is ignored because of "
1046 ui.write(_("%s is ignored because of "
1047 "containing folder %s\n")
1047 "containing folder %s\n")
1048 % (m.uipath(f), ignored))
1048 % (m.uipath(f), ignored))
1049 ignorefile, lineno, line = ignoredata
1049 ignorefile, lineno, line = ignoredata
1050 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1050 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1051 % (ignorefile, lineno, line))
1051 % (ignorefile, lineno, line))
1052 else:
1052 else:
1053 ui.write(_("%s is not ignored\n") % m.uipath(f))
1053 ui.write(_("%s is not ignored\n") % m.uipath(f))
1054
1054
1055 @command('debugindex', cmdutil.debugrevlogopts +
1055 @command('debugindex', cmdutil.debugrevlogopts +
1056 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1056 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1057 _('[-f FORMAT] -c|-m|FILE'),
1057 _('[-f FORMAT] -c|-m|FILE'),
1058 optionalrepo=True)
1058 optionalrepo=True)
1059 def debugindex(ui, repo, file_=None, **opts):
1059 def debugindex(ui, repo, file_=None, **opts):
1060 """dump the contents of an index file"""
1060 """dump the contents of an index file"""
1061 opts = pycompat.byteskwargs(opts)
1061 opts = pycompat.byteskwargs(opts)
1062 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1062 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1063 format = opts.get('format', 0)
1063 format = opts.get('format', 0)
1064 if format not in (0, 1):
1064 if format not in (0, 1):
1065 raise error.Abort(_("unknown format %d") % format)
1065 raise error.Abort(_("unknown format %d") % format)
1066
1066
1067 if ui.debugflag:
1067 if ui.debugflag:
1068 shortfn = hex
1068 shortfn = hex
1069 else:
1069 else:
1070 shortfn = short
1070 shortfn = short
1071
1071
1072 # There might not be anything in r, so have a sane default
1072 # There might not be anything in r, so have a sane default
1073 idlen = 12
1073 idlen = 12
1074 for i in r:
1074 for i in r:
1075 idlen = len(shortfn(r.node(i)))
1075 idlen = len(shortfn(r.node(i)))
1076 break
1076 break
1077
1077
1078 if format == 0:
1078 if format == 0:
1079 if ui.verbose:
1079 if ui.verbose:
1080 ui.write((" rev offset length linkrev"
1080 ui.write((" rev offset length linkrev"
1081 " %s %s p2\n") % ("nodeid".ljust(idlen),
1081 " %s %s p2\n") % ("nodeid".ljust(idlen),
1082 "p1".ljust(idlen)))
1082 "p1".ljust(idlen)))
1083 else:
1083 else:
1084 ui.write((" rev linkrev %s %s p2\n") % (
1084 ui.write((" rev linkrev %s %s p2\n") % (
1085 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1085 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1086 elif format == 1:
1086 elif format == 1:
1087 if ui.verbose:
1087 if ui.verbose:
1088 ui.write((" rev flag offset length size link p1"
1088 ui.write((" rev flag offset length size link p1"
1089 " p2 %s\n") % "nodeid".rjust(idlen))
1089 " p2 %s\n") % "nodeid".rjust(idlen))
1090 else:
1090 else:
1091 ui.write((" rev flag size link p1 p2 %s\n") %
1091 ui.write((" rev flag size link p1 p2 %s\n") %
1092 "nodeid".rjust(idlen))
1092 "nodeid".rjust(idlen))
1093
1093
1094 for i in r:
1094 for i in r:
1095 node = r.node(i)
1095 node = r.node(i)
1096 if format == 0:
1096 if format == 0:
1097 try:
1097 try:
1098 pp = r.parents(node)
1098 pp = r.parents(node)
1099 except Exception:
1099 except Exception:
1100 pp = [nullid, nullid]
1100 pp = [nullid, nullid]
1101 if ui.verbose:
1101 if ui.verbose:
1102 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1102 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1103 i, r.start(i), r.length(i), r.linkrev(i),
1103 i, r.start(i), r.length(i), r.linkrev(i),
1104 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1104 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1105 else:
1105 else:
1106 ui.write("% 6d % 7d %s %s %s\n" % (
1106 ui.write("% 6d % 7d %s %s %s\n" % (
1107 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1107 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1108 shortfn(pp[1])))
1108 shortfn(pp[1])))
1109 elif format == 1:
1109 elif format == 1:
1110 pr = r.parentrevs(i)
1110 pr = r.parentrevs(i)
1111 if ui.verbose:
1111 if ui.verbose:
1112 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1112 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1113 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1113 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1114 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1114 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1115 else:
1115 else:
1116 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1116 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1117 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1117 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1118 shortfn(node)))
1118 shortfn(node)))
1119
1119
1120 @command('debugindexdot', cmdutil.debugrevlogopts,
1120 @command('debugindexdot', cmdutil.debugrevlogopts,
1121 _('-c|-m|FILE'), optionalrepo=True)
1121 _('-c|-m|FILE'), optionalrepo=True)
1122 def debugindexdot(ui, repo, file_=None, **opts):
1122 def debugindexdot(ui, repo, file_=None, **opts):
1123 """dump an index DAG as a graphviz dot file"""
1123 """dump an index DAG as a graphviz dot file"""
1124 opts = pycompat.byteskwargs(opts)
1124 opts = pycompat.byteskwargs(opts)
1125 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1125 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1126 ui.write(("digraph G {\n"))
1126 ui.write(("digraph G {\n"))
1127 for i in r:
1127 for i in r:
1128 node = r.node(i)
1128 node = r.node(i)
1129 pp = r.parents(node)
1129 pp = r.parents(node)
1130 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1130 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1131 if pp[1] != nullid:
1131 if pp[1] != nullid:
1132 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1132 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1133 ui.write("}\n")
1133 ui.write("}\n")
1134
1134
1135 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1135 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1136 def debuginstall(ui, **opts):
1136 def debuginstall(ui, **opts):
1137 '''test Mercurial installation
1137 '''test Mercurial installation
1138
1138
1139 Returns 0 on success.
1139 Returns 0 on success.
1140 '''
1140 '''
1141 opts = pycompat.byteskwargs(opts)
1141 opts = pycompat.byteskwargs(opts)
1142
1142
1143 def writetemp(contents):
1143 def writetemp(contents):
1144 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1144 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1145 f = os.fdopen(fd, r"wb")
1145 f = os.fdopen(fd, r"wb")
1146 f.write(contents)
1146 f.write(contents)
1147 f.close()
1147 f.close()
1148 return name
1148 return name
1149
1149
1150 problems = 0
1150 problems = 0
1151
1151
1152 fm = ui.formatter('debuginstall', opts)
1152 fm = ui.formatter('debuginstall', opts)
1153 fm.startitem()
1153 fm.startitem()
1154
1154
1155 # encoding
1155 # encoding
1156 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1156 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1157 err = None
1157 err = None
1158 try:
1158 try:
1159 codecs.lookup(pycompat.sysstr(encoding.encoding))
1159 codecs.lookup(pycompat.sysstr(encoding.encoding))
1160 except LookupError as inst:
1160 except LookupError as inst:
1161 err = stringutil.forcebytestr(inst)
1161 err = stringutil.forcebytestr(inst)
1162 problems += 1
1162 problems += 1
1163 fm.condwrite(err, 'encodingerror', _(" %s\n"
1163 fm.condwrite(err, 'encodingerror', _(" %s\n"
1164 " (check that your locale is properly set)\n"), err)
1164 " (check that your locale is properly set)\n"), err)
1165
1165
1166 # Python
1166 # Python
1167 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1167 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1168 pycompat.sysexecutable)
1168 pycompat.sysexecutable)
1169 fm.write('pythonver', _("checking Python version (%s)\n"),
1169 fm.write('pythonver', _("checking Python version (%s)\n"),
1170 ("%d.%d.%d" % sys.version_info[:3]))
1170 ("%d.%d.%d" % sys.version_info[:3]))
1171 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1171 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1172 os.path.dirname(pycompat.fsencode(os.__file__)))
1172 os.path.dirname(pycompat.fsencode(os.__file__)))
1173
1173
1174 security = set(sslutil.supportedprotocols)
1174 security = set(sslutil.supportedprotocols)
1175 if sslutil.hassni:
1175 if sslutil.hassni:
1176 security.add('sni')
1176 security.add('sni')
1177
1177
1178 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1178 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1179 fm.formatlist(sorted(security), name='protocol',
1179 fm.formatlist(sorted(security), name='protocol',
1180 fmt='%s', sep=','))
1180 fmt='%s', sep=','))
1181
1181
1182 # These are warnings, not errors. So don't increment problem count. This
1182 # These are warnings, not errors. So don't increment problem count. This
1183 # may change in the future.
1183 # may change in the future.
1184 if 'tls1.2' not in security:
1184 if 'tls1.2' not in security:
1185 fm.plain(_(' TLS 1.2 not supported by Python install; '
1185 fm.plain(_(' TLS 1.2 not supported by Python install; '
1186 'network connections lack modern security\n'))
1186 'network connections lack modern security\n'))
1187 if 'sni' not in security:
1187 if 'sni' not in security:
1188 fm.plain(_(' SNI not supported by Python install; may have '
1188 fm.plain(_(' SNI not supported by Python install; may have '
1189 'connectivity issues with some servers\n'))
1189 'connectivity issues with some servers\n'))
1190
1190
1191 # TODO print CA cert info
1191 # TODO print CA cert info
1192
1192
1193 # hg version
1193 # hg version
1194 hgver = util.version()
1194 hgver = util.version()
1195 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1195 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1196 hgver.split('+')[0])
1196 hgver.split('+')[0])
1197 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1197 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1198 '+'.join(hgver.split('+')[1:]))
1198 '+'.join(hgver.split('+')[1:]))
1199
1199
1200 # compiled modules
1200 # compiled modules
1201 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1201 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1202 policy.policy)
1202 policy.policy)
1203 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1203 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1204 os.path.dirname(pycompat.fsencode(__file__)))
1204 os.path.dirname(pycompat.fsencode(__file__)))
1205
1205
1206 if policy.policy in ('c', 'allow'):
1206 if policy.policy in ('c', 'allow'):
1207 err = None
1207 err = None
1208 try:
1208 try:
1209 from .cext import (
1209 from .cext import (
1210 base85,
1210 base85,
1211 bdiff,
1211 bdiff,
1212 mpatch,
1212 mpatch,
1213 osutil,
1213 osutil,
1214 )
1214 )
1215 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1215 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1216 except Exception as inst:
1216 except Exception as inst:
1217 err = stringutil.forcebytestr(inst)
1217 err = stringutil.forcebytestr(inst)
1218 problems += 1
1218 problems += 1
1219 fm.condwrite(err, 'extensionserror', " %s\n", err)
1219 fm.condwrite(err, 'extensionserror', " %s\n", err)
1220
1220
1221 compengines = util.compengines._engines.values()
1221 compengines = util.compengines._engines.values()
1222 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1222 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1223 fm.formatlist(sorted(e.name() for e in compengines),
1223 fm.formatlist(sorted(e.name() for e in compengines),
1224 name='compengine', fmt='%s', sep=', '))
1224 name='compengine', fmt='%s', sep=', '))
1225 fm.write('compenginesavail', _('checking available compression engines '
1225 fm.write('compenginesavail', _('checking available compression engines '
1226 '(%s)\n'),
1226 '(%s)\n'),
1227 fm.formatlist(sorted(e.name() for e in compengines
1227 fm.formatlist(sorted(e.name() for e in compengines
1228 if e.available()),
1228 if e.available()),
1229 name='compengine', fmt='%s', sep=', '))
1229 name='compengine', fmt='%s', sep=', '))
1230 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1230 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1231 fm.write('compenginesserver', _('checking available compression engines '
1231 fm.write('compenginesserver', _('checking available compression engines '
1232 'for wire protocol (%s)\n'),
1232 'for wire protocol (%s)\n'),
1233 fm.formatlist([e.name() for e in wirecompengines
1233 fm.formatlist([e.name() for e in wirecompengines
1234 if e.wireprotosupport()],
1234 if e.wireprotosupport()],
1235 name='compengine', fmt='%s', sep=', '))
1235 name='compengine', fmt='%s', sep=', '))
1236 re2 = 'missing'
1236 re2 = 'missing'
1237 if util._re2:
1237 if util._re2:
1238 re2 = 'available'
1238 re2 = 'available'
1239 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1239 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1240 fm.data(re2=bool(util._re2))
1240 fm.data(re2=bool(util._re2))
1241
1241
1242 # templates
1242 # templates
1243 p = templater.templatepaths()
1243 p = templater.templatepaths()
1244 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1244 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1245 fm.condwrite(not p, '', _(" no template directories found\n"))
1245 fm.condwrite(not p, '', _(" no template directories found\n"))
1246 if p:
1246 if p:
1247 m = templater.templatepath("map-cmdline.default")
1247 m = templater.templatepath("map-cmdline.default")
1248 if m:
1248 if m:
1249 # template found, check if it is working
1249 # template found, check if it is working
1250 err = None
1250 err = None
1251 try:
1251 try:
1252 templater.templater.frommapfile(m)
1252 templater.templater.frommapfile(m)
1253 except Exception as inst:
1253 except Exception as inst:
1254 err = stringutil.forcebytestr(inst)
1254 err = stringutil.forcebytestr(inst)
1255 p = None
1255 p = None
1256 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1256 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1257 else:
1257 else:
1258 p = None
1258 p = None
1259 fm.condwrite(p, 'defaulttemplate',
1259 fm.condwrite(p, 'defaulttemplate',
1260 _("checking default template (%s)\n"), m)
1260 _("checking default template (%s)\n"), m)
1261 fm.condwrite(not m, 'defaulttemplatenotfound',
1261 fm.condwrite(not m, 'defaulttemplatenotfound',
1262 _(" template '%s' not found\n"), "default")
1262 _(" template '%s' not found\n"), "default")
1263 if not p:
1263 if not p:
1264 problems += 1
1264 problems += 1
1265 fm.condwrite(not p, '',
1265 fm.condwrite(not p, '',
1266 _(" (templates seem to have been installed incorrectly)\n"))
1266 _(" (templates seem to have been installed incorrectly)\n"))
1267
1267
1268 # editor
1268 # editor
1269 editor = ui.geteditor()
1269 editor = ui.geteditor()
1270 editor = util.expandpath(editor)
1270 editor = util.expandpath(editor)
1271 editorbin = procutil.shellsplit(editor)[0]
1271 editorbin = procutil.shellsplit(editor)[0]
1272 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1272 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1273 cmdpath = procutil.findexe(editorbin)
1273 cmdpath = procutil.findexe(editorbin)
1274 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1274 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1275 _(" No commit editor set and can't find %s in PATH\n"
1275 _(" No commit editor set and can't find %s in PATH\n"
1276 " (specify a commit editor in your configuration"
1276 " (specify a commit editor in your configuration"
1277 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1277 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1278 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1278 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1279 _(" Can't find editor '%s' in PATH\n"
1279 _(" Can't find editor '%s' in PATH\n"
1280 " (specify a commit editor in your configuration"
1280 " (specify a commit editor in your configuration"
1281 " file)\n"), not cmdpath and editorbin)
1281 " file)\n"), not cmdpath and editorbin)
1282 if not cmdpath and editor != 'vi':
1282 if not cmdpath and editor != 'vi':
1283 problems += 1
1283 problems += 1
1284
1284
1285 # check username
1285 # check username
1286 username = None
1286 username = None
1287 err = None
1287 err = None
1288 try:
1288 try:
1289 username = ui.username()
1289 username = ui.username()
1290 except error.Abort as e:
1290 except error.Abort as e:
1291 err = stringutil.forcebytestr(e)
1291 err = stringutil.forcebytestr(e)
1292 problems += 1
1292 problems += 1
1293
1293
1294 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1294 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1295 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1295 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1296 " (specify a username in your configuration file)\n"), err)
1296 " (specify a username in your configuration file)\n"), err)
1297
1297
1298 fm.condwrite(not problems, '',
1298 fm.condwrite(not problems, '',
1299 _("no problems detected\n"))
1299 _("no problems detected\n"))
1300 if not problems:
1300 if not problems:
1301 fm.data(problems=problems)
1301 fm.data(problems=problems)
1302 fm.condwrite(problems, 'problems',
1302 fm.condwrite(problems, 'problems',
1303 _("%d problems detected,"
1303 _("%d problems detected,"
1304 " please check your install!\n"), problems)
1304 " please check your install!\n"), problems)
1305 fm.end()
1305 fm.end()
1306
1306
1307 return problems
1307 return problems
1308
1308
1309 @command('debugknown', [], _('REPO ID...'), norepo=True)
1309 @command('debugknown', [], _('REPO ID...'), norepo=True)
1310 def debugknown(ui, repopath, *ids, **opts):
1310 def debugknown(ui, repopath, *ids, **opts):
1311 """test whether node ids are known to a repo
1311 """test whether node ids are known to a repo
1312
1312
1313 Every ID must be a full-length hex node id string. Returns a list of 0s
1313 Every ID must be a full-length hex node id string. Returns a list of 0s
1314 and 1s indicating unknown/known.
1314 and 1s indicating unknown/known.
1315 """
1315 """
1316 opts = pycompat.byteskwargs(opts)
1316 opts = pycompat.byteskwargs(opts)
1317 repo = hg.peer(ui, opts, repopath)
1317 repo = hg.peer(ui, opts, repopath)
1318 if not repo.capable('known'):
1318 if not repo.capable('known'):
1319 raise error.Abort("known() not supported by target repository")
1319 raise error.Abort("known() not supported by target repository")
1320 flags = repo.known([bin(s) for s in ids])
1320 flags = repo.known([bin(s) for s in ids])
1321 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1321 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1322
1322
1323 @command('debuglabelcomplete', [], _('LABEL...'))
1323 @command('debuglabelcomplete', [], _('LABEL...'))
1324 def debuglabelcomplete(ui, repo, *args):
1324 def debuglabelcomplete(ui, repo, *args):
1325 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1325 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1326 debugnamecomplete(ui, repo, *args)
1326 debugnamecomplete(ui, repo, *args)
1327
1327
1328 @command('debuglocks',
1328 @command('debuglocks',
1329 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1329 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1330 ('W', 'force-wlock', None,
1330 ('W', 'force-wlock', None,
1331 _('free the working state lock (DANGEROUS)')),
1331 _('free the working state lock (DANGEROUS)')),
1332 ('s', 'set-lock', None, _('set the store lock until stopped')),
1332 ('s', 'set-lock', None, _('set the store lock until stopped')),
1333 ('S', 'set-wlock', None,
1333 ('S', 'set-wlock', None,
1334 _('set the working state lock until stopped'))],
1334 _('set the working state lock until stopped'))],
1335 _('[OPTION]...'))
1335 _('[OPTION]...'))
1336 def debuglocks(ui, repo, **opts):
1336 def debuglocks(ui, repo, **opts):
1337 """show or modify state of locks
1337 """show or modify state of locks
1338
1338
1339 By default, this command will show which locks are held. This
1339 By default, this command will show which locks are held. This
1340 includes the user and process holding the lock, the amount of time
1340 includes the user and process holding the lock, the amount of time
1341 the lock has been held, and the machine name where the process is
1341 the lock has been held, and the machine name where the process is
1342 running if it's not local.
1342 running if it's not local.
1343
1343
1344 Locks protect the integrity of Mercurial's data, so should be
1344 Locks protect the integrity of Mercurial's data, so should be
1345 treated with care. System crashes or other interruptions may cause
1345 treated with care. System crashes or other interruptions may cause
1346 locks to not be properly released, though Mercurial will usually
1346 locks to not be properly released, though Mercurial will usually
1347 detect and remove such stale locks automatically.
1347 detect and remove such stale locks automatically.
1348
1348
1349 However, detecting stale locks may not always be possible (for
1349 However, detecting stale locks may not always be possible (for
1350 instance, on a shared filesystem). Removing locks may also be
1350 instance, on a shared filesystem). Removing locks may also be
1351 blocked by filesystem permissions.
1351 blocked by filesystem permissions.
1352
1352
1353 Setting a lock will prevent other commands from changing the data.
1353 Setting a lock will prevent other commands from changing the data.
1354 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1354 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1355 The set locks are removed when the command exits.
1355 The set locks are removed when the command exits.
1356
1356
1357 Returns 0 if no locks are held.
1357 Returns 0 if no locks are held.
1358
1358
1359 """
1359 """
1360
1360
1361 if opts.get(r'force_lock'):
1361 if opts.get(r'force_lock'):
1362 repo.svfs.unlink('lock')
1362 repo.svfs.unlink('lock')
1363 if opts.get(r'force_wlock'):
1363 if opts.get(r'force_wlock'):
1364 repo.vfs.unlink('wlock')
1364 repo.vfs.unlink('wlock')
1365 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1365 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1366 return 0
1366 return 0
1367
1367
1368 locks = []
1368 locks = []
1369 try:
1369 try:
1370 if opts.get(r'set_wlock'):
1370 if opts.get(r'set_wlock'):
1371 try:
1371 try:
1372 locks.append(repo.wlock(False))
1372 locks.append(repo.wlock(False))
1373 except error.LockHeld:
1373 except error.LockHeld:
1374 raise error.Abort(_('wlock is already held'))
1374 raise error.Abort(_('wlock is already held'))
1375 if opts.get(r'set_lock'):
1375 if opts.get(r'set_lock'):
1376 try:
1376 try:
1377 locks.append(repo.lock(False))
1377 locks.append(repo.lock(False))
1378 except error.LockHeld:
1378 except error.LockHeld:
1379 raise error.Abort(_('lock is already held'))
1379 raise error.Abort(_('lock is already held'))
1380 if len(locks):
1380 if len(locks):
1381 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1381 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1382 return 0
1382 return 0
1383 finally:
1383 finally:
1384 release(*locks)
1384 release(*locks)
1385
1385
1386 now = time.time()
1386 now = time.time()
1387 held = 0
1387 held = 0
1388
1388
1389 def report(vfs, name, method):
1389 def report(vfs, name, method):
1390 # this causes stale locks to get reaped for more accurate reporting
1390 # this causes stale locks to get reaped for more accurate reporting
1391 try:
1391 try:
1392 l = method(False)
1392 l = method(False)
1393 except error.LockHeld:
1393 except error.LockHeld:
1394 l = None
1394 l = None
1395
1395
1396 if l:
1396 if l:
1397 l.release()
1397 l.release()
1398 else:
1398 else:
1399 try:
1399 try:
1400 st = vfs.lstat(name)
1400 st = vfs.lstat(name)
1401 age = now - st[stat.ST_MTIME]
1401 age = now - st[stat.ST_MTIME]
1402 user = util.username(st.st_uid)
1402 user = util.username(st.st_uid)
1403 locker = vfs.readlock(name)
1403 locker = vfs.readlock(name)
1404 if ":" in locker:
1404 if ":" in locker:
1405 host, pid = locker.split(':')
1405 host, pid = locker.split(':')
1406 if host == socket.gethostname():
1406 if host == socket.gethostname():
1407 locker = 'user %s, process %s' % (user, pid)
1407 locker = 'user %s, process %s' % (user, pid)
1408 else:
1408 else:
1409 locker = 'user %s, process %s, host %s' \
1409 locker = 'user %s, process %s, host %s' \
1410 % (user, pid, host)
1410 % (user, pid, host)
1411 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1411 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1412 return 1
1412 return 1
1413 except OSError as e:
1413 except OSError as e:
1414 if e.errno != errno.ENOENT:
1414 if e.errno != errno.ENOENT:
1415 raise
1415 raise
1416
1416
1417 ui.write(("%-6s free\n") % (name + ":"))
1417 ui.write(("%-6s free\n") % (name + ":"))
1418 return 0
1418 return 0
1419
1419
1420 held += report(repo.svfs, "lock", repo.lock)
1420 held += report(repo.svfs, "lock", repo.lock)
1421 held += report(repo.vfs, "wlock", repo.wlock)
1421 held += report(repo.vfs, "wlock", repo.wlock)
1422
1422
1423 return held
1423 return held
1424
1424
1425 @command('debugmergestate', [], '')
1425 @command('debugmergestate', [], '')
1426 def debugmergestate(ui, repo, *args):
1426 def debugmergestate(ui, repo, *args):
1427 """print merge state
1427 """print merge state
1428
1428
1429 Use --verbose to print out information about whether v1 or v2 merge state
1429 Use --verbose to print out information about whether v1 or v2 merge state
1430 was chosen."""
1430 was chosen."""
1431 def _hashornull(h):
1431 def _hashornull(h):
1432 if h == nullhex:
1432 if h == nullhex:
1433 return 'null'
1433 return 'null'
1434 else:
1434 else:
1435 return h
1435 return h
1436
1436
1437 def printrecords(version):
1437 def printrecords(version):
1438 ui.write(('* version %d records\n') % version)
1438 ui.write(('* version %d records\n') % version)
1439 if version == 1:
1439 if version == 1:
1440 records = v1records
1440 records = v1records
1441 else:
1441 else:
1442 records = v2records
1442 records = v2records
1443
1443
1444 for rtype, record in records:
1444 for rtype, record in records:
1445 # pretty print some record types
1445 # pretty print some record types
1446 if rtype == 'L':
1446 if rtype == 'L':
1447 ui.write(('local: %s\n') % record)
1447 ui.write(('local: %s\n') % record)
1448 elif rtype == 'O':
1448 elif rtype == 'O':
1449 ui.write(('other: %s\n') % record)
1449 ui.write(('other: %s\n') % record)
1450 elif rtype == 'm':
1450 elif rtype == 'm':
1451 driver, mdstate = record.split('\0', 1)
1451 driver, mdstate = record.split('\0', 1)
1452 ui.write(('merge driver: %s (state "%s")\n')
1452 ui.write(('merge driver: %s (state "%s")\n')
1453 % (driver, mdstate))
1453 % (driver, mdstate))
1454 elif rtype in 'FDC':
1454 elif rtype in 'FDC':
1455 r = record.split('\0')
1455 r = record.split('\0')
1456 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1456 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1457 if version == 1:
1457 if version == 1:
1458 onode = 'not stored in v1 format'
1458 onode = 'not stored in v1 format'
1459 flags = r[7]
1459 flags = r[7]
1460 else:
1460 else:
1461 onode, flags = r[7:9]
1461 onode, flags = r[7:9]
1462 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1462 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1463 % (f, rtype, state, _hashornull(hash)))
1463 % (f, rtype, state, _hashornull(hash)))
1464 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1464 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1465 ui.write((' ancestor path: %s (node %s)\n')
1465 ui.write((' ancestor path: %s (node %s)\n')
1466 % (afile, _hashornull(anode)))
1466 % (afile, _hashornull(anode)))
1467 ui.write((' other path: %s (node %s)\n')
1467 ui.write((' other path: %s (node %s)\n')
1468 % (ofile, _hashornull(onode)))
1468 % (ofile, _hashornull(onode)))
1469 elif rtype == 'f':
1469 elif rtype == 'f':
1470 filename, rawextras = record.split('\0', 1)
1470 filename, rawextras = record.split('\0', 1)
1471 extras = rawextras.split('\0')
1471 extras = rawextras.split('\0')
1472 i = 0
1472 i = 0
1473 extrastrings = []
1473 extrastrings = []
1474 while i < len(extras):
1474 while i < len(extras):
1475 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1475 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1476 i += 2
1476 i += 2
1477
1477
1478 ui.write(('file extras: %s (%s)\n')
1478 ui.write(('file extras: %s (%s)\n')
1479 % (filename, ', '.join(extrastrings)))
1479 % (filename, ', '.join(extrastrings)))
1480 elif rtype == 'l':
1480 elif rtype == 'l':
1481 labels = record.split('\0', 2)
1481 labels = record.split('\0', 2)
1482 labels = [l for l in labels if len(l) > 0]
1482 labels = [l for l in labels if len(l) > 0]
1483 ui.write(('labels:\n'))
1483 ui.write(('labels:\n'))
1484 ui.write((' local: %s\n' % labels[0]))
1484 ui.write((' local: %s\n' % labels[0]))
1485 ui.write((' other: %s\n' % labels[1]))
1485 ui.write((' other: %s\n' % labels[1]))
1486 if len(labels) > 2:
1486 if len(labels) > 2:
1487 ui.write((' base: %s\n' % labels[2]))
1487 ui.write((' base: %s\n' % labels[2]))
1488 else:
1488 else:
1489 ui.write(('unrecognized entry: %s\t%s\n')
1489 ui.write(('unrecognized entry: %s\t%s\n')
1490 % (rtype, record.replace('\0', '\t')))
1490 % (rtype, record.replace('\0', '\t')))
1491
1491
1492 # Avoid mergestate.read() since it may raise an exception for unsupported
1492 # Avoid mergestate.read() since it may raise an exception for unsupported
1493 # merge state records. We shouldn't be doing this, but this is OK since this
1493 # merge state records. We shouldn't be doing this, but this is OK since this
1494 # command is pretty low-level.
1494 # command is pretty low-level.
1495 ms = mergemod.mergestate(repo)
1495 ms = mergemod.mergestate(repo)
1496
1496
1497 # sort so that reasonable information is on top
1497 # sort so that reasonable information is on top
1498 v1records = ms._readrecordsv1()
1498 v1records = ms._readrecordsv1()
1499 v2records = ms._readrecordsv2()
1499 v2records = ms._readrecordsv2()
1500 order = 'LOml'
1500 order = 'LOml'
1501 def key(r):
1501 def key(r):
1502 idx = order.find(r[0])
1502 idx = order.find(r[0])
1503 if idx == -1:
1503 if idx == -1:
1504 return (1, r[1])
1504 return (1, r[1])
1505 else:
1505 else:
1506 return (0, idx)
1506 return (0, idx)
1507 v1records.sort(key=key)
1507 v1records.sort(key=key)
1508 v2records.sort(key=key)
1508 v2records.sort(key=key)
1509
1509
1510 if not v1records and not v2records:
1510 if not v1records and not v2records:
1511 ui.write(('no merge state found\n'))
1511 ui.write(('no merge state found\n'))
1512 elif not v2records:
1512 elif not v2records:
1513 ui.note(('no version 2 merge state\n'))
1513 ui.note(('no version 2 merge state\n'))
1514 printrecords(1)
1514 printrecords(1)
1515 elif ms._v1v2match(v1records, v2records):
1515 elif ms._v1v2match(v1records, v2records):
1516 ui.note(('v1 and v2 states match: using v2\n'))
1516 ui.note(('v1 and v2 states match: using v2\n'))
1517 printrecords(2)
1517 printrecords(2)
1518 else:
1518 else:
1519 ui.note(('v1 and v2 states mismatch: using v1\n'))
1519 ui.note(('v1 and v2 states mismatch: using v1\n'))
1520 printrecords(1)
1520 printrecords(1)
1521 if ui.verbose:
1521 if ui.verbose:
1522 printrecords(2)
1522 printrecords(2)
1523
1523
1524 @command('debugnamecomplete', [], _('NAME...'))
1524 @command('debugnamecomplete', [], _('NAME...'))
1525 def debugnamecomplete(ui, repo, *args):
1525 def debugnamecomplete(ui, repo, *args):
1526 '''complete "names" - tags, open branch names, bookmark names'''
1526 '''complete "names" - tags, open branch names, bookmark names'''
1527
1527
1528 names = set()
1528 names = set()
1529 # since we previously only listed open branches, we will handle that
1529 # since we previously only listed open branches, we will handle that
1530 # specially (after this for loop)
1530 # specially (after this for loop)
1531 for name, ns in repo.names.iteritems():
1531 for name, ns in repo.names.iteritems():
1532 if name != 'branches':
1532 if name != 'branches':
1533 names.update(ns.listnames(repo))
1533 names.update(ns.listnames(repo))
1534 names.update(tag for (tag, heads, tip, closed)
1534 names.update(tag for (tag, heads, tip, closed)
1535 in repo.branchmap().iterbranches() if not closed)
1535 in repo.branchmap().iterbranches() if not closed)
1536 completions = set()
1536 completions = set()
1537 if not args:
1537 if not args:
1538 args = ['']
1538 args = ['']
1539 for a in args:
1539 for a in args:
1540 completions.update(n for n in names if n.startswith(a))
1540 completions.update(n for n in names if n.startswith(a))
1541 ui.write('\n'.join(sorted(completions)))
1541 ui.write('\n'.join(sorted(completions)))
1542 ui.write('\n')
1542 ui.write('\n')
1543
1543
1544 @command('debugobsolete',
1544 @command('debugobsolete',
1545 [('', 'flags', 0, _('markers flag')),
1545 [('', 'flags', 0, _('markers flag')),
1546 ('', 'record-parents', False,
1546 ('', 'record-parents', False,
1547 _('record parent information for the precursor')),
1547 _('record parent information for the precursor')),
1548 ('r', 'rev', [], _('display markers relevant to REV')),
1548 ('r', 'rev', [], _('display markers relevant to REV')),
1549 ('', 'exclusive', False, _('restrict display to markers only '
1549 ('', 'exclusive', False, _('restrict display to markers only '
1550 'relevant to REV')),
1550 'relevant to REV')),
1551 ('', 'index', False, _('display index of the marker')),
1551 ('', 'index', False, _('display index of the marker')),
1552 ('', 'delete', [], _('delete markers specified by indices')),
1552 ('', 'delete', [], _('delete markers specified by indices')),
1553 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1553 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1554 _('[OBSOLETED [REPLACEMENT ...]]'))
1554 _('[OBSOLETED [REPLACEMENT ...]]'))
1555 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1555 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1556 """create arbitrary obsolete marker
1556 """create arbitrary obsolete marker
1557
1557
1558 With no arguments, displays the list of obsolescence markers."""
1558 With no arguments, displays the list of obsolescence markers."""
1559
1559
1560 opts = pycompat.byteskwargs(opts)
1560 opts = pycompat.byteskwargs(opts)
1561
1561
1562 def parsenodeid(s):
1562 def parsenodeid(s):
1563 try:
1563 try:
1564 # We do not use revsingle/revrange functions here to accept
1564 # We do not use revsingle/revrange functions here to accept
1565 # arbitrary node identifiers, possibly not present in the
1565 # arbitrary node identifiers, possibly not present in the
1566 # local repository.
1566 # local repository.
1567 n = bin(s)
1567 n = bin(s)
1568 if len(n) != len(nullid):
1568 if len(n) != len(nullid):
1569 raise TypeError()
1569 raise TypeError()
1570 return n
1570 return n
1571 except TypeError:
1571 except TypeError:
1572 raise error.Abort('changeset references must be full hexadecimal '
1572 raise error.Abort('changeset references must be full hexadecimal '
1573 'node identifiers')
1573 'node identifiers')
1574
1574
1575 if opts.get('delete'):
1575 if opts.get('delete'):
1576 indices = []
1576 indices = []
1577 for v in opts.get('delete'):
1577 for v in opts.get('delete'):
1578 try:
1578 try:
1579 indices.append(int(v))
1579 indices.append(int(v))
1580 except ValueError:
1580 except ValueError:
1581 raise error.Abort(_('invalid index value: %r') % v,
1581 raise error.Abort(_('invalid index value: %r') % v,
1582 hint=_('use integers for indices'))
1582 hint=_('use integers for indices'))
1583
1583
1584 if repo.currenttransaction():
1584 if repo.currenttransaction():
1585 raise error.Abort(_('cannot delete obsmarkers in the middle '
1585 raise error.Abort(_('cannot delete obsmarkers in the middle '
1586 'of transaction.'))
1586 'of transaction.'))
1587
1587
1588 with repo.lock():
1588 with repo.lock():
1589 n = repair.deleteobsmarkers(repo.obsstore, indices)
1589 n = repair.deleteobsmarkers(repo.obsstore, indices)
1590 ui.write(_('deleted %i obsolescence markers\n') % n)
1590 ui.write(_('deleted %i obsolescence markers\n') % n)
1591
1591
1592 return
1592 return
1593
1593
1594 if precursor is not None:
1594 if precursor is not None:
1595 if opts['rev']:
1595 if opts['rev']:
1596 raise error.Abort('cannot select revision when creating marker')
1596 raise error.Abort('cannot select revision when creating marker')
1597 metadata = {}
1597 metadata = {}
1598 metadata['user'] = opts['user'] or ui.username()
1598 metadata['user'] = opts['user'] or ui.username()
1599 succs = tuple(parsenodeid(succ) for succ in successors)
1599 succs = tuple(parsenodeid(succ) for succ in successors)
1600 l = repo.lock()
1600 l = repo.lock()
1601 try:
1601 try:
1602 tr = repo.transaction('debugobsolete')
1602 tr = repo.transaction('debugobsolete')
1603 try:
1603 try:
1604 date = opts.get('date')
1604 date = opts.get('date')
1605 if date:
1605 if date:
1606 date = dateutil.parsedate(date)
1606 date = dateutil.parsedate(date)
1607 else:
1607 else:
1608 date = None
1608 date = None
1609 prec = parsenodeid(precursor)
1609 prec = parsenodeid(precursor)
1610 parents = None
1610 parents = None
1611 if opts['record_parents']:
1611 if opts['record_parents']:
1612 if prec not in repo.unfiltered():
1612 if prec not in repo.unfiltered():
1613 raise error.Abort('cannot used --record-parents on '
1613 raise error.Abort('cannot used --record-parents on '
1614 'unknown changesets')
1614 'unknown changesets')
1615 parents = repo.unfiltered()[prec].parents()
1615 parents = repo.unfiltered()[prec].parents()
1616 parents = tuple(p.node() for p in parents)
1616 parents = tuple(p.node() for p in parents)
1617 repo.obsstore.create(tr, prec, succs, opts['flags'],
1617 repo.obsstore.create(tr, prec, succs, opts['flags'],
1618 parents=parents, date=date,
1618 parents=parents, date=date,
1619 metadata=metadata, ui=ui)
1619 metadata=metadata, ui=ui)
1620 tr.close()
1620 tr.close()
1621 except ValueError as exc:
1621 except ValueError as exc:
1622 raise error.Abort(_('bad obsmarker input: %s') %
1622 raise error.Abort(_('bad obsmarker input: %s') %
1623 pycompat.bytestr(exc))
1623 pycompat.bytestr(exc))
1624 finally:
1624 finally:
1625 tr.release()
1625 tr.release()
1626 finally:
1626 finally:
1627 l.release()
1627 l.release()
1628 else:
1628 else:
1629 if opts['rev']:
1629 if opts['rev']:
1630 revs = scmutil.revrange(repo, opts['rev'])
1630 revs = scmutil.revrange(repo, opts['rev'])
1631 nodes = [repo[r].node() for r in revs]
1631 nodes = [repo[r].node() for r in revs]
1632 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1632 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1633 exclusive=opts['exclusive']))
1633 exclusive=opts['exclusive']))
1634 markers.sort(key=lambda x: x._data)
1634 markers.sort(key=lambda x: x._data)
1635 else:
1635 else:
1636 markers = obsutil.getmarkers(repo)
1636 markers = obsutil.getmarkers(repo)
1637
1637
1638 markerstoiter = markers
1638 markerstoiter = markers
1639 isrelevant = lambda m: True
1639 isrelevant = lambda m: True
1640 if opts.get('rev') and opts.get('index'):
1640 if opts.get('rev') and opts.get('index'):
1641 markerstoiter = obsutil.getmarkers(repo)
1641 markerstoiter = obsutil.getmarkers(repo)
1642 markerset = set(markers)
1642 markerset = set(markers)
1643 isrelevant = lambda m: m in markerset
1643 isrelevant = lambda m: m in markerset
1644
1644
1645 fm = ui.formatter('debugobsolete', opts)
1645 fm = ui.formatter('debugobsolete', opts)
1646 for i, m in enumerate(markerstoiter):
1646 for i, m in enumerate(markerstoiter):
1647 if not isrelevant(m):
1647 if not isrelevant(m):
1648 # marker can be irrelevant when we're iterating over a set
1648 # marker can be irrelevant when we're iterating over a set
1649 # of markers (markerstoiter) which is bigger than the set
1649 # of markers (markerstoiter) which is bigger than the set
1650 # of markers we want to display (markers)
1650 # of markers we want to display (markers)
1651 # this can happen if both --index and --rev options are
1651 # this can happen if both --index and --rev options are
1652 # provided and thus we need to iterate over all of the markers
1652 # provided and thus we need to iterate over all of the markers
1653 # to get the correct indices, but only display the ones that
1653 # to get the correct indices, but only display the ones that
1654 # are relevant to --rev value
1654 # are relevant to --rev value
1655 continue
1655 continue
1656 fm.startitem()
1656 fm.startitem()
1657 ind = i if opts.get('index') else None
1657 ind = i if opts.get('index') else None
1658 cmdutil.showmarker(fm, m, index=ind)
1658 cmdutil.showmarker(fm, m, index=ind)
1659 fm.end()
1659 fm.end()
1660
1660
1661 @command('debugpathcomplete',
1661 @command('debugpathcomplete',
1662 [('f', 'full', None, _('complete an entire path')),
1662 [('f', 'full', None, _('complete an entire path')),
1663 ('n', 'normal', None, _('show only normal files')),
1663 ('n', 'normal', None, _('show only normal files')),
1664 ('a', 'added', None, _('show only added files')),
1664 ('a', 'added', None, _('show only added files')),
1665 ('r', 'removed', None, _('show only removed files'))],
1665 ('r', 'removed', None, _('show only removed files'))],
1666 _('FILESPEC...'))
1666 _('FILESPEC...'))
1667 def debugpathcomplete(ui, repo, *specs, **opts):
1667 def debugpathcomplete(ui, repo, *specs, **opts):
1668 '''complete part or all of a tracked path
1668 '''complete part or all of a tracked path
1669
1669
1670 This command supports shells that offer path name completion. It
1670 This command supports shells that offer path name completion. It
1671 currently completes only files already known to the dirstate.
1671 currently completes only files already known to the dirstate.
1672
1672
1673 Completion extends only to the next path segment unless
1673 Completion extends only to the next path segment unless
1674 --full is specified, in which case entire paths are used.'''
1674 --full is specified, in which case entire paths are used.'''
1675
1675
1676 def complete(path, acceptable):
1676 def complete(path, acceptable):
1677 dirstate = repo.dirstate
1677 dirstate = repo.dirstate
1678 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1678 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1679 rootdir = repo.root + pycompat.ossep
1679 rootdir = repo.root + pycompat.ossep
1680 if spec != repo.root and not spec.startswith(rootdir):
1680 if spec != repo.root and not spec.startswith(rootdir):
1681 return [], []
1681 return [], []
1682 if os.path.isdir(spec):
1682 if os.path.isdir(spec):
1683 spec += '/'
1683 spec += '/'
1684 spec = spec[len(rootdir):]
1684 spec = spec[len(rootdir):]
1685 fixpaths = pycompat.ossep != '/'
1685 fixpaths = pycompat.ossep != '/'
1686 if fixpaths:
1686 if fixpaths:
1687 spec = spec.replace(pycompat.ossep, '/')
1687 spec = spec.replace(pycompat.ossep, '/')
1688 speclen = len(spec)
1688 speclen = len(spec)
1689 fullpaths = opts[r'full']
1689 fullpaths = opts[r'full']
1690 files, dirs = set(), set()
1690 files, dirs = set(), set()
1691 adddir, addfile = dirs.add, files.add
1691 adddir, addfile = dirs.add, files.add
1692 for f, st in dirstate.iteritems():
1692 for f, st in dirstate.iteritems():
1693 if f.startswith(spec) and st[0] in acceptable:
1693 if f.startswith(spec) and st[0] in acceptable:
1694 if fixpaths:
1694 if fixpaths:
1695 f = f.replace('/', pycompat.ossep)
1695 f = f.replace('/', pycompat.ossep)
1696 if fullpaths:
1696 if fullpaths:
1697 addfile(f)
1697 addfile(f)
1698 continue
1698 continue
1699 s = f.find(pycompat.ossep, speclen)
1699 s = f.find(pycompat.ossep, speclen)
1700 if s >= 0:
1700 if s >= 0:
1701 adddir(f[:s])
1701 adddir(f[:s])
1702 else:
1702 else:
1703 addfile(f)
1703 addfile(f)
1704 return files, dirs
1704 return files, dirs
1705
1705
1706 acceptable = ''
1706 acceptable = ''
1707 if opts[r'normal']:
1707 if opts[r'normal']:
1708 acceptable += 'nm'
1708 acceptable += 'nm'
1709 if opts[r'added']:
1709 if opts[r'added']:
1710 acceptable += 'a'
1710 acceptable += 'a'
1711 if opts[r'removed']:
1711 if opts[r'removed']:
1712 acceptable += 'r'
1712 acceptable += 'r'
1713 cwd = repo.getcwd()
1713 cwd = repo.getcwd()
1714 if not specs:
1714 if not specs:
1715 specs = ['.']
1715 specs = ['.']
1716
1716
1717 files, dirs = set(), set()
1717 files, dirs = set(), set()
1718 for spec in specs:
1718 for spec in specs:
1719 f, d = complete(spec, acceptable or 'nmar')
1719 f, d = complete(spec, acceptable or 'nmar')
1720 files.update(f)
1720 files.update(f)
1721 dirs.update(d)
1721 dirs.update(d)
1722 files.update(dirs)
1722 files.update(dirs)
1723 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1723 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1724 ui.write('\n')
1724 ui.write('\n')
1725
1725
1726 @command('debugpeer', [], _('PATH'), norepo=True)
1726 @command('debugpeer', [], _('PATH'), norepo=True)
1727 def debugpeer(ui, path):
1727 def debugpeer(ui, path):
1728 """establish a connection to a peer repository"""
1728 """establish a connection to a peer repository"""
1729 # Always enable peer request logging. Requires --debug to display
1729 # Always enable peer request logging. Requires --debug to display
1730 # though.
1730 # though.
1731 overrides = {
1731 overrides = {
1732 ('devel', 'debug.peer-request'): True,
1732 ('devel', 'debug.peer-request'): True,
1733 }
1733 }
1734
1734
1735 with ui.configoverride(overrides):
1735 with ui.configoverride(overrides):
1736 peer = hg.peer(ui, {}, path)
1736 peer = hg.peer(ui, {}, path)
1737
1737
1738 local = peer.local() is not None
1738 local = peer.local() is not None
1739 canpush = peer.canpush()
1739 canpush = peer.canpush()
1740
1740
1741 ui.write(_('url: %s\n') % peer.url())
1741 ui.write(_('url: %s\n') % peer.url())
1742 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1742 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1743 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1743 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1744
1744
1745 @command('debugpickmergetool',
1745 @command('debugpickmergetool',
1746 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1746 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1747 ('', 'changedelete', None, _('emulate merging change and delete')),
1747 ('', 'changedelete', None, _('emulate merging change and delete')),
1748 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1748 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1749 _('[PATTERN]...'),
1749 _('[PATTERN]...'),
1750 inferrepo=True)
1750 inferrepo=True)
1751 def debugpickmergetool(ui, repo, *pats, **opts):
1751 def debugpickmergetool(ui, repo, *pats, **opts):
1752 """examine which merge tool is chosen for specified file
1752 """examine which merge tool is chosen for specified file
1753
1753
1754 As described in :hg:`help merge-tools`, Mercurial examines
1754 As described in :hg:`help merge-tools`, Mercurial examines
1755 configurations below in this order to decide which merge tool is
1755 configurations below in this order to decide which merge tool is
1756 chosen for specified file.
1756 chosen for specified file.
1757
1757
1758 1. ``--tool`` option
1758 1. ``--tool`` option
1759 2. ``HGMERGE`` environment variable
1759 2. ``HGMERGE`` environment variable
1760 3. configurations in ``merge-patterns`` section
1760 3. configurations in ``merge-patterns`` section
1761 4. configuration of ``ui.merge``
1761 4. configuration of ``ui.merge``
1762 5. configurations in ``merge-tools`` section
1762 5. configurations in ``merge-tools`` section
1763 6. ``hgmerge`` tool (for historical reason only)
1763 6. ``hgmerge`` tool (for historical reason only)
1764 7. default tool for fallback (``:merge`` or ``:prompt``)
1764 7. default tool for fallback (``:merge`` or ``:prompt``)
1765
1765
1766 This command writes out examination result in the style below::
1766 This command writes out examination result in the style below::
1767
1767
1768 FILE = MERGETOOL
1768 FILE = MERGETOOL
1769
1769
1770 By default, all files known in the first parent context of the
1770 By default, all files known in the first parent context of the
1771 working directory are examined. Use file patterns and/or -I/-X
1771 working directory are examined. Use file patterns and/or -I/-X
1772 options to limit target files. -r/--rev is also useful to examine
1772 options to limit target files. -r/--rev is also useful to examine
1773 files in another context without actual updating to it.
1773 files in another context without actual updating to it.
1774
1774
1775 With --debug, this command shows warning messages while matching
1775 With --debug, this command shows warning messages while matching
1776 against ``merge-patterns`` and so on, too. It is recommended to
1776 against ``merge-patterns`` and so on, too. It is recommended to
1777 use this option with explicit file patterns and/or -I/-X options,
1777 use this option with explicit file patterns and/or -I/-X options,
1778 because this option increases amount of output per file according
1778 because this option increases amount of output per file according
1779 to configurations in hgrc.
1779 to configurations in hgrc.
1780
1780
1781 With -v/--verbose, this command shows configurations below at
1781 With -v/--verbose, this command shows configurations below at
1782 first (only if specified).
1782 first (only if specified).
1783
1783
1784 - ``--tool`` option
1784 - ``--tool`` option
1785 - ``HGMERGE`` environment variable
1785 - ``HGMERGE`` environment variable
1786 - configuration of ``ui.merge``
1786 - configuration of ``ui.merge``
1787
1787
1788 If merge tool is chosen before matching against
1788 If merge tool is chosen before matching against
1789 ``merge-patterns``, this command can't show any helpful
1789 ``merge-patterns``, this command can't show any helpful
1790 information, even with --debug. In such case, information above is
1790 information, even with --debug. In such case, information above is
1791 useful to know why a merge tool is chosen.
1791 useful to know why a merge tool is chosen.
1792 """
1792 """
1793 opts = pycompat.byteskwargs(opts)
1793 opts = pycompat.byteskwargs(opts)
1794 overrides = {}
1794 overrides = {}
1795 if opts['tool']:
1795 if opts['tool']:
1796 overrides[('ui', 'forcemerge')] = opts['tool']
1796 overrides[('ui', 'forcemerge')] = opts['tool']
1797 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1797 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1798
1798
1799 with ui.configoverride(overrides, 'debugmergepatterns'):
1799 with ui.configoverride(overrides, 'debugmergepatterns'):
1800 hgmerge = encoding.environ.get("HGMERGE")
1800 hgmerge = encoding.environ.get("HGMERGE")
1801 if hgmerge is not None:
1801 if hgmerge is not None:
1802 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1802 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1803 uimerge = ui.config("ui", "merge")
1803 uimerge = ui.config("ui", "merge")
1804 if uimerge:
1804 if uimerge:
1805 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1805 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1806
1806
1807 ctx = scmutil.revsingle(repo, opts.get('rev'))
1807 ctx = scmutil.revsingle(repo, opts.get('rev'))
1808 m = scmutil.match(ctx, pats, opts)
1808 m = scmutil.match(ctx, pats, opts)
1809 changedelete = opts['changedelete']
1809 changedelete = opts['changedelete']
1810 for path in ctx.walk(m):
1810 for path in ctx.walk(m):
1811 fctx = ctx[path]
1811 fctx = ctx[path]
1812 try:
1812 try:
1813 if not ui.debugflag:
1813 if not ui.debugflag:
1814 ui.pushbuffer(error=True)
1814 ui.pushbuffer(error=True)
1815 tool, toolpath = filemerge._picktool(repo, ui, path,
1815 tool, toolpath = filemerge._picktool(repo, ui, path,
1816 fctx.isbinary(),
1816 fctx.isbinary(),
1817 'l' in fctx.flags(),
1817 'l' in fctx.flags(),
1818 changedelete)
1818 changedelete)
1819 finally:
1819 finally:
1820 if not ui.debugflag:
1820 if not ui.debugflag:
1821 ui.popbuffer()
1821 ui.popbuffer()
1822 ui.write(('%s = %s\n') % (path, tool))
1822 ui.write(('%s = %s\n') % (path, tool))
1823
1823
1824 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1824 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1825 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1825 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1826 '''access the pushkey key/value protocol
1826 '''access the pushkey key/value protocol
1827
1827
1828 With two args, list the keys in the given namespace.
1828 With two args, list the keys in the given namespace.
1829
1829
1830 With five args, set a key to new if it currently is set to old.
1830 With five args, set a key to new if it currently is set to old.
1831 Reports success or failure.
1831 Reports success or failure.
1832 '''
1832 '''
1833
1833
1834 target = hg.peer(ui, {}, repopath)
1834 target = hg.peer(ui, {}, repopath)
1835 if keyinfo:
1835 if keyinfo:
1836 key, old, new = keyinfo
1836 key, old, new = keyinfo
1837 with target.commandexecutor() as e:
1837 with target.commandexecutor() as e:
1838 r = e.callcommand('pushkey', {
1838 r = e.callcommand('pushkey', {
1839 'namespace': namespace,
1839 'namespace': namespace,
1840 'key': key,
1840 'key': key,
1841 'old': old,
1841 'old': old,
1842 'new': new,
1842 'new': new,
1843 }).result()
1843 }).result()
1844
1844
1845 ui.status(pycompat.bytestr(r) + '\n')
1845 ui.status(pycompat.bytestr(r) + '\n')
1846 return not r
1846 return not r
1847 else:
1847 else:
1848 for k, v in sorted(target.listkeys(namespace).iteritems()):
1848 for k, v in sorted(target.listkeys(namespace).iteritems()):
1849 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1849 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1850 stringutil.escapestr(v)))
1850 stringutil.escapestr(v)))
1851
1851
1852 @command('debugpvec', [], _('A B'))
1852 @command('debugpvec', [], _('A B'))
1853 def debugpvec(ui, repo, a, b=None):
1853 def debugpvec(ui, repo, a, b=None):
1854 ca = scmutil.revsingle(repo, a)
1854 ca = scmutil.revsingle(repo, a)
1855 cb = scmutil.revsingle(repo, b)
1855 cb = scmutil.revsingle(repo, b)
1856 pa = pvec.ctxpvec(ca)
1856 pa = pvec.ctxpvec(ca)
1857 pb = pvec.ctxpvec(cb)
1857 pb = pvec.ctxpvec(cb)
1858 if pa == pb:
1858 if pa == pb:
1859 rel = "="
1859 rel = "="
1860 elif pa > pb:
1860 elif pa > pb:
1861 rel = ">"
1861 rel = ">"
1862 elif pa < pb:
1862 elif pa < pb:
1863 rel = "<"
1863 rel = "<"
1864 elif pa | pb:
1864 elif pa | pb:
1865 rel = "|"
1865 rel = "|"
1866 ui.write(_("a: %s\n") % pa)
1866 ui.write(_("a: %s\n") % pa)
1867 ui.write(_("b: %s\n") % pb)
1867 ui.write(_("b: %s\n") % pb)
1868 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1868 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1869 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1869 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1870 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1870 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1871 pa.distance(pb), rel))
1871 pa.distance(pb), rel))
1872
1872
1873 @command('debugrebuilddirstate|debugrebuildstate',
1873 @command('debugrebuilddirstate|debugrebuildstate',
1874 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1874 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1875 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1875 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1876 'the working copy parent')),
1876 'the working copy parent')),
1877 ],
1877 ],
1878 _('[-r REV]'))
1878 _('[-r REV]'))
1879 def debugrebuilddirstate(ui, repo, rev, **opts):
1879 def debugrebuilddirstate(ui, repo, rev, **opts):
1880 """rebuild the dirstate as it would look like for the given revision
1880 """rebuild the dirstate as it would look like for the given revision
1881
1881
1882 If no revision is specified the first current parent will be used.
1882 If no revision is specified the first current parent will be used.
1883
1883
1884 The dirstate will be set to the files of the given revision.
1884 The dirstate will be set to the files of the given revision.
1885 The actual working directory content or existing dirstate
1885 The actual working directory content or existing dirstate
1886 information such as adds or removes is not considered.
1886 information such as adds or removes is not considered.
1887
1887
1888 ``minimal`` will only rebuild the dirstate status for files that claim to be
1888 ``minimal`` will only rebuild the dirstate status for files that claim to be
1889 tracked but are not in the parent manifest, or that exist in the parent
1889 tracked but are not in the parent manifest, or that exist in the parent
1890 manifest but are not in the dirstate. It will not change adds, removes, or
1890 manifest but are not in the dirstate. It will not change adds, removes, or
1891 modified files that are in the working copy parent.
1891 modified files that are in the working copy parent.
1892
1892
1893 One use of this command is to make the next :hg:`status` invocation
1893 One use of this command is to make the next :hg:`status` invocation
1894 check the actual file content.
1894 check the actual file content.
1895 """
1895 """
1896 ctx = scmutil.revsingle(repo, rev)
1896 ctx = scmutil.revsingle(repo, rev)
1897 with repo.wlock():
1897 with repo.wlock():
1898 dirstate = repo.dirstate
1898 dirstate = repo.dirstate
1899 changedfiles = None
1899 changedfiles = None
1900 # See command doc for what minimal does.
1900 # See command doc for what minimal does.
1901 if opts.get(r'minimal'):
1901 if opts.get(r'minimal'):
1902 manifestfiles = set(ctx.manifest().keys())
1902 manifestfiles = set(ctx.manifest().keys())
1903 dirstatefiles = set(dirstate)
1903 dirstatefiles = set(dirstate)
1904 manifestonly = manifestfiles - dirstatefiles
1904 manifestonly = manifestfiles - dirstatefiles
1905 dsonly = dirstatefiles - manifestfiles
1905 dsonly = dirstatefiles - manifestfiles
1906 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1906 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1907 changedfiles = manifestonly | dsnotadded
1907 changedfiles = manifestonly | dsnotadded
1908
1908
1909 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1909 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1910
1910
1911 @command('debugrebuildfncache', [], '')
1911 @command('debugrebuildfncache', [], '')
1912 def debugrebuildfncache(ui, repo):
1912 def debugrebuildfncache(ui, repo):
1913 """rebuild the fncache file"""
1913 """rebuild the fncache file"""
1914 repair.rebuildfncache(ui, repo)
1914 repair.rebuildfncache(ui, repo)
1915
1915
1916 @command('debugrename',
1916 @command('debugrename',
1917 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1917 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1918 _('[-r REV] FILE'))
1918 _('[-r REV] FILE'))
1919 def debugrename(ui, repo, file1, *pats, **opts):
1919 def debugrename(ui, repo, file1, *pats, **opts):
1920 """dump rename information"""
1920 """dump rename information"""
1921
1921
1922 opts = pycompat.byteskwargs(opts)
1922 opts = pycompat.byteskwargs(opts)
1923 ctx = scmutil.revsingle(repo, opts.get('rev'))
1923 ctx = scmutil.revsingle(repo, opts.get('rev'))
1924 m = scmutil.match(ctx, (file1,) + pats, opts)
1924 m = scmutil.match(ctx, (file1,) + pats, opts)
1925 for abs in ctx.walk(m):
1925 for abs in ctx.walk(m):
1926 fctx = ctx[abs]
1926 fctx = ctx[abs]
1927 o = fctx.filelog().renamed(fctx.filenode())
1927 o = fctx.filelog().renamed(fctx.filenode())
1928 rel = m.rel(abs)
1928 rel = m.rel(abs)
1929 if o:
1929 if o:
1930 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1930 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1931 else:
1931 else:
1932 ui.write(_("%s not renamed\n") % rel)
1932 ui.write(_("%s not renamed\n") % rel)
1933
1933
1934 @command('debugrevlog', cmdutil.debugrevlogopts +
1934 @command('debugrevlog', cmdutil.debugrevlogopts +
1935 [('d', 'dump', False, _('dump index data'))],
1935 [('d', 'dump', False, _('dump index data'))],
1936 _('-c|-m|FILE'),
1936 _('-c|-m|FILE'),
1937 optionalrepo=True)
1937 optionalrepo=True)
1938 def debugrevlog(ui, repo, file_=None, **opts):
1938 def debugrevlog(ui, repo, file_=None, **opts):
1939 """show data and statistics about a revlog"""
1939 """show data and statistics about a revlog"""
1940 opts = pycompat.byteskwargs(opts)
1940 opts = pycompat.byteskwargs(opts)
1941 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1941 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1942
1942
1943 if opts.get("dump"):
1943 if opts.get("dump"):
1944 numrevs = len(r)
1944 numrevs = len(r)
1945 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1945 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1946 " rawsize totalsize compression heads chainlen\n"))
1946 " rawsize totalsize compression heads chainlen\n"))
1947 ts = 0
1947 ts = 0
1948 heads = set()
1948 heads = set()
1949
1949
1950 for rev in xrange(numrevs):
1950 for rev in xrange(numrevs):
1951 dbase = r.deltaparent(rev)
1951 dbase = r.deltaparent(rev)
1952 if dbase == -1:
1952 if dbase == -1:
1953 dbase = rev
1953 dbase = rev
1954 cbase = r.chainbase(rev)
1954 cbase = r.chainbase(rev)
1955 clen = r.chainlen(rev)
1955 clen = r.chainlen(rev)
1956 p1, p2 = r.parentrevs(rev)
1956 p1, p2 = r.parentrevs(rev)
1957 rs = r.rawsize(rev)
1957 rs = r.rawsize(rev)
1958 ts = ts + rs
1958 ts = ts + rs
1959 heads -= set(r.parentrevs(rev))
1959 heads -= set(r.parentrevs(rev))
1960 heads.add(rev)
1960 heads.add(rev)
1961 try:
1961 try:
1962 compression = ts / r.end(rev)
1962 compression = ts / r.end(rev)
1963 except ZeroDivisionError:
1963 except ZeroDivisionError:
1964 compression = 0
1964 compression = 0
1965 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1965 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1966 "%11d %5d %8d\n" %
1966 "%11d %5d %8d\n" %
1967 (rev, p1, p2, r.start(rev), r.end(rev),
1967 (rev, p1, p2, r.start(rev), r.end(rev),
1968 r.start(dbase), r.start(cbase),
1968 r.start(dbase), r.start(cbase),
1969 r.start(p1), r.start(p2),
1969 r.start(p1), r.start(p2),
1970 rs, ts, compression, len(heads), clen))
1970 rs, ts, compression, len(heads), clen))
1971 return 0
1971 return 0
1972
1972
1973 v = r.version
1973 v = r.version
1974 format = v & 0xFFFF
1974 format = v & 0xFFFF
1975 flags = []
1975 flags = []
1976 gdelta = False
1976 gdelta = False
1977 if v & revlog.FLAG_INLINE_DATA:
1977 if v & revlog.FLAG_INLINE_DATA:
1978 flags.append('inline')
1978 flags.append('inline')
1979 if v & revlog.FLAG_GENERALDELTA:
1979 if v & revlog.FLAG_GENERALDELTA:
1980 gdelta = True
1980 gdelta = True
1981 flags.append('generaldelta')
1981 flags.append('generaldelta')
1982 if not flags:
1982 if not flags:
1983 flags = ['(none)']
1983 flags = ['(none)']
1984
1984
1985 nummerges = 0
1985 nummerges = 0
1986 numfull = 0
1986 numfull = 0
1987 numprev = 0
1987 numprev = 0
1988 nump1 = 0
1988 nump1 = 0
1989 nump2 = 0
1989 nump2 = 0
1990 numother = 0
1990 numother = 0
1991 nump1prev = 0
1991 nump1prev = 0
1992 nump2prev = 0
1992 nump2prev = 0
1993 chainlengths = []
1993 chainlengths = []
1994 chainbases = []
1994 chainbases = []
1995 chainspans = []
1995 chainspans = []
1996
1996
1997 datasize = [None, 0, 0]
1997 datasize = [None, 0, 0]
1998 fullsize = [None, 0, 0]
1998 fullsize = [None, 0, 0]
1999 deltasize = [None, 0, 0]
1999 deltasize = [None, 0, 0]
2000 chunktypecounts = {}
2000 chunktypecounts = {}
2001 chunktypesizes = {}
2001 chunktypesizes = {}
2002
2002
2003 def addsize(size, l):
2003 def addsize(size, l):
2004 if l[0] is None or size < l[0]:
2004 if l[0] is None or size < l[0]:
2005 l[0] = size
2005 l[0] = size
2006 if size > l[1]:
2006 if size > l[1]:
2007 l[1] = size
2007 l[1] = size
2008 l[2] += size
2008 l[2] += size
2009
2009
2010 numrevs = len(r)
2010 numrevs = len(r)
2011 for rev in xrange(numrevs):
2011 for rev in xrange(numrevs):
2012 p1, p2 = r.parentrevs(rev)
2012 p1, p2 = r.parentrevs(rev)
2013 delta = r.deltaparent(rev)
2013 delta = r.deltaparent(rev)
2014 if format > 0:
2014 if format > 0:
2015 addsize(r.rawsize(rev), datasize)
2015 addsize(r.rawsize(rev), datasize)
2016 if p2 != nullrev:
2016 if p2 != nullrev:
2017 nummerges += 1
2017 nummerges += 1
2018 size = r.length(rev)
2018 size = r.length(rev)
2019 if delta == nullrev:
2019 if delta == nullrev:
2020 chainlengths.append(0)
2020 chainlengths.append(0)
2021 chainbases.append(r.start(rev))
2021 chainbases.append(r.start(rev))
2022 chainspans.append(size)
2022 chainspans.append(size)
2023 numfull += 1
2023 numfull += 1
2024 addsize(size, fullsize)
2024 addsize(size, fullsize)
2025 else:
2025 else:
2026 chainlengths.append(chainlengths[delta] + 1)
2026 chainlengths.append(chainlengths[delta] + 1)
2027 baseaddr = chainbases[delta]
2027 baseaddr = chainbases[delta]
2028 revaddr = r.start(rev)
2028 revaddr = r.start(rev)
2029 chainbases.append(baseaddr)
2029 chainbases.append(baseaddr)
2030 chainspans.append((revaddr - baseaddr) + size)
2030 chainspans.append((revaddr - baseaddr) + size)
2031 addsize(size, deltasize)
2031 addsize(size, deltasize)
2032 if delta == rev - 1:
2032 if delta == rev - 1:
2033 numprev += 1
2033 numprev += 1
2034 if delta == p1:
2034 if delta == p1:
2035 nump1prev += 1
2035 nump1prev += 1
2036 elif delta == p2:
2036 elif delta == p2:
2037 nump2prev += 1
2037 nump2prev += 1
2038 elif delta == p1:
2038 elif delta == p1:
2039 nump1 += 1
2039 nump1 += 1
2040 elif delta == p2:
2040 elif delta == p2:
2041 nump2 += 1
2041 nump2 += 1
2042 elif delta != nullrev:
2042 elif delta != nullrev:
2043 numother += 1
2043 numother += 1
2044
2044
2045 # Obtain data on the raw chunks in the revlog.
2045 # Obtain data on the raw chunks in the revlog.
2046 segment = r._getsegmentforrevs(rev, rev)[1]
2046 segment = r._getsegmentforrevs(rev, rev)[1]
2047 if segment:
2047 if segment:
2048 chunktype = bytes(segment[0:1])
2048 chunktype = bytes(segment[0:1])
2049 else:
2049 else:
2050 chunktype = 'empty'
2050 chunktype = 'empty'
2051
2051
2052 if chunktype not in chunktypecounts:
2052 if chunktype not in chunktypecounts:
2053 chunktypecounts[chunktype] = 0
2053 chunktypecounts[chunktype] = 0
2054 chunktypesizes[chunktype] = 0
2054 chunktypesizes[chunktype] = 0
2055
2055
2056 chunktypecounts[chunktype] += 1
2056 chunktypecounts[chunktype] += 1
2057 chunktypesizes[chunktype] += size
2057 chunktypesizes[chunktype] += size
2058
2058
2059 # Adjust size min value for empty cases
2059 # Adjust size min value for empty cases
2060 for size in (datasize, fullsize, deltasize):
2060 for size in (datasize, fullsize, deltasize):
2061 if size[0] is None:
2061 if size[0] is None:
2062 size[0] = 0
2062 size[0] = 0
2063
2063
2064 numdeltas = numrevs - numfull
2064 numdeltas = numrevs - numfull
2065 numoprev = numprev - nump1prev - nump2prev
2065 numoprev = numprev - nump1prev - nump2prev
2066 totalrawsize = datasize[2]
2066 totalrawsize = datasize[2]
2067 datasize[2] /= numrevs
2067 datasize[2] /= numrevs
2068 fulltotal = fullsize[2]
2068 fulltotal = fullsize[2]
2069 fullsize[2] /= numfull
2069 fullsize[2] /= numfull
2070 deltatotal = deltasize[2]
2070 deltatotal = deltasize[2]
2071 if numrevs - numfull > 0:
2071 if numrevs - numfull > 0:
2072 deltasize[2] /= numrevs - numfull
2072 deltasize[2] /= numrevs - numfull
2073 totalsize = fulltotal + deltatotal
2073 totalsize = fulltotal + deltatotal
2074 avgchainlen = sum(chainlengths) / numrevs
2074 avgchainlen = sum(chainlengths) / numrevs
2075 maxchainlen = max(chainlengths)
2075 maxchainlen = max(chainlengths)
2076 maxchainspan = max(chainspans)
2076 maxchainspan = max(chainspans)
2077 compratio = 1
2077 compratio = 1
2078 if totalsize:
2078 if totalsize:
2079 compratio = totalrawsize / totalsize
2079 compratio = totalrawsize / totalsize
2080
2080
2081 basedfmtstr = '%%%dd\n'
2081 basedfmtstr = '%%%dd\n'
2082 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2082 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2083
2083
2084 def dfmtstr(max):
2084 def dfmtstr(max):
2085 return basedfmtstr % len(str(max))
2085 return basedfmtstr % len(str(max))
2086 def pcfmtstr(max, padding=0):
2086 def pcfmtstr(max, padding=0):
2087 return basepcfmtstr % (len(str(max)), ' ' * padding)
2087 return basepcfmtstr % (len(str(max)), ' ' * padding)
2088
2088
2089 def pcfmt(value, total):
2089 def pcfmt(value, total):
2090 if total:
2090 if total:
2091 return (value, 100 * float(value) / total)
2091 return (value, 100 * float(value) / total)
2092 else:
2092 else:
2093 return value, 100.0
2093 return value, 100.0
2094
2094
2095 ui.write(('format : %d\n') % format)
2095 ui.write(('format : %d\n') % format)
2096 ui.write(('flags : %s\n') % ', '.join(flags))
2096 ui.write(('flags : %s\n') % ', '.join(flags))
2097
2097
2098 ui.write('\n')
2098 ui.write('\n')
2099 fmt = pcfmtstr(totalsize)
2099 fmt = pcfmtstr(totalsize)
2100 fmt2 = dfmtstr(totalsize)
2100 fmt2 = dfmtstr(totalsize)
2101 ui.write(('revisions : ') + fmt2 % numrevs)
2101 ui.write(('revisions : ') + fmt2 % numrevs)
2102 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2102 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2103 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2103 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2104 ui.write(('revisions : ') + fmt2 % numrevs)
2104 ui.write(('revisions : ') + fmt2 % numrevs)
2105 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2105 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2106 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2106 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2107 ui.write(('revision size : ') + fmt2 % totalsize)
2107 ui.write(('revision size : ') + fmt2 % totalsize)
2108 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2108 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2109 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2109 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2110
2110
2111 def fmtchunktype(chunktype):
2111 def fmtchunktype(chunktype):
2112 if chunktype == 'empty':
2112 if chunktype == 'empty':
2113 return ' %s : ' % chunktype
2113 return ' %s : ' % chunktype
2114 elif chunktype in pycompat.bytestr(string.ascii_letters):
2114 elif chunktype in pycompat.bytestr(string.ascii_letters):
2115 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2115 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2116 else:
2116 else:
2117 return ' 0x%s : ' % hex(chunktype)
2117 return ' 0x%s : ' % hex(chunktype)
2118
2118
2119 ui.write('\n')
2119 ui.write('\n')
2120 ui.write(('chunks : ') + fmt2 % numrevs)
2120 ui.write(('chunks : ') + fmt2 % numrevs)
2121 for chunktype in sorted(chunktypecounts):
2121 for chunktype in sorted(chunktypecounts):
2122 ui.write(fmtchunktype(chunktype))
2122 ui.write(fmtchunktype(chunktype))
2123 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2123 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2124 ui.write(('chunks size : ') + fmt2 % totalsize)
2124 ui.write(('chunks size : ') + fmt2 % totalsize)
2125 for chunktype in sorted(chunktypecounts):
2125 for chunktype in sorted(chunktypecounts):
2126 ui.write(fmtchunktype(chunktype))
2126 ui.write(fmtchunktype(chunktype))
2127 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2127 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2128
2128
2129 ui.write('\n')
2129 ui.write('\n')
2130 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2130 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2131 ui.write(('avg chain length : ') + fmt % avgchainlen)
2131 ui.write(('avg chain length : ') + fmt % avgchainlen)
2132 ui.write(('max chain length : ') + fmt % maxchainlen)
2132 ui.write(('max chain length : ') + fmt % maxchainlen)
2133 ui.write(('max chain reach : ') + fmt % maxchainspan)
2133 ui.write(('max chain reach : ') + fmt % maxchainspan)
2134 ui.write(('compression ratio : ') + fmt % compratio)
2134 ui.write(('compression ratio : ') + fmt % compratio)
2135
2135
2136 if format > 0:
2136 if format > 0:
2137 ui.write('\n')
2137 ui.write('\n')
2138 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2138 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2139 % tuple(datasize))
2139 % tuple(datasize))
2140 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2140 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2141 % tuple(fullsize))
2141 % tuple(fullsize))
2142 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2142 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2143 % tuple(deltasize))
2143 % tuple(deltasize))
2144
2144
2145 if numdeltas > 0:
2145 if numdeltas > 0:
2146 ui.write('\n')
2146 ui.write('\n')
2147 fmt = pcfmtstr(numdeltas)
2147 fmt = pcfmtstr(numdeltas)
2148 fmt2 = pcfmtstr(numdeltas, 4)
2148 fmt2 = pcfmtstr(numdeltas, 4)
2149 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2149 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2150 if numprev > 0:
2150 if numprev > 0:
2151 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2151 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2152 numprev))
2152 numprev))
2153 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2153 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2154 numprev))
2154 numprev))
2155 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2155 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2156 numprev))
2156 numprev))
2157 if gdelta:
2157 if gdelta:
2158 ui.write(('deltas against p1 : ')
2158 ui.write(('deltas against p1 : ')
2159 + fmt % pcfmt(nump1, numdeltas))
2159 + fmt % pcfmt(nump1, numdeltas))
2160 ui.write(('deltas against p2 : ')
2160 ui.write(('deltas against p2 : ')
2161 + fmt % pcfmt(nump2, numdeltas))
2161 + fmt % pcfmt(nump2, numdeltas))
2162 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2162 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2163 numdeltas))
2163 numdeltas))
2164
2164
2165 @command('debugrevspec',
2165 @command('debugrevspec',
2166 [('', 'optimize', None,
2166 [('', 'optimize', None,
2167 _('print parsed tree after optimizing (DEPRECATED)')),
2167 _('print parsed tree after optimizing (DEPRECATED)')),
2168 ('', 'show-revs', True, _('print list of result revisions (default)')),
2168 ('', 'show-revs', True, _('print list of result revisions (default)')),
2169 ('s', 'show-set', None, _('print internal representation of result set')),
2169 ('s', 'show-set', None, _('print internal representation of result set')),
2170 ('p', 'show-stage', [],
2170 ('p', 'show-stage', [],
2171 _('print parsed tree at the given stage'), _('NAME')),
2171 _('print parsed tree at the given stage'), _('NAME')),
2172 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2172 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2173 ('', 'verify-optimized', False, _('verify optimized result')),
2173 ('', 'verify-optimized', False, _('verify optimized result')),
2174 ],
2174 ],
2175 ('REVSPEC'))
2175 ('REVSPEC'))
2176 def debugrevspec(ui, repo, expr, **opts):
2176 def debugrevspec(ui, repo, expr, **opts):
2177 """parse and apply a revision specification
2177 """parse and apply a revision specification
2178
2178
2179 Use -p/--show-stage option to print the parsed tree at the given stages.
2179 Use -p/--show-stage option to print the parsed tree at the given stages.
2180 Use -p all to print tree at every stage.
2180 Use -p all to print tree at every stage.
2181
2181
2182 Use --no-show-revs option with -s or -p to print only the set
2182 Use --no-show-revs option with -s or -p to print only the set
2183 representation or the parsed tree respectively.
2183 representation or the parsed tree respectively.
2184
2184
2185 Use --verify-optimized to compare the optimized result with the unoptimized
2185 Use --verify-optimized to compare the optimized result with the unoptimized
2186 one. Returns 1 if the optimized result differs.
2186 one. Returns 1 if the optimized result differs.
2187 """
2187 """
2188 opts = pycompat.byteskwargs(opts)
2188 opts = pycompat.byteskwargs(opts)
2189 aliases = ui.configitems('revsetalias')
2189 aliases = ui.configitems('revsetalias')
2190 stages = [
2190 stages = [
2191 ('parsed', lambda tree: tree),
2191 ('parsed', lambda tree: tree),
2192 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2192 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2193 ui.warn)),
2193 ui.warn)),
2194 ('concatenated', revsetlang.foldconcat),
2194 ('concatenated', revsetlang.foldconcat),
2195 ('analyzed', revsetlang.analyze),
2195 ('analyzed', revsetlang.analyze),
2196 ('optimized', revsetlang.optimize),
2196 ('optimized', revsetlang.optimize),
2197 ]
2197 ]
2198 if opts['no_optimized']:
2198 if opts['no_optimized']:
2199 stages = stages[:-1]
2199 stages = stages[:-1]
2200 if opts['verify_optimized'] and opts['no_optimized']:
2200 if opts['verify_optimized'] and opts['no_optimized']:
2201 raise error.Abort(_('cannot use --verify-optimized with '
2201 raise error.Abort(_('cannot use --verify-optimized with '
2202 '--no-optimized'))
2202 '--no-optimized'))
2203 stagenames = set(n for n, f in stages)
2203 stagenames = set(n for n, f in stages)
2204
2204
2205 showalways = set()
2205 showalways = set()
2206 showchanged = set()
2206 showchanged = set()
2207 if ui.verbose and not opts['show_stage']:
2207 if ui.verbose and not opts['show_stage']:
2208 # show parsed tree by --verbose (deprecated)
2208 # show parsed tree by --verbose (deprecated)
2209 showalways.add('parsed')
2209 showalways.add('parsed')
2210 showchanged.update(['expanded', 'concatenated'])
2210 showchanged.update(['expanded', 'concatenated'])
2211 if opts['optimize']:
2211 if opts['optimize']:
2212 showalways.add('optimized')
2212 showalways.add('optimized')
2213 if opts['show_stage'] and opts['optimize']:
2213 if opts['show_stage'] and opts['optimize']:
2214 raise error.Abort(_('cannot use --optimize with --show-stage'))
2214 raise error.Abort(_('cannot use --optimize with --show-stage'))
2215 if opts['show_stage'] == ['all']:
2215 if opts['show_stage'] == ['all']:
2216 showalways.update(stagenames)
2216 showalways.update(stagenames)
2217 else:
2217 else:
2218 for n in opts['show_stage']:
2218 for n in opts['show_stage']:
2219 if n not in stagenames:
2219 if n not in stagenames:
2220 raise error.Abort(_('invalid stage name: %s') % n)
2220 raise error.Abort(_('invalid stage name: %s') % n)
2221 showalways.update(opts['show_stage'])
2221 showalways.update(opts['show_stage'])
2222
2222
2223 treebystage = {}
2223 treebystage = {}
2224 printedtree = None
2224 printedtree = None
2225 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2225 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2226 for n, f in stages:
2226 for n, f in stages:
2227 treebystage[n] = tree = f(tree)
2227 treebystage[n] = tree = f(tree)
2228 if n in showalways or (n in showchanged and tree != printedtree):
2228 if n in showalways or (n in showchanged and tree != printedtree):
2229 if opts['show_stage'] or n != 'parsed':
2229 if opts['show_stage'] or n != 'parsed':
2230 ui.write(("* %s:\n") % n)
2230 ui.write(("* %s:\n") % n)
2231 ui.write(revsetlang.prettyformat(tree), "\n")
2231 ui.write(revsetlang.prettyformat(tree), "\n")
2232 printedtree = tree
2232 printedtree = tree
2233
2233
2234 if opts['verify_optimized']:
2234 if opts['verify_optimized']:
2235 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2235 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2236 brevs = revset.makematcher(treebystage['optimized'])(repo)
2236 brevs = revset.makematcher(treebystage['optimized'])(repo)
2237 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2237 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2238 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2238 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2239 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2239 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2240 arevs = list(arevs)
2240 arevs = list(arevs)
2241 brevs = list(brevs)
2241 brevs = list(brevs)
2242 if arevs == brevs:
2242 if arevs == brevs:
2243 return 0
2243 return 0
2244 ui.write(('--- analyzed\n'), label='diff.file_a')
2244 ui.write(('--- analyzed\n'), label='diff.file_a')
2245 ui.write(('+++ optimized\n'), label='diff.file_b')
2245 ui.write(('+++ optimized\n'), label='diff.file_b')
2246 sm = difflib.SequenceMatcher(None, arevs, brevs)
2246 sm = difflib.SequenceMatcher(None, arevs, brevs)
2247 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2247 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2248 if tag in ('delete', 'replace'):
2248 if tag in ('delete', 'replace'):
2249 for c in arevs[alo:ahi]:
2249 for c in arevs[alo:ahi]:
2250 ui.write('-%s\n' % c, label='diff.deleted')
2250 ui.write('-%s\n' % c, label='diff.deleted')
2251 if tag in ('insert', 'replace'):
2251 if tag in ('insert', 'replace'):
2252 for c in brevs[blo:bhi]:
2252 for c in brevs[blo:bhi]:
2253 ui.write('+%s\n' % c, label='diff.inserted')
2253 ui.write('+%s\n' % c, label='diff.inserted')
2254 if tag == 'equal':
2254 if tag == 'equal':
2255 for c in arevs[alo:ahi]:
2255 for c in arevs[alo:ahi]:
2256 ui.write(' %s\n' % c)
2256 ui.write(' %s\n' % c)
2257 return 1
2257 return 1
2258
2258
2259 func = revset.makematcher(tree)
2259 func = revset.makematcher(tree)
2260 revs = func(repo)
2260 revs = func(repo)
2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2262 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2262 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2263 if not opts['show_revs']:
2263 if not opts['show_revs']:
2264 return
2264 return
2265 for c in revs:
2265 for c in revs:
2266 ui.write("%d\n" % c)
2266 ui.write("%d\n" % c)
2267
2267
2268 @command('debugserve', [
2268 @command('debugserve', [
2269 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2269 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2270 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2270 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2271 ('', 'logiofile', '', _('file to log server I/O to')),
2271 ('', 'logiofile', '', _('file to log server I/O to')),
2272 ], '')
2272 ], '')
2273 def debugserve(ui, repo, **opts):
2273 def debugserve(ui, repo, **opts):
2274 """run a server with advanced settings
2274 """run a server with advanced settings
2275
2275
2276 This command is similar to :hg:`serve`. It exists partially as a
2276 This command is similar to :hg:`serve`. It exists partially as a
2277 workaround to the fact that ``hg serve --stdio`` must have specific
2277 workaround to the fact that ``hg serve --stdio`` must have specific
2278 arguments for security reasons.
2278 arguments for security reasons.
2279 """
2279 """
2280 opts = pycompat.byteskwargs(opts)
2280 opts = pycompat.byteskwargs(opts)
2281
2281
2282 if not opts['sshstdio']:
2282 if not opts['sshstdio']:
2283 raise error.Abort(_('only --sshstdio is currently supported'))
2283 raise error.Abort(_('only --sshstdio is currently supported'))
2284
2284
2285 logfh = None
2285 logfh = None
2286
2286
2287 if opts['logiofd'] and opts['logiofile']:
2287 if opts['logiofd'] and opts['logiofile']:
2288 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2288 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2289
2289
2290 if opts['logiofd']:
2290 if opts['logiofd']:
2291 # Line buffered because output is line based.
2291 # Line buffered because output is line based.
2292 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2292 try:
2293 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2294 except OSError as e:
2295 if e.errno != errno.ESPIPE:
2296 raise
2297 # can't seek a pipe, so `ab` mode fails on py3
2298 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2293 elif opts['logiofile']:
2299 elif opts['logiofile']:
2294 logfh = open(opts['logiofile'], 'ab', 1)
2300 logfh = open(opts['logiofile'], 'ab', 1)
2295
2301
2296 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2302 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2297 s.serve_forever()
2303 s.serve_forever()
2298
2304
2299 @command('debugsetparents', [], _('REV1 [REV2]'))
2305 @command('debugsetparents', [], _('REV1 [REV2]'))
2300 def debugsetparents(ui, repo, rev1, rev2=None):
2306 def debugsetparents(ui, repo, rev1, rev2=None):
2301 """manually set the parents of the current working directory
2307 """manually set the parents of the current working directory
2302
2308
2303 This is useful for writing repository conversion tools, but should
2309 This is useful for writing repository conversion tools, but should
2304 be used with care. For example, neither the working directory nor the
2310 be used with care. For example, neither the working directory nor the
2305 dirstate is updated, so file status may be incorrect after running this
2311 dirstate is updated, so file status may be incorrect after running this
2306 command.
2312 command.
2307
2313
2308 Returns 0 on success.
2314 Returns 0 on success.
2309 """
2315 """
2310
2316
2311 node1 = scmutil.revsingle(repo, rev1).node()
2317 node1 = scmutil.revsingle(repo, rev1).node()
2312 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2318 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2313
2319
2314 with repo.wlock():
2320 with repo.wlock():
2315 repo.setparents(node1, node2)
2321 repo.setparents(node1, node2)
2316
2322
2317 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2323 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2318 def debugssl(ui, repo, source=None, **opts):
2324 def debugssl(ui, repo, source=None, **opts):
2319 '''test a secure connection to a server
2325 '''test a secure connection to a server
2320
2326
2321 This builds the certificate chain for the server on Windows, installing the
2327 This builds the certificate chain for the server on Windows, installing the
2322 missing intermediates and trusted root via Windows Update if necessary. It
2328 missing intermediates and trusted root via Windows Update if necessary. It
2323 does nothing on other platforms.
2329 does nothing on other platforms.
2324
2330
2325 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2331 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2326 that server is used. See :hg:`help urls` for more information.
2332 that server is used. See :hg:`help urls` for more information.
2327
2333
2328 If the update succeeds, retry the original operation. Otherwise, the cause
2334 If the update succeeds, retry the original operation. Otherwise, the cause
2329 of the SSL error is likely another issue.
2335 of the SSL error is likely another issue.
2330 '''
2336 '''
2331 if not pycompat.iswindows:
2337 if not pycompat.iswindows:
2332 raise error.Abort(_('certificate chain building is only possible on '
2338 raise error.Abort(_('certificate chain building is only possible on '
2333 'Windows'))
2339 'Windows'))
2334
2340
2335 if not source:
2341 if not source:
2336 if not repo:
2342 if not repo:
2337 raise error.Abort(_("there is no Mercurial repository here, and no "
2343 raise error.Abort(_("there is no Mercurial repository here, and no "
2338 "server specified"))
2344 "server specified"))
2339 source = "default"
2345 source = "default"
2340
2346
2341 source, branches = hg.parseurl(ui.expandpath(source))
2347 source, branches = hg.parseurl(ui.expandpath(source))
2342 url = util.url(source)
2348 url = util.url(source)
2343 addr = None
2349 addr = None
2344
2350
2345 defaultport = {'https': 443, 'ssh': 22}
2351 defaultport = {'https': 443, 'ssh': 22}
2346 if url.scheme in defaultport:
2352 if url.scheme in defaultport:
2347 try:
2353 try:
2348 addr = (url.host, int(url.port or defaultport[url.scheme]))
2354 addr = (url.host, int(url.port or defaultport[url.scheme]))
2349 except ValueError:
2355 except ValueError:
2350 raise error.Abort(_("malformed port number in URL"))
2356 raise error.Abort(_("malformed port number in URL"))
2351 else:
2357 else:
2352 raise error.Abort(_("only https and ssh connections are supported"))
2358 raise error.Abort(_("only https and ssh connections are supported"))
2353
2359
2354 from . import win32
2360 from . import win32
2355
2361
2356 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2362 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2357 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2363 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2358
2364
2359 try:
2365 try:
2360 s.connect(addr)
2366 s.connect(addr)
2361 cert = s.getpeercert(True)
2367 cert = s.getpeercert(True)
2362
2368
2363 ui.status(_('checking the certificate chain for %s\n') % url.host)
2369 ui.status(_('checking the certificate chain for %s\n') % url.host)
2364
2370
2365 complete = win32.checkcertificatechain(cert, build=False)
2371 complete = win32.checkcertificatechain(cert, build=False)
2366
2372
2367 if not complete:
2373 if not complete:
2368 ui.status(_('certificate chain is incomplete, updating... '))
2374 ui.status(_('certificate chain is incomplete, updating... '))
2369
2375
2370 if not win32.checkcertificatechain(cert):
2376 if not win32.checkcertificatechain(cert):
2371 ui.status(_('failed.\n'))
2377 ui.status(_('failed.\n'))
2372 else:
2378 else:
2373 ui.status(_('done.\n'))
2379 ui.status(_('done.\n'))
2374 else:
2380 else:
2375 ui.status(_('full certificate chain is available\n'))
2381 ui.status(_('full certificate chain is available\n'))
2376 finally:
2382 finally:
2377 s.close()
2383 s.close()
2378
2384
2379 @command('debugsub',
2385 @command('debugsub',
2380 [('r', 'rev', '',
2386 [('r', 'rev', '',
2381 _('revision to check'), _('REV'))],
2387 _('revision to check'), _('REV'))],
2382 _('[-r REV] [REV]'))
2388 _('[-r REV] [REV]'))
2383 def debugsub(ui, repo, rev=None):
2389 def debugsub(ui, repo, rev=None):
2384 ctx = scmutil.revsingle(repo, rev, None)
2390 ctx = scmutil.revsingle(repo, rev, None)
2385 for k, v in sorted(ctx.substate.items()):
2391 for k, v in sorted(ctx.substate.items()):
2386 ui.write(('path %s\n') % k)
2392 ui.write(('path %s\n') % k)
2387 ui.write((' source %s\n') % v[0])
2393 ui.write((' source %s\n') % v[0])
2388 ui.write((' revision %s\n') % v[1])
2394 ui.write((' revision %s\n') % v[1])
2389
2395
2390 @command('debugsuccessorssets',
2396 @command('debugsuccessorssets',
2391 [('', 'closest', False, _('return closest successors sets only'))],
2397 [('', 'closest', False, _('return closest successors sets only'))],
2392 _('[REV]'))
2398 _('[REV]'))
2393 def debugsuccessorssets(ui, repo, *revs, **opts):
2399 def debugsuccessorssets(ui, repo, *revs, **opts):
2394 """show set of successors for revision
2400 """show set of successors for revision
2395
2401
2396 A successors set of changeset A is a consistent group of revisions that
2402 A successors set of changeset A is a consistent group of revisions that
2397 succeed A. It contains non-obsolete changesets only unless closests
2403 succeed A. It contains non-obsolete changesets only unless closests
2398 successors set is set.
2404 successors set is set.
2399
2405
2400 In most cases a changeset A has a single successors set containing a single
2406 In most cases a changeset A has a single successors set containing a single
2401 successor (changeset A replaced by A').
2407 successor (changeset A replaced by A').
2402
2408
2403 A changeset that is made obsolete with no successors are called "pruned".
2409 A changeset that is made obsolete with no successors are called "pruned".
2404 Such changesets have no successors sets at all.
2410 Such changesets have no successors sets at all.
2405
2411
2406 A changeset that has been "split" will have a successors set containing
2412 A changeset that has been "split" will have a successors set containing
2407 more than one successor.
2413 more than one successor.
2408
2414
2409 A changeset that has been rewritten in multiple different ways is called
2415 A changeset that has been rewritten in multiple different ways is called
2410 "divergent". Such changesets have multiple successor sets (each of which
2416 "divergent". Such changesets have multiple successor sets (each of which
2411 may also be split, i.e. have multiple successors).
2417 may also be split, i.e. have multiple successors).
2412
2418
2413 Results are displayed as follows::
2419 Results are displayed as follows::
2414
2420
2415 <rev1>
2421 <rev1>
2416 <successors-1A>
2422 <successors-1A>
2417 <rev2>
2423 <rev2>
2418 <successors-2A>
2424 <successors-2A>
2419 <successors-2B1> <successors-2B2> <successors-2B3>
2425 <successors-2B1> <successors-2B2> <successors-2B3>
2420
2426
2421 Here rev2 has two possible (i.e. divergent) successors sets. The first
2427 Here rev2 has two possible (i.e. divergent) successors sets. The first
2422 holds one element, whereas the second holds three (i.e. the changeset has
2428 holds one element, whereas the second holds three (i.e. the changeset has
2423 been split).
2429 been split).
2424 """
2430 """
2425 # passed to successorssets caching computation from one call to another
2431 # passed to successorssets caching computation from one call to another
2426 cache = {}
2432 cache = {}
2427 ctx2str = bytes
2433 ctx2str = bytes
2428 node2str = short
2434 node2str = short
2429 for rev in scmutil.revrange(repo, revs):
2435 for rev in scmutil.revrange(repo, revs):
2430 ctx = repo[rev]
2436 ctx = repo[rev]
2431 ui.write('%s\n'% ctx2str(ctx))
2437 ui.write('%s\n'% ctx2str(ctx))
2432 for succsset in obsutil.successorssets(repo, ctx.node(),
2438 for succsset in obsutil.successorssets(repo, ctx.node(),
2433 closest=opts[r'closest'],
2439 closest=opts[r'closest'],
2434 cache=cache):
2440 cache=cache):
2435 if succsset:
2441 if succsset:
2436 ui.write(' ')
2442 ui.write(' ')
2437 ui.write(node2str(succsset[0]))
2443 ui.write(node2str(succsset[0]))
2438 for node in succsset[1:]:
2444 for node in succsset[1:]:
2439 ui.write(' ')
2445 ui.write(' ')
2440 ui.write(node2str(node))
2446 ui.write(node2str(node))
2441 ui.write('\n')
2447 ui.write('\n')
2442
2448
2443 @command('debugtemplate',
2449 @command('debugtemplate',
2444 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2450 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2445 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2451 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2446 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2452 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2447 optionalrepo=True)
2453 optionalrepo=True)
2448 def debugtemplate(ui, repo, tmpl, **opts):
2454 def debugtemplate(ui, repo, tmpl, **opts):
2449 """parse and apply a template
2455 """parse and apply a template
2450
2456
2451 If -r/--rev is given, the template is processed as a log template and
2457 If -r/--rev is given, the template is processed as a log template and
2452 applied to the given changesets. Otherwise, it is processed as a generic
2458 applied to the given changesets. Otherwise, it is processed as a generic
2453 template.
2459 template.
2454
2460
2455 Use --verbose to print the parsed tree.
2461 Use --verbose to print the parsed tree.
2456 """
2462 """
2457 revs = None
2463 revs = None
2458 if opts[r'rev']:
2464 if opts[r'rev']:
2459 if repo is None:
2465 if repo is None:
2460 raise error.RepoError(_('there is no Mercurial repository here '
2466 raise error.RepoError(_('there is no Mercurial repository here '
2461 '(.hg not found)'))
2467 '(.hg not found)'))
2462 revs = scmutil.revrange(repo, opts[r'rev'])
2468 revs = scmutil.revrange(repo, opts[r'rev'])
2463
2469
2464 props = {}
2470 props = {}
2465 for d in opts[r'define']:
2471 for d in opts[r'define']:
2466 try:
2472 try:
2467 k, v = (e.strip() for e in d.split('=', 1))
2473 k, v = (e.strip() for e in d.split('=', 1))
2468 if not k or k == 'ui':
2474 if not k or k == 'ui':
2469 raise ValueError
2475 raise ValueError
2470 props[k] = v
2476 props[k] = v
2471 except ValueError:
2477 except ValueError:
2472 raise error.Abort(_('malformed keyword definition: %s') % d)
2478 raise error.Abort(_('malformed keyword definition: %s') % d)
2473
2479
2474 if ui.verbose:
2480 if ui.verbose:
2475 aliases = ui.configitems('templatealias')
2481 aliases = ui.configitems('templatealias')
2476 tree = templater.parse(tmpl)
2482 tree = templater.parse(tmpl)
2477 ui.note(templater.prettyformat(tree), '\n')
2483 ui.note(templater.prettyformat(tree), '\n')
2478 newtree = templater.expandaliases(tree, aliases)
2484 newtree = templater.expandaliases(tree, aliases)
2479 if newtree != tree:
2485 if newtree != tree:
2480 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2486 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2481
2487
2482 if revs is None:
2488 if revs is None:
2483 tres = formatter.templateresources(ui, repo)
2489 tres = formatter.templateresources(ui, repo)
2484 t = formatter.maketemplater(ui, tmpl, resources=tres)
2490 t = formatter.maketemplater(ui, tmpl, resources=tres)
2485 ui.write(t.renderdefault(props))
2491 ui.write(t.renderdefault(props))
2486 else:
2492 else:
2487 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2493 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2488 for r in revs:
2494 for r in revs:
2489 displayer.show(repo[r], **pycompat.strkwargs(props))
2495 displayer.show(repo[r], **pycompat.strkwargs(props))
2490 displayer.close()
2496 displayer.close()
2491
2497
2492 @command('debuguigetpass', [
2498 @command('debuguigetpass', [
2493 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2499 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2494 ], _('[-p TEXT]'), norepo=True)
2500 ], _('[-p TEXT]'), norepo=True)
2495 def debuguigetpass(ui, prompt=''):
2501 def debuguigetpass(ui, prompt=''):
2496 """show prompt to type password"""
2502 """show prompt to type password"""
2497 r = ui.getpass(prompt)
2503 r = ui.getpass(prompt)
2498 ui.write(('respose: %s\n') % r)
2504 ui.write(('respose: %s\n') % r)
2499
2505
2500 @command('debuguiprompt', [
2506 @command('debuguiprompt', [
2501 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2507 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2502 ], _('[-p TEXT]'), norepo=True)
2508 ], _('[-p TEXT]'), norepo=True)
2503 def debuguiprompt(ui, prompt=''):
2509 def debuguiprompt(ui, prompt=''):
2504 """show plain prompt"""
2510 """show plain prompt"""
2505 r = ui.prompt(prompt)
2511 r = ui.prompt(prompt)
2506 ui.write(('response: %s\n') % r)
2512 ui.write(('response: %s\n') % r)
2507
2513
2508 @command('debugupdatecaches', [])
2514 @command('debugupdatecaches', [])
2509 def debugupdatecaches(ui, repo, *pats, **opts):
2515 def debugupdatecaches(ui, repo, *pats, **opts):
2510 """warm all known caches in the repository"""
2516 """warm all known caches in the repository"""
2511 with repo.wlock(), repo.lock():
2517 with repo.wlock(), repo.lock():
2512 repo.updatecaches(full=True)
2518 repo.updatecaches(full=True)
2513
2519
2514 @command('debugupgraderepo', [
2520 @command('debugupgraderepo', [
2515 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2521 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2516 ('', 'run', False, _('performs an upgrade')),
2522 ('', 'run', False, _('performs an upgrade')),
2517 ])
2523 ])
2518 def debugupgraderepo(ui, repo, run=False, optimize=None):
2524 def debugupgraderepo(ui, repo, run=False, optimize=None):
2519 """upgrade a repository to use different features
2525 """upgrade a repository to use different features
2520
2526
2521 If no arguments are specified, the repository is evaluated for upgrade
2527 If no arguments are specified, the repository is evaluated for upgrade
2522 and a list of problems and potential optimizations is printed.
2528 and a list of problems and potential optimizations is printed.
2523
2529
2524 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2530 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2525 can be influenced via additional arguments. More details will be provided
2531 can be influenced via additional arguments. More details will be provided
2526 by the command output when run without ``--run``.
2532 by the command output when run without ``--run``.
2527
2533
2528 During the upgrade, the repository will be locked and no writes will be
2534 During the upgrade, the repository will be locked and no writes will be
2529 allowed.
2535 allowed.
2530
2536
2531 At the end of the upgrade, the repository may not be readable while new
2537 At the end of the upgrade, the repository may not be readable while new
2532 repository data is swapped in. This window will be as long as it takes to
2538 repository data is swapped in. This window will be as long as it takes to
2533 rename some directories inside the ``.hg`` directory. On most machines, this
2539 rename some directories inside the ``.hg`` directory. On most machines, this
2534 should complete almost instantaneously and the chances of a consumer being
2540 should complete almost instantaneously and the chances of a consumer being
2535 unable to access the repository should be low.
2541 unable to access the repository should be low.
2536 """
2542 """
2537 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2543 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2538
2544
2539 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2545 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2540 inferrepo=True)
2546 inferrepo=True)
2541 def debugwalk(ui, repo, *pats, **opts):
2547 def debugwalk(ui, repo, *pats, **opts):
2542 """show how files match on given patterns"""
2548 """show how files match on given patterns"""
2543 opts = pycompat.byteskwargs(opts)
2549 opts = pycompat.byteskwargs(opts)
2544 m = scmutil.match(repo[None], pats, opts)
2550 m = scmutil.match(repo[None], pats, opts)
2545 if ui.verbose:
2551 if ui.verbose:
2546 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2552 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2547 items = list(repo[None].walk(m))
2553 items = list(repo[None].walk(m))
2548 if not items:
2554 if not items:
2549 return
2555 return
2550 f = lambda fn: fn
2556 f = lambda fn: fn
2551 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2557 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2552 f = lambda fn: util.normpath(fn)
2558 f = lambda fn: util.normpath(fn)
2553 fmt = 'f %%-%ds %%-%ds %%s' % (
2559 fmt = 'f %%-%ds %%-%ds %%s' % (
2554 max([len(abs) for abs in items]),
2560 max([len(abs) for abs in items]),
2555 max([len(m.rel(abs)) for abs in items]))
2561 max([len(m.rel(abs)) for abs in items]))
2556 for abs in items:
2562 for abs in items:
2557 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2563 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2558 ui.write("%s\n" % line.rstrip())
2564 ui.write("%s\n" % line.rstrip())
2559
2565
2560 @command('debugwhyunstable', [], _('REV'))
2566 @command('debugwhyunstable', [], _('REV'))
2561 def debugwhyunstable(ui, repo, rev):
2567 def debugwhyunstable(ui, repo, rev):
2562 """explain instabilities of a changeset"""
2568 """explain instabilities of a changeset"""
2563 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2569 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2564 dnodes = ''
2570 dnodes = ''
2565 if entry.get('divergentnodes'):
2571 if entry.get('divergentnodes'):
2566 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2572 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2567 for ctx in entry['divergentnodes']) + ' '
2573 for ctx in entry['divergentnodes']) + ' '
2568 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2574 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2569 entry['reason'], entry['node']))
2575 entry['reason'], entry['node']))
2570
2576
2571 @command('debugwireargs',
2577 @command('debugwireargs',
2572 [('', 'three', '', 'three'),
2578 [('', 'three', '', 'three'),
2573 ('', 'four', '', 'four'),
2579 ('', 'four', '', 'four'),
2574 ('', 'five', '', 'five'),
2580 ('', 'five', '', 'five'),
2575 ] + cmdutil.remoteopts,
2581 ] + cmdutil.remoteopts,
2576 _('REPO [OPTIONS]... [ONE [TWO]]'),
2582 _('REPO [OPTIONS]... [ONE [TWO]]'),
2577 norepo=True)
2583 norepo=True)
2578 def debugwireargs(ui, repopath, *vals, **opts):
2584 def debugwireargs(ui, repopath, *vals, **opts):
2579 opts = pycompat.byteskwargs(opts)
2585 opts = pycompat.byteskwargs(opts)
2580 repo = hg.peer(ui, opts, repopath)
2586 repo = hg.peer(ui, opts, repopath)
2581 for opt in cmdutil.remoteopts:
2587 for opt in cmdutil.remoteopts:
2582 del opts[opt[1]]
2588 del opts[opt[1]]
2583 args = {}
2589 args = {}
2584 for k, v in opts.iteritems():
2590 for k, v in opts.iteritems():
2585 if v:
2591 if v:
2586 args[k] = v
2592 args[k] = v
2587 args = pycompat.strkwargs(args)
2593 args = pycompat.strkwargs(args)
2588 # run twice to check that we don't mess up the stream for the next command
2594 # run twice to check that we don't mess up the stream for the next command
2589 res1 = repo.debugwireargs(*vals, **args)
2595 res1 = repo.debugwireargs(*vals, **args)
2590 res2 = repo.debugwireargs(*vals, **args)
2596 res2 = repo.debugwireargs(*vals, **args)
2591 ui.write("%s\n" % res1)
2597 ui.write("%s\n" % res1)
2592 if res1 != res2:
2598 if res1 != res2:
2593 ui.warn("%s\n" % res2)
2599 ui.warn("%s\n" % res2)
2594
2600
2595 def _parsewirelangblocks(fh):
2601 def _parsewirelangblocks(fh):
2596 activeaction = None
2602 activeaction = None
2597 blocklines = []
2603 blocklines = []
2598
2604
2599 for line in fh:
2605 for line in fh:
2600 line = line.rstrip()
2606 line = line.rstrip()
2601 if not line:
2607 if not line:
2602 continue
2608 continue
2603
2609
2604 if line.startswith(b'#'):
2610 if line.startswith(b'#'):
2605 continue
2611 continue
2606
2612
2607 if not line.startswith(' '):
2613 if not line.startswith(' '):
2608 # New block. Flush previous one.
2614 # New block. Flush previous one.
2609 if activeaction:
2615 if activeaction:
2610 yield activeaction, blocklines
2616 yield activeaction, blocklines
2611
2617
2612 activeaction = line
2618 activeaction = line
2613 blocklines = []
2619 blocklines = []
2614 continue
2620 continue
2615
2621
2616 # Else we start with an indent.
2622 # Else we start with an indent.
2617
2623
2618 if not activeaction:
2624 if not activeaction:
2619 raise error.Abort(_('indented line outside of block'))
2625 raise error.Abort(_('indented line outside of block'))
2620
2626
2621 blocklines.append(line)
2627 blocklines.append(line)
2622
2628
2623 # Flush last block.
2629 # Flush last block.
2624 if activeaction:
2630 if activeaction:
2625 yield activeaction, blocklines
2631 yield activeaction, blocklines
2626
2632
2627 @command('debugwireproto',
2633 @command('debugwireproto',
2628 [
2634 [
2629 ('', 'localssh', False, _('start an SSH server for this repo')),
2635 ('', 'localssh', False, _('start an SSH server for this repo')),
2630 ('', 'peer', '', _('construct a specific version of the peer')),
2636 ('', 'peer', '', _('construct a specific version of the peer')),
2631 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2637 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2632 ('', 'nologhandshake', False,
2638 ('', 'nologhandshake', False,
2633 _('do not log I/O related to the peer handshake')),
2639 _('do not log I/O related to the peer handshake')),
2634 ] + cmdutil.remoteopts,
2640 ] + cmdutil.remoteopts,
2635 _('[PATH]'),
2641 _('[PATH]'),
2636 optionalrepo=True)
2642 optionalrepo=True)
2637 def debugwireproto(ui, repo, path=None, **opts):
2643 def debugwireproto(ui, repo, path=None, **opts):
2638 """send wire protocol commands to a server
2644 """send wire protocol commands to a server
2639
2645
2640 This command can be used to issue wire protocol commands to remote
2646 This command can be used to issue wire protocol commands to remote
2641 peers and to debug the raw data being exchanged.
2647 peers and to debug the raw data being exchanged.
2642
2648
2643 ``--localssh`` will start an SSH server against the current repository
2649 ``--localssh`` will start an SSH server against the current repository
2644 and connect to that. By default, the connection will perform a handshake
2650 and connect to that. By default, the connection will perform a handshake
2645 and establish an appropriate peer instance.
2651 and establish an appropriate peer instance.
2646
2652
2647 ``--peer`` can be used to bypass the handshake protocol and construct a
2653 ``--peer`` can be used to bypass the handshake protocol and construct a
2648 peer instance using the specified class type. Valid values are ``raw``,
2654 peer instance using the specified class type. Valid values are ``raw``,
2649 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2655 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2650 raw data payloads and don't support higher-level command actions.
2656 raw data payloads and don't support higher-level command actions.
2651
2657
2652 ``--noreadstderr`` can be used to disable automatic reading from stderr
2658 ``--noreadstderr`` can be used to disable automatic reading from stderr
2653 of the peer (for SSH connections only). Disabling automatic reading of
2659 of the peer (for SSH connections only). Disabling automatic reading of
2654 stderr is useful for making output more deterministic.
2660 stderr is useful for making output more deterministic.
2655
2661
2656 Commands are issued via a mini language which is specified via stdin.
2662 Commands are issued via a mini language which is specified via stdin.
2657 The language consists of individual actions to perform. An action is
2663 The language consists of individual actions to perform. An action is
2658 defined by a block. A block is defined as a line with no leading
2664 defined by a block. A block is defined as a line with no leading
2659 space followed by 0 or more lines with leading space. Blocks are
2665 space followed by 0 or more lines with leading space. Blocks are
2660 effectively a high-level command with additional metadata.
2666 effectively a high-level command with additional metadata.
2661
2667
2662 Lines beginning with ``#`` are ignored.
2668 Lines beginning with ``#`` are ignored.
2663
2669
2664 The following sections denote available actions.
2670 The following sections denote available actions.
2665
2671
2666 raw
2672 raw
2667 ---
2673 ---
2668
2674
2669 Send raw data to the server.
2675 Send raw data to the server.
2670
2676
2671 The block payload contains the raw data to send as one atomic send
2677 The block payload contains the raw data to send as one atomic send
2672 operation. The data may not actually be delivered in a single system
2678 operation. The data may not actually be delivered in a single system
2673 call: it depends on the abilities of the transport being used.
2679 call: it depends on the abilities of the transport being used.
2674
2680
2675 Each line in the block is de-indented and concatenated. Then, that
2681 Each line in the block is de-indented and concatenated. Then, that
2676 value is evaluated as a Python b'' literal. This allows the use of
2682 value is evaluated as a Python b'' literal. This allows the use of
2677 backslash escaping, etc.
2683 backslash escaping, etc.
2678
2684
2679 raw+
2685 raw+
2680 ----
2686 ----
2681
2687
2682 Behaves like ``raw`` except flushes output afterwards.
2688 Behaves like ``raw`` except flushes output afterwards.
2683
2689
2684 command <X>
2690 command <X>
2685 -----------
2691 -----------
2686
2692
2687 Send a request to run a named command, whose name follows the ``command``
2693 Send a request to run a named command, whose name follows the ``command``
2688 string.
2694 string.
2689
2695
2690 Arguments to the command are defined as lines in this block. The format of
2696 Arguments to the command are defined as lines in this block. The format of
2691 each line is ``<key> <value>``. e.g.::
2697 each line is ``<key> <value>``. e.g.::
2692
2698
2693 command listkeys
2699 command listkeys
2694 namespace bookmarks
2700 namespace bookmarks
2695
2701
2696 If the value begins with ``eval:``, it will be interpreted as a Python
2702 If the value begins with ``eval:``, it will be interpreted as a Python
2697 literal expression. Otherwise values are interpreted as Python b'' literals.
2703 literal expression. Otherwise values are interpreted as Python b'' literals.
2698 This allows sending complex types and encoding special byte sequences via
2704 This allows sending complex types and encoding special byte sequences via
2699 backslash escaping.
2705 backslash escaping.
2700
2706
2701 The following arguments have special meaning:
2707 The following arguments have special meaning:
2702
2708
2703 ``PUSHFILE``
2709 ``PUSHFILE``
2704 When defined, the *push* mechanism of the peer will be used instead
2710 When defined, the *push* mechanism of the peer will be used instead
2705 of the static request-response mechanism and the content of the
2711 of the static request-response mechanism and the content of the
2706 file specified in the value of this argument will be sent as the
2712 file specified in the value of this argument will be sent as the
2707 command payload.
2713 command payload.
2708
2714
2709 This can be used to submit a local bundle file to the remote.
2715 This can be used to submit a local bundle file to the remote.
2710
2716
2711 batchbegin
2717 batchbegin
2712 ----------
2718 ----------
2713
2719
2714 Instruct the peer to begin a batched send.
2720 Instruct the peer to begin a batched send.
2715
2721
2716 All ``command`` blocks are queued for execution until the next
2722 All ``command`` blocks are queued for execution until the next
2717 ``batchsubmit`` block.
2723 ``batchsubmit`` block.
2718
2724
2719 batchsubmit
2725 batchsubmit
2720 -----------
2726 -----------
2721
2727
2722 Submit previously queued ``command`` blocks as a batch request.
2728 Submit previously queued ``command`` blocks as a batch request.
2723
2729
2724 This action MUST be paired with a ``batchbegin`` action.
2730 This action MUST be paired with a ``batchbegin`` action.
2725
2731
2726 httprequest <method> <path>
2732 httprequest <method> <path>
2727 ---------------------------
2733 ---------------------------
2728
2734
2729 (HTTP peer only)
2735 (HTTP peer only)
2730
2736
2731 Send an HTTP request to the peer.
2737 Send an HTTP request to the peer.
2732
2738
2733 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2739 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2734
2740
2735 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2741 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2736 headers to add to the request. e.g. ``Accept: foo``.
2742 headers to add to the request. e.g. ``Accept: foo``.
2737
2743
2738 The following arguments are special:
2744 The following arguments are special:
2739
2745
2740 ``BODYFILE``
2746 ``BODYFILE``
2741 The content of the file defined as the value to this argument will be
2747 The content of the file defined as the value to this argument will be
2742 transferred verbatim as the HTTP request body.
2748 transferred verbatim as the HTTP request body.
2743
2749
2744 ``frame <type> <flags> <payload>``
2750 ``frame <type> <flags> <payload>``
2745 Send a unified protocol frame as part of the request body.
2751 Send a unified protocol frame as part of the request body.
2746
2752
2747 All frames will be collected and sent as the body to the HTTP
2753 All frames will be collected and sent as the body to the HTTP
2748 request.
2754 request.
2749
2755
2750 close
2756 close
2751 -----
2757 -----
2752
2758
2753 Close the connection to the server.
2759 Close the connection to the server.
2754
2760
2755 flush
2761 flush
2756 -----
2762 -----
2757
2763
2758 Flush data written to the server.
2764 Flush data written to the server.
2759
2765
2760 readavailable
2766 readavailable
2761 -------------
2767 -------------
2762
2768
2763 Close the write end of the connection and read all available data from
2769 Close the write end of the connection and read all available data from
2764 the server.
2770 the server.
2765
2771
2766 If the connection to the server encompasses multiple pipes, we poll both
2772 If the connection to the server encompasses multiple pipes, we poll both
2767 pipes and read available data.
2773 pipes and read available data.
2768
2774
2769 readline
2775 readline
2770 --------
2776 --------
2771
2777
2772 Read a line of output from the server. If there are multiple output
2778 Read a line of output from the server. If there are multiple output
2773 pipes, reads only the main pipe.
2779 pipes, reads only the main pipe.
2774
2780
2775 ereadline
2781 ereadline
2776 ---------
2782 ---------
2777
2783
2778 Like ``readline``, but read from the stderr pipe, if available.
2784 Like ``readline``, but read from the stderr pipe, if available.
2779
2785
2780 read <X>
2786 read <X>
2781 --------
2787 --------
2782
2788
2783 ``read()`` N bytes from the server's main output pipe.
2789 ``read()`` N bytes from the server's main output pipe.
2784
2790
2785 eread <X>
2791 eread <X>
2786 ---------
2792 ---------
2787
2793
2788 ``read()`` N bytes from the server's stderr pipe, if available.
2794 ``read()`` N bytes from the server's stderr pipe, if available.
2789
2795
2790 Specifying Unified Frame-Based Protocol Frames
2796 Specifying Unified Frame-Based Protocol Frames
2791 ----------------------------------------------
2797 ----------------------------------------------
2792
2798
2793 It is possible to emit a *Unified Frame-Based Protocol* by using special
2799 It is possible to emit a *Unified Frame-Based Protocol* by using special
2794 syntax.
2800 syntax.
2795
2801
2796 A frame is composed as a type, flags, and payload. These can be parsed
2802 A frame is composed as a type, flags, and payload. These can be parsed
2797 from a string of the form:
2803 from a string of the form:
2798
2804
2799 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2805 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2800
2806
2801 ``request-id`` and ``stream-id`` are integers defining the request and
2807 ``request-id`` and ``stream-id`` are integers defining the request and
2802 stream identifiers.
2808 stream identifiers.
2803
2809
2804 ``type`` can be an integer value for the frame type or the string name
2810 ``type`` can be an integer value for the frame type or the string name
2805 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2811 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2806 ``command-name``.
2812 ``command-name``.
2807
2813
2808 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2814 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2809 components. Each component (and there can be just one) can be an integer
2815 components. Each component (and there can be just one) can be an integer
2810 or a flag name for stream flags or frame flags, respectively. Values are
2816 or a flag name for stream flags or frame flags, respectively. Values are
2811 resolved to integers and then bitwise OR'd together.
2817 resolved to integers and then bitwise OR'd together.
2812
2818
2813 ``payload`` represents the raw frame payload. If it begins with
2819 ``payload`` represents the raw frame payload. If it begins with
2814 ``cbor:``, the following string is evaluated as Python code and the
2820 ``cbor:``, the following string is evaluated as Python code and the
2815 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2821 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2816 as a Python byte string literal.
2822 as a Python byte string literal.
2817 """
2823 """
2818 opts = pycompat.byteskwargs(opts)
2824 opts = pycompat.byteskwargs(opts)
2819
2825
2820 if opts['localssh'] and not repo:
2826 if opts['localssh'] and not repo:
2821 raise error.Abort(_('--localssh requires a repository'))
2827 raise error.Abort(_('--localssh requires a repository'))
2822
2828
2823 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2829 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2824 raise error.Abort(_('invalid value for --peer'),
2830 raise error.Abort(_('invalid value for --peer'),
2825 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2831 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2826
2832
2827 if path and opts['localssh']:
2833 if path and opts['localssh']:
2828 raise error.Abort(_('cannot specify --localssh with an explicit '
2834 raise error.Abort(_('cannot specify --localssh with an explicit '
2829 'path'))
2835 'path'))
2830
2836
2831 if ui.interactive():
2837 if ui.interactive():
2832 ui.write(_('(waiting for commands on stdin)\n'))
2838 ui.write(_('(waiting for commands on stdin)\n'))
2833
2839
2834 blocks = list(_parsewirelangblocks(ui.fin))
2840 blocks = list(_parsewirelangblocks(ui.fin))
2835
2841
2836 proc = None
2842 proc = None
2837 stdin = None
2843 stdin = None
2838 stdout = None
2844 stdout = None
2839 stderr = None
2845 stderr = None
2840 opener = None
2846 opener = None
2841
2847
2842 if opts['localssh']:
2848 if opts['localssh']:
2843 # We start the SSH server in its own process so there is process
2849 # We start the SSH server in its own process so there is process
2844 # separation. This prevents a whole class of potential bugs around
2850 # separation. This prevents a whole class of potential bugs around
2845 # shared state from interfering with server operation.
2851 # shared state from interfering with server operation.
2846 args = procutil.hgcmd() + [
2852 args = procutil.hgcmd() + [
2847 '-R', repo.root,
2853 '-R', repo.root,
2848 'debugserve', '--sshstdio',
2854 'debugserve', '--sshstdio',
2849 ]
2855 ]
2850 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2856 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2851 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2857 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2852 bufsize=0)
2858 bufsize=0)
2853
2859
2854 stdin = proc.stdin
2860 stdin = proc.stdin
2855 stdout = proc.stdout
2861 stdout = proc.stdout
2856 stderr = proc.stderr
2862 stderr = proc.stderr
2857
2863
2858 # We turn the pipes into observers so we can log I/O.
2864 # We turn the pipes into observers so we can log I/O.
2859 if ui.verbose or opts['peer'] == 'raw':
2865 if ui.verbose or opts['peer'] == 'raw':
2860 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2866 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2861 logdata=True)
2867 logdata=True)
2862 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2868 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2863 logdata=True)
2869 logdata=True)
2864 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2870 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2865 logdata=True)
2871 logdata=True)
2866
2872
2867 # --localssh also implies the peer connection settings.
2873 # --localssh also implies the peer connection settings.
2868
2874
2869 url = 'ssh://localserver'
2875 url = 'ssh://localserver'
2870 autoreadstderr = not opts['noreadstderr']
2876 autoreadstderr = not opts['noreadstderr']
2871
2877
2872 if opts['peer'] == 'ssh1':
2878 if opts['peer'] == 'ssh1':
2873 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2879 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2874 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2880 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2875 None, autoreadstderr=autoreadstderr)
2881 None, autoreadstderr=autoreadstderr)
2876 elif opts['peer'] == 'ssh2':
2882 elif opts['peer'] == 'ssh2':
2877 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2883 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2878 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2884 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2879 None, autoreadstderr=autoreadstderr)
2885 None, autoreadstderr=autoreadstderr)
2880 elif opts['peer'] == 'raw':
2886 elif opts['peer'] == 'raw':
2881 ui.write(_('using raw connection to peer\n'))
2887 ui.write(_('using raw connection to peer\n'))
2882 peer = None
2888 peer = None
2883 else:
2889 else:
2884 ui.write(_('creating ssh peer from handshake results\n'))
2890 ui.write(_('creating ssh peer from handshake results\n'))
2885 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2891 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2886 autoreadstderr=autoreadstderr)
2892 autoreadstderr=autoreadstderr)
2887
2893
2888 elif path:
2894 elif path:
2889 # We bypass hg.peer() so we can proxy the sockets.
2895 # We bypass hg.peer() so we can proxy the sockets.
2890 # TODO consider not doing this because we skip
2896 # TODO consider not doing this because we skip
2891 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2897 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2892 u = util.url(path)
2898 u = util.url(path)
2893 if u.scheme != 'http':
2899 if u.scheme != 'http':
2894 raise error.Abort(_('only http:// paths are currently supported'))
2900 raise error.Abort(_('only http:// paths are currently supported'))
2895
2901
2896 url, authinfo = u.authinfo()
2902 url, authinfo = u.authinfo()
2897 openerargs = {
2903 openerargs = {
2898 r'useragent': b'Mercurial debugwireproto',
2904 r'useragent': b'Mercurial debugwireproto',
2899 }
2905 }
2900
2906
2901 # Turn pipes/sockets into observers so we can log I/O.
2907 # Turn pipes/sockets into observers so we can log I/O.
2902 if ui.verbose:
2908 if ui.verbose:
2903 openerargs.update({
2909 openerargs.update({
2904 r'loggingfh': ui,
2910 r'loggingfh': ui,
2905 r'loggingname': b's',
2911 r'loggingname': b's',
2906 r'loggingopts': {
2912 r'loggingopts': {
2907 r'logdata': True,
2913 r'logdata': True,
2908 r'logdataapis': False,
2914 r'logdataapis': False,
2909 },
2915 },
2910 })
2916 })
2911
2917
2912 if ui.debugflag:
2918 if ui.debugflag:
2913 openerargs[r'loggingopts'][r'logdataapis'] = True
2919 openerargs[r'loggingopts'][r'logdataapis'] = True
2914
2920
2915 # Don't send default headers when in raw mode. This allows us to
2921 # Don't send default headers when in raw mode. This allows us to
2916 # bypass most of the behavior of our URL handling code so we can
2922 # bypass most of the behavior of our URL handling code so we can
2917 # have near complete control over what's sent on the wire.
2923 # have near complete control over what's sent on the wire.
2918 if opts['peer'] == 'raw':
2924 if opts['peer'] == 'raw':
2919 openerargs[r'sendaccept'] = False
2925 openerargs[r'sendaccept'] = False
2920
2926
2921 opener = urlmod.opener(ui, authinfo, **openerargs)
2927 opener = urlmod.opener(ui, authinfo, **openerargs)
2922
2928
2923 if opts['peer'] == 'http2':
2929 if opts['peer'] == 'http2':
2924 ui.write(_('creating http peer for wire protocol version 2\n'))
2930 ui.write(_('creating http peer for wire protocol version 2\n'))
2925 # We go through makepeer() because we need an API descriptor for
2931 # We go through makepeer() because we need an API descriptor for
2926 # the peer instance to be useful.
2932 # the peer instance to be useful.
2927 with ui.configoverride({
2933 with ui.configoverride({
2928 ('experimental', 'httppeer.advertise-v2'): True}):
2934 ('experimental', 'httppeer.advertise-v2'): True}):
2929 if opts['nologhandshake']:
2935 if opts['nologhandshake']:
2930 ui.pushbuffer()
2936 ui.pushbuffer()
2931
2937
2932 peer = httppeer.makepeer(ui, path, opener=opener)
2938 peer = httppeer.makepeer(ui, path, opener=opener)
2933
2939
2934 if opts['nologhandshake']:
2940 if opts['nologhandshake']:
2935 ui.popbuffer()
2941 ui.popbuffer()
2936
2942
2937 if not isinstance(peer, httppeer.httpv2peer):
2943 if not isinstance(peer, httppeer.httpv2peer):
2938 raise error.Abort(_('could not instantiate HTTP peer for '
2944 raise error.Abort(_('could not instantiate HTTP peer for '
2939 'wire protocol version 2'),
2945 'wire protocol version 2'),
2940 hint=_('the server may not have the feature '
2946 hint=_('the server may not have the feature '
2941 'enabled or is not allowing this '
2947 'enabled or is not allowing this '
2942 'client version'))
2948 'client version'))
2943
2949
2944 elif opts['peer'] == 'raw':
2950 elif opts['peer'] == 'raw':
2945 ui.write(_('using raw connection to peer\n'))
2951 ui.write(_('using raw connection to peer\n'))
2946 peer = None
2952 peer = None
2947 elif opts['peer']:
2953 elif opts['peer']:
2948 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2954 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2949 opts['peer'])
2955 opts['peer'])
2950 else:
2956 else:
2951 peer = httppeer.makepeer(ui, path, opener=opener)
2957 peer = httppeer.makepeer(ui, path, opener=opener)
2952
2958
2953 # We /could/ populate stdin/stdout with sock.makefile()...
2959 # We /could/ populate stdin/stdout with sock.makefile()...
2954 else:
2960 else:
2955 raise error.Abort(_('unsupported connection configuration'))
2961 raise error.Abort(_('unsupported connection configuration'))
2956
2962
2957 batchedcommands = None
2963 batchedcommands = None
2958
2964
2959 # Now perform actions based on the parsed wire language instructions.
2965 # Now perform actions based on the parsed wire language instructions.
2960 for action, lines in blocks:
2966 for action, lines in blocks:
2961 if action in ('raw', 'raw+'):
2967 if action in ('raw', 'raw+'):
2962 if not stdin:
2968 if not stdin:
2963 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2969 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2964
2970
2965 # Concatenate the data together.
2971 # Concatenate the data together.
2966 data = ''.join(l.lstrip() for l in lines)
2972 data = ''.join(l.lstrip() for l in lines)
2967 data = stringutil.unescapestr(data)
2973 data = stringutil.unescapestr(data)
2968 stdin.write(data)
2974 stdin.write(data)
2969
2975
2970 if action == 'raw+':
2976 if action == 'raw+':
2971 stdin.flush()
2977 stdin.flush()
2972 elif action == 'flush':
2978 elif action == 'flush':
2973 if not stdin:
2979 if not stdin:
2974 raise error.Abort(_('cannot call flush on this peer'))
2980 raise error.Abort(_('cannot call flush on this peer'))
2975 stdin.flush()
2981 stdin.flush()
2976 elif action.startswith('command'):
2982 elif action.startswith('command'):
2977 if not peer:
2983 if not peer:
2978 raise error.Abort(_('cannot send commands unless peer instance '
2984 raise error.Abort(_('cannot send commands unless peer instance '
2979 'is available'))
2985 'is available'))
2980
2986
2981 command = action.split(' ', 1)[1]
2987 command = action.split(' ', 1)[1]
2982
2988
2983 args = {}
2989 args = {}
2984 for line in lines:
2990 for line in lines:
2985 # We need to allow empty values.
2991 # We need to allow empty values.
2986 fields = line.lstrip().split(' ', 1)
2992 fields = line.lstrip().split(' ', 1)
2987 if len(fields) == 1:
2993 if len(fields) == 1:
2988 key = fields[0]
2994 key = fields[0]
2989 value = ''
2995 value = ''
2990 else:
2996 else:
2991 key, value = fields
2997 key, value = fields
2992
2998
2993 if value.startswith('eval:'):
2999 if value.startswith('eval:'):
2994 value = stringutil.evalpythonliteral(value[5:])
3000 value = stringutil.evalpythonliteral(value[5:])
2995 else:
3001 else:
2996 value = stringutil.unescapestr(value)
3002 value = stringutil.unescapestr(value)
2997
3003
2998 args[key] = value
3004 args[key] = value
2999
3005
3000 if batchedcommands is not None:
3006 if batchedcommands is not None:
3001 batchedcommands.append((command, args))
3007 batchedcommands.append((command, args))
3002 continue
3008 continue
3003
3009
3004 ui.status(_('sending %s command\n') % command)
3010 ui.status(_('sending %s command\n') % command)
3005
3011
3006 if 'PUSHFILE' in args:
3012 if 'PUSHFILE' in args:
3007 with open(args['PUSHFILE'], r'rb') as fh:
3013 with open(args['PUSHFILE'], r'rb') as fh:
3008 del args['PUSHFILE']
3014 del args['PUSHFILE']
3009 res, output = peer._callpush(command, fh,
3015 res, output = peer._callpush(command, fh,
3010 **pycompat.strkwargs(args))
3016 **pycompat.strkwargs(args))
3011 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3017 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3012 ui.status(_('remote output: %s\n') %
3018 ui.status(_('remote output: %s\n') %
3013 stringutil.escapestr(output))
3019 stringutil.escapestr(output))
3014 else:
3020 else:
3015 with peer.commandexecutor() as e:
3021 with peer.commandexecutor() as e:
3016 res = e.callcommand(command, args).result()
3022 res = e.callcommand(command, args).result()
3017
3023
3018 if isinstance(res, wireprotov2peer.commandresponse):
3024 if isinstance(res, wireprotov2peer.commandresponse):
3019 val = list(res.cborobjects())
3025 val = list(res.cborobjects())
3020 ui.status(_('response: %s\n') %
3026 ui.status(_('response: %s\n') %
3021 stringutil.pprint(val, bprefix=True))
3027 stringutil.pprint(val, bprefix=True))
3022
3028
3023 else:
3029 else:
3024 ui.status(_('response: %s\n') %
3030 ui.status(_('response: %s\n') %
3025 stringutil.pprint(res, bprefix=True))
3031 stringutil.pprint(res, bprefix=True))
3026
3032
3027 elif action == 'batchbegin':
3033 elif action == 'batchbegin':
3028 if batchedcommands is not None:
3034 if batchedcommands is not None:
3029 raise error.Abort(_('nested batchbegin not allowed'))
3035 raise error.Abort(_('nested batchbegin not allowed'))
3030
3036
3031 batchedcommands = []
3037 batchedcommands = []
3032 elif action == 'batchsubmit':
3038 elif action == 'batchsubmit':
3033 # There is a batching API we could go through. But it would be
3039 # There is a batching API we could go through. But it would be
3034 # difficult to normalize requests into function calls. It is easier
3040 # difficult to normalize requests into function calls. It is easier
3035 # to bypass this layer and normalize to commands + args.
3041 # to bypass this layer and normalize to commands + args.
3036 ui.status(_('sending batch with %d sub-commands\n') %
3042 ui.status(_('sending batch with %d sub-commands\n') %
3037 len(batchedcommands))
3043 len(batchedcommands))
3038 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3044 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3039 ui.status(_('response #%d: %s\n') %
3045 ui.status(_('response #%d: %s\n') %
3040 (i, stringutil.escapestr(chunk)))
3046 (i, stringutil.escapestr(chunk)))
3041
3047
3042 batchedcommands = None
3048 batchedcommands = None
3043
3049
3044 elif action.startswith('httprequest '):
3050 elif action.startswith('httprequest '):
3045 if not opener:
3051 if not opener:
3046 raise error.Abort(_('cannot use httprequest without an HTTP '
3052 raise error.Abort(_('cannot use httprequest without an HTTP '
3047 'peer'))
3053 'peer'))
3048
3054
3049 request = action.split(' ', 2)
3055 request = action.split(' ', 2)
3050 if len(request) != 3:
3056 if len(request) != 3:
3051 raise error.Abort(_('invalid httprequest: expected format is '
3057 raise error.Abort(_('invalid httprequest: expected format is '
3052 '"httprequest <method> <path>'))
3058 '"httprequest <method> <path>'))
3053
3059
3054 method, httppath = request[1:]
3060 method, httppath = request[1:]
3055 headers = {}
3061 headers = {}
3056 body = None
3062 body = None
3057 frames = []
3063 frames = []
3058 for line in lines:
3064 for line in lines:
3059 line = line.lstrip()
3065 line = line.lstrip()
3060 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3066 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3061 if m:
3067 if m:
3062 headers[m.group(1)] = m.group(2)
3068 headers[m.group(1)] = m.group(2)
3063 continue
3069 continue
3064
3070
3065 if line.startswith(b'BODYFILE '):
3071 if line.startswith(b'BODYFILE '):
3066 with open(line.split(b' ', 1), 'rb') as fh:
3072 with open(line.split(b' ', 1), 'rb') as fh:
3067 body = fh.read()
3073 body = fh.read()
3068 elif line.startswith(b'frame '):
3074 elif line.startswith(b'frame '):
3069 frame = wireprotoframing.makeframefromhumanstring(
3075 frame = wireprotoframing.makeframefromhumanstring(
3070 line[len(b'frame '):])
3076 line[len(b'frame '):])
3071
3077
3072 frames.append(frame)
3078 frames.append(frame)
3073 else:
3079 else:
3074 raise error.Abort(_('unknown argument to httprequest: %s') %
3080 raise error.Abort(_('unknown argument to httprequest: %s') %
3075 line)
3081 line)
3076
3082
3077 url = path + httppath
3083 url = path + httppath
3078
3084
3079 if frames:
3085 if frames:
3080 body = b''.join(bytes(f) for f in frames)
3086 body = b''.join(bytes(f) for f in frames)
3081
3087
3082 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3088 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3083
3089
3084 # urllib.Request insists on using has_data() as a proxy for
3090 # urllib.Request insists on using has_data() as a proxy for
3085 # determining the request method. Override that to use our
3091 # determining the request method. Override that to use our
3086 # explicitly requested method.
3092 # explicitly requested method.
3087 req.get_method = lambda: method
3093 req.get_method = lambda: method
3088
3094
3089 try:
3095 try:
3090 res = opener.open(req)
3096 res = opener.open(req)
3091 body = res.read()
3097 body = res.read()
3092 except util.urlerr.urlerror as e:
3098 except util.urlerr.urlerror as e:
3093 e.read()
3099 e.read()
3094 continue
3100 continue
3095
3101
3096 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3102 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3097 ui.write(_('cbor> %s\n') %
3103 ui.write(_('cbor> %s\n') %
3098 stringutil.pprint(cbor.loads(body), bprefix=True))
3104 stringutil.pprint(cbor.loads(body), bprefix=True))
3099
3105
3100 elif action == 'close':
3106 elif action == 'close':
3101 peer.close()
3107 peer.close()
3102 elif action == 'readavailable':
3108 elif action == 'readavailable':
3103 if not stdout or not stderr:
3109 if not stdout or not stderr:
3104 raise error.Abort(_('readavailable not available on this peer'))
3110 raise error.Abort(_('readavailable not available on this peer'))
3105
3111
3106 stdin.close()
3112 stdin.close()
3107 stdout.read()
3113 stdout.read()
3108 stderr.read()
3114 stderr.read()
3109
3115
3110 elif action == 'readline':
3116 elif action == 'readline':
3111 if not stdout:
3117 if not stdout:
3112 raise error.Abort(_('readline not available on this peer'))
3118 raise error.Abort(_('readline not available on this peer'))
3113 stdout.readline()
3119 stdout.readline()
3114 elif action == 'ereadline':
3120 elif action == 'ereadline':
3115 if not stderr:
3121 if not stderr:
3116 raise error.Abort(_('ereadline not available on this peer'))
3122 raise error.Abort(_('ereadline not available on this peer'))
3117 stderr.readline()
3123 stderr.readline()
3118 elif action.startswith('read '):
3124 elif action.startswith('read '):
3119 count = int(action.split(' ', 1)[1])
3125 count = int(action.split(' ', 1)[1])
3120 if not stdout:
3126 if not stdout:
3121 raise error.Abort(_('read not available on this peer'))
3127 raise error.Abort(_('read not available on this peer'))
3122 stdout.read(count)
3128 stdout.read(count)
3123 elif action.startswith('eread '):
3129 elif action.startswith('eread '):
3124 count = int(action.split(' ', 1)[1])
3130 count = int(action.split(' ', 1)[1])
3125 if not stderr:
3131 if not stderr:
3126 raise error.Abort(_('eread not available on this peer'))
3132 raise error.Abort(_('eread not available on this peer'))
3127 stderr.read(count)
3133 stderr.read(count)
3128 else:
3134 else:
3129 raise error.Abort(_('unknown action: %s') % action)
3135 raise error.Abort(_('unknown action: %s') % action)
3130
3136
3131 if batchedcommands is not None:
3137 if batchedcommands is not None:
3132 raise error.Abort(_('unclosed "batchbegin" request'))
3138 raise error.Abort(_('unclosed "batchbegin" request'))
3133
3139
3134 if peer:
3140 if peer:
3135 peer.close()
3141 peer.close()
3136
3142
3137 if proc:
3143 if proc:
3138 proc.kill()
3144 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now