##// END OF EJS Templates
debugbuilddag: use progress helper...
Martin von Zweigbergk -
r38394:fce1c174 default
parent child Browse files
Show More
@@ -1,3152 +1,3154 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 fileset,
51 fileset,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 initialmergedlines.append("")
181 initialmergedlines.append("")
182
182
183 tags = []
183 tags = []
184
184
185 wlock = lock = tr = None
185 wlock = lock = tr = None
186 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 total=total)
186 try:
188 try:
187 wlock = repo.wlock()
189 wlock = repo.wlock()
188 lock = repo.lock()
190 lock = repo.lock()
189 tr = repo.transaction("builddag")
191 tr = repo.transaction("builddag")
190
192
191 at = -1
193 at = -1
192 atbranch = 'default'
194 atbranch = 'default'
193 nodeids = []
195 nodeids = []
194 id = 0
196 id = 0
195 ui.progress(_('building'), id, unit=_('revisions'), total=total)
197 progress.update(id)
196 for type, data in dagparser.parsedag(text):
198 for type, data in dagparser.parsedag(text):
197 if type == 'n':
199 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
200 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
201 id, ps = data
200
202
201 files = []
203 files = []
202 filecontent = {}
204 filecontent = {}
203
205
204 p2 = None
206 p2 = None
205 if mergeable_file:
207 if mergeable_file:
206 fn = "mf"
208 fn = "mf"
207 p1 = repo[ps[0]]
209 p1 = repo[ps[0]]
208 if len(ps) > 1:
210 if len(ps) > 1:
209 p2 = repo[ps[1]]
211 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
212 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
213 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
214 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
215 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
216 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
217 ml.append("")
216 elif at > 0:
218 elif at > 0:
217 ml = p1[fn].data().split("\n")
219 ml = p1[fn].data().split("\n")
218 else:
220 else:
219 ml = initialmergedlines
221 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
222 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
223 mergedtext = "\n".join(ml)
222 files.append(fn)
224 files.append(fn)
223 filecontent[fn] = mergedtext
225 filecontent[fn] = mergedtext
224
226
225 if overwritten_file:
227 if overwritten_file:
226 fn = "of"
228 fn = "of"
227 files.append(fn)
229 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
229
231
230 if new_file:
232 if new_file:
231 fn = "nf%i" % id
233 fn = "nf%i" % id
232 files.append(fn)
234 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
235 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
236 if len(ps) > 1:
235 if not p2:
237 if not p2:
236 p2 = repo[ps[1]]
238 p2 = repo[ps[1]]
237 for fn in p2:
239 for fn in p2:
238 if fn.startswith("nf"):
240 if fn.startswith("nf"):
239 files.append(fn)
241 files.append(fn)
240 filecontent[fn] = p2[fn].data()
242 filecontent[fn] = p2[fn].data()
241
243
242 def fctxfn(repo, cx, path):
244 def fctxfn(repo, cx, path):
243 if path in filecontent:
245 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
246 return context.memfilectx(repo, cx, path,
245 filecontent[path])
247 filecontent[path])
246 return None
248 return None
247
249
248 if len(ps) == 0 or ps[0] < 0:
250 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
251 pars = [None, None]
250 elif len(ps) == 1:
252 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
253 pars = [nodeids[ps[0]], None]
252 else:
254 else:
253 pars = [nodeids[p] for p in ps]
255 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
256 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
257 date=(id, 0),
256 user="debugbuilddag",
258 user="debugbuilddag",
257 extra={'branch': atbranch})
259 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
260 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
261 nodeids.append(nodeid)
260 at = id
262 at = id
261 elif type == 'l':
263 elif type == 'l':
262 id, name = data
264 id, name = data
263 ui.note(('tag %s\n' % name))
265 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
266 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
267 elif type == 'a':
266 ui.note(('branch %s\n' % data))
268 ui.note(('branch %s\n' % data))
267 atbranch = data
269 atbranch = data
268 ui.progress(_('building'), id, unit=_('revisions'), total=total)
270 progress.update(id)
269 tr.close()
271 tr.close()
270
272
271 if tags:
273 if tags:
272 repo.vfs.write("localtags", "".join(tags))
274 repo.vfs.write("localtags", "".join(tags))
273 finally:
275 finally:
274 ui.progress(_('building'), None)
276 progress.complete()
275 release(tr, lock, wlock)
277 release(tr, lock, wlock)
276
278
277 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
279 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
278 indent_string = ' ' * indent
280 indent_string = ' ' * indent
279 if all:
281 if all:
280 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
282 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
281 % indent_string)
283 % indent_string)
282
284
283 def showchunks(named):
285 def showchunks(named):
284 ui.write("\n%s%s\n" % (indent_string, named))
286 ui.write("\n%s%s\n" % (indent_string, named))
285 for deltadata in gen.deltaiter():
287 for deltadata in gen.deltaiter():
286 node, p1, p2, cs, deltabase, delta, flags = deltadata
288 node, p1, p2, cs, deltabase, delta, flags = deltadata
287 ui.write("%s%s %s %s %s %s %d\n" %
289 ui.write("%s%s %s %s %s %s %d\n" %
288 (indent_string, hex(node), hex(p1), hex(p2),
290 (indent_string, hex(node), hex(p1), hex(p2),
289 hex(cs), hex(deltabase), len(delta)))
291 hex(cs), hex(deltabase), len(delta)))
290
292
291 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
292 showchunks("changelog")
294 showchunks("changelog")
293 chunkdata = gen.manifestheader()
295 chunkdata = gen.manifestheader()
294 showchunks("manifest")
296 showchunks("manifest")
295 for chunkdata in iter(gen.filelogheader, {}):
297 for chunkdata in iter(gen.filelogheader, {}):
296 fname = chunkdata['filename']
298 fname = chunkdata['filename']
297 showchunks(fname)
299 showchunks(fname)
298 else:
300 else:
299 if isinstance(gen, bundle2.unbundle20):
301 if isinstance(gen, bundle2.unbundle20):
300 raise error.Abort(_('use debugbundle2 for this file'))
302 raise error.Abort(_('use debugbundle2 for this file'))
301 chunkdata = gen.changelogheader()
303 chunkdata = gen.changelogheader()
302 for deltadata in gen.deltaiter():
304 for deltadata in gen.deltaiter():
303 node, p1, p2, cs, deltabase, delta, flags = deltadata
305 node, p1, p2, cs, deltabase, delta, flags = deltadata
304 ui.write("%s%s\n" % (indent_string, hex(node)))
306 ui.write("%s%s\n" % (indent_string, hex(node)))
305
307
306 def _debugobsmarkers(ui, part, indent=0, **opts):
308 def _debugobsmarkers(ui, part, indent=0, **opts):
307 """display version and markers contained in 'data'"""
309 """display version and markers contained in 'data'"""
308 opts = pycompat.byteskwargs(opts)
310 opts = pycompat.byteskwargs(opts)
309 data = part.read()
311 data = part.read()
310 indent_string = ' ' * indent
312 indent_string = ' ' * indent
311 try:
313 try:
312 version, markers = obsolete._readmarkers(data)
314 version, markers = obsolete._readmarkers(data)
313 except error.UnknownVersion as exc:
315 except error.UnknownVersion as exc:
314 msg = "%sunsupported version: %s (%d bytes)\n"
316 msg = "%sunsupported version: %s (%d bytes)\n"
315 msg %= indent_string, exc.version, len(data)
317 msg %= indent_string, exc.version, len(data)
316 ui.write(msg)
318 ui.write(msg)
317 else:
319 else:
318 msg = "%sversion: %d (%d bytes)\n"
320 msg = "%sversion: %d (%d bytes)\n"
319 msg %= indent_string, version, len(data)
321 msg %= indent_string, version, len(data)
320 ui.write(msg)
322 ui.write(msg)
321 fm = ui.formatter('debugobsolete', opts)
323 fm = ui.formatter('debugobsolete', opts)
322 for rawmarker in sorted(markers):
324 for rawmarker in sorted(markers):
323 m = obsutil.marker(None, rawmarker)
325 m = obsutil.marker(None, rawmarker)
324 fm.startitem()
326 fm.startitem()
325 fm.plain(indent_string)
327 fm.plain(indent_string)
326 cmdutil.showmarker(fm, m)
328 cmdutil.showmarker(fm, m)
327 fm.end()
329 fm.end()
328
330
329 def _debugphaseheads(ui, data, indent=0):
331 def _debugphaseheads(ui, data, indent=0):
330 """display version and markers contained in 'data'"""
332 """display version and markers contained in 'data'"""
331 indent_string = ' ' * indent
333 indent_string = ' ' * indent
332 headsbyphase = phases.binarydecode(data)
334 headsbyphase = phases.binarydecode(data)
333 for phase in phases.allphases:
335 for phase in phases.allphases:
334 for head in headsbyphase[phase]:
336 for head in headsbyphase[phase]:
335 ui.write(indent_string)
337 ui.write(indent_string)
336 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
338 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
337
339
338 def _quasirepr(thing):
340 def _quasirepr(thing):
339 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
341 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
340 return '{%s}' % (
342 return '{%s}' % (
341 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
343 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
342 return pycompat.bytestr(repr(thing))
344 return pycompat.bytestr(repr(thing))
343
345
344 def _debugbundle2(ui, gen, all=None, **opts):
346 def _debugbundle2(ui, gen, all=None, **opts):
345 """lists the contents of a bundle2"""
347 """lists the contents of a bundle2"""
346 if not isinstance(gen, bundle2.unbundle20):
348 if not isinstance(gen, bundle2.unbundle20):
347 raise error.Abort(_('not a bundle2 file'))
349 raise error.Abort(_('not a bundle2 file'))
348 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
350 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
349 parttypes = opts.get(r'part_type', [])
351 parttypes = opts.get(r'part_type', [])
350 for part in gen.iterparts():
352 for part in gen.iterparts():
351 if parttypes and part.type not in parttypes:
353 if parttypes and part.type not in parttypes:
352 continue
354 continue
353 msg = '%s -- %s (mandatory: %r)\n'
355 msg = '%s -- %s (mandatory: %r)\n'
354 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
356 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
355 if part.type == 'changegroup':
357 if part.type == 'changegroup':
356 version = part.params.get('version', '01')
358 version = part.params.get('version', '01')
357 cg = changegroup.getunbundler(version, part, 'UN')
359 cg = changegroup.getunbundler(version, part, 'UN')
358 if not ui.quiet:
360 if not ui.quiet:
359 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
361 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
360 if part.type == 'obsmarkers':
362 if part.type == 'obsmarkers':
361 if not ui.quiet:
363 if not ui.quiet:
362 _debugobsmarkers(ui, part, indent=4, **opts)
364 _debugobsmarkers(ui, part, indent=4, **opts)
363 if part.type == 'phase-heads':
365 if part.type == 'phase-heads':
364 if not ui.quiet:
366 if not ui.quiet:
365 _debugphaseheads(ui, part, indent=4)
367 _debugphaseheads(ui, part, indent=4)
366
368
367 @command('debugbundle',
369 @command('debugbundle',
368 [('a', 'all', None, _('show all details')),
370 [('a', 'all', None, _('show all details')),
369 ('', 'part-type', [], _('show only the named part type')),
371 ('', 'part-type', [], _('show only the named part type')),
370 ('', 'spec', None, _('print the bundlespec of the bundle'))],
372 ('', 'spec', None, _('print the bundlespec of the bundle'))],
371 _('FILE'),
373 _('FILE'),
372 norepo=True)
374 norepo=True)
373 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
375 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
374 """lists the contents of a bundle"""
376 """lists the contents of a bundle"""
375 with hg.openpath(ui, bundlepath) as f:
377 with hg.openpath(ui, bundlepath) as f:
376 if spec:
378 if spec:
377 spec = exchange.getbundlespec(ui, f)
379 spec = exchange.getbundlespec(ui, f)
378 ui.write('%s\n' % spec)
380 ui.write('%s\n' % spec)
379 return
381 return
380
382
381 gen = exchange.readbundle(ui, f, bundlepath)
383 gen = exchange.readbundle(ui, f, bundlepath)
382 if isinstance(gen, bundle2.unbundle20):
384 if isinstance(gen, bundle2.unbundle20):
383 return _debugbundle2(ui, gen, all=all, **opts)
385 return _debugbundle2(ui, gen, all=all, **opts)
384 _debugchangegroup(ui, gen, all=all, **opts)
386 _debugchangegroup(ui, gen, all=all, **opts)
385
387
386 @command('debugcapabilities',
388 @command('debugcapabilities',
387 [], _('PATH'),
389 [], _('PATH'),
388 norepo=True)
390 norepo=True)
389 def debugcapabilities(ui, path, **opts):
391 def debugcapabilities(ui, path, **opts):
390 """lists the capabilities of a remote peer"""
392 """lists the capabilities of a remote peer"""
391 opts = pycompat.byteskwargs(opts)
393 opts = pycompat.byteskwargs(opts)
392 peer = hg.peer(ui, opts, path)
394 peer = hg.peer(ui, opts, path)
393 caps = peer.capabilities()
395 caps = peer.capabilities()
394 ui.write(('Main capabilities:\n'))
396 ui.write(('Main capabilities:\n'))
395 for c in sorted(caps):
397 for c in sorted(caps):
396 ui.write((' %s\n') % c)
398 ui.write((' %s\n') % c)
397 b2caps = bundle2.bundle2caps(peer)
399 b2caps = bundle2.bundle2caps(peer)
398 if b2caps:
400 if b2caps:
399 ui.write(('Bundle2 capabilities:\n'))
401 ui.write(('Bundle2 capabilities:\n'))
400 for key, values in sorted(b2caps.iteritems()):
402 for key, values in sorted(b2caps.iteritems()):
401 ui.write((' %s\n') % key)
403 ui.write((' %s\n') % key)
402 for v in values:
404 for v in values:
403 ui.write((' %s\n') % v)
405 ui.write((' %s\n') % v)
404
406
405 @command('debugcheckstate', [], '')
407 @command('debugcheckstate', [], '')
406 def debugcheckstate(ui, repo):
408 def debugcheckstate(ui, repo):
407 """validate the correctness of the current dirstate"""
409 """validate the correctness of the current dirstate"""
408 parent1, parent2 = repo.dirstate.parents()
410 parent1, parent2 = repo.dirstate.parents()
409 m1 = repo[parent1].manifest()
411 m1 = repo[parent1].manifest()
410 m2 = repo[parent2].manifest()
412 m2 = repo[parent2].manifest()
411 errors = 0
413 errors = 0
412 for f in repo.dirstate:
414 for f in repo.dirstate:
413 state = repo.dirstate[f]
415 state = repo.dirstate[f]
414 if state in "nr" and f not in m1:
416 if state in "nr" and f not in m1:
415 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
417 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
416 errors += 1
418 errors += 1
417 if state in "a" and f in m1:
419 if state in "a" and f in m1:
418 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
420 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
419 errors += 1
421 errors += 1
420 if state in "m" and f not in m1 and f not in m2:
422 if state in "m" and f not in m1 and f not in m2:
421 ui.warn(_("%s in state %s, but not in either manifest\n") %
423 ui.warn(_("%s in state %s, but not in either manifest\n") %
422 (f, state))
424 (f, state))
423 errors += 1
425 errors += 1
424 for f in m1:
426 for f in m1:
425 state = repo.dirstate[f]
427 state = repo.dirstate[f]
426 if state not in "nrm":
428 if state not in "nrm":
427 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
429 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
428 errors += 1
430 errors += 1
429 if errors:
431 if errors:
430 error = _(".hg/dirstate inconsistent with current parent's manifest")
432 error = _(".hg/dirstate inconsistent with current parent's manifest")
431 raise error.Abort(error)
433 raise error.Abort(error)
432
434
433 @command('debugcolor',
435 @command('debugcolor',
434 [('', 'style', None, _('show all configured styles'))],
436 [('', 'style', None, _('show all configured styles'))],
435 'hg debugcolor')
437 'hg debugcolor')
436 def debugcolor(ui, repo, **opts):
438 def debugcolor(ui, repo, **opts):
437 """show available color, effects or style"""
439 """show available color, effects or style"""
438 ui.write(('color mode: %s\n') % ui._colormode)
440 ui.write(('color mode: %s\n') % ui._colormode)
439 if opts.get(r'style'):
441 if opts.get(r'style'):
440 return _debugdisplaystyle(ui)
442 return _debugdisplaystyle(ui)
441 else:
443 else:
442 return _debugdisplaycolor(ui)
444 return _debugdisplaycolor(ui)
443
445
444 def _debugdisplaycolor(ui):
446 def _debugdisplaycolor(ui):
445 ui = ui.copy()
447 ui = ui.copy()
446 ui._styles.clear()
448 ui._styles.clear()
447 for effect in color._activeeffects(ui).keys():
449 for effect in color._activeeffects(ui).keys():
448 ui._styles[effect] = effect
450 ui._styles[effect] = effect
449 if ui._terminfoparams:
451 if ui._terminfoparams:
450 for k, v in ui.configitems('color'):
452 for k, v in ui.configitems('color'):
451 if k.startswith('color.'):
453 if k.startswith('color.'):
452 ui._styles[k] = k[6:]
454 ui._styles[k] = k[6:]
453 elif k.startswith('terminfo.'):
455 elif k.startswith('terminfo.'):
454 ui._styles[k] = k[9:]
456 ui._styles[k] = k[9:]
455 ui.write(_('available colors:\n'))
457 ui.write(_('available colors:\n'))
456 # sort label with a '_' after the other to group '_background' entry.
458 # sort label with a '_' after the other to group '_background' entry.
457 items = sorted(ui._styles.items(),
459 items = sorted(ui._styles.items(),
458 key=lambda i: ('_' in i[0], i[0], i[1]))
460 key=lambda i: ('_' in i[0], i[0], i[1]))
459 for colorname, label in items:
461 for colorname, label in items:
460 ui.write(('%s\n') % colorname, label=label)
462 ui.write(('%s\n') % colorname, label=label)
461
463
462 def _debugdisplaystyle(ui):
464 def _debugdisplaystyle(ui):
463 ui.write(_('available style:\n'))
465 ui.write(_('available style:\n'))
464 if not ui._styles:
466 if not ui._styles:
465 return
467 return
466 width = max(len(s) for s in ui._styles)
468 width = max(len(s) for s in ui._styles)
467 for label, effects in sorted(ui._styles.items()):
469 for label, effects in sorted(ui._styles.items()):
468 ui.write('%s' % label, label=label)
470 ui.write('%s' % label, label=label)
469 if effects:
471 if effects:
470 # 50
472 # 50
471 ui.write(': ')
473 ui.write(': ')
472 ui.write(' ' * (max(0, width - len(label))))
474 ui.write(' ' * (max(0, width - len(label))))
473 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
475 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
474 ui.write('\n')
476 ui.write('\n')
475
477
476 @command('debugcreatestreamclonebundle', [], 'FILE')
478 @command('debugcreatestreamclonebundle', [], 'FILE')
477 def debugcreatestreamclonebundle(ui, repo, fname):
479 def debugcreatestreamclonebundle(ui, repo, fname):
478 """create a stream clone bundle file
480 """create a stream clone bundle file
479
481
480 Stream bundles are special bundles that are essentially archives of
482 Stream bundles are special bundles that are essentially archives of
481 revlog files. They are commonly used for cloning very quickly.
483 revlog files. They are commonly used for cloning very quickly.
482 """
484 """
483 # TODO we may want to turn this into an abort when this functionality
485 # TODO we may want to turn this into an abort when this functionality
484 # is moved into `hg bundle`.
486 # is moved into `hg bundle`.
485 if phases.hassecret(repo):
487 if phases.hassecret(repo):
486 ui.warn(_('(warning: stream clone bundle will contain secret '
488 ui.warn(_('(warning: stream clone bundle will contain secret '
487 'revisions)\n'))
489 'revisions)\n'))
488
490
489 requirements, gen = streamclone.generatebundlev1(repo)
491 requirements, gen = streamclone.generatebundlev1(repo)
490 changegroup.writechunks(ui, gen, fname)
492 changegroup.writechunks(ui, gen, fname)
491
493
492 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
494 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
493
495
494 @command('debugdag',
496 @command('debugdag',
495 [('t', 'tags', None, _('use tags as labels')),
497 [('t', 'tags', None, _('use tags as labels')),
496 ('b', 'branches', None, _('annotate with branch names')),
498 ('b', 'branches', None, _('annotate with branch names')),
497 ('', 'dots', None, _('use dots for runs')),
499 ('', 'dots', None, _('use dots for runs')),
498 ('s', 'spaces', None, _('separate elements by spaces'))],
500 ('s', 'spaces', None, _('separate elements by spaces'))],
499 _('[OPTION]... [FILE [REV]...]'),
501 _('[OPTION]... [FILE [REV]...]'),
500 optionalrepo=True)
502 optionalrepo=True)
501 def debugdag(ui, repo, file_=None, *revs, **opts):
503 def debugdag(ui, repo, file_=None, *revs, **opts):
502 """format the changelog or an index DAG as a concise textual description
504 """format the changelog or an index DAG as a concise textual description
503
505
504 If you pass a revlog index, the revlog's DAG is emitted. If you list
506 If you pass a revlog index, the revlog's DAG is emitted. If you list
505 revision numbers, they get labeled in the output as rN.
507 revision numbers, they get labeled in the output as rN.
506
508
507 Otherwise, the changelog DAG of the current repo is emitted.
509 Otherwise, the changelog DAG of the current repo is emitted.
508 """
510 """
509 spaces = opts.get(r'spaces')
511 spaces = opts.get(r'spaces')
510 dots = opts.get(r'dots')
512 dots = opts.get(r'dots')
511 if file_:
513 if file_:
512 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
514 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
513 file_)
515 file_)
514 revs = set((int(r) for r in revs))
516 revs = set((int(r) for r in revs))
515 def events():
517 def events():
516 for r in rlog:
518 for r in rlog:
517 yield 'n', (r, list(p for p in rlog.parentrevs(r)
519 yield 'n', (r, list(p for p in rlog.parentrevs(r)
518 if p != -1))
520 if p != -1))
519 if r in revs:
521 if r in revs:
520 yield 'l', (r, "r%i" % r)
522 yield 'l', (r, "r%i" % r)
521 elif repo:
523 elif repo:
522 cl = repo.changelog
524 cl = repo.changelog
523 tags = opts.get(r'tags')
525 tags = opts.get(r'tags')
524 branches = opts.get(r'branches')
526 branches = opts.get(r'branches')
525 if tags:
527 if tags:
526 labels = {}
528 labels = {}
527 for l, n in repo.tags().items():
529 for l, n in repo.tags().items():
528 labels.setdefault(cl.rev(n), []).append(l)
530 labels.setdefault(cl.rev(n), []).append(l)
529 def events():
531 def events():
530 b = "default"
532 b = "default"
531 for r in cl:
533 for r in cl:
532 if branches:
534 if branches:
533 newb = cl.read(cl.node(r))[5]['branch']
535 newb = cl.read(cl.node(r))[5]['branch']
534 if newb != b:
536 if newb != b:
535 yield 'a', newb
537 yield 'a', newb
536 b = newb
538 b = newb
537 yield 'n', (r, list(p for p in cl.parentrevs(r)
539 yield 'n', (r, list(p for p in cl.parentrevs(r)
538 if p != -1))
540 if p != -1))
539 if tags:
541 if tags:
540 ls = labels.get(r)
542 ls = labels.get(r)
541 if ls:
543 if ls:
542 for l in ls:
544 for l in ls:
543 yield 'l', (r, l)
545 yield 'l', (r, l)
544 else:
546 else:
545 raise error.Abort(_('need repo for changelog dag'))
547 raise error.Abort(_('need repo for changelog dag'))
546
548
547 for line in dagparser.dagtextlines(events(),
549 for line in dagparser.dagtextlines(events(),
548 addspaces=spaces,
550 addspaces=spaces,
549 wraplabels=True,
551 wraplabels=True,
550 wrapannotations=True,
552 wrapannotations=True,
551 wrapnonlinear=dots,
553 wrapnonlinear=dots,
552 usedots=dots,
554 usedots=dots,
553 maxlinewidth=70):
555 maxlinewidth=70):
554 ui.write(line)
556 ui.write(line)
555 ui.write("\n")
557 ui.write("\n")
556
558
557 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
559 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
558 def debugdata(ui, repo, file_, rev=None, **opts):
560 def debugdata(ui, repo, file_, rev=None, **opts):
559 """dump the contents of a data file revision"""
561 """dump the contents of a data file revision"""
560 opts = pycompat.byteskwargs(opts)
562 opts = pycompat.byteskwargs(opts)
561 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
563 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
562 if rev is not None:
564 if rev is not None:
563 raise error.CommandError('debugdata', _('invalid arguments'))
565 raise error.CommandError('debugdata', _('invalid arguments'))
564 file_, rev = None, file_
566 file_, rev = None, file_
565 elif rev is None:
567 elif rev is None:
566 raise error.CommandError('debugdata', _('invalid arguments'))
568 raise error.CommandError('debugdata', _('invalid arguments'))
567 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
569 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
568 try:
570 try:
569 ui.write(r.revision(r.lookup(rev), raw=True))
571 ui.write(r.revision(r.lookup(rev), raw=True))
570 except KeyError:
572 except KeyError:
571 raise error.Abort(_('invalid revision identifier %s') % rev)
573 raise error.Abort(_('invalid revision identifier %s') % rev)
572
574
573 @command('debugdate',
575 @command('debugdate',
574 [('e', 'extended', None, _('try extended date formats'))],
576 [('e', 'extended', None, _('try extended date formats'))],
575 _('[-e] DATE [RANGE]'),
577 _('[-e] DATE [RANGE]'),
576 norepo=True, optionalrepo=True)
578 norepo=True, optionalrepo=True)
577 def debugdate(ui, date, range=None, **opts):
579 def debugdate(ui, date, range=None, **opts):
578 """parse and display a date"""
580 """parse and display a date"""
579 if opts[r"extended"]:
581 if opts[r"extended"]:
580 d = dateutil.parsedate(date, util.extendeddateformats)
582 d = dateutil.parsedate(date, util.extendeddateformats)
581 else:
583 else:
582 d = dateutil.parsedate(date)
584 d = dateutil.parsedate(date)
583 ui.write(("internal: %d %d\n") % d)
585 ui.write(("internal: %d %d\n") % d)
584 ui.write(("standard: %s\n") % dateutil.datestr(d))
586 ui.write(("standard: %s\n") % dateutil.datestr(d))
585 if range:
587 if range:
586 m = dateutil.matchdate(range)
588 m = dateutil.matchdate(range)
587 ui.write(("match: %s\n") % m(d[0]))
589 ui.write(("match: %s\n") % m(d[0]))
588
590
589 @command('debugdeltachain',
591 @command('debugdeltachain',
590 cmdutil.debugrevlogopts + cmdutil.formatteropts,
592 cmdutil.debugrevlogopts + cmdutil.formatteropts,
591 _('-c|-m|FILE'),
593 _('-c|-m|FILE'),
592 optionalrepo=True)
594 optionalrepo=True)
593 def debugdeltachain(ui, repo, file_=None, **opts):
595 def debugdeltachain(ui, repo, file_=None, **opts):
594 """dump information about delta chains in a revlog
596 """dump information about delta chains in a revlog
595
597
596 Output can be templatized. Available template keywords are:
598 Output can be templatized. Available template keywords are:
597
599
598 :``rev``: revision number
600 :``rev``: revision number
599 :``chainid``: delta chain identifier (numbered by unique base)
601 :``chainid``: delta chain identifier (numbered by unique base)
600 :``chainlen``: delta chain length to this revision
602 :``chainlen``: delta chain length to this revision
601 :``prevrev``: previous revision in delta chain
603 :``prevrev``: previous revision in delta chain
602 :``deltatype``: role of delta / how it was computed
604 :``deltatype``: role of delta / how it was computed
603 :``compsize``: compressed size of revision
605 :``compsize``: compressed size of revision
604 :``uncompsize``: uncompressed size of revision
606 :``uncompsize``: uncompressed size of revision
605 :``chainsize``: total size of compressed revisions in chain
607 :``chainsize``: total size of compressed revisions in chain
606 :``chainratio``: total chain size divided by uncompressed revision size
608 :``chainratio``: total chain size divided by uncompressed revision size
607 (new delta chains typically start at ratio 2.00)
609 (new delta chains typically start at ratio 2.00)
608 :``lindist``: linear distance from base revision in delta chain to end
610 :``lindist``: linear distance from base revision in delta chain to end
609 of this revision
611 of this revision
610 :``extradist``: total size of revisions not part of this delta chain from
612 :``extradist``: total size of revisions not part of this delta chain from
611 base of delta chain to end of this revision; a measurement
613 base of delta chain to end of this revision; a measurement
612 of how much extra data we need to read/seek across to read
614 of how much extra data we need to read/seek across to read
613 the delta chain for this revision
615 the delta chain for this revision
614 :``extraratio``: extradist divided by chainsize; another representation of
616 :``extraratio``: extradist divided by chainsize; another representation of
615 how much unrelated data is needed to load this delta chain
617 how much unrelated data is needed to load this delta chain
616
618
617 If the repository is configured to use the sparse read, additional keywords
619 If the repository is configured to use the sparse read, additional keywords
618 are available:
620 are available:
619
621
620 :``readsize``: total size of data read from the disk for a revision
622 :``readsize``: total size of data read from the disk for a revision
621 (sum of the sizes of all the blocks)
623 (sum of the sizes of all the blocks)
622 :``largestblock``: size of the largest block of data read from the disk
624 :``largestblock``: size of the largest block of data read from the disk
623 :``readdensity``: density of useful bytes in the data read from the disk
625 :``readdensity``: density of useful bytes in the data read from the disk
624 :``srchunks``: in how many data hunks the whole revision would be read
626 :``srchunks``: in how many data hunks the whole revision would be read
625
627
626 The sparse read can be enabled with experimental.sparse-read = True
628 The sparse read can be enabled with experimental.sparse-read = True
627 """
629 """
628 opts = pycompat.byteskwargs(opts)
630 opts = pycompat.byteskwargs(opts)
629 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
631 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
630 index = r.index
632 index = r.index
631 start = r.start
633 start = r.start
632 length = r.length
634 length = r.length
633 generaldelta = r.version & revlog.FLAG_GENERALDELTA
635 generaldelta = r.version & revlog.FLAG_GENERALDELTA
634 withsparseread = getattr(r, '_withsparseread', False)
636 withsparseread = getattr(r, '_withsparseread', False)
635
637
636 def revinfo(rev):
638 def revinfo(rev):
637 e = index[rev]
639 e = index[rev]
638 compsize = e[1]
640 compsize = e[1]
639 uncompsize = e[2]
641 uncompsize = e[2]
640 chainsize = 0
642 chainsize = 0
641
643
642 if generaldelta:
644 if generaldelta:
643 if e[3] == e[5]:
645 if e[3] == e[5]:
644 deltatype = 'p1'
646 deltatype = 'p1'
645 elif e[3] == e[6]:
647 elif e[3] == e[6]:
646 deltatype = 'p2'
648 deltatype = 'p2'
647 elif e[3] == rev - 1:
649 elif e[3] == rev - 1:
648 deltatype = 'prev'
650 deltatype = 'prev'
649 elif e[3] == rev:
651 elif e[3] == rev:
650 deltatype = 'base'
652 deltatype = 'base'
651 else:
653 else:
652 deltatype = 'other'
654 deltatype = 'other'
653 else:
655 else:
654 if e[3] == rev:
656 if e[3] == rev:
655 deltatype = 'base'
657 deltatype = 'base'
656 else:
658 else:
657 deltatype = 'prev'
659 deltatype = 'prev'
658
660
659 chain = r._deltachain(rev)[0]
661 chain = r._deltachain(rev)[0]
660 for iterrev in chain:
662 for iterrev in chain:
661 e = index[iterrev]
663 e = index[iterrev]
662 chainsize += e[1]
664 chainsize += e[1]
663
665
664 return compsize, uncompsize, deltatype, chain, chainsize
666 return compsize, uncompsize, deltatype, chain, chainsize
665
667
666 fm = ui.formatter('debugdeltachain', opts)
668 fm = ui.formatter('debugdeltachain', opts)
667
669
668 fm.plain(' rev chain# chainlen prev delta '
670 fm.plain(' rev chain# chainlen prev delta '
669 'size rawsize chainsize ratio lindist extradist '
671 'size rawsize chainsize ratio lindist extradist '
670 'extraratio')
672 'extraratio')
671 if withsparseread:
673 if withsparseread:
672 fm.plain(' readsize largestblk rddensity srchunks')
674 fm.plain(' readsize largestblk rddensity srchunks')
673 fm.plain('\n')
675 fm.plain('\n')
674
676
675 chainbases = {}
677 chainbases = {}
676 for rev in r:
678 for rev in r:
677 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
679 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
678 chainbase = chain[0]
680 chainbase = chain[0]
679 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
681 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
680 basestart = start(chainbase)
682 basestart = start(chainbase)
681 revstart = start(rev)
683 revstart = start(rev)
682 lineardist = revstart + comp - basestart
684 lineardist = revstart + comp - basestart
683 extradist = lineardist - chainsize
685 extradist = lineardist - chainsize
684 try:
686 try:
685 prevrev = chain[-2]
687 prevrev = chain[-2]
686 except IndexError:
688 except IndexError:
687 prevrev = -1
689 prevrev = -1
688
690
689 chainratio = float(chainsize) / float(uncomp)
691 chainratio = float(chainsize) / float(uncomp)
690 extraratio = float(extradist) / float(chainsize)
692 extraratio = float(extradist) / float(chainsize)
691
693
692 fm.startitem()
694 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
697 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
701 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
707 if withsparseread:
706 readsize = 0
708 readsize = 0
707 largestblock = 0
709 largestblock = 0
708 srchunks = 0
710 srchunks = 0
709
711
710 for revschunk in revlog._slicechunk(r, chain):
712 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
713 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
714
716
715 readsize += blksize
717 readsize += blksize
716 if largestblock < blksize:
718 if largestblock < blksize:
717 largestblock = blksize
719 largestblock = blksize
718
720
719 readdensity = float(chainsize) / float(readsize)
721 readdensity = float(chainsize) / float(readsize)
720
722
721 fm.write('readsize largestblock readdensity srchunks',
723 fm.write('readsize largestblock readdensity srchunks',
722 ' %10d %10d %9.5f %8d',
724 ' %10d %10d %9.5f %8d',
723 readsize, largestblock, readdensity, srchunks,
725 readsize, largestblock, readdensity, srchunks,
724 readsize=readsize, largestblock=largestblock,
726 readsize=readsize, largestblock=largestblock,
725 readdensity=readdensity, srchunks=srchunks)
727 readdensity=readdensity, srchunks=srchunks)
726
728
727 fm.plain('\n')
729 fm.plain('\n')
728
730
729 fm.end()
731 fm.end()
730
732
731 @command('debugdirstate|debugstate',
733 @command('debugdirstate|debugstate',
732 [('', 'nodates', None, _('do not display the saved mtime')),
734 [('', 'nodates', None, _('do not display the saved mtime')),
733 ('', 'datesort', None, _('sort by saved mtime'))],
735 ('', 'datesort', None, _('sort by saved mtime'))],
734 _('[OPTION]...'))
736 _('[OPTION]...'))
735 def debugstate(ui, repo, **opts):
737 def debugstate(ui, repo, **opts):
736 """show the contents of the current dirstate"""
738 """show the contents of the current dirstate"""
737
739
738 nodates = opts.get(r'nodates')
740 nodates = opts.get(r'nodates')
739 datesort = opts.get(r'datesort')
741 datesort = opts.get(r'datesort')
740
742
741 timestr = ""
743 timestr = ""
742 if datesort:
744 if datesort:
743 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
744 else:
746 else:
745 keyfunc = None # sort by filename
747 keyfunc = None # sort by filename
746 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
747 if ent[3] == -1:
749 if ent[3] == -1:
748 timestr = 'unset '
750 timestr = 'unset '
749 elif nodates:
751 elif nodates:
750 timestr = 'set '
752 timestr = 'set '
751 else:
753 else:
752 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
753 time.localtime(ent[3]))
755 time.localtime(ent[3]))
754 timestr = encoding.strtolocal(timestr)
756 timestr = encoding.strtolocal(timestr)
755 if ent[1] & 0o20000:
757 if ent[1] & 0o20000:
756 mode = 'lnk'
758 mode = 'lnk'
757 else:
759 else:
758 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
759 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
760 for f in repo.dirstate.copies():
762 for f in repo.dirstate.copies():
761 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
762
764
763 @command('debugdiscovery',
765 @command('debugdiscovery',
764 [('', 'old', None, _('use old-style discovery')),
766 [('', 'old', None, _('use old-style discovery')),
765 ('', 'nonheads', None,
767 ('', 'nonheads', None,
766 _('use old-style discovery with non-heads included')),
768 _('use old-style discovery with non-heads included')),
767 ('', 'rev', [], 'restrict discovery to this set of revs'),
769 ('', 'rev', [], 'restrict discovery to this set of revs'),
768 ] + cmdutil.remoteopts,
770 ] + cmdutil.remoteopts,
769 _('[--rev REV] [OTHER]'))
771 _('[--rev REV] [OTHER]'))
770 def debugdiscovery(ui, repo, remoteurl="default", **opts):
772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
771 """runs the changeset discovery protocol in isolation"""
773 """runs the changeset discovery protocol in isolation"""
772 opts = pycompat.byteskwargs(opts)
774 opts = pycompat.byteskwargs(opts)
773 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
774 remote = hg.peer(repo, opts, remoteurl)
776 remote = hg.peer(repo, opts, remoteurl)
775 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
776
778
777 # make sure tests are repeatable
779 # make sure tests are repeatable
778 random.seed(12323)
780 random.seed(12323)
779
781
780 def doit(pushedrevs, remoteheads, remote=remote):
782 def doit(pushedrevs, remoteheads, remote=remote):
781 if opts.get('old'):
783 if opts.get('old'):
782 if not util.safehasattr(remote, 'branches'):
784 if not util.safehasattr(remote, 'branches'):
783 # enable in-client legacy support
785 # enable in-client legacy support
784 remote = localrepo.locallegacypeer(remote.local())
786 remote = localrepo.locallegacypeer(remote.local())
785 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
786 force=True)
788 force=True)
787 common = set(common)
789 common = set(common)
788 if not opts.get('nonheads'):
790 if not opts.get('nonheads'):
789 ui.write(("unpruned common: %s\n") %
791 ui.write(("unpruned common: %s\n") %
790 " ".join(sorted(short(n) for n in common)))
792 " ".join(sorted(short(n) for n in common)))
791 dag = dagutil.revlogdag(repo.changelog)
793 dag = dagutil.revlogdag(repo.changelog)
792 all = dag.ancestorset(dag.internalizeall(common))
794 all = dag.ancestorset(dag.internalizeall(common))
793 common = dag.externalizeall(dag.headsetofconnecteds(all))
795 common = dag.externalizeall(dag.headsetofconnecteds(all))
794 else:
796 else:
795 nodes = None
797 nodes = None
796 if pushedrevs:
798 if pushedrevs:
797 revs = scmutil.revrange(repo, pushedrevs)
799 revs = scmutil.revrange(repo, pushedrevs)
798 nodes = [repo[r].node() for r in revs]
800 nodes = [repo[r].node() for r in revs]
799 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
801 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
800 ancestorsof=nodes)
802 ancestorsof=nodes)
801 common = set(common)
803 common = set(common)
802 rheads = set(hds)
804 rheads = set(hds)
803 lheads = set(repo.heads())
805 lheads = set(repo.heads())
804 ui.write(("common heads: %s\n") %
806 ui.write(("common heads: %s\n") %
805 " ".join(sorted(short(n) for n in common)))
807 " ".join(sorted(short(n) for n in common)))
806 if lheads <= common:
808 if lheads <= common:
807 ui.write(("local is subset\n"))
809 ui.write(("local is subset\n"))
808 elif rheads <= common:
810 elif rheads <= common:
809 ui.write(("remote is subset\n"))
811 ui.write(("remote is subset\n"))
810
812
811 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
813 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
812 localrevs = opts['rev']
814 localrevs = opts['rev']
813 doit(localrevs, remoterevs)
815 doit(localrevs, remoterevs)
814
816
815 _chunksize = 4 << 10
817 _chunksize = 4 << 10
816
818
817 @command('debugdownload',
819 @command('debugdownload',
818 [
820 [
819 ('o', 'output', '', _('path')),
821 ('o', 'output', '', _('path')),
820 ],
822 ],
821 optionalrepo=True)
823 optionalrepo=True)
822 def debugdownload(ui, repo, url, output=None, **opts):
824 def debugdownload(ui, repo, url, output=None, **opts):
823 """download a resource using Mercurial logic and config
825 """download a resource using Mercurial logic and config
824 """
826 """
825 fh = urlmod.open(ui, url, output)
827 fh = urlmod.open(ui, url, output)
826
828
827 dest = ui
829 dest = ui
828 if output:
830 if output:
829 dest = open(output, "wb", _chunksize)
831 dest = open(output, "wb", _chunksize)
830 try:
832 try:
831 data = fh.read(_chunksize)
833 data = fh.read(_chunksize)
832 while data:
834 while data:
833 dest.write(data)
835 dest.write(data)
834 data = fh.read(_chunksize)
836 data = fh.read(_chunksize)
835 finally:
837 finally:
836 if output:
838 if output:
837 dest.close()
839 dest.close()
838
840
839 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
841 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
840 def debugextensions(ui, repo, **opts):
842 def debugextensions(ui, repo, **opts):
841 '''show information about active extensions'''
843 '''show information about active extensions'''
842 opts = pycompat.byteskwargs(opts)
844 opts = pycompat.byteskwargs(opts)
843 exts = extensions.extensions(ui)
845 exts = extensions.extensions(ui)
844 hgver = util.version()
846 hgver = util.version()
845 fm = ui.formatter('debugextensions', opts)
847 fm = ui.formatter('debugextensions', opts)
846 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
848 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
847 isinternal = extensions.ismoduleinternal(extmod)
849 isinternal = extensions.ismoduleinternal(extmod)
848 extsource = pycompat.fsencode(extmod.__file__)
850 extsource = pycompat.fsencode(extmod.__file__)
849 if isinternal:
851 if isinternal:
850 exttestedwith = [] # never expose magic string to users
852 exttestedwith = [] # never expose magic string to users
851 else:
853 else:
852 exttestedwith = getattr(extmod, 'testedwith', '').split()
854 exttestedwith = getattr(extmod, 'testedwith', '').split()
853 extbuglink = getattr(extmod, 'buglink', None)
855 extbuglink = getattr(extmod, 'buglink', None)
854
856
855 fm.startitem()
857 fm.startitem()
856
858
857 if ui.quiet or ui.verbose:
859 if ui.quiet or ui.verbose:
858 fm.write('name', '%s\n', extname)
860 fm.write('name', '%s\n', extname)
859 else:
861 else:
860 fm.write('name', '%s', extname)
862 fm.write('name', '%s', extname)
861 if isinternal or hgver in exttestedwith:
863 if isinternal or hgver in exttestedwith:
862 fm.plain('\n')
864 fm.plain('\n')
863 elif not exttestedwith:
865 elif not exttestedwith:
864 fm.plain(_(' (untested!)\n'))
866 fm.plain(_(' (untested!)\n'))
865 else:
867 else:
866 lasttestedversion = exttestedwith[-1]
868 lasttestedversion = exttestedwith[-1]
867 fm.plain(' (%s!)\n' % lasttestedversion)
869 fm.plain(' (%s!)\n' % lasttestedversion)
868
870
869 fm.condwrite(ui.verbose and extsource, 'source',
871 fm.condwrite(ui.verbose and extsource, 'source',
870 _(' location: %s\n'), extsource or "")
872 _(' location: %s\n'), extsource or "")
871
873
872 if ui.verbose:
874 if ui.verbose:
873 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
875 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
874 fm.data(bundled=isinternal)
876 fm.data(bundled=isinternal)
875
877
876 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
878 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
877 _(' tested with: %s\n'),
879 _(' tested with: %s\n'),
878 fm.formatlist(exttestedwith, name='ver'))
880 fm.formatlist(exttestedwith, name='ver'))
879
881
880 fm.condwrite(ui.verbose and extbuglink, 'buglink',
882 fm.condwrite(ui.verbose and extbuglink, 'buglink',
881 _(' bug reporting: %s\n'), extbuglink or "")
883 _(' bug reporting: %s\n'), extbuglink or "")
882
884
883 fm.end()
885 fm.end()
884
886
885 @command('debugfileset',
887 @command('debugfileset',
886 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
888 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
887 _('[-r REV] FILESPEC'))
889 _('[-r REV] FILESPEC'))
888 def debugfileset(ui, repo, expr, **opts):
890 def debugfileset(ui, repo, expr, **opts):
889 '''parse and apply a fileset specification'''
891 '''parse and apply a fileset specification'''
890 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
892 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
891 if ui.verbose:
893 if ui.verbose:
892 tree = fileset.parse(expr)
894 tree = fileset.parse(expr)
893 ui.note(fileset.prettyformat(tree), "\n")
895 ui.note(fileset.prettyformat(tree), "\n")
894
896
895 for f in ctx.getfileset(expr):
897 for f in ctx.getfileset(expr):
896 ui.write("%s\n" % f)
898 ui.write("%s\n" % f)
897
899
898 @command('debugformat',
900 @command('debugformat',
899 [] + cmdutil.formatteropts,
901 [] + cmdutil.formatteropts,
900 _(''))
902 _(''))
901 def debugformat(ui, repo, **opts):
903 def debugformat(ui, repo, **opts):
902 """display format information about the current repository
904 """display format information about the current repository
903
905
904 Use --verbose to get extra information about current config value and
906 Use --verbose to get extra information about current config value and
905 Mercurial default."""
907 Mercurial default."""
906 opts = pycompat.byteskwargs(opts)
908 opts = pycompat.byteskwargs(opts)
907 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
909 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
908 maxvariantlength = max(len('format-variant'), maxvariantlength)
910 maxvariantlength = max(len('format-variant'), maxvariantlength)
909
911
910 def makeformatname(name):
912 def makeformatname(name):
911 return '%s:' + (' ' * (maxvariantlength - len(name)))
913 return '%s:' + (' ' * (maxvariantlength - len(name)))
912
914
913 fm = ui.formatter('debugformat', opts)
915 fm = ui.formatter('debugformat', opts)
914 if fm.isplain():
916 if fm.isplain():
915 def formatvalue(value):
917 def formatvalue(value):
916 if util.safehasattr(value, 'startswith'):
918 if util.safehasattr(value, 'startswith'):
917 return value
919 return value
918 if value:
920 if value:
919 return 'yes'
921 return 'yes'
920 else:
922 else:
921 return 'no'
923 return 'no'
922 else:
924 else:
923 formatvalue = pycompat.identity
925 formatvalue = pycompat.identity
924
926
925 fm.plain('format-variant')
927 fm.plain('format-variant')
926 fm.plain(' ' * (maxvariantlength - len('format-variant')))
928 fm.plain(' ' * (maxvariantlength - len('format-variant')))
927 fm.plain(' repo')
929 fm.plain(' repo')
928 if ui.verbose:
930 if ui.verbose:
929 fm.plain(' config default')
931 fm.plain(' config default')
930 fm.plain('\n')
932 fm.plain('\n')
931 for fv in upgrade.allformatvariant:
933 for fv in upgrade.allformatvariant:
932 fm.startitem()
934 fm.startitem()
933 repovalue = fv.fromrepo(repo)
935 repovalue = fv.fromrepo(repo)
934 configvalue = fv.fromconfig(repo)
936 configvalue = fv.fromconfig(repo)
935
937
936 if repovalue != configvalue:
938 if repovalue != configvalue:
937 namelabel = 'formatvariant.name.mismatchconfig'
939 namelabel = 'formatvariant.name.mismatchconfig'
938 repolabel = 'formatvariant.repo.mismatchconfig'
940 repolabel = 'formatvariant.repo.mismatchconfig'
939 elif repovalue != fv.default:
941 elif repovalue != fv.default:
940 namelabel = 'formatvariant.name.mismatchdefault'
942 namelabel = 'formatvariant.name.mismatchdefault'
941 repolabel = 'formatvariant.repo.mismatchdefault'
943 repolabel = 'formatvariant.repo.mismatchdefault'
942 else:
944 else:
943 namelabel = 'formatvariant.name.uptodate'
945 namelabel = 'formatvariant.name.uptodate'
944 repolabel = 'formatvariant.repo.uptodate'
946 repolabel = 'formatvariant.repo.uptodate'
945
947
946 fm.write('name', makeformatname(fv.name), fv.name,
948 fm.write('name', makeformatname(fv.name), fv.name,
947 label=namelabel)
949 label=namelabel)
948 fm.write('repo', ' %3s', formatvalue(repovalue),
950 fm.write('repo', ' %3s', formatvalue(repovalue),
949 label=repolabel)
951 label=repolabel)
950 if fv.default != configvalue:
952 if fv.default != configvalue:
951 configlabel = 'formatvariant.config.special'
953 configlabel = 'formatvariant.config.special'
952 else:
954 else:
953 configlabel = 'formatvariant.config.default'
955 configlabel = 'formatvariant.config.default'
954 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
956 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
955 label=configlabel)
957 label=configlabel)
956 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
958 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
957 label='formatvariant.default')
959 label='formatvariant.default')
958 fm.plain('\n')
960 fm.plain('\n')
959 fm.end()
961 fm.end()
960
962
961 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
963 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
962 def debugfsinfo(ui, path="."):
964 def debugfsinfo(ui, path="."):
963 """show information detected about current filesystem"""
965 """show information detected about current filesystem"""
964 ui.write(('path: %s\n') % path)
966 ui.write(('path: %s\n') % path)
965 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
967 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
966 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
968 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
967 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
969 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
968 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
970 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
969 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
971 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
970 casesensitive = '(unknown)'
972 casesensitive = '(unknown)'
971 try:
973 try:
972 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
974 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
973 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
975 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
974 except OSError:
976 except OSError:
975 pass
977 pass
976 ui.write(('case-sensitive: %s\n') % casesensitive)
978 ui.write(('case-sensitive: %s\n') % casesensitive)
977
979
978 @command('debuggetbundle',
980 @command('debuggetbundle',
979 [('H', 'head', [], _('id of head node'), _('ID')),
981 [('H', 'head', [], _('id of head node'), _('ID')),
980 ('C', 'common', [], _('id of common node'), _('ID')),
982 ('C', 'common', [], _('id of common node'), _('ID')),
981 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
983 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
982 _('REPO FILE [-H|-C ID]...'),
984 _('REPO FILE [-H|-C ID]...'),
983 norepo=True)
985 norepo=True)
984 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
986 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
985 """retrieves a bundle from a repo
987 """retrieves a bundle from a repo
986
988
987 Every ID must be a full-length hex node id string. Saves the bundle to the
989 Every ID must be a full-length hex node id string. Saves the bundle to the
988 given file.
990 given file.
989 """
991 """
990 opts = pycompat.byteskwargs(opts)
992 opts = pycompat.byteskwargs(opts)
991 repo = hg.peer(ui, opts, repopath)
993 repo = hg.peer(ui, opts, repopath)
992 if not repo.capable('getbundle'):
994 if not repo.capable('getbundle'):
993 raise error.Abort("getbundle() not supported by target repository")
995 raise error.Abort("getbundle() not supported by target repository")
994 args = {}
996 args = {}
995 if common:
997 if common:
996 args[r'common'] = [bin(s) for s in common]
998 args[r'common'] = [bin(s) for s in common]
997 if head:
999 if head:
998 args[r'heads'] = [bin(s) for s in head]
1000 args[r'heads'] = [bin(s) for s in head]
999 # TODO: get desired bundlecaps from command line.
1001 # TODO: get desired bundlecaps from command line.
1000 args[r'bundlecaps'] = None
1002 args[r'bundlecaps'] = None
1001 bundle = repo.getbundle('debug', **args)
1003 bundle = repo.getbundle('debug', **args)
1002
1004
1003 bundletype = opts.get('type', 'bzip2').lower()
1005 bundletype = opts.get('type', 'bzip2').lower()
1004 btypes = {'none': 'HG10UN',
1006 btypes = {'none': 'HG10UN',
1005 'bzip2': 'HG10BZ',
1007 'bzip2': 'HG10BZ',
1006 'gzip': 'HG10GZ',
1008 'gzip': 'HG10GZ',
1007 'bundle2': 'HG20'}
1009 'bundle2': 'HG20'}
1008 bundletype = btypes.get(bundletype)
1010 bundletype = btypes.get(bundletype)
1009 if bundletype not in bundle2.bundletypes:
1011 if bundletype not in bundle2.bundletypes:
1010 raise error.Abort(_('unknown bundle type specified with --type'))
1012 raise error.Abort(_('unknown bundle type specified with --type'))
1011 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1013 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1012
1014
1013 @command('debugignore', [], '[FILE]')
1015 @command('debugignore', [], '[FILE]')
1014 def debugignore(ui, repo, *files, **opts):
1016 def debugignore(ui, repo, *files, **opts):
1015 """display the combined ignore pattern and information about ignored files
1017 """display the combined ignore pattern and information about ignored files
1016
1018
1017 With no argument display the combined ignore pattern.
1019 With no argument display the combined ignore pattern.
1018
1020
1019 Given space separated file names, shows if the given file is ignored and
1021 Given space separated file names, shows if the given file is ignored and
1020 if so, show the ignore rule (file and line number) that matched it.
1022 if so, show the ignore rule (file and line number) that matched it.
1021 """
1023 """
1022 ignore = repo.dirstate._ignore
1024 ignore = repo.dirstate._ignore
1023 if not files:
1025 if not files:
1024 # Show all the patterns
1026 # Show all the patterns
1025 ui.write("%s\n" % pycompat.byterepr(ignore))
1027 ui.write("%s\n" % pycompat.byterepr(ignore))
1026 else:
1028 else:
1027 m = scmutil.match(repo[None], pats=files)
1029 m = scmutil.match(repo[None], pats=files)
1028 for f in m.files():
1030 for f in m.files():
1029 nf = util.normpath(f)
1031 nf = util.normpath(f)
1030 ignored = None
1032 ignored = None
1031 ignoredata = None
1033 ignoredata = None
1032 if nf != '.':
1034 if nf != '.':
1033 if ignore(nf):
1035 if ignore(nf):
1034 ignored = nf
1036 ignored = nf
1035 ignoredata = repo.dirstate._ignorefileandline(nf)
1037 ignoredata = repo.dirstate._ignorefileandline(nf)
1036 else:
1038 else:
1037 for p in util.finddirs(nf):
1039 for p in util.finddirs(nf):
1038 if ignore(p):
1040 if ignore(p):
1039 ignored = p
1041 ignored = p
1040 ignoredata = repo.dirstate._ignorefileandline(p)
1042 ignoredata = repo.dirstate._ignorefileandline(p)
1041 break
1043 break
1042 if ignored:
1044 if ignored:
1043 if ignored == nf:
1045 if ignored == nf:
1044 ui.write(_("%s is ignored\n") % m.uipath(f))
1046 ui.write(_("%s is ignored\n") % m.uipath(f))
1045 else:
1047 else:
1046 ui.write(_("%s is ignored because of "
1048 ui.write(_("%s is ignored because of "
1047 "containing folder %s\n")
1049 "containing folder %s\n")
1048 % (m.uipath(f), ignored))
1050 % (m.uipath(f), ignored))
1049 ignorefile, lineno, line = ignoredata
1051 ignorefile, lineno, line = ignoredata
1050 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1052 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1051 % (ignorefile, lineno, line))
1053 % (ignorefile, lineno, line))
1052 else:
1054 else:
1053 ui.write(_("%s is not ignored\n") % m.uipath(f))
1055 ui.write(_("%s is not ignored\n") % m.uipath(f))
1054
1056
1055 @command('debugindex', cmdutil.debugrevlogopts +
1057 @command('debugindex', cmdutil.debugrevlogopts +
1056 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1058 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1057 _('[-f FORMAT] -c|-m|FILE'),
1059 _('[-f FORMAT] -c|-m|FILE'),
1058 optionalrepo=True)
1060 optionalrepo=True)
1059 def debugindex(ui, repo, file_=None, **opts):
1061 def debugindex(ui, repo, file_=None, **opts):
1060 """dump the contents of an index file"""
1062 """dump the contents of an index file"""
1061 opts = pycompat.byteskwargs(opts)
1063 opts = pycompat.byteskwargs(opts)
1062 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1064 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1063 format = opts.get('format', 0)
1065 format = opts.get('format', 0)
1064 if format not in (0, 1):
1066 if format not in (0, 1):
1065 raise error.Abort(_("unknown format %d") % format)
1067 raise error.Abort(_("unknown format %d") % format)
1066
1068
1067 if ui.debugflag:
1069 if ui.debugflag:
1068 shortfn = hex
1070 shortfn = hex
1069 else:
1071 else:
1070 shortfn = short
1072 shortfn = short
1071
1073
1072 # There might not be anything in r, so have a sane default
1074 # There might not be anything in r, so have a sane default
1073 idlen = 12
1075 idlen = 12
1074 for i in r:
1076 for i in r:
1075 idlen = len(shortfn(r.node(i)))
1077 idlen = len(shortfn(r.node(i)))
1076 break
1078 break
1077
1079
1078 if format == 0:
1080 if format == 0:
1079 if ui.verbose:
1081 if ui.verbose:
1080 ui.write((" rev offset length linkrev"
1082 ui.write((" rev offset length linkrev"
1081 " %s %s p2\n") % ("nodeid".ljust(idlen),
1083 " %s %s p2\n") % ("nodeid".ljust(idlen),
1082 "p1".ljust(idlen)))
1084 "p1".ljust(idlen)))
1083 else:
1085 else:
1084 ui.write((" rev linkrev %s %s p2\n") % (
1086 ui.write((" rev linkrev %s %s p2\n") % (
1085 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1087 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1086 elif format == 1:
1088 elif format == 1:
1087 if ui.verbose:
1089 if ui.verbose:
1088 ui.write((" rev flag offset length size link p1"
1090 ui.write((" rev flag offset length size link p1"
1089 " p2 %s\n") % "nodeid".rjust(idlen))
1091 " p2 %s\n") % "nodeid".rjust(idlen))
1090 else:
1092 else:
1091 ui.write((" rev flag size link p1 p2 %s\n") %
1093 ui.write((" rev flag size link p1 p2 %s\n") %
1092 "nodeid".rjust(idlen))
1094 "nodeid".rjust(idlen))
1093
1095
1094 for i in r:
1096 for i in r:
1095 node = r.node(i)
1097 node = r.node(i)
1096 if format == 0:
1098 if format == 0:
1097 try:
1099 try:
1098 pp = r.parents(node)
1100 pp = r.parents(node)
1099 except Exception:
1101 except Exception:
1100 pp = [nullid, nullid]
1102 pp = [nullid, nullid]
1101 if ui.verbose:
1103 if ui.verbose:
1102 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1104 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1103 i, r.start(i), r.length(i), r.linkrev(i),
1105 i, r.start(i), r.length(i), r.linkrev(i),
1104 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1106 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1105 else:
1107 else:
1106 ui.write("% 6d % 7d %s %s %s\n" % (
1108 ui.write("% 6d % 7d %s %s %s\n" % (
1107 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1109 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1108 shortfn(pp[1])))
1110 shortfn(pp[1])))
1109 elif format == 1:
1111 elif format == 1:
1110 pr = r.parentrevs(i)
1112 pr = r.parentrevs(i)
1111 if ui.verbose:
1113 if ui.verbose:
1112 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1114 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1113 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1115 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1114 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1116 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1115 else:
1117 else:
1116 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1118 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1117 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1119 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1118 shortfn(node)))
1120 shortfn(node)))
1119
1121
1120 @command('debugindexdot', cmdutil.debugrevlogopts,
1122 @command('debugindexdot', cmdutil.debugrevlogopts,
1121 _('-c|-m|FILE'), optionalrepo=True)
1123 _('-c|-m|FILE'), optionalrepo=True)
1122 def debugindexdot(ui, repo, file_=None, **opts):
1124 def debugindexdot(ui, repo, file_=None, **opts):
1123 """dump an index DAG as a graphviz dot file"""
1125 """dump an index DAG as a graphviz dot file"""
1124 opts = pycompat.byteskwargs(opts)
1126 opts = pycompat.byteskwargs(opts)
1125 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1127 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1126 ui.write(("digraph G {\n"))
1128 ui.write(("digraph G {\n"))
1127 for i in r:
1129 for i in r:
1128 node = r.node(i)
1130 node = r.node(i)
1129 pp = r.parents(node)
1131 pp = r.parents(node)
1130 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1132 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1131 if pp[1] != nullid:
1133 if pp[1] != nullid:
1132 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1134 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1133 ui.write("}\n")
1135 ui.write("}\n")
1134
1136
1135 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1137 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1136 def debuginstall(ui, **opts):
1138 def debuginstall(ui, **opts):
1137 '''test Mercurial installation
1139 '''test Mercurial installation
1138
1140
1139 Returns 0 on success.
1141 Returns 0 on success.
1140 '''
1142 '''
1141 opts = pycompat.byteskwargs(opts)
1143 opts = pycompat.byteskwargs(opts)
1142
1144
1143 def writetemp(contents):
1145 def writetemp(contents):
1144 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1146 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1145 f = os.fdopen(fd, r"wb")
1147 f = os.fdopen(fd, r"wb")
1146 f.write(contents)
1148 f.write(contents)
1147 f.close()
1149 f.close()
1148 return name
1150 return name
1149
1151
1150 problems = 0
1152 problems = 0
1151
1153
1152 fm = ui.formatter('debuginstall', opts)
1154 fm = ui.formatter('debuginstall', opts)
1153 fm.startitem()
1155 fm.startitem()
1154
1156
1155 # encoding
1157 # encoding
1156 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1158 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1157 err = None
1159 err = None
1158 try:
1160 try:
1159 codecs.lookup(pycompat.sysstr(encoding.encoding))
1161 codecs.lookup(pycompat.sysstr(encoding.encoding))
1160 except LookupError as inst:
1162 except LookupError as inst:
1161 err = stringutil.forcebytestr(inst)
1163 err = stringutil.forcebytestr(inst)
1162 problems += 1
1164 problems += 1
1163 fm.condwrite(err, 'encodingerror', _(" %s\n"
1165 fm.condwrite(err, 'encodingerror', _(" %s\n"
1164 " (check that your locale is properly set)\n"), err)
1166 " (check that your locale is properly set)\n"), err)
1165
1167
1166 # Python
1168 # Python
1167 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1169 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1168 pycompat.sysexecutable)
1170 pycompat.sysexecutable)
1169 fm.write('pythonver', _("checking Python version (%s)\n"),
1171 fm.write('pythonver', _("checking Python version (%s)\n"),
1170 ("%d.%d.%d" % sys.version_info[:3]))
1172 ("%d.%d.%d" % sys.version_info[:3]))
1171 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1173 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1172 os.path.dirname(pycompat.fsencode(os.__file__)))
1174 os.path.dirname(pycompat.fsencode(os.__file__)))
1173
1175
1174 security = set(sslutil.supportedprotocols)
1176 security = set(sslutil.supportedprotocols)
1175 if sslutil.hassni:
1177 if sslutil.hassni:
1176 security.add('sni')
1178 security.add('sni')
1177
1179
1178 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1180 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1179 fm.formatlist(sorted(security), name='protocol',
1181 fm.formatlist(sorted(security), name='protocol',
1180 fmt='%s', sep=','))
1182 fmt='%s', sep=','))
1181
1183
1182 # These are warnings, not errors. So don't increment problem count. This
1184 # These are warnings, not errors. So don't increment problem count. This
1183 # may change in the future.
1185 # may change in the future.
1184 if 'tls1.2' not in security:
1186 if 'tls1.2' not in security:
1185 fm.plain(_(' TLS 1.2 not supported by Python install; '
1187 fm.plain(_(' TLS 1.2 not supported by Python install; '
1186 'network connections lack modern security\n'))
1188 'network connections lack modern security\n'))
1187 if 'sni' not in security:
1189 if 'sni' not in security:
1188 fm.plain(_(' SNI not supported by Python install; may have '
1190 fm.plain(_(' SNI not supported by Python install; may have '
1189 'connectivity issues with some servers\n'))
1191 'connectivity issues with some servers\n'))
1190
1192
1191 # TODO print CA cert info
1193 # TODO print CA cert info
1192
1194
1193 # hg version
1195 # hg version
1194 hgver = util.version()
1196 hgver = util.version()
1195 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1197 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1196 hgver.split('+')[0])
1198 hgver.split('+')[0])
1197 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1199 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1198 '+'.join(hgver.split('+')[1:]))
1200 '+'.join(hgver.split('+')[1:]))
1199
1201
1200 # compiled modules
1202 # compiled modules
1201 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1203 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1202 policy.policy)
1204 policy.policy)
1203 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1205 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1204 os.path.dirname(pycompat.fsencode(__file__)))
1206 os.path.dirname(pycompat.fsencode(__file__)))
1205
1207
1206 if policy.policy in ('c', 'allow'):
1208 if policy.policy in ('c', 'allow'):
1207 err = None
1209 err = None
1208 try:
1210 try:
1209 from .cext import (
1211 from .cext import (
1210 base85,
1212 base85,
1211 bdiff,
1213 bdiff,
1212 mpatch,
1214 mpatch,
1213 osutil,
1215 osutil,
1214 )
1216 )
1215 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1217 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1216 except Exception as inst:
1218 except Exception as inst:
1217 err = stringutil.forcebytestr(inst)
1219 err = stringutil.forcebytestr(inst)
1218 problems += 1
1220 problems += 1
1219 fm.condwrite(err, 'extensionserror', " %s\n", err)
1221 fm.condwrite(err, 'extensionserror', " %s\n", err)
1220
1222
1221 compengines = util.compengines._engines.values()
1223 compengines = util.compengines._engines.values()
1222 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1224 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1223 fm.formatlist(sorted(e.name() for e in compengines),
1225 fm.formatlist(sorted(e.name() for e in compengines),
1224 name='compengine', fmt='%s', sep=', '))
1226 name='compengine', fmt='%s', sep=', '))
1225 fm.write('compenginesavail', _('checking available compression engines '
1227 fm.write('compenginesavail', _('checking available compression engines '
1226 '(%s)\n'),
1228 '(%s)\n'),
1227 fm.formatlist(sorted(e.name() for e in compengines
1229 fm.formatlist(sorted(e.name() for e in compengines
1228 if e.available()),
1230 if e.available()),
1229 name='compengine', fmt='%s', sep=', '))
1231 name='compengine', fmt='%s', sep=', '))
1230 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1232 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1231 fm.write('compenginesserver', _('checking available compression engines '
1233 fm.write('compenginesserver', _('checking available compression engines '
1232 'for wire protocol (%s)\n'),
1234 'for wire protocol (%s)\n'),
1233 fm.formatlist([e.name() for e in wirecompengines
1235 fm.formatlist([e.name() for e in wirecompengines
1234 if e.wireprotosupport()],
1236 if e.wireprotosupport()],
1235 name='compengine', fmt='%s', sep=', '))
1237 name='compengine', fmt='%s', sep=', '))
1236 re2 = 'missing'
1238 re2 = 'missing'
1237 if util._re2:
1239 if util._re2:
1238 re2 = 'available'
1240 re2 = 'available'
1239 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1241 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1240 fm.data(re2=bool(util._re2))
1242 fm.data(re2=bool(util._re2))
1241
1243
1242 # templates
1244 # templates
1243 p = templater.templatepaths()
1245 p = templater.templatepaths()
1244 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1246 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1245 fm.condwrite(not p, '', _(" no template directories found\n"))
1247 fm.condwrite(not p, '', _(" no template directories found\n"))
1246 if p:
1248 if p:
1247 m = templater.templatepath("map-cmdline.default")
1249 m = templater.templatepath("map-cmdline.default")
1248 if m:
1250 if m:
1249 # template found, check if it is working
1251 # template found, check if it is working
1250 err = None
1252 err = None
1251 try:
1253 try:
1252 templater.templater.frommapfile(m)
1254 templater.templater.frommapfile(m)
1253 except Exception as inst:
1255 except Exception as inst:
1254 err = stringutil.forcebytestr(inst)
1256 err = stringutil.forcebytestr(inst)
1255 p = None
1257 p = None
1256 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1258 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1257 else:
1259 else:
1258 p = None
1260 p = None
1259 fm.condwrite(p, 'defaulttemplate',
1261 fm.condwrite(p, 'defaulttemplate',
1260 _("checking default template (%s)\n"), m)
1262 _("checking default template (%s)\n"), m)
1261 fm.condwrite(not m, 'defaulttemplatenotfound',
1263 fm.condwrite(not m, 'defaulttemplatenotfound',
1262 _(" template '%s' not found\n"), "default")
1264 _(" template '%s' not found\n"), "default")
1263 if not p:
1265 if not p:
1264 problems += 1
1266 problems += 1
1265 fm.condwrite(not p, '',
1267 fm.condwrite(not p, '',
1266 _(" (templates seem to have been installed incorrectly)\n"))
1268 _(" (templates seem to have been installed incorrectly)\n"))
1267
1269
1268 # editor
1270 # editor
1269 editor = ui.geteditor()
1271 editor = ui.geteditor()
1270 editor = util.expandpath(editor)
1272 editor = util.expandpath(editor)
1271 editorbin = procutil.shellsplit(editor)[0]
1273 editorbin = procutil.shellsplit(editor)[0]
1272 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1274 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1273 cmdpath = procutil.findexe(editorbin)
1275 cmdpath = procutil.findexe(editorbin)
1274 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1276 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1275 _(" No commit editor set and can't find %s in PATH\n"
1277 _(" No commit editor set and can't find %s in PATH\n"
1276 " (specify a commit editor in your configuration"
1278 " (specify a commit editor in your configuration"
1277 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1279 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1278 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1280 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1279 _(" Can't find editor '%s' in PATH\n"
1281 _(" Can't find editor '%s' in PATH\n"
1280 " (specify a commit editor in your configuration"
1282 " (specify a commit editor in your configuration"
1281 " file)\n"), not cmdpath and editorbin)
1283 " file)\n"), not cmdpath and editorbin)
1282 if not cmdpath and editor != 'vi':
1284 if not cmdpath and editor != 'vi':
1283 problems += 1
1285 problems += 1
1284
1286
1285 # check username
1287 # check username
1286 username = None
1288 username = None
1287 err = None
1289 err = None
1288 try:
1290 try:
1289 username = ui.username()
1291 username = ui.username()
1290 except error.Abort as e:
1292 except error.Abort as e:
1291 err = stringutil.forcebytestr(e)
1293 err = stringutil.forcebytestr(e)
1292 problems += 1
1294 problems += 1
1293
1295
1294 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1296 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1295 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1297 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1296 " (specify a username in your configuration file)\n"), err)
1298 " (specify a username in your configuration file)\n"), err)
1297
1299
1298 fm.condwrite(not problems, '',
1300 fm.condwrite(not problems, '',
1299 _("no problems detected\n"))
1301 _("no problems detected\n"))
1300 if not problems:
1302 if not problems:
1301 fm.data(problems=problems)
1303 fm.data(problems=problems)
1302 fm.condwrite(problems, 'problems',
1304 fm.condwrite(problems, 'problems',
1303 _("%d problems detected,"
1305 _("%d problems detected,"
1304 " please check your install!\n"), problems)
1306 " please check your install!\n"), problems)
1305 fm.end()
1307 fm.end()
1306
1308
1307 return problems
1309 return problems
1308
1310
1309 @command('debugknown', [], _('REPO ID...'), norepo=True)
1311 @command('debugknown', [], _('REPO ID...'), norepo=True)
1310 def debugknown(ui, repopath, *ids, **opts):
1312 def debugknown(ui, repopath, *ids, **opts):
1311 """test whether node ids are known to a repo
1313 """test whether node ids are known to a repo
1312
1314
1313 Every ID must be a full-length hex node id string. Returns a list of 0s
1315 Every ID must be a full-length hex node id string. Returns a list of 0s
1314 and 1s indicating unknown/known.
1316 and 1s indicating unknown/known.
1315 """
1317 """
1316 opts = pycompat.byteskwargs(opts)
1318 opts = pycompat.byteskwargs(opts)
1317 repo = hg.peer(ui, opts, repopath)
1319 repo = hg.peer(ui, opts, repopath)
1318 if not repo.capable('known'):
1320 if not repo.capable('known'):
1319 raise error.Abort("known() not supported by target repository")
1321 raise error.Abort("known() not supported by target repository")
1320 flags = repo.known([bin(s) for s in ids])
1322 flags = repo.known([bin(s) for s in ids])
1321 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1323 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1322
1324
1323 @command('debuglabelcomplete', [], _('LABEL...'))
1325 @command('debuglabelcomplete', [], _('LABEL...'))
1324 def debuglabelcomplete(ui, repo, *args):
1326 def debuglabelcomplete(ui, repo, *args):
1325 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1327 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1326 debugnamecomplete(ui, repo, *args)
1328 debugnamecomplete(ui, repo, *args)
1327
1329
1328 @command('debuglocks',
1330 @command('debuglocks',
1329 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1331 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1330 ('W', 'force-wlock', None,
1332 ('W', 'force-wlock', None,
1331 _('free the working state lock (DANGEROUS)')),
1333 _('free the working state lock (DANGEROUS)')),
1332 ('s', 'set-lock', None, _('set the store lock until stopped')),
1334 ('s', 'set-lock', None, _('set the store lock until stopped')),
1333 ('S', 'set-wlock', None,
1335 ('S', 'set-wlock', None,
1334 _('set the working state lock until stopped'))],
1336 _('set the working state lock until stopped'))],
1335 _('[OPTION]...'))
1337 _('[OPTION]...'))
1336 def debuglocks(ui, repo, **opts):
1338 def debuglocks(ui, repo, **opts):
1337 """show or modify state of locks
1339 """show or modify state of locks
1338
1340
1339 By default, this command will show which locks are held. This
1341 By default, this command will show which locks are held. This
1340 includes the user and process holding the lock, the amount of time
1342 includes the user and process holding the lock, the amount of time
1341 the lock has been held, and the machine name where the process is
1343 the lock has been held, and the machine name where the process is
1342 running if it's not local.
1344 running if it's not local.
1343
1345
1344 Locks protect the integrity of Mercurial's data, so should be
1346 Locks protect the integrity of Mercurial's data, so should be
1345 treated with care. System crashes or other interruptions may cause
1347 treated with care. System crashes or other interruptions may cause
1346 locks to not be properly released, though Mercurial will usually
1348 locks to not be properly released, though Mercurial will usually
1347 detect and remove such stale locks automatically.
1349 detect and remove such stale locks automatically.
1348
1350
1349 However, detecting stale locks may not always be possible (for
1351 However, detecting stale locks may not always be possible (for
1350 instance, on a shared filesystem). Removing locks may also be
1352 instance, on a shared filesystem). Removing locks may also be
1351 blocked by filesystem permissions.
1353 blocked by filesystem permissions.
1352
1354
1353 Setting a lock will prevent other commands from changing the data.
1355 Setting a lock will prevent other commands from changing the data.
1354 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1356 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1355 The set locks are removed when the command exits.
1357 The set locks are removed when the command exits.
1356
1358
1357 Returns 0 if no locks are held.
1359 Returns 0 if no locks are held.
1358
1360
1359 """
1361 """
1360
1362
1361 if opts.get(r'force_lock'):
1363 if opts.get(r'force_lock'):
1362 repo.svfs.unlink('lock')
1364 repo.svfs.unlink('lock')
1363 if opts.get(r'force_wlock'):
1365 if opts.get(r'force_wlock'):
1364 repo.vfs.unlink('wlock')
1366 repo.vfs.unlink('wlock')
1365 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1367 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1366 return 0
1368 return 0
1367
1369
1368 locks = []
1370 locks = []
1369 try:
1371 try:
1370 if opts.get(r'set_wlock'):
1372 if opts.get(r'set_wlock'):
1371 try:
1373 try:
1372 locks.append(repo.wlock(False))
1374 locks.append(repo.wlock(False))
1373 except error.LockHeld:
1375 except error.LockHeld:
1374 raise error.Abort(_('wlock is already held'))
1376 raise error.Abort(_('wlock is already held'))
1375 if opts.get(r'set_lock'):
1377 if opts.get(r'set_lock'):
1376 try:
1378 try:
1377 locks.append(repo.lock(False))
1379 locks.append(repo.lock(False))
1378 except error.LockHeld:
1380 except error.LockHeld:
1379 raise error.Abort(_('lock is already held'))
1381 raise error.Abort(_('lock is already held'))
1380 if len(locks):
1382 if len(locks):
1381 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1383 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1382 return 0
1384 return 0
1383 finally:
1385 finally:
1384 release(*locks)
1386 release(*locks)
1385
1387
1386 now = time.time()
1388 now = time.time()
1387 held = 0
1389 held = 0
1388
1390
1389 def report(vfs, name, method):
1391 def report(vfs, name, method):
1390 # this causes stale locks to get reaped for more accurate reporting
1392 # this causes stale locks to get reaped for more accurate reporting
1391 try:
1393 try:
1392 l = method(False)
1394 l = method(False)
1393 except error.LockHeld:
1395 except error.LockHeld:
1394 l = None
1396 l = None
1395
1397
1396 if l:
1398 if l:
1397 l.release()
1399 l.release()
1398 else:
1400 else:
1399 try:
1401 try:
1400 st = vfs.lstat(name)
1402 st = vfs.lstat(name)
1401 age = now - st[stat.ST_MTIME]
1403 age = now - st[stat.ST_MTIME]
1402 user = util.username(st.st_uid)
1404 user = util.username(st.st_uid)
1403 locker = vfs.readlock(name)
1405 locker = vfs.readlock(name)
1404 if ":" in locker:
1406 if ":" in locker:
1405 host, pid = locker.split(':')
1407 host, pid = locker.split(':')
1406 if host == socket.gethostname():
1408 if host == socket.gethostname():
1407 locker = 'user %s, process %s' % (user, pid)
1409 locker = 'user %s, process %s' % (user, pid)
1408 else:
1410 else:
1409 locker = 'user %s, process %s, host %s' \
1411 locker = 'user %s, process %s, host %s' \
1410 % (user, pid, host)
1412 % (user, pid, host)
1411 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1413 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1412 return 1
1414 return 1
1413 except OSError as e:
1415 except OSError as e:
1414 if e.errno != errno.ENOENT:
1416 if e.errno != errno.ENOENT:
1415 raise
1417 raise
1416
1418
1417 ui.write(("%-6s free\n") % (name + ":"))
1419 ui.write(("%-6s free\n") % (name + ":"))
1418 return 0
1420 return 0
1419
1421
1420 held += report(repo.svfs, "lock", repo.lock)
1422 held += report(repo.svfs, "lock", repo.lock)
1421 held += report(repo.vfs, "wlock", repo.wlock)
1423 held += report(repo.vfs, "wlock", repo.wlock)
1422
1424
1423 return held
1425 return held
1424
1426
1425 @command('debugmergestate', [], '')
1427 @command('debugmergestate', [], '')
1426 def debugmergestate(ui, repo, *args):
1428 def debugmergestate(ui, repo, *args):
1427 """print merge state
1429 """print merge state
1428
1430
1429 Use --verbose to print out information about whether v1 or v2 merge state
1431 Use --verbose to print out information about whether v1 or v2 merge state
1430 was chosen."""
1432 was chosen."""
1431 def _hashornull(h):
1433 def _hashornull(h):
1432 if h == nullhex:
1434 if h == nullhex:
1433 return 'null'
1435 return 'null'
1434 else:
1436 else:
1435 return h
1437 return h
1436
1438
1437 def printrecords(version):
1439 def printrecords(version):
1438 ui.write(('* version %d records\n') % version)
1440 ui.write(('* version %d records\n') % version)
1439 if version == 1:
1441 if version == 1:
1440 records = v1records
1442 records = v1records
1441 else:
1443 else:
1442 records = v2records
1444 records = v2records
1443
1445
1444 for rtype, record in records:
1446 for rtype, record in records:
1445 # pretty print some record types
1447 # pretty print some record types
1446 if rtype == 'L':
1448 if rtype == 'L':
1447 ui.write(('local: %s\n') % record)
1449 ui.write(('local: %s\n') % record)
1448 elif rtype == 'O':
1450 elif rtype == 'O':
1449 ui.write(('other: %s\n') % record)
1451 ui.write(('other: %s\n') % record)
1450 elif rtype == 'm':
1452 elif rtype == 'm':
1451 driver, mdstate = record.split('\0', 1)
1453 driver, mdstate = record.split('\0', 1)
1452 ui.write(('merge driver: %s (state "%s")\n')
1454 ui.write(('merge driver: %s (state "%s")\n')
1453 % (driver, mdstate))
1455 % (driver, mdstate))
1454 elif rtype in 'FDC':
1456 elif rtype in 'FDC':
1455 r = record.split('\0')
1457 r = record.split('\0')
1456 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1458 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1457 if version == 1:
1459 if version == 1:
1458 onode = 'not stored in v1 format'
1460 onode = 'not stored in v1 format'
1459 flags = r[7]
1461 flags = r[7]
1460 else:
1462 else:
1461 onode, flags = r[7:9]
1463 onode, flags = r[7:9]
1462 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1464 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1463 % (f, rtype, state, _hashornull(hash)))
1465 % (f, rtype, state, _hashornull(hash)))
1464 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1466 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1465 ui.write((' ancestor path: %s (node %s)\n')
1467 ui.write((' ancestor path: %s (node %s)\n')
1466 % (afile, _hashornull(anode)))
1468 % (afile, _hashornull(anode)))
1467 ui.write((' other path: %s (node %s)\n')
1469 ui.write((' other path: %s (node %s)\n')
1468 % (ofile, _hashornull(onode)))
1470 % (ofile, _hashornull(onode)))
1469 elif rtype == 'f':
1471 elif rtype == 'f':
1470 filename, rawextras = record.split('\0', 1)
1472 filename, rawextras = record.split('\0', 1)
1471 extras = rawextras.split('\0')
1473 extras = rawextras.split('\0')
1472 i = 0
1474 i = 0
1473 extrastrings = []
1475 extrastrings = []
1474 while i < len(extras):
1476 while i < len(extras):
1475 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1477 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1476 i += 2
1478 i += 2
1477
1479
1478 ui.write(('file extras: %s (%s)\n')
1480 ui.write(('file extras: %s (%s)\n')
1479 % (filename, ', '.join(extrastrings)))
1481 % (filename, ', '.join(extrastrings)))
1480 elif rtype == 'l':
1482 elif rtype == 'l':
1481 labels = record.split('\0', 2)
1483 labels = record.split('\0', 2)
1482 labels = [l for l in labels if len(l) > 0]
1484 labels = [l for l in labels if len(l) > 0]
1483 ui.write(('labels:\n'))
1485 ui.write(('labels:\n'))
1484 ui.write((' local: %s\n' % labels[0]))
1486 ui.write((' local: %s\n' % labels[0]))
1485 ui.write((' other: %s\n' % labels[1]))
1487 ui.write((' other: %s\n' % labels[1]))
1486 if len(labels) > 2:
1488 if len(labels) > 2:
1487 ui.write((' base: %s\n' % labels[2]))
1489 ui.write((' base: %s\n' % labels[2]))
1488 else:
1490 else:
1489 ui.write(('unrecognized entry: %s\t%s\n')
1491 ui.write(('unrecognized entry: %s\t%s\n')
1490 % (rtype, record.replace('\0', '\t')))
1492 % (rtype, record.replace('\0', '\t')))
1491
1493
1492 # Avoid mergestate.read() since it may raise an exception for unsupported
1494 # Avoid mergestate.read() since it may raise an exception for unsupported
1493 # merge state records. We shouldn't be doing this, but this is OK since this
1495 # merge state records. We shouldn't be doing this, but this is OK since this
1494 # command is pretty low-level.
1496 # command is pretty low-level.
1495 ms = mergemod.mergestate(repo)
1497 ms = mergemod.mergestate(repo)
1496
1498
1497 # sort so that reasonable information is on top
1499 # sort so that reasonable information is on top
1498 v1records = ms._readrecordsv1()
1500 v1records = ms._readrecordsv1()
1499 v2records = ms._readrecordsv2()
1501 v2records = ms._readrecordsv2()
1500 order = 'LOml'
1502 order = 'LOml'
1501 def key(r):
1503 def key(r):
1502 idx = order.find(r[0])
1504 idx = order.find(r[0])
1503 if idx == -1:
1505 if idx == -1:
1504 return (1, r[1])
1506 return (1, r[1])
1505 else:
1507 else:
1506 return (0, idx)
1508 return (0, idx)
1507 v1records.sort(key=key)
1509 v1records.sort(key=key)
1508 v2records.sort(key=key)
1510 v2records.sort(key=key)
1509
1511
1510 if not v1records and not v2records:
1512 if not v1records and not v2records:
1511 ui.write(('no merge state found\n'))
1513 ui.write(('no merge state found\n'))
1512 elif not v2records:
1514 elif not v2records:
1513 ui.note(('no version 2 merge state\n'))
1515 ui.note(('no version 2 merge state\n'))
1514 printrecords(1)
1516 printrecords(1)
1515 elif ms._v1v2match(v1records, v2records):
1517 elif ms._v1v2match(v1records, v2records):
1516 ui.note(('v1 and v2 states match: using v2\n'))
1518 ui.note(('v1 and v2 states match: using v2\n'))
1517 printrecords(2)
1519 printrecords(2)
1518 else:
1520 else:
1519 ui.note(('v1 and v2 states mismatch: using v1\n'))
1521 ui.note(('v1 and v2 states mismatch: using v1\n'))
1520 printrecords(1)
1522 printrecords(1)
1521 if ui.verbose:
1523 if ui.verbose:
1522 printrecords(2)
1524 printrecords(2)
1523
1525
1524 @command('debugnamecomplete', [], _('NAME...'))
1526 @command('debugnamecomplete', [], _('NAME...'))
1525 def debugnamecomplete(ui, repo, *args):
1527 def debugnamecomplete(ui, repo, *args):
1526 '''complete "names" - tags, open branch names, bookmark names'''
1528 '''complete "names" - tags, open branch names, bookmark names'''
1527
1529
1528 names = set()
1530 names = set()
1529 # since we previously only listed open branches, we will handle that
1531 # since we previously only listed open branches, we will handle that
1530 # specially (after this for loop)
1532 # specially (after this for loop)
1531 for name, ns in repo.names.iteritems():
1533 for name, ns in repo.names.iteritems():
1532 if name != 'branches':
1534 if name != 'branches':
1533 names.update(ns.listnames(repo))
1535 names.update(ns.listnames(repo))
1534 names.update(tag for (tag, heads, tip, closed)
1536 names.update(tag for (tag, heads, tip, closed)
1535 in repo.branchmap().iterbranches() if not closed)
1537 in repo.branchmap().iterbranches() if not closed)
1536 completions = set()
1538 completions = set()
1537 if not args:
1539 if not args:
1538 args = ['']
1540 args = ['']
1539 for a in args:
1541 for a in args:
1540 completions.update(n for n in names if n.startswith(a))
1542 completions.update(n for n in names if n.startswith(a))
1541 ui.write('\n'.join(sorted(completions)))
1543 ui.write('\n'.join(sorted(completions)))
1542 ui.write('\n')
1544 ui.write('\n')
1543
1545
1544 @command('debugobsolete',
1546 @command('debugobsolete',
1545 [('', 'flags', 0, _('markers flag')),
1547 [('', 'flags', 0, _('markers flag')),
1546 ('', 'record-parents', False,
1548 ('', 'record-parents', False,
1547 _('record parent information for the precursor')),
1549 _('record parent information for the precursor')),
1548 ('r', 'rev', [], _('display markers relevant to REV')),
1550 ('r', 'rev', [], _('display markers relevant to REV')),
1549 ('', 'exclusive', False, _('restrict display to markers only '
1551 ('', 'exclusive', False, _('restrict display to markers only '
1550 'relevant to REV')),
1552 'relevant to REV')),
1551 ('', 'index', False, _('display index of the marker')),
1553 ('', 'index', False, _('display index of the marker')),
1552 ('', 'delete', [], _('delete markers specified by indices')),
1554 ('', 'delete', [], _('delete markers specified by indices')),
1553 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1555 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1554 _('[OBSOLETED [REPLACEMENT ...]]'))
1556 _('[OBSOLETED [REPLACEMENT ...]]'))
1555 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1557 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1556 """create arbitrary obsolete marker
1558 """create arbitrary obsolete marker
1557
1559
1558 With no arguments, displays the list of obsolescence markers."""
1560 With no arguments, displays the list of obsolescence markers."""
1559
1561
1560 opts = pycompat.byteskwargs(opts)
1562 opts = pycompat.byteskwargs(opts)
1561
1563
1562 def parsenodeid(s):
1564 def parsenodeid(s):
1563 try:
1565 try:
1564 # We do not use revsingle/revrange functions here to accept
1566 # We do not use revsingle/revrange functions here to accept
1565 # arbitrary node identifiers, possibly not present in the
1567 # arbitrary node identifiers, possibly not present in the
1566 # local repository.
1568 # local repository.
1567 n = bin(s)
1569 n = bin(s)
1568 if len(n) != len(nullid):
1570 if len(n) != len(nullid):
1569 raise TypeError()
1571 raise TypeError()
1570 return n
1572 return n
1571 except TypeError:
1573 except TypeError:
1572 raise error.Abort('changeset references must be full hexadecimal '
1574 raise error.Abort('changeset references must be full hexadecimal '
1573 'node identifiers')
1575 'node identifiers')
1574
1576
1575 if opts.get('delete'):
1577 if opts.get('delete'):
1576 indices = []
1578 indices = []
1577 for v in opts.get('delete'):
1579 for v in opts.get('delete'):
1578 try:
1580 try:
1579 indices.append(int(v))
1581 indices.append(int(v))
1580 except ValueError:
1582 except ValueError:
1581 raise error.Abort(_('invalid index value: %r') % v,
1583 raise error.Abort(_('invalid index value: %r') % v,
1582 hint=_('use integers for indices'))
1584 hint=_('use integers for indices'))
1583
1585
1584 if repo.currenttransaction():
1586 if repo.currenttransaction():
1585 raise error.Abort(_('cannot delete obsmarkers in the middle '
1587 raise error.Abort(_('cannot delete obsmarkers in the middle '
1586 'of transaction.'))
1588 'of transaction.'))
1587
1589
1588 with repo.lock():
1590 with repo.lock():
1589 n = repair.deleteobsmarkers(repo.obsstore, indices)
1591 n = repair.deleteobsmarkers(repo.obsstore, indices)
1590 ui.write(_('deleted %i obsolescence markers\n') % n)
1592 ui.write(_('deleted %i obsolescence markers\n') % n)
1591
1593
1592 return
1594 return
1593
1595
1594 if precursor is not None:
1596 if precursor is not None:
1595 if opts['rev']:
1597 if opts['rev']:
1596 raise error.Abort('cannot select revision when creating marker')
1598 raise error.Abort('cannot select revision when creating marker')
1597 metadata = {}
1599 metadata = {}
1598 metadata['user'] = opts['user'] or ui.username()
1600 metadata['user'] = opts['user'] or ui.username()
1599 succs = tuple(parsenodeid(succ) for succ in successors)
1601 succs = tuple(parsenodeid(succ) for succ in successors)
1600 l = repo.lock()
1602 l = repo.lock()
1601 try:
1603 try:
1602 tr = repo.transaction('debugobsolete')
1604 tr = repo.transaction('debugobsolete')
1603 try:
1605 try:
1604 date = opts.get('date')
1606 date = opts.get('date')
1605 if date:
1607 if date:
1606 date = dateutil.parsedate(date)
1608 date = dateutil.parsedate(date)
1607 else:
1609 else:
1608 date = None
1610 date = None
1609 prec = parsenodeid(precursor)
1611 prec = parsenodeid(precursor)
1610 parents = None
1612 parents = None
1611 if opts['record_parents']:
1613 if opts['record_parents']:
1612 if prec not in repo.unfiltered():
1614 if prec not in repo.unfiltered():
1613 raise error.Abort('cannot used --record-parents on '
1615 raise error.Abort('cannot used --record-parents on '
1614 'unknown changesets')
1616 'unknown changesets')
1615 parents = repo.unfiltered()[prec].parents()
1617 parents = repo.unfiltered()[prec].parents()
1616 parents = tuple(p.node() for p in parents)
1618 parents = tuple(p.node() for p in parents)
1617 repo.obsstore.create(tr, prec, succs, opts['flags'],
1619 repo.obsstore.create(tr, prec, succs, opts['flags'],
1618 parents=parents, date=date,
1620 parents=parents, date=date,
1619 metadata=metadata, ui=ui)
1621 metadata=metadata, ui=ui)
1620 tr.close()
1622 tr.close()
1621 except ValueError as exc:
1623 except ValueError as exc:
1622 raise error.Abort(_('bad obsmarker input: %s') %
1624 raise error.Abort(_('bad obsmarker input: %s') %
1623 pycompat.bytestr(exc))
1625 pycompat.bytestr(exc))
1624 finally:
1626 finally:
1625 tr.release()
1627 tr.release()
1626 finally:
1628 finally:
1627 l.release()
1629 l.release()
1628 else:
1630 else:
1629 if opts['rev']:
1631 if opts['rev']:
1630 revs = scmutil.revrange(repo, opts['rev'])
1632 revs = scmutil.revrange(repo, opts['rev'])
1631 nodes = [repo[r].node() for r in revs]
1633 nodes = [repo[r].node() for r in revs]
1632 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1634 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1633 exclusive=opts['exclusive']))
1635 exclusive=opts['exclusive']))
1634 markers.sort(key=lambda x: x._data)
1636 markers.sort(key=lambda x: x._data)
1635 else:
1637 else:
1636 markers = obsutil.getmarkers(repo)
1638 markers = obsutil.getmarkers(repo)
1637
1639
1638 markerstoiter = markers
1640 markerstoiter = markers
1639 isrelevant = lambda m: True
1641 isrelevant = lambda m: True
1640 if opts.get('rev') and opts.get('index'):
1642 if opts.get('rev') and opts.get('index'):
1641 markerstoiter = obsutil.getmarkers(repo)
1643 markerstoiter = obsutil.getmarkers(repo)
1642 markerset = set(markers)
1644 markerset = set(markers)
1643 isrelevant = lambda m: m in markerset
1645 isrelevant = lambda m: m in markerset
1644
1646
1645 fm = ui.formatter('debugobsolete', opts)
1647 fm = ui.formatter('debugobsolete', opts)
1646 for i, m in enumerate(markerstoiter):
1648 for i, m in enumerate(markerstoiter):
1647 if not isrelevant(m):
1649 if not isrelevant(m):
1648 # marker can be irrelevant when we're iterating over a set
1650 # marker can be irrelevant when we're iterating over a set
1649 # of markers (markerstoiter) which is bigger than the set
1651 # of markers (markerstoiter) which is bigger than the set
1650 # of markers we want to display (markers)
1652 # of markers we want to display (markers)
1651 # this can happen if both --index and --rev options are
1653 # this can happen if both --index and --rev options are
1652 # provided and thus we need to iterate over all of the markers
1654 # provided and thus we need to iterate over all of the markers
1653 # to get the correct indices, but only display the ones that
1655 # to get the correct indices, but only display the ones that
1654 # are relevant to --rev value
1656 # are relevant to --rev value
1655 continue
1657 continue
1656 fm.startitem()
1658 fm.startitem()
1657 ind = i if opts.get('index') else None
1659 ind = i if opts.get('index') else None
1658 cmdutil.showmarker(fm, m, index=ind)
1660 cmdutil.showmarker(fm, m, index=ind)
1659 fm.end()
1661 fm.end()
1660
1662
1661 @command('debugpathcomplete',
1663 @command('debugpathcomplete',
1662 [('f', 'full', None, _('complete an entire path')),
1664 [('f', 'full', None, _('complete an entire path')),
1663 ('n', 'normal', None, _('show only normal files')),
1665 ('n', 'normal', None, _('show only normal files')),
1664 ('a', 'added', None, _('show only added files')),
1666 ('a', 'added', None, _('show only added files')),
1665 ('r', 'removed', None, _('show only removed files'))],
1667 ('r', 'removed', None, _('show only removed files'))],
1666 _('FILESPEC...'))
1668 _('FILESPEC...'))
1667 def debugpathcomplete(ui, repo, *specs, **opts):
1669 def debugpathcomplete(ui, repo, *specs, **opts):
1668 '''complete part or all of a tracked path
1670 '''complete part or all of a tracked path
1669
1671
1670 This command supports shells that offer path name completion. It
1672 This command supports shells that offer path name completion. It
1671 currently completes only files already known to the dirstate.
1673 currently completes only files already known to the dirstate.
1672
1674
1673 Completion extends only to the next path segment unless
1675 Completion extends only to the next path segment unless
1674 --full is specified, in which case entire paths are used.'''
1676 --full is specified, in which case entire paths are used.'''
1675
1677
1676 def complete(path, acceptable):
1678 def complete(path, acceptable):
1677 dirstate = repo.dirstate
1679 dirstate = repo.dirstate
1678 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1680 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1679 rootdir = repo.root + pycompat.ossep
1681 rootdir = repo.root + pycompat.ossep
1680 if spec != repo.root and not spec.startswith(rootdir):
1682 if spec != repo.root and not spec.startswith(rootdir):
1681 return [], []
1683 return [], []
1682 if os.path.isdir(spec):
1684 if os.path.isdir(spec):
1683 spec += '/'
1685 spec += '/'
1684 spec = spec[len(rootdir):]
1686 spec = spec[len(rootdir):]
1685 fixpaths = pycompat.ossep != '/'
1687 fixpaths = pycompat.ossep != '/'
1686 if fixpaths:
1688 if fixpaths:
1687 spec = spec.replace(pycompat.ossep, '/')
1689 spec = spec.replace(pycompat.ossep, '/')
1688 speclen = len(spec)
1690 speclen = len(spec)
1689 fullpaths = opts[r'full']
1691 fullpaths = opts[r'full']
1690 files, dirs = set(), set()
1692 files, dirs = set(), set()
1691 adddir, addfile = dirs.add, files.add
1693 adddir, addfile = dirs.add, files.add
1692 for f, st in dirstate.iteritems():
1694 for f, st in dirstate.iteritems():
1693 if f.startswith(spec) and st[0] in acceptable:
1695 if f.startswith(spec) and st[0] in acceptable:
1694 if fixpaths:
1696 if fixpaths:
1695 f = f.replace('/', pycompat.ossep)
1697 f = f.replace('/', pycompat.ossep)
1696 if fullpaths:
1698 if fullpaths:
1697 addfile(f)
1699 addfile(f)
1698 continue
1700 continue
1699 s = f.find(pycompat.ossep, speclen)
1701 s = f.find(pycompat.ossep, speclen)
1700 if s >= 0:
1702 if s >= 0:
1701 adddir(f[:s])
1703 adddir(f[:s])
1702 else:
1704 else:
1703 addfile(f)
1705 addfile(f)
1704 return files, dirs
1706 return files, dirs
1705
1707
1706 acceptable = ''
1708 acceptable = ''
1707 if opts[r'normal']:
1709 if opts[r'normal']:
1708 acceptable += 'nm'
1710 acceptable += 'nm'
1709 if opts[r'added']:
1711 if opts[r'added']:
1710 acceptable += 'a'
1712 acceptable += 'a'
1711 if opts[r'removed']:
1713 if opts[r'removed']:
1712 acceptable += 'r'
1714 acceptable += 'r'
1713 cwd = repo.getcwd()
1715 cwd = repo.getcwd()
1714 if not specs:
1716 if not specs:
1715 specs = ['.']
1717 specs = ['.']
1716
1718
1717 files, dirs = set(), set()
1719 files, dirs = set(), set()
1718 for spec in specs:
1720 for spec in specs:
1719 f, d = complete(spec, acceptable or 'nmar')
1721 f, d = complete(spec, acceptable or 'nmar')
1720 files.update(f)
1722 files.update(f)
1721 dirs.update(d)
1723 dirs.update(d)
1722 files.update(dirs)
1724 files.update(dirs)
1723 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1725 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1724 ui.write('\n')
1726 ui.write('\n')
1725
1727
1726 @command('debugpeer', [], _('PATH'), norepo=True)
1728 @command('debugpeer', [], _('PATH'), norepo=True)
1727 def debugpeer(ui, path):
1729 def debugpeer(ui, path):
1728 """establish a connection to a peer repository"""
1730 """establish a connection to a peer repository"""
1729 # Always enable peer request logging. Requires --debug to display
1731 # Always enable peer request logging. Requires --debug to display
1730 # though.
1732 # though.
1731 overrides = {
1733 overrides = {
1732 ('devel', 'debug.peer-request'): True,
1734 ('devel', 'debug.peer-request'): True,
1733 }
1735 }
1734
1736
1735 with ui.configoverride(overrides):
1737 with ui.configoverride(overrides):
1736 peer = hg.peer(ui, {}, path)
1738 peer = hg.peer(ui, {}, path)
1737
1739
1738 local = peer.local() is not None
1740 local = peer.local() is not None
1739 canpush = peer.canpush()
1741 canpush = peer.canpush()
1740
1742
1741 ui.write(_('url: %s\n') % peer.url())
1743 ui.write(_('url: %s\n') % peer.url())
1742 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1744 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1743 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1745 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1744
1746
1745 @command('debugpickmergetool',
1747 @command('debugpickmergetool',
1746 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1748 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1747 ('', 'changedelete', None, _('emulate merging change and delete')),
1749 ('', 'changedelete', None, _('emulate merging change and delete')),
1748 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1750 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1749 _('[PATTERN]...'),
1751 _('[PATTERN]...'),
1750 inferrepo=True)
1752 inferrepo=True)
1751 def debugpickmergetool(ui, repo, *pats, **opts):
1753 def debugpickmergetool(ui, repo, *pats, **opts):
1752 """examine which merge tool is chosen for specified file
1754 """examine which merge tool is chosen for specified file
1753
1755
1754 As described in :hg:`help merge-tools`, Mercurial examines
1756 As described in :hg:`help merge-tools`, Mercurial examines
1755 configurations below in this order to decide which merge tool is
1757 configurations below in this order to decide which merge tool is
1756 chosen for specified file.
1758 chosen for specified file.
1757
1759
1758 1. ``--tool`` option
1760 1. ``--tool`` option
1759 2. ``HGMERGE`` environment variable
1761 2. ``HGMERGE`` environment variable
1760 3. configurations in ``merge-patterns`` section
1762 3. configurations in ``merge-patterns`` section
1761 4. configuration of ``ui.merge``
1763 4. configuration of ``ui.merge``
1762 5. configurations in ``merge-tools`` section
1764 5. configurations in ``merge-tools`` section
1763 6. ``hgmerge`` tool (for historical reason only)
1765 6. ``hgmerge`` tool (for historical reason only)
1764 7. default tool for fallback (``:merge`` or ``:prompt``)
1766 7. default tool for fallback (``:merge`` or ``:prompt``)
1765
1767
1766 This command writes out examination result in the style below::
1768 This command writes out examination result in the style below::
1767
1769
1768 FILE = MERGETOOL
1770 FILE = MERGETOOL
1769
1771
1770 By default, all files known in the first parent context of the
1772 By default, all files known in the first parent context of the
1771 working directory are examined. Use file patterns and/or -I/-X
1773 working directory are examined. Use file patterns and/or -I/-X
1772 options to limit target files. -r/--rev is also useful to examine
1774 options to limit target files. -r/--rev is also useful to examine
1773 files in another context without actual updating to it.
1775 files in another context without actual updating to it.
1774
1776
1775 With --debug, this command shows warning messages while matching
1777 With --debug, this command shows warning messages while matching
1776 against ``merge-patterns`` and so on, too. It is recommended to
1778 against ``merge-patterns`` and so on, too. It is recommended to
1777 use this option with explicit file patterns and/or -I/-X options,
1779 use this option with explicit file patterns and/or -I/-X options,
1778 because this option increases amount of output per file according
1780 because this option increases amount of output per file according
1779 to configurations in hgrc.
1781 to configurations in hgrc.
1780
1782
1781 With -v/--verbose, this command shows configurations below at
1783 With -v/--verbose, this command shows configurations below at
1782 first (only if specified).
1784 first (only if specified).
1783
1785
1784 - ``--tool`` option
1786 - ``--tool`` option
1785 - ``HGMERGE`` environment variable
1787 - ``HGMERGE`` environment variable
1786 - configuration of ``ui.merge``
1788 - configuration of ``ui.merge``
1787
1789
1788 If merge tool is chosen before matching against
1790 If merge tool is chosen before matching against
1789 ``merge-patterns``, this command can't show any helpful
1791 ``merge-patterns``, this command can't show any helpful
1790 information, even with --debug. In such case, information above is
1792 information, even with --debug. In such case, information above is
1791 useful to know why a merge tool is chosen.
1793 useful to know why a merge tool is chosen.
1792 """
1794 """
1793 opts = pycompat.byteskwargs(opts)
1795 opts = pycompat.byteskwargs(opts)
1794 overrides = {}
1796 overrides = {}
1795 if opts['tool']:
1797 if opts['tool']:
1796 overrides[('ui', 'forcemerge')] = opts['tool']
1798 overrides[('ui', 'forcemerge')] = opts['tool']
1797 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1799 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1798
1800
1799 with ui.configoverride(overrides, 'debugmergepatterns'):
1801 with ui.configoverride(overrides, 'debugmergepatterns'):
1800 hgmerge = encoding.environ.get("HGMERGE")
1802 hgmerge = encoding.environ.get("HGMERGE")
1801 if hgmerge is not None:
1803 if hgmerge is not None:
1802 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1804 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1803 uimerge = ui.config("ui", "merge")
1805 uimerge = ui.config("ui", "merge")
1804 if uimerge:
1806 if uimerge:
1805 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1807 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1806
1808
1807 ctx = scmutil.revsingle(repo, opts.get('rev'))
1809 ctx = scmutil.revsingle(repo, opts.get('rev'))
1808 m = scmutil.match(ctx, pats, opts)
1810 m = scmutil.match(ctx, pats, opts)
1809 changedelete = opts['changedelete']
1811 changedelete = opts['changedelete']
1810 for path in ctx.walk(m):
1812 for path in ctx.walk(m):
1811 fctx = ctx[path]
1813 fctx = ctx[path]
1812 try:
1814 try:
1813 if not ui.debugflag:
1815 if not ui.debugflag:
1814 ui.pushbuffer(error=True)
1816 ui.pushbuffer(error=True)
1815 tool, toolpath = filemerge._picktool(repo, ui, path,
1817 tool, toolpath = filemerge._picktool(repo, ui, path,
1816 fctx.isbinary(),
1818 fctx.isbinary(),
1817 'l' in fctx.flags(),
1819 'l' in fctx.flags(),
1818 changedelete)
1820 changedelete)
1819 finally:
1821 finally:
1820 if not ui.debugflag:
1822 if not ui.debugflag:
1821 ui.popbuffer()
1823 ui.popbuffer()
1822 ui.write(('%s = %s\n') % (path, tool))
1824 ui.write(('%s = %s\n') % (path, tool))
1823
1825
1824 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1826 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1825 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1827 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1826 '''access the pushkey key/value protocol
1828 '''access the pushkey key/value protocol
1827
1829
1828 With two args, list the keys in the given namespace.
1830 With two args, list the keys in the given namespace.
1829
1831
1830 With five args, set a key to new if it currently is set to old.
1832 With five args, set a key to new if it currently is set to old.
1831 Reports success or failure.
1833 Reports success or failure.
1832 '''
1834 '''
1833
1835
1834 target = hg.peer(ui, {}, repopath)
1836 target = hg.peer(ui, {}, repopath)
1835 if keyinfo:
1837 if keyinfo:
1836 key, old, new = keyinfo
1838 key, old, new = keyinfo
1837 with target.commandexecutor() as e:
1839 with target.commandexecutor() as e:
1838 r = e.callcommand('pushkey', {
1840 r = e.callcommand('pushkey', {
1839 'namespace': namespace,
1841 'namespace': namespace,
1840 'key': key,
1842 'key': key,
1841 'old': old,
1843 'old': old,
1842 'new': new,
1844 'new': new,
1843 }).result()
1845 }).result()
1844
1846
1845 ui.status(pycompat.bytestr(r) + '\n')
1847 ui.status(pycompat.bytestr(r) + '\n')
1846 return not r
1848 return not r
1847 else:
1849 else:
1848 for k, v in sorted(target.listkeys(namespace).iteritems()):
1850 for k, v in sorted(target.listkeys(namespace).iteritems()):
1849 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1851 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1850 stringutil.escapestr(v)))
1852 stringutil.escapestr(v)))
1851
1853
1852 @command('debugpvec', [], _('A B'))
1854 @command('debugpvec', [], _('A B'))
1853 def debugpvec(ui, repo, a, b=None):
1855 def debugpvec(ui, repo, a, b=None):
1854 ca = scmutil.revsingle(repo, a)
1856 ca = scmutil.revsingle(repo, a)
1855 cb = scmutil.revsingle(repo, b)
1857 cb = scmutil.revsingle(repo, b)
1856 pa = pvec.ctxpvec(ca)
1858 pa = pvec.ctxpvec(ca)
1857 pb = pvec.ctxpvec(cb)
1859 pb = pvec.ctxpvec(cb)
1858 if pa == pb:
1860 if pa == pb:
1859 rel = "="
1861 rel = "="
1860 elif pa > pb:
1862 elif pa > pb:
1861 rel = ">"
1863 rel = ">"
1862 elif pa < pb:
1864 elif pa < pb:
1863 rel = "<"
1865 rel = "<"
1864 elif pa | pb:
1866 elif pa | pb:
1865 rel = "|"
1867 rel = "|"
1866 ui.write(_("a: %s\n") % pa)
1868 ui.write(_("a: %s\n") % pa)
1867 ui.write(_("b: %s\n") % pb)
1869 ui.write(_("b: %s\n") % pb)
1868 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1870 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1869 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1871 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1870 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1872 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1871 pa.distance(pb), rel))
1873 pa.distance(pb), rel))
1872
1874
1873 @command('debugrebuilddirstate|debugrebuildstate',
1875 @command('debugrebuilddirstate|debugrebuildstate',
1874 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1876 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1875 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1877 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1876 'the working copy parent')),
1878 'the working copy parent')),
1877 ],
1879 ],
1878 _('[-r REV]'))
1880 _('[-r REV]'))
1879 def debugrebuilddirstate(ui, repo, rev, **opts):
1881 def debugrebuilddirstate(ui, repo, rev, **opts):
1880 """rebuild the dirstate as it would look like for the given revision
1882 """rebuild the dirstate as it would look like for the given revision
1881
1883
1882 If no revision is specified the first current parent will be used.
1884 If no revision is specified the first current parent will be used.
1883
1885
1884 The dirstate will be set to the files of the given revision.
1886 The dirstate will be set to the files of the given revision.
1885 The actual working directory content or existing dirstate
1887 The actual working directory content or existing dirstate
1886 information such as adds or removes is not considered.
1888 information such as adds or removes is not considered.
1887
1889
1888 ``minimal`` will only rebuild the dirstate status for files that claim to be
1890 ``minimal`` will only rebuild the dirstate status for files that claim to be
1889 tracked but are not in the parent manifest, or that exist in the parent
1891 tracked but are not in the parent manifest, or that exist in the parent
1890 manifest but are not in the dirstate. It will not change adds, removes, or
1892 manifest but are not in the dirstate. It will not change adds, removes, or
1891 modified files that are in the working copy parent.
1893 modified files that are in the working copy parent.
1892
1894
1893 One use of this command is to make the next :hg:`status` invocation
1895 One use of this command is to make the next :hg:`status` invocation
1894 check the actual file content.
1896 check the actual file content.
1895 """
1897 """
1896 ctx = scmutil.revsingle(repo, rev)
1898 ctx = scmutil.revsingle(repo, rev)
1897 with repo.wlock():
1899 with repo.wlock():
1898 dirstate = repo.dirstate
1900 dirstate = repo.dirstate
1899 changedfiles = None
1901 changedfiles = None
1900 # See command doc for what minimal does.
1902 # See command doc for what minimal does.
1901 if opts.get(r'minimal'):
1903 if opts.get(r'minimal'):
1902 manifestfiles = set(ctx.manifest().keys())
1904 manifestfiles = set(ctx.manifest().keys())
1903 dirstatefiles = set(dirstate)
1905 dirstatefiles = set(dirstate)
1904 manifestonly = manifestfiles - dirstatefiles
1906 manifestonly = manifestfiles - dirstatefiles
1905 dsonly = dirstatefiles - manifestfiles
1907 dsonly = dirstatefiles - manifestfiles
1906 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1908 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1907 changedfiles = manifestonly | dsnotadded
1909 changedfiles = manifestonly | dsnotadded
1908
1910
1909 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1911 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1910
1912
1911 @command('debugrebuildfncache', [], '')
1913 @command('debugrebuildfncache', [], '')
1912 def debugrebuildfncache(ui, repo):
1914 def debugrebuildfncache(ui, repo):
1913 """rebuild the fncache file"""
1915 """rebuild the fncache file"""
1914 repair.rebuildfncache(ui, repo)
1916 repair.rebuildfncache(ui, repo)
1915
1917
1916 @command('debugrename',
1918 @command('debugrename',
1917 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1919 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1918 _('[-r REV] FILE'))
1920 _('[-r REV] FILE'))
1919 def debugrename(ui, repo, file1, *pats, **opts):
1921 def debugrename(ui, repo, file1, *pats, **opts):
1920 """dump rename information"""
1922 """dump rename information"""
1921
1923
1922 opts = pycompat.byteskwargs(opts)
1924 opts = pycompat.byteskwargs(opts)
1923 ctx = scmutil.revsingle(repo, opts.get('rev'))
1925 ctx = scmutil.revsingle(repo, opts.get('rev'))
1924 m = scmutil.match(ctx, (file1,) + pats, opts)
1926 m = scmutil.match(ctx, (file1,) + pats, opts)
1925 for abs in ctx.walk(m):
1927 for abs in ctx.walk(m):
1926 fctx = ctx[abs]
1928 fctx = ctx[abs]
1927 o = fctx.filelog().renamed(fctx.filenode())
1929 o = fctx.filelog().renamed(fctx.filenode())
1928 rel = m.rel(abs)
1930 rel = m.rel(abs)
1929 if o:
1931 if o:
1930 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1932 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1931 else:
1933 else:
1932 ui.write(_("%s not renamed\n") % rel)
1934 ui.write(_("%s not renamed\n") % rel)
1933
1935
1934 @command('debugrevlog', cmdutil.debugrevlogopts +
1936 @command('debugrevlog', cmdutil.debugrevlogopts +
1935 [('d', 'dump', False, _('dump index data'))],
1937 [('d', 'dump', False, _('dump index data'))],
1936 _('-c|-m|FILE'),
1938 _('-c|-m|FILE'),
1937 optionalrepo=True)
1939 optionalrepo=True)
1938 def debugrevlog(ui, repo, file_=None, **opts):
1940 def debugrevlog(ui, repo, file_=None, **opts):
1939 """show data and statistics about a revlog"""
1941 """show data and statistics about a revlog"""
1940 opts = pycompat.byteskwargs(opts)
1942 opts = pycompat.byteskwargs(opts)
1941 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1943 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1942
1944
1943 if opts.get("dump"):
1945 if opts.get("dump"):
1944 numrevs = len(r)
1946 numrevs = len(r)
1945 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1947 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1946 " rawsize totalsize compression heads chainlen\n"))
1948 " rawsize totalsize compression heads chainlen\n"))
1947 ts = 0
1949 ts = 0
1948 heads = set()
1950 heads = set()
1949
1951
1950 for rev in xrange(numrevs):
1952 for rev in xrange(numrevs):
1951 dbase = r.deltaparent(rev)
1953 dbase = r.deltaparent(rev)
1952 if dbase == -1:
1954 if dbase == -1:
1953 dbase = rev
1955 dbase = rev
1954 cbase = r.chainbase(rev)
1956 cbase = r.chainbase(rev)
1955 clen = r.chainlen(rev)
1957 clen = r.chainlen(rev)
1956 p1, p2 = r.parentrevs(rev)
1958 p1, p2 = r.parentrevs(rev)
1957 rs = r.rawsize(rev)
1959 rs = r.rawsize(rev)
1958 ts = ts + rs
1960 ts = ts + rs
1959 heads -= set(r.parentrevs(rev))
1961 heads -= set(r.parentrevs(rev))
1960 heads.add(rev)
1962 heads.add(rev)
1961 try:
1963 try:
1962 compression = ts / r.end(rev)
1964 compression = ts / r.end(rev)
1963 except ZeroDivisionError:
1965 except ZeroDivisionError:
1964 compression = 0
1966 compression = 0
1965 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1967 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1966 "%11d %5d %8d\n" %
1968 "%11d %5d %8d\n" %
1967 (rev, p1, p2, r.start(rev), r.end(rev),
1969 (rev, p1, p2, r.start(rev), r.end(rev),
1968 r.start(dbase), r.start(cbase),
1970 r.start(dbase), r.start(cbase),
1969 r.start(p1), r.start(p2),
1971 r.start(p1), r.start(p2),
1970 rs, ts, compression, len(heads), clen))
1972 rs, ts, compression, len(heads), clen))
1971 return 0
1973 return 0
1972
1974
1973 v = r.version
1975 v = r.version
1974 format = v & 0xFFFF
1976 format = v & 0xFFFF
1975 flags = []
1977 flags = []
1976 gdelta = False
1978 gdelta = False
1977 if v & revlog.FLAG_INLINE_DATA:
1979 if v & revlog.FLAG_INLINE_DATA:
1978 flags.append('inline')
1980 flags.append('inline')
1979 if v & revlog.FLAG_GENERALDELTA:
1981 if v & revlog.FLAG_GENERALDELTA:
1980 gdelta = True
1982 gdelta = True
1981 flags.append('generaldelta')
1983 flags.append('generaldelta')
1982 if not flags:
1984 if not flags:
1983 flags = ['(none)']
1985 flags = ['(none)']
1984
1986
1985 nummerges = 0
1987 nummerges = 0
1986 numfull = 0
1988 numfull = 0
1987 numprev = 0
1989 numprev = 0
1988 nump1 = 0
1990 nump1 = 0
1989 nump2 = 0
1991 nump2 = 0
1990 numother = 0
1992 numother = 0
1991 nump1prev = 0
1993 nump1prev = 0
1992 nump2prev = 0
1994 nump2prev = 0
1993 chainlengths = []
1995 chainlengths = []
1994 chainbases = []
1996 chainbases = []
1995 chainspans = []
1997 chainspans = []
1996
1998
1997 datasize = [None, 0, 0]
1999 datasize = [None, 0, 0]
1998 fullsize = [None, 0, 0]
2000 fullsize = [None, 0, 0]
1999 deltasize = [None, 0, 0]
2001 deltasize = [None, 0, 0]
2000 chunktypecounts = {}
2002 chunktypecounts = {}
2001 chunktypesizes = {}
2003 chunktypesizes = {}
2002
2004
2003 def addsize(size, l):
2005 def addsize(size, l):
2004 if l[0] is None or size < l[0]:
2006 if l[0] is None or size < l[0]:
2005 l[0] = size
2007 l[0] = size
2006 if size > l[1]:
2008 if size > l[1]:
2007 l[1] = size
2009 l[1] = size
2008 l[2] += size
2010 l[2] += size
2009
2011
2010 numrevs = len(r)
2012 numrevs = len(r)
2011 for rev in xrange(numrevs):
2013 for rev in xrange(numrevs):
2012 p1, p2 = r.parentrevs(rev)
2014 p1, p2 = r.parentrevs(rev)
2013 delta = r.deltaparent(rev)
2015 delta = r.deltaparent(rev)
2014 if format > 0:
2016 if format > 0:
2015 addsize(r.rawsize(rev), datasize)
2017 addsize(r.rawsize(rev), datasize)
2016 if p2 != nullrev:
2018 if p2 != nullrev:
2017 nummerges += 1
2019 nummerges += 1
2018 size = r.length(rev)
2020 size = r.length(rev)
2019 if delta == nullrev:
2021 if delta == nullrev:
2020 chainlengths.append(0)
2022 chainlengths.append(0)
2021 chainbases.append(r.start(rev))
2023 chainbases.append(r.start(rev))
2022 chainspans.append(size)
2024 chainspans.append(size)
2023 numfull += 1
2025 numfull += 1
2024 addsize(size, fullsize)
2026 addsize(size, fullsize)
2025 else:
2027 else:
2026 chainlengths.append(chainlengths[delta] + 1)
2028 chainlengths.append(chainlengths[delta] + 1)
2027 baseaddr = chainbases[delta]
2029 baseaddr = chainbases[delta]
2028 revaddr = r.start(rev)
2030 revaddr = r.start(rev)
2029 chainbases.append(baseaddr)
2031 chainbases.append(baseaddr)
2030 chainspans.append((revaddr - baseaddr) + size)
2032 chainspans.append((revaddr - baseaddr) + size)
2031 addsize(size, deltasize)
2033 addsize(size, deltasize)
2032 if delta == rev - 1:
2034 if delta == rev - 1:
2033 numprev += 1
2035 numprev += 1
2034 if delta == p1:
2036 if delta == p1:
2035 nump1prev += 1
2037 nump1prev += 1
2036 elif delta == p2:
2038 elif delta == p2:
2037 nump2prev += 1
2039 nump2prev += 1
2038 elif delta == p1:
2040 elif delta == p1:
2039 nump1 += 1
2041 nump1 += 1
2040 elif delta == p2:
2042 elif delta == p2:
2041 nump2 += 1
2043 nump2 += 1
2042 elif delta != nullrev:
2044 elif delta != nullrev:
2043 numother += 1
2045 numother += 1
2044
2046
2045 # Obtain data on the raw chunks in the revlog.
2047 # Obtain data on the raw chunks in the revlog.
2046 segment = r._getsegmentforrevs(rev, rev)[1]
2048 segment = r._getsegmentforrevs(rev, rev)[1]
2047 if segment:
2049 if segment:
2048 chunktype = bytes(segment[0:1])
2050 chunktype = bytes(segment[0:1])
2049 else:
2051 else:
2050 chunktype = 'empty'
2052 chunktype = 'empty'
2051
2053
2052 if chunktype not in chunktypecounts:
2054 if chunktype not in chunktypecounts:
2053 chunktypecounts[chunktype] = 0
2055 chunktypecounts[chunktype] = 0
2054 chunktypesizes[chunktype] = 0
2056 chunktypesizes[chunktype] = 0
2055
2057
2056 chunktypecounts[chunktype] += 1
2058 chunktypecounts[chunktype] += 1
2057 chunktypesizes[chunktype] += size
2059 chunktypesizes[chunktype] += size
2058
2060
2059 # Adjust size min value for empty cases
2061 # Adjust size min value for empty cases
2060 for size in (datasize, fullsize, deltasize):
2062 for size in (datasize, fullsize, deltasize):
2061 if size[0] is None:
2063 if size[0] is None:
2062 size[0] = 0
2064 size[0] = 0
2063
2065
2064 numdeltas = numrevs - numfull
2066 numdeltas = numrevs - numfull
2065 numoprev = numprev - nump1prev - nump2prev
2067 numoprev = numprev - nump1prev - nump2prev
2066 totalrawsize = datasize[2]
2068 totalrawsize = datasize[2]
2067 datasize[2] /= numrevs
2069 datasize[2] /= numrevs
2068 fulltotal = fullsize[2]
2070 fulltotal = fullsize[2]
2069 fullsize[2] /= numfull
2071 fullsize[2] /= numfull
2070 deltatotal = deltasize[2]
2072 deltatotal = deltasize[2]
2071 if numrevs - numfull > 0:
2073 if numrevs - numfull > 0:
2072 deltasize[2] /= numrevs - numfull
2074 deltasize[2] /= numrevs - numfull
2073 totalsize = fulltotal + deltatotal
2075 totalsize = fulltotal + deltatotal
2074 avgchainlen = sum(chainlengths) / numrevs
2076 avgchainlen = sum(chainlengths) / numrevs
2075 maxchainlen = max(chainlengths)
2077 maxchainlen = max(chainlengths)
2076 maxchainspan = max(chainspans)
2078 maxchainspan = max(chainspans)
2077 compratio = 1
2079 compratio = 1
2078 if totalsize:
2080 if totalsize:
2079 compratio = totalrawsize / totalsize
2081 compratio = totalrawsize / totalsize
2080
2082
2081 basedfmtstr = '%%%dd\n'
2083 basedfmtstr = '%%%dd\n'
2082 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2084 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2083
2085
2084 def dfmtstr(max):
2086 def dfmtstr(max):
2085 return basedfmtstr % len(str(max))
2087 return basedfmtstr % len(str(max))
2086 def pcfmtstr(max, padding=0):
2088 def pcfmtstr(max, padding=0):
2087 return basepcfmtstr % (len(str(max)), ' ' * padding)
2089 return basepcfmtstr % (len(str(max)), ' ' * padding)
2088
2090
2089 def pcfmt(value, total):
2091 def pcfmt(value, total):
2090 if total:
2092 if total:
2091 return (value, 100 * float(value) / total)
2093 return (value, 100 * float(value) / total)
2092 else:
2094 else:
2093 return value, 100.0
2095 return value, 100.0
2094
2096
2095 ui.write(('format : %d\n') % format)
2097 ui.write(('format : %d\n') % format)
2096 ui.write(('flags : %s\n') % ', '.join(flags))
2098 ui.write(('flags : %s\n') % ', '.join(flags))
2097
2099
2098 ui.write('\n')
2100 ui.write('\n')
2099 fmt = pcfmtstr(totalsize)
2101 fmt = pcfmtstr(totalsize)
2100 fmt2 = dfmtstr(totalsize)
2102 fmt2 = dfmtstr(totalsize)
2101 ui.write(('revisions : ') + fmt2 % numrevs)
2103 ui.write(('revisions : ') + fmt2 % numrevs)
2102 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2104 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2103 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2105 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2104 ui.write(('revisions : ') + fmt2 % numrevs)
2106 ui.write(('revisions : ') + fmt2 % numrevs)
2105 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2107 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2106 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2108 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2107 ui.write(('revision size : ') + fmt2 % totalsize)
2109 ui.write(('revision size : ') + fmt2 % totalsize)
2108 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2110 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2109 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2111 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2110
2112
2111 def fmtchunktype(chunktype):
2113 def fmtchunktype(chunktype):
2112 if chunktype == 'empty':
2114 if chunktype == 'empty':
2113 return ' %s : ' % chunktype
2115 return ' %s : ' % chunktype
2114 elif chunktype in pycompat.bytestr(string.ascii_letters):
2116 elif chunktype in pycompat.bytestr(string.ascii_letters):
2115 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2117 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2116 else:
2118 else:
2117 return ' 0x%s : ' % hex(chunktype)
2119 return ' 0x%s : ' % hex(chunktype)
2118
2120
2119 ui.write('\n')
2121 ui.write('\n')
2120 ui.write(('chunks : ') + fmt2 % numrevs)
2122 ui.write(('chunks : ') + fmt2 % numrevs)
2121 for chunktype in sorted(chunktypecounts):
2123 for chunktype in sorted(chunktypecounts):
2122 ui.write(fmtchunktype(chunktype))
2124 ui.write(fmtchunktype(chunktype))
2123 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2125 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2124 ui.write(('chunks size : ') + fmt2 % totalsize)
2126 ui.write(('chunks size : ') + fmt2 % totalsize)
2125 for chunktype in sorted(chunktypecounts):
2127 for chunktype in sorted(chunktypecounts):
2126 ui.write(fmtchunktype(chunktype))
2128 ui.write(fmtchunktype(chunktype))
2127 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2129 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2128
2130
2129 ui.write('\n')
2131 ui.write('\n')
2130 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2132 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2131 ui.write(('avg chain length : ') + fmt % avgchainlen)
2133 ui.write(('avg chain length : ') + fmt % avgchainlen)
2132 ui.write(('max chain length : ') + fmt % maxchainlen)
2134 ui.write(('max chain length : ') + fmt % maxchainlen)
2133 ui.write(('max chain reach : ') + fmt % maxchainspan)
2135 ui.write(('max chain reach : ') + fmt % maxchainspan)
2134 ui.write(('compression ratio : ') + fmt % compratio)
2136 ui.write(('compression ratio : ') + fmt % compratio)
2135
2137
2136 if format > 0:
2138 if format > 0:
2137 ui.write('\n')
2139 ui.write('\n')
2138 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2140 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2139 % tuple(datasize))
2141 % tuple(datasize))
2140 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2142 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2141 % tuple(fullsize))
2143 % tuple(fullsize))
2142 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2144 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2143 % tuple(deltasize))
2145 % tuple(deltasize))
2144
2146
2145 if numdeltas > 0:
2147 if numdeltas > 0:
2146 ui.write('\n')
2148 ui.write('\n')
2147 fmt = pcfmtstr(numdeltas)
2149 fmt = pcfmtstr(numdeltas)
2148 fmt2 = pcfmtstr(numdeltas, 4)
2150 fmt2 = pcfmtstr(numdeltas, 4)
2149 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2151 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2150 if numprev > 0:
2152 if numprev > 0:
2151 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2153 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2152 numprev))
2154 numprev))
2153 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2155 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2154 numprev))
2156 numprev))
2155 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2157 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2156 numprev))
2158 numprev))
2157 if gdelta:
2159 if gdelta:
2158 ui.write(('deltas against p1 : ')
2160 ui.write(('deltas against p1 : ')
2159 + fmt % pcfmt(nump1, numdeltas))
2161 + fmt % pcfmt(nump1, numdeltas))
2160 ui.write(('deltas against p2 : ')
2162 ui.write(('deltas against p2 : ')
2161 + fmt % pcfmt(nump2, numdeltas))
2163 + fmt % pcfmt(nump2, numdeltas))
2162 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2164 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2163 numdeltas))
2165 numdeltas))
2164
2166
2165 @command('debugrevspec',
2167 @command('debugrevspec',
2166 [('', 'optimize', None,
2168 [('', 'optimize', None,
2167 _('print parsed tree after optimizing (DEPRECATED)')),
2169 _('print parsed tree after optimizing (DEPRECATED)')),
2168 ('', 'show-revs', True, _('print list of result revisions (default)')),
2170 ('', 'show-revs', True, _('print list of result revisions (default)')),
2169 ('s', 'show-set', None, _('print internal representation of result set')),
2171 ('s', 'show-set', None, _('print internal representation of result set')),
2170 ('p', 'show-stage', [],
2172 ('p', 'show-stage', [],
2171 _('print parsed tree at the given stage'), _('NAME')),
2173 _('print parsed tree at the given stage'), _('NAME')),
2172 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2174 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2173 ('', 'verify-optimized', False, _('verify optimized result')),
2175 ('', 'verify-optimized', False, _('verify optimized result')),
2174 ],
2176 ],
2175 ('REVSPEC'))
2177 ('REVSPEC'))
2176 def debugrevspec(ui, repo, expr, **opts):
2178 def debugrevspec(ui, repo, expr, **opts):
2177 """parse and apply a revision specification
2179 """parse and apply a revision specification
2178
2180
2179 Use -p/--show-stage option to print the parsed tree at the given stages.
2181 Use -p/--show-stage option to print the parsed tree at the given stages.
2180 Use -p all to print tree at every stage.
2182 Use -p all to print tree at every stage.
2181
2183
2182 Use --no-show-revs option with -s or -p to print only the set
2184 Use --no-show-revs option with -s or -p to print only the set
2183 representation or the parsed tree respectively.
2185 representation or the parsed tree respectively.
2184
2186
2185 Use --verify-optimized to compare the optimized result with the unoptimized
2187 Use --verify-optimized to compare the optimized result with the unoptimized
2186 one. Returns 1 if the optimized result differs.
2188 one. Returns 1 if the optimized result differs.
2187 """
2189 """
2188 opts = pycompat.byteskwargs(opts)
2190 opts = pycompat.byteskwargs(opts)
2189 aliases = ui.configitems('revsetalias')
2191 aliases = ui.configitems('revsetalias')
2190 stages = [
2192 stages = [
2191 ('parsed', lambda tree: tree),
2193 ('parsed', lambda tree: tree),
2192 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2194 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2193 ui.warn)),
2195 ui.warn)),
2194 ('concatenated', revsetlang.foldconcat),
2196 ('concatenated', revsetlang.foldconcat),
2195 ('analyzed', revsetlang.analyze),
2197 ('analyzed', revsetlang.analyze),
2196 ('optimized', revsetlang.optimize),
2198 ('optimized', revsetlang.optimize),
2197 ]
2199 ]
2198 if opts['no_optimized']:
2200 if opts['no_optimized']:
2199 stages = stages[:-1]
2201 stages = stages[:-1]
2200 if opts['verify_optimized'] and opts['no_optimized']:
2202 if opts['verify_optimized'] and opts['no_optimized']:
2201 raise error.Abort(_('cannot use --verify-optimized with '
2203 raise error.Abort(_('cannot use --verify-optimized with '
2202 '--no-optimized'))
2204 '--no-optimized'))
2203 stagenames = set(n for n, f in stages)
2205 stagenames = set(n for n, f in stages)
2204
2206
2205 showalways = set()
2207 showalways = set()
2206 showchanged = set()
2208 showchanged = set()
2207 if ui.verbose and not opts['show_stage']:
2209 if ui.verbose and not opts['show_stage']:
2208 # show parsed tree by --verbose (deprecated)
2210 # show parsed tree by --verbose (deprecated)
2209 showalways.add('parsed')
2211 showalways.add('parsed')
2210 showchanged.update(['expanded', 'concatenated'])
2212 showchanged.update(['expanded', 'concatenated'])
2211 if opts['optimize']:
2213 if opts['optimize']:
2212 showalways.add('optimized')
2214 showalways.add('optimized')
2213 if opts['show_stage'] and opts['optimize']:
2215 if opts['show_stage'] and opts['optimize']:
2214 raise error.Abort(_('cannot use --optimize with --show-stage'))
2216 raise error.Abort(_('cannot use --optimize with --show-stage'))
2215 if opts['show_stage'] == ['all']:
2217 if opts['show_stage'] == ['all']:
2216 showalways.update(stagenames)
2218 showalways.update(stagenames)
2217 else:
2219 else:
2218 for n in opts['show_stage']:
2220 for n in opts['show_stage']:
2219 if n not in stagenames:
2221 if n not in stagenames:
2220 raise error.Abort(_('invalid stage name: %s') % n)
2222 raise error.Abort(_('invalid stage name: %s') % n)
2221 showalways.update(opts['show_stage'])
2223 showalways.update(opts['show_stage'])
2222
2224
2223 treebystage = {}
2225 treebystage = {}
2224 printedtree = None
2226 printedtree = None
2225 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2227 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2226 for n, f in stages:
2228 for n, f in stages:
2227 treebystage[n] = tree = f(tree)
2229 treebystage[n] = tree = f(tree)
2228 if n in showalways or (n in showchanged and tree != printedtree):
2230 if n in showalways or (n in showchanged and tree != printedtree):
2229 if opts['show_stage'] or n != 'parsed':
2231 if opts['show_stage'] or n != 'parsed':
2230 ui.write(("* %s:\n") % n)
2232 ui.write(("* %s:\n") % n)
2231 ui.write(revsetlang.prettyformat(tree), "\n")
2233 ui.write(revsetlang.prettyformat(tree), "\n")
2232 printedtree = tree
2234 printedtree = tree
2233
2235
2234 if opts['verify_optimized']:
2236 if opts['verify_optimized']:
2235 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2237 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2236 brevs = revset.makematcher(treebystage['optimized'])(repo)
2238 brevs = revset.makematcher(treebystage['optimized'])(repo)
2237 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2239 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2238 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2240 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2239 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2241 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2240 arevs = list(arevs)
2242 arevs = list(arevs)
2241 brevs = list(brevs)
2243 brevs = list(brevs)
2242 if arevs == brevs:
2244 if arevs == brevs:
2243 return 0
2245 return 0
2244 ui.write(('--- analyzed\n'), label='diff.file_a')
2246 ui.write(('--- analyzed\n'), label='diff.file_a')
2245 ui.write(('+++ optimized\n'), label='diff.file_b')
2247 ui.write(('+++ optimized\n'), label='diff.file_b')
2246 sm = difflib.SequenceMatcher(None, arevs, brevs)
2248 sm = difflib.SequenceMatcher(None, arevs, brevs)
2247 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2249 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2248 if tag in ('delete', 'replace'):
2250 if tag in ('delete', 'replace'):
2249 for c in arevs[alo:ahi]:
2251 for c in arevs[alo:ahi]:
2250 ui.write('-%s\n' % c, label='diff.deleted')
2252 ui.write('-%s\n' % c, label='diff.deleted')
2251 if tag in ('insert', 'replace'):
2253 if tag in ('insert', 'replace'):
2252 for c in brevs[blo:bhi]:
2254 for c in brevs[blo:bhi]:
2253 ui.write('+%s\n' % c, label='diff.inserted')
2255 ui.write('+%s\n' % c, label='diff.inserted')
2254 if tag == 'equal':
2256 if tag == 'equal':
2255 for c in arevs[alo:ahi]:
2257 for c in arevs[alo:ahi]:
2256 ui.write(' %s\n' % c)
2258 ui.write(' %s\n' % c)
2257 return 1
2259 return 1
2258
2260
2259 func = revset.makematcher(tree)
2261 func = revset.makematcher(tree)
2260 revs = func(repo)
2262 revs = func(repo)
2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2263 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2262 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2264 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2263 if not opts['show_revs']:
2265 if not opts['show_revs']:
2264 return
2266 return
2265 for c in revs:
2267 for c in revs:
2266 ui.write("%d\n" % c)
2268 ui.write("%d\n" % c)
2267
2269
2268 @command('debugserve', [
2270 @command('debugserve', [
2269 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2271 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2270 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2272 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2271 ('', 'logiofile', '', _('file to log server I/O to')),
2273 ('', 'logiofile', '', _('file to log server I/O to')),
2272 ], '')
2274 ], '')
2273 def debugserve(ui, repo, **opts):
2275 def debugserve(ui, repo, **opts):
2274 """run a server with advanced settings
2276 """run a server with advanced settings
2275
2277
2276 This command is similar to :hg:`serve`. It exists partially as a
2278 This command is similar to :hg:`serve`. It exists partially as a
2277 workaround to the fact that ``hg serve --stdio`` must have specific
2279 workaround to the fact that ``hg serve --stdio`` must have specific
2278 arguments for security reasons.
2280 arguments for security reasons.
2279 """
2281 """
2280 opts = pycompat.byteskwargs(opts)
2282 opts = pycompat.byteskwargs(opts)
2281
2283
2282 if not opts['sshstdio']:
2284 if not opts['sshstdio']:
2283 raise error.Abort(_('only --sshstdio is currently supported'))
2285 raise error.Abort(_('only --sshstdio is currently supported'))
2284
2286
2285 logfh = None
2287 logfh = None
2286
2288
2287 if opts['logiofd'] and opts['logiofile']:
2289 if opts['logiofd'] and opts['logiofile']:
2288 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2290 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2289
2291
2290 if opts['logiofd']:
2292 if opts['logiofd']:
2291 # Line buffered because output is line based.
2293 # Line buffered because output is line based.
2292 try:
2294 try:
2293 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2295 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2294 except OSError as e:
2296 except OSError as e:
2295 if e.errno != errno.ESPIPE:
2297 if e.errno != errno.ESPIPE:
2296 raise
2298 raise
2297 # can't seek a pipe, so `ab` mode fails on py3
2299 # can't seek a pipe, so `ab` mode fails on py3
2298 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2300 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2299 elif opts['logiofile']:
2301 elif opts['logiofile']:
2300 logfh = open(opts['logiofile'], 'ab', 1)
2302 logfh = open(opts['logiofile'], 'ab', 1)
2301
2303
2302 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2304 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2303 s.serve_forever()
2305 s.serve_forever()
2304
2306
2305 @command('debugsetparents', [], _('REV1 [REV2]'))
2307 @command('debugsetparents', [], _('REV1 [REV2]'))
2306 def debugsetparents(ui, repo, rev1, rev2=None):
2308 def debugsetparents(ui, repo, rev1, rev2=None):
2307 """manually set the parents of the current working directory
2309 """manually set the parents of the current working directory
2308
2310
2309 This is useful for writing repository conversion tools, but should
2311 This is useful for writing repository conversion tools, but should
2310 be used with care. For example, neither the working directory nor the
2312 be used with care. For example, neither the working directory nor the
2311 dirstate is updated, so file status may be incorrect after running this
2313 dirstate is updated, so file status may be incorrect after running this
2312 command.
2314 command.
2313
2315
2314 Returns 0 on success.
2316 Returns 0 on success.
2315 """
2317 """
2316
2318
2317 node1 = scmutil.revsingle(repo, rev1).node()
2319 node1 = scmutil.revsingle(repo, rev1).node()
2318 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2320 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2319
2321
2320 with repo.wlock():
2322 with repo.wlock():
2321 repo.setparents(node1, node2)
2323 repo.setparents(node1, node2)
2322
2324
2323 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2325 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2324 def debugssl(ui, repo, source=None, **opts):
2326 def debugssl(ui, repo, source=None, **opts):
2325 '''test a secure connection to a server
2327 '''test a secure connection to a server
2326
2328
2327 This builds the certificate chain for the server on Windows, installing the
2329 This builds the certificate chain for the server on Windows, installing the
2328 missing intermediates and trusted root via Windows Update if necessary. It
2330 missing intermediates and trusted root via Windows Update if necessary. It
2329 does nothing on other platforms.
2331 does nothing on other platforms.
2330
2332
2331 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2333 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2332 that server is used. See :hg:`help urls` for more information.
2334 that server is used. See :hg:`help urls` for more information.
2333
2335
2334 If the update succeeds, retry the original operation. Otherwise, the cause
2336 If the update succeeds, retry the original operation. Otherwise, the cause
2335 of the SSL error is likely another issue.
2337 of the SSL error is likely another issue.
2336 '''
2338 '''
2337 if not pycompat.iswindows:
2339 if not pycompat.iswindows:
2338 raise error.Abort(_('certificate chain building is only possible on '
2340 raise error.Abort(_('certificate chain building is only possible on '
2339 'Windows'))
2341 'Windows'))
2340
2342
2341 if not source:
2343 if not source:
2342 if not repo:
2344 if not repo:
2343 raise error.Abort(_("there is no Mercurial repository here, and no "
2345 raise error.Abort(_("there is no Mercurial repository here, and no "
2344 "server specified"))
2346 "server specified"))
2345 source = "default"
2347 source = "default"
2346
2348
2347 source, branches = hg.parseurl(ui.expandpath(source))
2349 source, branches = hg.parseurl(ui.expandpath(source))
2348 url = util.url(source)
2350 url = util.url(source)
2349 addr = None
2351 addr = None
2350
2352
2351 defaultport = {'https': 443, 'ssh': 22}
2353 defaultport = {'https': 443, 'ssh': 22}
2352 if url.scheme in defaultport:
2354 if url.scheme in defaultport:
2353 try:
2355 try:
2354 addr = (url.host, int(url.port or defaultport[url.scheme]))
2356 addr = (url.host, int(url.port or defaultport[url.scheme]))
2355 except ValueError:
2357 except ValueError:
2356 raise error.Abort(_("malformed port number in URL"))
2358 raise error.Abort(_("malformed port number in URL"))
2357 else:
2359 else:
2358 raise error.Abort(_("only https and ssh connections are supported"))
2360 raise error.Abort(_("only https and ssh connections are supported"))
2359
2361
2360 from . import win32
2362 from . import win32
2361
2363
2362 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2364 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2363 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2365 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2364
2366
2365 try:
2367 try:
2366 s.connect(addr)
2368 s.connect(addr)
2367 cert = s.getpeercert(True)
2369 cert = s.getpeercert(True)
2368
2370
2369 ui.status(_('checking the certificate chain for %s\n') % url.host)
2371 ui.status(_('checking the certificate chain for %s\n') % url.host)
2370
2372
2371 complete = win32.checkcertificatechain(cert, build=False)
2373 complete = win32.checkcertificatechain(cert, build=False)
2372
2374
2373 if not complete:
2375 if not complete:
2374 ui.status(_('certificate chain is incomplete, updating... '))
2376 ui.status(_('certificate chain is incomplete, updating... '))
2375
2377
2376 if not win32.checkcertificatechain(cert):
2378 if not win32.checkcertificatechain(cert):
2377 ui.status(_('failed.\n'))
2379 ui.status(_('failed.\n'))
2378 else:
2380 else:
2379 ui.status(_('done.\n'))
2381 ui.status(_('done.\n'))
2380 else:
2382 else:
2381 ui.status(_('full certificate chain is available\n'))
2383 ui.status(_('full certificate chain is available\n'))
2382 finally:
2384 finally:
2383 s.close()
2385 s.close()
2384
2386
2385 @command('debugsub',
2387 @command('debugsub',
2386 [('r', 'rev', '',
2388 [('r', 'rev', '',
2387 _('revision to check'), _('REV'))],
2389 _('revision to check'), _('REV'))],
2388 _('[-r REV] [REV]'))
2390 _('[-r REV] [REV]'))
2389 def debugsub(ui, repo, rev=None):
2391 def debugsub(ui, repo, rev=None):
2390 ctx = scmutil.revsingle(repo, rev, None)
2392 ctx = scmutil.revsingle(repo, rev, None)
2391 for k, v in sorted(ctx.substate.items()):
2393 for k, v in sorted(ctx.substate.items()):
2392 ui.write(('path %s\n') % k)
2394 ui.write(('path %s\n') % k)
2393 ui.write((' source %s\n') % v[0])
2395 ui.write((' source %s\n') % v[0])
2394 ui.write((' revision %s\n') % v[1])
2396 ui.write((' revision %s\n') % v[1])
2395
2397
2396 @command('debugsuccessorssets',
2398 @command('debugsuccessorssets',
2397 [('', 'closest', False, _('return closest successors sets only'))],
2399 [('', 'closest', False, _('return closest successors sets only'))],
2398 _('[REV]'))
2400 _('[REV]'))
2399 def debugsuccessorssets(ui, repo, *revs, **opts):
2401 def debugsuccessorssets(ui, repo, *revs, **opts):
2400 """show set of successors for revision
2402 """show set of successors for revision
2401
2403
2402 A successors set of changeset A is a consistent group of revisions that
2404 A successors set of changeset A is a consistent group of revisions that
2403 succeed A. It contains non-obsolete changesets only unless closests
2405 succeed A. It contains non-obsolete changesets only unless closests
2404 successors set is set.
2406 successors set is set.
2405
2407
2406 In most cases a changeset A has a single successors set containing a single
2408 In most cases a changeset A has a single successors set containing a single
2407 successor (changeset A replaced by A').
2409 successor (changeset A replaced by A').
2408
2410
2409 A changeset that is made obsolete with no successors are called "pruned".
2411 A changeset that is made obsolete with no successors are called "pruned".
2410 Such changesets have no successors sets at all.
2412 Such changesets have no successors sets at all.
2411
2413
2412 A changeset that has been "split" will have a successors set containing
2414 A changeset that has been "split" will have a successors set containing
2413 more than one successor.
2415 more than one successor.
2414
2416
2415 A changeset that has been rewritten in multiple different ways is called
2417 A changeset that has been rewritten in multiple different ways is called
2416 "divergent". Such changesets have multiple successor sets (each of which
2418 "divergent". Such changesets have multiple successor sets (each of which
2417 may also be split, i.e. have multiple successors).
2419 may also be split, i.e. have multiple successors).
2418
2420
2419 Results are displayed as follows::
2421 Results are displayed as follows::
2420
2422
2421 <rev1>
2423 <rev1>
2422 <successors-1A>
2424 <successors-1A>
2423 <rev2>
2425 <rev2>
2424 <successors-2A>
2426 <successors-2A>
2425 <successors-2B1> <successors-2B2> <successors-2B3>
2427 <successors-2B1> <successors-2B2> <successors-2B3>
2426
2428
2427 Here rev2 has two possible (i.e. divergent) successors sets. The first
2429 Here rev2 has two possible (i.e. divergent) successors sets. The first
2428 holds one element, whereas the second holds three (i.e. the changeset has
2430 holds one element, whereas the second holds three (i.e. the changeset has
2429 been split).
2431 been split).
2430 """
2432 """
2431 # passed to successorssets caching computation from one call to another
2433 # passed to successorssets caching computation from one call to another
2432 cache = {}
2434 cache = {}
2433 ctx2str = bytes
2435 ctx2str = bytes
2434 node2str = short
2436 node2str = short
2435 for rev in scmutil.revrange(repo, revs):
2437 for rev in scmutil.revrange(repo, revs):
2436 ctx = repo[rev]
2438 ctx = repo[rev]
2437 ui.write('%s\n'% ctx2str(ctx))
2439 ui.write('%s\n'% ctx2str(ctx))
2438 for succsset in obsutil.successorssets(repo, ctx.node(),
2440 for succsset in obsutil.successorssets(repo, ctx.node(),
2439 closest=opts[r'closest'],
2441 closest=opts[r'closest'],
2440 cache=cache):
2442 cache=cache):
2441 if succsset:
2443 if succsset:
2442 ui.write(' ')
2444 ui.write(' ')
2443 ui.write(node2str(succsset[0]))
2445 ui.write(node2str(succsset[0]))
2444 for node in succsset[1:]:
2446 for node in succsset[1:]:
2445 ui.write(' ')
2447 ui.write(' ')
2446 ui.write(node2str(node))
2448 ui.write(node2str(node))
2447 ui.write('\n')
2449 ui.write('\n')
2448
2450
2449 @command('debugtemplate',
2451 @command('debugtemplate',
2450 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2452 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2451 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2453 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2452 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2454 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2453 optionalrepo=True)
2455 optionalrepo=True)
2454 def debugtemplate(ui, repo, tmpl, **opts):
2456 def debugtemplate(ui, repo, tmpl, **opts):
2455 """parse and apply a template
2457 """parse and apply a template
2456
2458
2457 If -r/--rev is given, the template is processed as a log template and
2459 If -r/--rev is given, the template is processed as a log template and
2458 applied to the given changesets. Otherwise, it is processed as a generic
2460 applied to the given changesets. Otherwise, it is processed as a generic
2459 template.
2461 template.
2460
2462
2461 Use --verbose to print the parsed tree.
2463 Use --verbose to print the parsed tree.
2462 """
2464 """
2463 revs = None
2465 revs = None
2464 if opts[r'rev']:
2466 if opts[r'rev']:
2465 if repo is None:
2467 if repo is None:
2466 raise error.RepoError(_('there is no Mercurial repository here '
2468 raise error.RepoError(_('there is no Mercurial repository here '
2467 '(.hg not found)'))
2469 '(.hg not found)'))
2468 revs = scmutil.revrange(repo, opts[r'rev'])
2470 revs = scmutil.revrange(repo, opts[r'rev'])
2469
2471
2470 props = {}
2472 props = {}
2471 for d in opts[r'define']:
2473 for d in opts[r'define']:
2472 try:
2474 try:
2473 k, v = (e.strip() for e in d.split('=', 1))
2475 k, v = (e.strip() for e in d.split('=', 1))
2474 if not k or k == 'ui':
2476 if not k or k == 'ui':
2475 raise ValueError
2477 raise ValueError
2476 props[k] = v
2478 props[k] = v
2477 except ValueError:
2479 except ValueError:
2478 raise error.Abort(_('malformed keyword definition: %s') % d)
2480 raise error.Abort(_('malformed keyword definition: %s') % d)
2479
2481
2480 if ui.verbose:
2482 if ui.verbose:
2481 aliases = ui.configitems('templatealias')
2483 aliases = ui.configitems('templatealias')
2482 tree = templater.parse(tmpl)
2484 tree = templater.parse(tmpl)
2483 ui.note(templater.prettyformat(tree), '\n')
2485 ui.note(templater.prettyformat(tree), '\n')
2484 newtree = templater.expandaliases(tree, aliases)
2486 newtree = templater.expandaliases(tree, aliases)
2485 if newtree != tree:
2487 if newtree != tree:
2486 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2488 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2487
2489
2488 if revs is None:
2490 if revs is None:
2489 tres = formatter.templateresources(ui, repo)
2491 tres = formatter.templateresources(ui, repo)
2490 t = formatter.maketemplater(ui, tmpl, resources=tres)
2492 t = formatter.maketemplater(ui, tmpl, resources=tres)
2491 if ui.verbose:
2493 if ui.verbose:
2492 kwds, funcs = t.symbolsuseddefault()
2494 kwds, funcs = t.symbolsuseddefault()
2493 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2495 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2494 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2496 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2495 ui.write(t.renderdefault(props))
2497 ui.write(t.renderdefault(props))
2496 else:
2498 else:
2497 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2499 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2498 if ui.verbose:
2500 if ui.verbose:
2499 kwds, funcs = displayer.t.symbolsuseddefault()
2501 kwds, funcs = displayer.t.symbolsuseddefault()
2500 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2502 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2501 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2503 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2502 for r in revs:
2504 for r in revs:
2503 displayer.show(repo[r], **pycompat.strkwargs(props))
2505 displayer.show(repo[r], **pycompat.strkwargs(props))
2504 displayer.close()
2506 displayer.close()
2505
2507
2506 @command('debuguigetpass', [
2508 @command('debuguigetpass', [
2507 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2509 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2508 ], _('[-p TEXT]'), norepo=True)
2510 ], _('[-p TEXT]'), norepo=True)
2509 def debuguigetpass(ui, prompt=''):
2511 def debuguigetpass(ui, prompt=''):
2510 """show prompt to type password"""
2512 """show prompt to type password"""
2511 r = ui.getpass(prompt)
2513 r = ui.getpass(prompt)
2512 ui.write(('respose: %s\n') % r)
2514 ui.write(('respose: %s\n') % r)
2513
2515
2514 @command('debuguiprompt', [
2516 @command('debuguiprompt', [
2515 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2517 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2516 ], _('[-p TEXT]'), norepo=True)
2518 ], _('[-p TEXT]'), norepo=True)
2517 def debuguiprompt(ui, prompt=''):
2519 def debuguiprompt(ui, prompt=''):
2518 """show plain prompt"""
2520 """show plain prompt"""
2519 r = ui.prompt(prompt)
2521 r = ui.prompt(prompt)
2520 ui.write(('response: %s\n') % r)
2522 ui.write(('response: %s\n') % r)
2521
2523
2522 @command('debugupdatecaches', [])
2524 @command('debugupdatecaches', [])
2523 def debugupdatecaches(ui, repo, *pats, **opts):
2525 def debugupdatecaches(ui, repo, *pats, **opts):
2524 """warm all known caches in the repository"""
2526 """warm all known caches in the repository"""
2525 with repo.wlock(), repo.lock():
2527 with repo.wlock(), repo.lock():
2526 repo.updatecaches(full=True)
2528 repo.updatecaches(full=True)
2527
2529
2528 @command('debugupgraderepo', [
2530 @command('debugupgraderepo', [
2529 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2531 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2530 ('', 'run', False, _('performs an upgrade')),
2532 ('', 'run', False, _('performs an upgrade')),
2531 ])
2533 ])
2532 def debugupgraderepo(ui, repo, run=False, optimize=None):
2534 def debugupgraderepo(ui, repo, run=False, optimize=None):
2533 """upgrade a repository to use different features
2535 """upgrade a repository to use different features
2534
2536
2535 If no arguments are specified, the repository is evaluated for upgrade
2537 If no arguments are specified, the repository is evaluated for upgrade
2536 and a list of problems and potential optimizations is printed.
2538 and a list of problems and potential optimizations is printed.
2537
2539
2538 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2540 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2539 can be influenced via additional arguments. More details will be provided
2541 can be influenced via additional arguments. More details will be provided
2540 by the command output when run without ``--run``.
2542 by the command output when run without ``--run``.
2541
2543
2542 During the upgrade, the repository will be locked and no writes will be
2544 During the upgrade, the repository will be locked and no writes will be
2543 allowed.
2545 allowed.
2544
2546
2545 At the end of the upgrade, the repository may not be readable while new
2547 At the end of the upgrade, the repository may not be readable while new
2546 repository data is swapped in. This window will be as long as it takes to
2548 repository data is swapped in. This window will be as long as it takes to
2547 rename some directories inside the ``.hg`` directory. On most machines, this
2549 rename some directories inside the ``.hg`` directory. On most machines, this
2548 should complete almost instantaneously and the chances of a consumer being
2550 should complete almost instantaneously and the chances of a consumer being
2549 unable to access the repository should be low.
2551 unable to access the repository should be low.
2550 """
2552 """
2551 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2553 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2552
2554
2553 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2555 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2554 inferrepo=True)
2556 inferrepo=True)
2555 def debugwalk(ui, repo, *pats, **opts):
2557 def debugwalk(ui, repo, *pats, **opts):
2556 """show how files match on given patterns"""
2558 """show how files match on given patterns"""
2557 opts = pycompat.byteskwargs(opts)
2559 opts = pycompat.byteskwargs(opts)
2558 m = scmutil.match(repo[None], pats, opts)
2560 m = scmutil.match(repo[None], pats, opts)
2559 if ui.verbose:
2561 if ui.verbose:
2560 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2562 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2561 items = list(repo[None].walk(m))
2563 items = list(repo[None].walk(m))
2562 if not items:
2564 if not items:
2563 return
2565 return
2564 f = lambda fn: fn
2566 f = lambda fn: fn
2565 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2567 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2566 f = lambda fn: util.normpath(fn)
2568 f = lambda fn: util.normpath(fn)
2567 fmt = 'f %%-%ds %%-%ds %%s' % (
2569 fmt = 'f %%-%ds %%-%ds %%s' % (
2568 max([len(abs) for abs in items]),
2570 max([len(abs) for abs in items]),
2569 max([len(m.rel(abs)) for abs in items]))
2571 max([len(m.rel(abs)) for abs in items]))
2570 for abs in items:
2572 for abs in items:
2571 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2573 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2572 ui.write("%s\n" % line.rstrip())
2574 ui.write("%s\n" % line.rstrip())
2573
2575
2574 @command('debugwhyunstable', [], _('REV'))
2576 @command('debugwhyunstable', [], _('REV'))
2575 def debugwhyunstable(ui, repo, rev):
2577 def debugwhyunstable(ui, repo, rev):
2576 """explain instabilities of a changeset"""
2578 """explain instabilities of a changeset"""
2577 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2579 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2578 dnodes = ''
2580 dnodes = ''
2579 if entry.get('divergentnodes'):
2581 if entry.get('divergentnodes'):
2580 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2582 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2581 for ctx in entry['divergentnodes']) + ' '
2583 for ctx in entry['divergentnodes']) + ' '
2582 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2584 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2583 entry['reason'], entry['node']))
2585 entry['reason'], entry['node']))
2584
2586
2585 @command('debugwireargs',
2587 @command('debugwireargs',
2586 [('', 'three', '', 'three'),
2588 [('', 'three', '', 'three'),
2587 ('', 'four', '', 'four'),
2589 ('', 'four', '', 'four'),
2588 ('', 'five', '', 'five'),
2590 ('', 'five', '', 'five'),
2589 ] + cmdutil.remoteopts,
2591 ] + cmdutil.remoteopts,
2590 _('REPO [OPTIONS]... [ONE [TWO]]'),
2592 _('REPO [OPTIONS]... [ONE [TWO]]'),
2591 norepo=True)
2593 norepo=True)
2592 def debugwireargs(ui, repopath, *vals, **opts):
2594 def debugwireargs(ui, repopath, *vals, **opts):
2593 opts = pycompat.byteskwargs(opts)
2595 opts = pycompat.byteskwargs(opts)
2594 repo = hg.peer(ui, opts, repopath)
2596 repo = hg.peer(ui, opts, repopath)
2595 for opt in cmdutil.remoteopts:
2597 for opt in cmdutil.remoteopts:
2596 del opts[opt[1]]
2598 del opts[opt[1]]
2597 args = {}
2599 args = {}
2598 for k, v in opts.iteritems():
2600 for k, v in opts.iteritems():
2599 if v:
2601 if v:
2600 args[k] = v
2602 args[k] = v
2601 args = pycompat.strkwargs(args)
2603 args = pycompat.strkwargs(args)
2602 # run twice to check that we don't mess up the stream for the next command
2604 # run twice to check that we don't mess up the stream for the next command
2603 res1 = repo.debugwireargs(*vals, **args)
2605 res1 = repo.debugwireargs(*vals, **args)
2604 res2 = repo.debugwireargs(*vals, **args)
2606 res2 = repo.debugwireargs(*vals, **args)
2605 ui.write("%s\n" % res1)
2607 ui.write("%s\n" % res1)
2606 if res1 != res2:
2608 if res1 != res2:
2607 ui.warn("%s\n" % res2)
2609 ui.warn("%s\n" % res2)
2608
2610
2609 def _parsewirelangblocks(fh):
2611 def _parsewirelangblocks(fh):
2610 activeaction = None
2612 activeaction = None
2611 blocklines = []
2613 blocklines = []
2612
2614
2613 for line in fh:
2615 for line in fh:
2614 line = line.rstrip()
2616 line = line.rstrip()
2615 if not line:
2617 if not line:
2616 continue
2618 continue
2617
2619
2618 if line.startswith(b'#'):
2620 if line.startswith(b'#'):
2619 continue
2621 continue
2620
2622
2621 if not line.startswith(' '):
2623 if not line.startswith(' '):
2622 # New block. Flush previous one.
2624 # New block. Flush previous one.
2623 if activeaction:
2625 if activeaction:
2624 yield activeaction, blocklines
2626 yield activeaction, blocklines
2625
2627
2626 activeaction = line
2628 activeaction = line
2627 blocklines = []
2629 blocklines = []
2628 continue
2630 continue
2629
2631
2630 # Else we start with an indent.
2632 # Else we start with an indent.
2631
2633
2632 if not activeaction:
2634 if not activeaction:
2633 raise error.Abort(_('indented line outside of block'))
2635 raise error.Abort(_('indented line outside of block'))
2634
2636
2635 blocklines.append(line)
2637 blocklines.append(line)
2636
2638
2637 # Flush last block.
2639 # Flush last block.
2638 if activeaction:
2640 if activeaction:
2639 yield activeaction, blocklines
2641 yield activeaction, blocklines
2640
2642
2641 @command('debugwireproto',
2643 @command('debugwireproto',
2642 [
2644 [
2643 ('', 'localssh', False, _('start an SSH server for this repo')),
2645 ('', 'localssh', False, _('start an SSH server for this repo')),
2644 ('', 'peer', '', _('construct a specific version of the peer')),
2646 ('', 'peer', '', _('construct a specific version of the peer')),
2645 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2647 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2646 ('', 'nologhandshake', False,
2648 ('', 'nologhandshake', False,
2647 _('do not log I/O related to the peer handshake')),
2649 _('do not log I/O related to the peer handshake')),
2648 ] + cmdutil.remoteopts,
2650 ] + cmdutil.remoteopts,
2649 _('[PATH]'),
2651 _('[PATH]'),
2650 optionalrepo=True)
2652 optionalrepo=True)
2651 def debugwireproto(ui, repo, path=None, **opts):
2653 def debugwireproto(ui, repo, path=None, **opts):
2652 """send wire protocol commands to a server
2654 """send wire protocol commands to a server
2653
2655
2654 This command can be used to issue wire protocol commands to remote
2656 This command can be used to issue wire protocol commands to remote
2655 peers and to debug the raw data being exchanged.
2657 peers and to debug the raw data being exchanged.
2656
2658
2657 ``--localssh`` will start an SSH server against the current repository
2659 ``--localssh`` will start an SSH server against the current repository
2658 and connect to that. By default, the connection will perform a handshake
2660 and connect to that. By default, the connection will perform a handshake
2659 and establish an appropriate peer instance.
2661 and establish an appropriate peer instance.
2660
2662
2661 ``--peer`` can be used to bypass the handshake protocol and construct a
2663 ``--peer`` can be used to bypass the handshake protocol and construct a
2662 peer instance using the specified class type. Valid values are ``raw``,
2664 peer instance using the specified class type. Valid values are ``raw``,
2663 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2665 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2664 raw data payloads and don't support higher-level command actions.
2666 raw data payloads and don't support higher-level command actions.
2665
2667
2666 ``--noreadstderr`` can be used to disable automatic reading from stderr
2668 ``--noreadstderr`` can be used to disable automatic reading from stderr
2667 of the peer (for SSH connections only). Disabling automatic reading of
2669 of the peer (for SSH connections only). Disabling automatic reading of
2668 stderr is useful for making output more deterministic.
2670 stderr is useful for making output more deterministic.
2669
2671
2670 Commands are issued via a mini language which is specified via stdin.
2672 Commands are issued via a mini language which is specified via stdin.
2671 The language consists of individual actions to perform. An action is
2673 The language consists of individual actions to perform. An action is
2672 defined by a block. A block is defined as a line with no leading
2674 defined by a block. A block is defined as a line with no leading
2673 space followed by 0 or more lines with leading space. Blocks are
2675 space followed by 0 or more lines with leading space. Blocks are
2674 effectively a high-level command with additional metadata.
2676 effectively a high-level command with additional metadata.
2675
2677
2676 Lines beginning with ``#`` are ignored.
2678 Lines beginning with ``#`` are ignored.
2677
2679
2678 The following sections denote available actions.
2680 The following sections denote available actions.
2679
2681
2680 raw
2682 raw
2681 ---
2683 ---
2682
2684
2683 Send raw data to the server.
2685 Send raw data to the server.
2684
2686
2685 The block payload contains the raw data to send as one atomic send
2687 The block payload contains the raw data to send as one atomic send
2686 operation. The data may not actually be delivered in a single system
2688 operation. The data may not actually be delivered in a single system
2687 call: it depends on the abilities of the transport being used.
2689 call: it depends on the abilities of the transport being used.
2688
2690
2689 Each line in the block is de-indented and concatenated. Then, that
2691 Each line in the block is de-indented and concatenated. Then, that
2690 value is evaluated as a Python b'' literal. This allows the use of
2692 value is evaluated as a Python b'' literal. This allows the use of
2691 backslash escaping, etc.
2693 backslash escaping, etc.
2692
2694
2693 raw+
2695 raw+
2694 ----
2696 ----
2695
2697
2696 Behaves like ``raw`` except flushes output afterwards.
2698 Behaves like ``raw`` except flushes output afterwards.
2697
2699
2698 command <X>
2700 command <X>
2699 -----------
2701 -----------
2700
2702
2701 Send a request to run a named command, whose name follows the ``command``
2703 Send a request to run a named command, whose name follows the ``command``
2702 string.
2704 string.
2703
2705
2704 Arguments to the command are defined as lines in this block. The format of
2706 Arguments to the command are defined as lines in this block. The format of
2705 each line is ``<key> <value>``. e.g.::
2707 each line is ``<key> <value>``. e.g.::
2706
2708
2707 command listkeys
2709 command listkeys
2708 namespace bookmarks
2710 namespace bookmarks
2709
2711
2710 If the value begins with ``eval:``, it will be interpreted as a Python
2712 If the value begins with ``eval:``, it will be interpreted as a Python
2711 literal expression. Otherwise values are interpreted as Python b'' literals.
2713 literal expression. Otherwise values are interpreted as Python b'' literals.
2712 This allows sending complex types and encoding special byte sequences via
2714 This allows sending complex types and encoding special byte sequences via
2713 backslash escaping.
2715 backslash escaping.
2714
2716
2715 The following arguments have special meaning:
2717 The following arguments have special meaning:
2716
2718
2717 ``PUSHFILE``
2719 ``PUSHFILE``
2718 When defined, the *push* mechanism of the peer will be used instead
2720 When defined, the *push* mechanism of the peer will be used instead
2719 of the static request-response mechanism and the content of the
2721 of the static request-response mechanism and the content of the
2720 file specified in the value of this argument will be sent as the
2722 file specified in the value of this argument will be sent as the
2721 command payload.
2723 command payload.
2722
2724
2723 This can be used to submit a local bundle file to the remote.
2725 This can be used to submit a local bundle file to the remote.
2724
2726
2725 batchbegin
2727 batchbegin
2726 ----------
2728 ----------
2727
2729
2728 Instruct the peer to begin a batched send.
2730 Instruct the peer to begin a batched send.
2729
2731
2730 All ``command`` blocks are queued for execution until the next
2732 All ``command`` blocks are queued for execution until the next
2731 ``batchsubmit`` block.
2733 ``batchsubmit`` block.
2732
2734
2733 batchsubmit
2735 batchsubmit
2734 -----------
2736 -----------
2735
2737
2736 Submit previously queued ``command`` blocks as a batch request.
2738 Submit previously queued ``command`` blocks as a batch request.
2737
2739
2738 This action MUST be paired with a ``batchbegin`` action.
2740 This action MUST be paired with a ``batchbegin`` action.
2739
2741
2740 httprequest <method> <path>
2742 httprequest <method> <path>
2741 ---------------------------
2743 ---------------------------
2742
2744
2743 (HTTP peer only)
2745 (HTTP peer only)
2744
2746
2745 Send an HTTP request to the peer.
2747 Send an HTTP request to the peer.
2746
2748
2747 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2749 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2748
2750
2749 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2751 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2750 headers to add to the request. e.g. ``Accept: foo``.
2752 headers to add to the request. e.g. ``Accept: foo``.
2751
2753
2752 The following arguments are special:
2754 The following arguments are special:
2753
2755
2754 ``BODYFILE``
2756 ``BODYFILE``
2755 The content of the file defined as the value to this argument will be
2757 The content of the file defined as the value to this argument will be
2756 transferred verbatim as the HTTP request body.
2758 transferred verbatim as the HTTP request body.
2757
2759
2758 ``frame <type> <flags> <payload>``
2760 ``frame <type> <flags> <payload>``
2759 Send a unified protocol frame as part of the request body.
2761 Send a unified protocol frame as part of the request body.
2760
2762
2761 All frames will be collected and sent as the body to the HTTP
2763 All frames will be collected and sent as the body to the HTTP
2762 request.
2764 request.
2763
2765
2764 close
2766 close
2765 -----
2767 -----
2766
2768
2767 Close the connection to the server.
2769 Close the connection to the server.
2768
2770
2769 flush
2771 flush
2770 -----
2772 -----
2771
2773
2772 Flush data written to the server.
2774 Flush data written to the server.
2773
2775
2774 readavailable
2776 readavailable
2775 -------------
2777 -------------
2776
2778
2777 Close the write end of the connection and read all available data from
2779 Close the write end of the connection and read all available data from
2778 the server.
2780 the server.
2779
2781
2780 If the connection to the server encompasses multiple pipes, we poll both
2782 If the connection to the server encompasses multiple pipes, we poll both
2781 pipes and read available data.
2783 pipes and read available data.
2782
2784
2783 readline
2785 readline
2784 --------
2786 --------
2785
2787
2786 Read a line of output from the server. If there are multiple output
2788 Read a line of output from the server. If there are multiple output
2787 pipes, reads only the main pipe.
2789 pipes, reads only the main pipe.
2788
2790
2789 ereadline
2791 ereadline
2790 ---------
2792 ---------
2791
2793
2792 Like ``readline``, but read from the stderr pipe, if available.
2794 Like ``readline``, but read from the stderr pipe, if available.
2793
2795
2794 read <X>
2796 read <X>
2795 --------
2797 --------
2796
2798
2797 ``read()`` N bytes from the server's main output pipe.
2799 ``read()`` N bytes from the server's main output pipe.
2798
2800
2799 eread <X>
2801 eread <X>
2800 ---------
2802 ---------
2801
2803
2802 ``read()`` N bytes from the server's stderr pipe, if available.
2804 ``read()`` N bytes from the server's stderr pipe, if available.
2803
2805
2804 Specifying Unified Frame-Based Protocol Frames
2806 Specifying Unified Frame-Based Protocol Frames
2805 ----------------------------------------------
2807 ----------------------------------------------
2806
2808
2807 It is possible to emit a *Unified Frame-Based Protocol* by using special
2809 It is possible to emit a *Unified Frame-Based Protocol* by using special
2808 syntax.
2810 syntax.
2809
2811
2810 A frame is composed as a type, flags, and payload. These can be parsed
2812 A frame is composed as a type, flags, and payload. These can be parsed
2811 from a string of the form:
2813 from a string of the form:
2812
2814
2813 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2815 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2814
2816
2815 ``request-id`` and ``stream-id`` are integers defining the request and
2817 ``request-id`` and ``stream-id`` are integers defining the request and
2816 stream identifiers.
2818 stream identifiers.
2817
2819
2818 ``type`` can be an integer value for the frame type or the string name
2820 ``type`` can be an integer value for the frame type or the string name
2819 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2821 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2820 ``command-name``.
2822 ``command-name``.
2821
2823
2822 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2824 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2823 components. Each component (and there can be just one) can be an integer
2825 components. Each component (and there can be just one) can be an integer
2824 or a flag name for stream flags or frame flags, respectively. Values are
2826 or a flag name for stream flags or frame flags, respectively. Values are
2825 resolved to integers and then bitwise OR'd together.
2827 resolved to integers and then bitwise OR'd together.
2826
2828
2827 ``payload`` represents the raw frame payload. If it begins with
2829 ``payload`` represents the raw frame payload. If it begins with
2828 ``cbor:``, the following string is evaluated as Python code and the
2830 ``cbor:``, the following string is evaluated as Python code and the
2829 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2831 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2830 as a Python byte string literal.
2832 as a Python byte string literal.
2831 """
2833 """
2832 opts = pycompat.byteskwargs(opts)
2834 opts = pycompat.byteskwargs(opts)
2833
2835
2834 if opts['localssh'] and not repo:
2836 if opts['localssh'] and not repo:
2835 raise error.Abort(_('--localssh requires a repository'))
2837 raise error.Abort(_('--localssh requires a repository'))
2836
2838
2837 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2839 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2838 raise error.Abort(_('invalid value for --peer'),
2840 raise error.Abort(_('invalid value for --peer'),
2839 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2841 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2840
2842
2841 if path and opts['localssh']:
2843 if path and opts['localssh']:
2842 raise error.Abort(_('cannot specify --localssh with an explicit '
2844 raise error.Abort(_('cannot specify --localssh with an explicit '
2843 'path'))
2845 'path'))
2844
2846
2845 if ui.interactive():
2847 if ui.interactive():
2846 ui.write(_('(waiting for commands on stdin)\n'))
2848 ui.write(_('(waiting for commands on stdin)\n'))
2847
2849
2848 blocks = list(_parsewirelangblocks(ui.fin))
2850 blocks = list(_parsewirelangblocks(ui.fin))
2849
2851
2850 proc = None
2852 proc = None
2851 stdin = None
2853 stdin = None
2852 stdout = None
2854 stdout = None
2853 stderr = None
2855 stderr = None
2854 opener = None
2856 opener = None
2855
2857
2856 if opts['localssh']:
2858 if opts['localssh']:
2857 # We start the SSH server in its own process so there is process
2859 # We start the SSH server in its own process so there is process
2858 # separation. This prevents a whole class of potential bugs around
2860 # separation. This prevents a whole class of potential bugs around
2859 # shared state from interfering with server operation.
2861 # shared state from interfering with server operation.
2860 args = procutil.hgcmd() + [
2862 args = procutil.hgcmd() + [
2861 '-R', repo.root,
2863 '-R', repo.root,
2862 'debugserve', '--sshstdio',
2864 'debugserve', '--sshstdio',
2863 ]
2865 ]
2864 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2866 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2865 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2867 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2866 bufsize=0)
2868 bufsize=0)
2867
2869
2868 stdin = proc.stdin
2870 stdin = proc.stdin
2869 stdout = proc.stdout
2871 stdout = proc.stdout
2870 stderr = proc.stderr
2872 stderr = proc.stderr
2871
2873
2872 # We turn the pipes into observers so we can log I/O.
2874 # We turn the pipes into observers so we can log I/O.
2873 if ui.verbose or opts['peer'] == 'raw':
2875 if ui.verbose or opts['peer'] == 'raw':
2874 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2876 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2875 logdata=True)
2877 logdata=True)
2876 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2878 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2877 logdata=True)
2879 logdata=True)
2878 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2880 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2879 logdata=True)
2881 logdata=True)
2880
2882
2881 # --localssh also implies the peer connection settings.
2883 # --localssh also implies the peer connection settings.
2882
2884
2883 url = 'ssh://localserver'
2885 url = 'ssh://localserver'
2884 autoreadstderr = not opts['noreadstderr']
2886 autoreadstderr = not opts['noreadstderr']
2885
2887
2886 if opts['peer'] == 'ssh1':
2888 if opts['peer'] == 'ssh1':
2887 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2889 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2888 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2890 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2889 None, autoreadstderr=autoreadstderr)
2891 None, autoreadstderr=autoreadstderr)
2890 elif opts['peer'] == 'ssh2':
2892 elif opts['peer'] == 'ssh2':
2891 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2893 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2892 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2894 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2893 None, autoreadstderr=autoreadstderr)
2895 None, autoreadstderr=autoreadstderr)
2894 elif opts['peer'] == 'raw':
2896 elif opts['peer'] == 'raw':
2895 ui.write(_('using raw connection to peer\n'))
2897 ui.write(_('using raw connection to peer\n'))
2896 peer = None
2898 peer = None
2897 else:
2899 else:
2898 ui.write(_('creating ssh peer from handshake results\n'))
2900 ui.write(_('creating ssh peer from handshake results\n'))
2899 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2901 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2900 autoreadstderr=autoreadstderr)
2902 autoreadstderr=autoreadstderr)
2901
2903
2902 elif path:
2904 elif path:
2903 # We bypass hg.peer() so we can proxy the sockets.
2905 # We bypass hg.peer() so we can proxy the sockets.
2904 # TODO consider not doing this because we skip
2906 # TODO consider not doing this because we skip
2905 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2907 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2906 u = util.url(path)
2908 u = util.url(path)
2907 if u.scheme != 'http':
2909 if u.scheme != 'http':
2908 raise error.Abort(_('only http:// paths are currently supported'))
2910 raise error.Abort(_('only http:// paths are currently supported'))
2909
2911
2910 url, authinfo = u.authinfo()
2912 url, authinfo = u.authinfo()
2911 openerargs = {
2913 openerargs = {
2912 r'useragent': b'Mercurial debugwireproto',
2914 r'useragent': b'Mercurial debugwireproto',
2913 }
2915 }
2914
2916
2915 # Turn pipes/sockets into observers so we can log I/O.
2917 # Turn pipes/sockets into observers so we can log I/O.
2916 if ui.verbose:
2918 if ui.verbose:
2917 openerargs.update({
2919 openerargs.update({
2918 r'loggingfh': ui,
2920 r'loggingfh': ui,
2919 r'loggingname': b's',
2921 r'loggingname': b's',
2920 r'loggingopts': {
2922 r'loggingopts': {
2921 r'logdata': True,
2923 r'logdata': True,
2922 r'logdataapis': False,
2924 r'logdataapis': False,
2923 },
2925 },
2924 })
2926 })
2925
2927
2926 if ui.debugflag:
2928 if ui.debugflag:
2927 openerargs[r'loggingopts'][r'logdataapis'] = True
2929 openerargs[r'loggingopts'][r'logdataapis'] = True
2928
2930
2929 # Don't send default headers when in raw mode. This allows us to
2931 # Don't send default headers when in raw mode. This allows us to
2930 # bypass most of the behavior of our URL handling code so we can
2932 # bypass most of the behavior of our URL handling code so we can
2931 # have near complete control over what's sent on the wire.
2933 # have near complete control over what's sent on the wire.
2932 if opts['peer'] == 'raw':
2934 if opts['peer'] == 'raw':
2933 openerargs[r'sendaccept'] = False
2935 openerargs[r'sendaccept'] = False
2934
2936
2935 opener = urlmod.opener(ui, authinfo, **openerargs)
2937 opener = urlmod.opener(ui, authinfo, **openerargs)
2936
2938
2937 if opts['peer'] == 'http2':
2939 if opts['peer'] == 'http2':
2938 ui.write(_('creating http peer for wire protocol version 2\n'))
2940 ui.write(_('creating http peer for wire protocol version 2\n'))
2939 # We go through makepeer() because we need an API descriptor for
2941 # We go through makepeer() because we need an API descriptor for
2940 # the peer instance to be useful.
2942 # the peer instance to be useful.
2941 with ui.configoverride({
2943 with ui.configoverride({
2942 ('experimental', 'httppeer.advertise-v2'): True}):
2944 ('experimental', 'httppeer.advertise-v2'): True}):
2943 if opts['nologhandshake']:
2945 if opts['nologhandshake']:
2944 ui.pushbuffer()
2946 ui.pushbuffer()
2945
2947
2946 peer = httppeer.makepeer(ui, path, opener=opener)
2948 peer = httppeer.makepeer(ui, path, opener=opener)
2947
2949
2948 if opts['nologhandshake']:
2950 if opts['nologhandshake']:
2949 ui.popbuffer()
2951 ui.popbuffer()
2950
2952
2951 if not isinstance(peer, httppeer.httpv2peer):
2953 if not isinstance(peer, httppeer.httpv2peer):
2952 raise error.Abort(_('could not instantiate HTTP peer for '
2954 raise error.Abort(_('could not instantiate HTTP peer for '
2953 'wire protocol version 2'),
2955 'wire protocol version 2'),
2954 hint=_('the server may not have the feature '
2956 hint=_('the server may not have the feature '
2955 'enabled or is not allowing this '
2957 'enabled or is not allowing this '
2956 'client version'))
2958 'client version'))
2957
2959
2958 elif opts['peer'] == 'raw':
2960 elif opts['peer'] == 'raw':
2959 ui.write(_('using raw connection to peer\n'))
2961 ui.write(_('using raw connection to peer\n'))
2960 peer = None
2962 peer = None
2961 elif opts['peer']:
2963 elif opts['peer']:
2962 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2964 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2963 opts['peer'])
2965 opts['peer'])
2964 else:
2966 else:
2965 peer = httppeer.makepeer(ui, path, opener=opener)
2967 peer = httppeer.makepeer(ui, path, opener=opener)
2966
2968
2967 # We /could/ populate stdin/stdout with sock.makefile()...
2969 # We /could/ populate stdin/stdout with sock.makefile()...
2968 else:
2970 else:
2969 raise error.Abort(_('unsupported connection configuration'))
2971 raise error.Abort(_('unsupported connection configuration'))
2970
2972
2971 batchedcommands = None
2973 batchedcommands = None
2972
2974
2973 # Now perform actions based on the parsed wire language instructions.
2975 # Now perform actions based on the parsed wire language instructions.
2974 for action, lines in blocks:
2976 for action, lines in blocks:
2975 if action in ('raw', 'raw+'):
2977 if action in ('raw', 'raw+'):
2976 if not stdin:
2978 if not stdin:
2977 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2979 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2978
2980
2979 # Concatenate the data together.
2981 # Concatenate the data together.
2980 data = ''.join(l.lstrip() for l in lines)
2982 data = ''.join(l.lstrip() for l in lines)
2981 data = stringutil.unescapestr(data)
2983 data = stringutil.unescapestr(data)
2982 stdin.write(data)
2984 stdin.write(data)
2983
2985
2984 if action == 'raw+':
2986 if action == 'raw+':
2985 stdin.flush()
2987 stdin.flush()
2986 elif action == 'flush':
2988 elif action == 'flush':
2987 if not stdin:
2989 if not stdin:
2988 raise error.Abort(_('cannot call flush on this peer'))
2990 raise error.Abort(_('cannot call flush on this peer'))
2989 stdin.flush()
2991 stdin.flush()
2990 elif action.startswith('command'):
2992 elif action.startswith('command'):
2991 if not peer:
2993 if not peer:
2992 raise error.Abort(_('cannot send commands unless peer instance '
2994 raise error.Abort(_('cannot send commands unless peer instance '
2993 'is available'))
2995 'is available'))
2994
2996
2995 command = action.split(' ', 1)[1]
2997 command = action.split(' ', 1)[1]
2996
2998
2997 args = {}
2999 args = {}
2998 for line in lines:
3000 for line in lines:
2999 # We need to allow empty values.
3001 # We need to allow empty values.
3000 fields = line.lstrip().split(' ', 1)
3002 fields = line.lstrip().split(' ', 1)
3001 if len(fields) == 1:
3003 if len(fields) == 1:
3002 key = fields[0]
3004 key = fields[0]
3003 value = ''
3005 value = ''
3004 else:
3006 else:
3005 key, value = fields
3007 key, value = fields
3006
3008
3007 if value.startswith('eval:'):
3009 if value.startswith('eval:'):
3008 value = stringutil.evalpythonliteral(value[5:])
3010 value = stringutil.evalpythonliteral(value[5:])
3009 else:
3011 else:
3010 value = stringutil.unescapestr(value)
3012 value = stringutil.unescapestr(value)
3011
3013
3012 args[key] = value
3014 args[key] = value
3013
3015
3014 if batchedcommands is not None:
3016 if batchedcommands is not None:
3015 batchedcommands.append((command, args))
3017 batchedcommands.append((command, args))
3016 continue
3018 continue
3017
3019
3018 ui.status(_('sending %s command\n') % command)
3020 ui.status(_('sending %s command\n') % command)
3019
3021
3020 if 'PUSHFILE' in args:
3022 if 'PUSHFILE' in args:
3021 with open(args['PUSHFILE'], r'rb') as fh:
3023 with open(args['PUSHFILE'], r'rb') as fh:
3022 del args['PUSHFILE']
3024 del args['PUSHFILE']
3023 res, output = peer._callpush(command, fh,
3025 res, output = peer._callpush(command, fh,
3024 **pycompat.strkwargs(args))
3026 **pycompat.strkwargs(args))
3025 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3027 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3026 ui.status(_('remote output: %s\n') %
3028 ui.status(_('remote output: %s\n') %
3027 stringutil.escapestr(output))
3029 stringutil.escapestr(output))
3028 else:
3030 else:
3029 with peer.commandexecutor() as e:
3031 with peer.commandexecutor() as e:
3030 res = e.callcommand(command, args).result()
3032 res = e.callcommand(command, args).result()
3031
3033
3032 if isinstance(res, wireprotov2peer.commandresponse):
3034 if isinstance(res, wireprotov2peer.commandresponse):
3033 val = list(res.cborobjects())
3035 val = list(res.cborobjects())
3034 ui.status(_('response: %s\n') %
3036 ui.status(_('response: %s\n') %
3035 stringutil.pprint(val, bprefix=True))
3037 stringutil.pprint(val, bprefix=True))
3036
3038
3037 else:
3039 else:
3038 ui.status(_('response: %s\n') %
3040 ui.status(_('response: %s\n') %
3039 stringutil.pprint(res, bprefix=True))
3041 stringutil.pprint(res, bprefix=True))
3040
3042
3041 elif action == 'batchbegin':
3043 elif action == 'batchbegin':
3042 if batchedcommands is not None:
3044 if batchedcommands is not None:
3043 raise error.Abort(_('nested batchbegin not allowed'))
3045 raise error.Abort(_('nested batchbegin not allowed'))
3044
3046
3045 batchedcommands = []
3047 batchedcommands = []
3046 elif action == 'batchsubmit':
3048 elif action == 'batchsubmit':
3047 # There is a batching API we could go through. But it would be
3049 # There is a batching API we could go through. But it would be
3048 # difficult to normalize requests into function calls. It is easier
3050 # difficult to normalize requests into function calls. It is easier
3049 # to bypass this layer and normalize to commands + args.
3051 # to bypass this layer and normalize to commands + args.
3050 ui.status(_('sending batch with %d sub-commands\n') %
3052 ui.status(_('sending batch with %d sub-commands\n') %
3051 len(batchedcommands))
3053 len(batchedcommands))
3052 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3054 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3053 ui.status(_('response #%d: %s\n') %
3055 ui.status(_('response #%d: %s\n') %
3054 (i, stringutil.escapestr(chunk)))
3056 (i, stringutil.escapestr(chunk)))
3055
3057
3056 batchedcommands = None
3058 batchedcommands = None
3057
3059
3058 elif action.startswith('httprequest '):
3060 elif action.startswith('httprequest '):
3059 if not opener:
3061 if not opener:
3060 raise error.Abort(_('cannot use httprequest without an HTTP '
3062 raise error.Abort(_('cannot use httprequest without an HTTP '
3061 'peer'))
3063 'peer'))
3062
3064
3063 request = action.split(' ', 2)
3065 request = action.split(' ', 2)
3064 if len(request) != 3:
3066 if len(request) != 3:
3065 raise error.Abort(_('invalid httprequest: expected format is '
3067 raise error.Abort(_('invalid httprequest: expected format is '
3066 '"httprequest <method> <path>'))
3068 '"httprequest <method> <path>'))
3067
3069
3068 method, httppath = request[1:]
3070 method, httppath = request[1:]
3069 headers = {}
3071 headers = {}
3070 body = None
3072 body = None
3071 frames = []
3073 frames = []
3072 for line in lines:
3074 for line in lines:
3073 line = line.lstrip()
3075 line = line.lstrip()
3074 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3076 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3075 if m:
3077 if m:
3076 headers[m.group(1)] = m.group(2)
3078 headers[m.group(1)] = m.group(2)
3077 continue
3079 continue
3078
3080
3079 if line.startswith(b'BODYFILE '):
3081 if line.startswith(b'BODYFILE '):
3080 with open(line.split(b' ', 1), 'rb') as fh:
3082 with open(line.split(b' ', 1), 'rb') as fh:
3081 body = fh.read()
3083 body = fh.read()
3082 elif line.startswith(b'frame '):
3084 elif line.startswith(b'frame '):
3083 frame = wireprotoframing.makeframefromhumanstring(
3085 frame = wireprotoframing.makeframefromhumanstring(
3084 line[len(b'frame '):])
3086 line[len(b'frame '):])
3085
3087
3086 frames.append(frame)
3088 frames.append(frame)
3087 else:
3089 else:
3088 raise error.Abort(_('unknown argument to httprequest: %s') %
3090 raise error.Abort(_('unknown argument to httprequest: %s') %
3089 line)
3091 line)
3090
3092
3091 url = path + httppath
3093 url = path + httppath
3092
3094
3093 if frames:
3095 if frames:
3094 body = b''.join(bytes(f) for f in frames)
3096 body = b''.join(bytes(f) for f in frames)
3095
3097
3096 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3098 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3097
3099
3098 # urllib.Request insists on using has_data() as a proxy for
3100 # urllib.Request insists on using has_data() as a proxy for
3099 # determining the request method. Override that to use our
3101 # determining the request method. Override that to use our
3100 # explicitly requested method.
3102 # explicitly requested method.
3101 req.get_method = lambda: method
3103 req.get_method = lambda: method
3102
3104
3103 try:
3105 try:
3104 res = opener.open(req)
3106 res = opener.open(req)
3105 body = res.read()
3107 body = res.read()
3106 except util.urlerr.urlerror as e:
3108 except util.urlerr.urlerror as e:
3107 e.read()
3109 e.read()
3108 continue
3110 continue
3109
3111
3110 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3112 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3111 ui.write(_('cbor> %s\n') %
3113 ui.write(_('cbor> %s\n') %
3112 stringutil.pprint(cbor.loads(body), bprefix=True))
3114 stringutil.pprint(cbor.loads(body), bprefix=True))
3113
3115
3114 elif action == 'close':
3116 elif action == 'close':
3115 peer.close()
3117 peer.close()
3116 elif action == 'readavailable':
3118 elif action == 'readavailable':
3117 if not stdout or not stderr:
3119 if not stdout or not stderr:
3118 raise error.Abort(_('readavailable not available on this peer'))
3120 raise error.Abort(_('readavailable not available on this peer'))
3119
3121
3120 stdin.close()
3122 stdin.close()
3121 stdout.read()
3123 stdout.read()
3122 stderr.read()
3124 stderr.read()
3123
3125
3124 elif action == 'readline':
3126 elif action == 'readline':
3125 if not stdout:
3127 if not stdout:
3126 raise error.Abort(_('readline not available on this peer'))
3128 raise error.Abort(_('readline not available on this peer'))
3127 stdout.readline()
3129 stdout.readline()
3128 elif action == 'ereadline':
3130 elif action == 'ereadline':
3129 if not stderr:
3131 if not stderr:
3130 raise error.Abort(_('ereadline not available on this peer'))
3132 raise error.Abort(_('ereadline not available on this peer'))
3131 stderr.readline()
3133 stderr.readline()
3132 elif action.startswith('read '):
3134 elif action.startswith('read '):
3133 count = int(action.split(' ', 1)[1])
3135 count = int(action.split(' ', 1)[1])
3134 if not stdout:
3136 if not stdout:
3135 raise error.Abort(_('read not available on this peer'))
3137 raise error.Abort(_('read not available on this peer'))
3136 stdout.read(count)
3138 stdout.read(count)
3137 elif action.startswith('eread '):
3139 elif action.startswith('eread '):
3138 count = int(action.split(' ', 1)[1])
3140 count = int(action.split(' ', 1)[1])
3139 if not stderr:
3141 if not stderr:
3140 raise error.Abort(_('eread not available on this peer'))
3142 raise error.Abort(_('eread not available on this peer'))
3141 stderr.read(count)
3143 stderr.read(count)
3142 else:
3144 else:
3143 raise error.Abort(_('unknown action: %s') % action)
3145 raise error.Abort(_('unknown action: %s') % action)
3144
3146
3145 if batchedcommands is not None:
3147 if batchedcommands is not None:
3146 raise error.Abort(_('unclosed "batchbegin" request'))
3148 raise error.Abort(_('unclosed "batchbegin" request'))
3147
3149
3148 if peer:
3150 if peer:
3149 peer.close()
3151 peer.close()
3150
3152
3151 if proc:
3153 if proc:
3152 proc.kill()
3154 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now