##// END OF EJS Templates
debugcommands: fix typo in debuguigetpass
Yuya Nishihara -
r45148:649fd6c3 default
parent child Browse files
Show More
@@ -1,4512 +1,4512
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 merge as mergemod,
61 merge as mergemod,
62 obsolete,
62 obsolete,
63 obsutil,
63 obsutil,
64 pathutil,
64 pathutil,
65 phases,
65 phases,
66 policy,
66 policy,
67 pvec,
67 pvec,
68 pycompat,
68 pycompat,
69 registrar,
69 registrar,
70 repair,
70 repair,
71 revlog,
71 revlog,
72 revset,
72 revset,
73 revsetlang,
73 revsetlang,
74 scmutil,
74 scmutil,
75 setdiscovery,
75 setdiscovery,
76 simplemerge,
76 simplemerge,
77 sshpeer,
77 sshpeer,
78 sslutil,
78 sslutil,
79 streamclone,
79 streamclone,
80 tags as tagsmod,
80 tags as tagsmod,
81 templater,
81 templater,
82 treediscovery,
82 treediscovery,
83 upgrade,
83 upgrade,
84 url as urlmod,
84 url as urlmod,
85 util,
85 util,
86 vfs as vfsmod,
86 vfs as vfsmod,
87 wireprotoframing,
87 wireprotoframing,
88 wireprotoserver,
88 wireprotoserver,
89 wireprotov2peer,
89 wireprotov2peer,
90 )
90 )
91 from .utils import (
91 from .utils import (
92 cborutil,
92 cborutil,
93 compression,
93 compression,
94 dateutil,
94 dateutil,
95 procutil,
95 procutil,
96 stringutil,
96 stringutil,
97 )
97 )
98
98
99 from .revlogutils import (
99 from .revlogutils import (
100 deltas as deltautil,
100 deltas as deltautil,
101 nodemap,
101 nodemap,
102 )
102 )
103
103
104 release = lockmod.release
104 release = lockmod.release
105
105
106 command = registrar.command()
106 command = registrar.command()
107
107
108
108
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
110 def debugancestor(ui, repo, *args):
110 def debugancestor(ui, repo, *args):
111 """find the ancestor revision of two revisions in a given index"""
111 """find the ancestor revision of two revisions in a given index"""
112 if len(args) == 3:
112 if len(args) == 3:
113 index, rev1, rev2 = args
113 index, rev1, rev2 = args
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
115 lookup = r.lookup
115 lookup = r.lookup
116 elif len(args) == 2:
116 elif len(args) == 2:
117 if not repo:
117 if not repo:
118 raise error.Abort(
118 raise error.Abort(
119 _(b'there is no Mercurial repository here (.hg not found)')
119 _(b'there is no Mercurial repository here (.hg not found)')
120 )
120 )
121 rev1, rev2 = args
121 rev1, rev2 = args
122 r = repo.changelog
122 r = repo.changelog
123 lookup = repo.lookup
123 lookup = repo.lookup
124 else:
124 else:
125 raise error.Abort(_(b'either two or three arguments required'))
125 raise error.Abort(_(b'either two or three arguments required'))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
126 a = r.ancestor(lookup(rev1), lookup(rev2))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
128
128
129
129
130 @command(b'debugapplystreamclonebundle', [], b'FILE')
130 @command(b'debugapplystreamclonebundle', [], b'FILE')
131 def debugapplystreamclonebundle(ui, repo, fname):
131 def debugapplystreamclonebundle(ui, repo, fname):
132 """apply a stream clone bundle file"""
132 """apply a stream clone bundle file"""
133 f = hg.openpath(ui, fname)
133 f = hg.openpath(ui, fname)
134 gen = exchange.readbundle(ui, f, fname)
134 gen = exchange.readbundle(ui, f, fname)
135 gen.apply(repo)
135 gen.apply(repo)
136
136
137
137
138 @command(
138 @command(
139 b'debugbuilddag',
139 b'debugbuilddag',
140 [
140 [
141 (
141 (
142 b'm',
142 b'm',
143 b'mergeable-file',
143 b'mergeable-file',
144 None,
144 None,
145 _(b'add single file mergeable changes'),
145 _(b'add single file mergeable changes'),
146 ),
146 ),
147 (
147 (
148 b'o',
148 b'o',
149 b'overwritten-file',
149 b'overwritten-file',
150 None,
150 None,
151 _(b'add single file all revs overwrite'),
151 _(b'add single file all revs overwrite'),
152 ),
152 ),
153 (b'n', b'new-file', None, _(b'add new file at each rev')),
153 (b'n', b'new-file', None, _(b'add new file at each rev')),
154 ],
154 ],
155 _(b'[OPTION]... [TEXT]'),
155 _(b'[OPTION]... [TEXT]'),
156 )
156 )
157 def debugbuilddag(
157 def debugbuilddag(
158 ui,
158 ui,
159 repo,
159 repo,
160 text=None,
160 text=None,
161 mergeable_file=False,
161 mergeable_file=False,
162 overwritten_file=False,
162 overwritten_file=False,
163 new_file=False,
163 new_file=False,
164 ):
164 ):
165 """builds a repo with a given DAG from scratch in the current empty repo
165 """builds a repo with a given DAG from scratch in the current empty repo
166
166
167 The description of the DAG is read from stdin if not given on the
167 The description of the DAG is read from stdin if not given on the
168 command line.
168 command line.
169
169
170 Elements:
170 Elements:
171
171
172 - "+n" is a linear run of n nodes based on the current default parent
172 - "+n" is a linear run of n nodes based on the current default parent
173 - "." is a single node based on the current default parent
173 - "." is a single node based on the current default parent
174 - "$" resets the default parent to null (implied at the start);
174 - "$" resets the default parent to null (implied at the start);
175 otherwise the default parent is always the last node created
175 otherwise the default parent is always the last node created
176 - "<p" sets the default parent to the backref p
176 - "<p" sets the default parent to the backref p
177 - "*p" is a fork at parent p, which is a backref
177 - "*p" is a fork at parent p, which is a backref
178 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
178 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
179 - "/p2" is a merge of the preceding node and p2
179 - "/p2" is a merge of the preceding node and p2
180 - ":tag" defines a local tag for the preceding node
180 - ":tag" defines a local tag for the preceding node
181 - "@branch" sets the named branch for subsequent nodes
181 - "@branch" sets the named branch for subsequent nodes
182 - "#...\\n" is a comment up to the end of the line
182 - "#...\\n" is a comment up to the end of the line
183
183
184 Whitespace between the above elements is ignored.
184 Whitespace between the above elements is ignored.
185
185
186 A backref is either
186 A backref is either
187
187
188 - a number n, which references the node curr-n, where curr is the current
188 - a number n, which references the node curr-n, where curr is the current
189 node, or
189 node, or
190 - the name of a local tag you placed earlier using ":tag", or
190 - the name of a local tag you placed earlier using ":tag", or
191 - empty to denote the default parent.
191 - empty to denote the default parent.
192
192
193 All string valued-elements are either strictly alphanumeric, or must
193 All string valued-elements are either strictly alphanumeric, or must
194 be enclosed in double quotes ("..."), with "\\" as escape character.
194 be enclosed in double quotes ("..."), with "\\" as escape character.
195 """
195 """
196
196
197 if text is None:
197 if text is None:
198 ui.status(_(b"reading DAG from stdin\n"))
198 ui.status(_(b"reading DAG from stdin\n"))
199 text = ui.fin.read()
199 text = ui.fin.read()
200
200
201 cl = repo.changelog
201 cl = repo.changelog
202 if len(cl) > 0:
202 if len(cl) > 0:
203 raise error.Abort(_(b'repository is not empty'))
203 raise error.Abort(_(b'repository is not empty'))
204
204
205 # determine number of revs in DAG
205 # determine number of revs in DAG
206 total = 0
206 total = 0
207 for type, data in dagparser.parsedag(text):
207 for type, data in dagparser.parsedag(text):
208 if type == b'n':
208 if type == b'n':
209 total += 1
209 total += 1
210
210
211 if mergeable_file:
211 if mergeable_file:
212 linesperrev = 2
212 linesperrev = 2
213 # make a file with k lines per rev
213 # make a file with k lines per rev
214 initialmergedlines = [
214 initialmergedlines = [
215 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
215 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
216 ]
216 ]
217 initialmergedlines.append(b"")
217 initialmergedlines.append(b"")
218
218
219 tags = []
219 tags = []
220 progress = ui.makeprogress(
220 progress = ui.makeprogress(
221 _(b'building'), unit=_(b'revisions'), total=total
221 _(b'building'), unit=_(b'revisions'), total=total
222 )
222 )
223 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
223 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
224 at = -1
224 at = -1
225 atbranch = b'default'
225 atbranch = b'default'
226 nodeids = []
226 nodeids = []
227 id = 0
227 id = 0
228 progress.update(id)
228 progress.update(id)
229 for type, data in dagparser.parsedag(text):
229 for type, data in dagparser.parsedag(text):
230 if type == b'n':
230 if type == b'n':
231 ui.note((b'node %s\n' % pycompat.bytestr(data)))
231 ui.note((b'node %s\n' % pycompat.bytestr(data)))
232 id, ps = data
232 id, ps = data
233
233
234 files = []
234 files = []
235 filecontent = {}
235 filecontent = {}
236
236
237 p2 = None
237 p2 = None
238 if mergeable_file:
238 if mergeable_file:
239 fn = b"mf"
239 fn = b"mf"
240 p1 = repo[ps[0]]
240 p1 = repo[ps[0]]
241 if len(ps) > 1:
241 if len(ps) > 1:
242 p2 = repo[ps[1]]
242 p2 = repo[ps[1]]
243 pa = p1.ancestor(p2)
243 pa = p1.ancestor(p2)
244 base, local, other = [
244 base, local, other = [
245 x[fn].data() for x in (pa, p1, p2)
245 x[fn].data() for x in (pa, p1, p2)
246 ]
246 ]
247 m3 = simplemerge.Merge3Text(base, local, other)
247 m3 = simplemerge.Merge3Text(base, local, other)
248 ml = [l.strip() for l in m3.merge_lines()]
248 ml = [l.strip() for l in m3.merge_lines()]
249 ml.append(b"")
249 ml.append(b"")
250 elif at > 0:
250 elif at > 0:
251 ml = p1[fn].data().split(b"\n")
251 ml = p1[fn].data().split(b"\n")
252 else:
252 else:
253 ml = initialmergedlines
253 ml = initialmergedlines
254 ml[id * linesperrev] += b" r%i" % id
254 ml[id * linesperrev] += b" r%i" % id
255 mergedtext = b"\n".join(ml)
255 mergedtext = b"\n".join(ml)
256 files.append(fn)
256 files.append(fn)
257 filecontent[fn] = mergedtext
257 filecontent[fn] = mergedtext
258
258
259 if overwritten_file:
259 if overwritten_file:
260 fn = b"of"
260 fn = b"of"
261 files.append(fn)
261 files.append(fn)
262 filecontent[fn] = b"r%i\n" % id
262 filecontent[fn] = b"r%i\n" % id
263
263
264 if new_file:
264 if new_file:
265 fn = b"nf%i" % id
265 fn = b"nf%i" % id
266 files.append(fn)
266 files.append(fn)
267 filecontent[fn] = b"r%i\n" % id
267 filecontent[fn] = b"r%i\n" % id
268 if len(ps) > 1:
268 if len(ps) > 1:
269 if not p2:
269 if not p2:
270 p2 = repo[ps[1]]
270 p2 = repo[ps[1]]
271 for fn in p2:
271 for fn in p2:
272 if fn.startswith(b"nf"):
272 if fn.startswith(b"nf"):
273 files.append(fn)
273 files.append(fn)
274 filecontent[fn] = p2[fn].data()
274 filecontent[fn] = p2[fn].data()
275
275
276 def fctxfn(repo, cx, path):
276 def fctxfn(repo, cx, path):
277 if path in filecontent:
277 if path in filecontent:
278 return context.memfilectx(
278 return context.memfilectx(
279 repo, cx, path, filecontent[path]
279 repo, cx, path, filecontent[path]
280 )
280 )
281 return None
281 return None
282
282
283 if len(ps) == 0 or ps[0] < 0:
283 if len(ps) == 0 or ps[0] < 0:
284 pars = [None, None]
284 pars = [None, None]
285 elif len(ps) == 1:
285 elif len(ps) == 1:
286 pars = [nodeids[ps[0]], None]
286 pars = [nodeids[ps[0]], None]
287 else:
287 else:
288 pars = [nodeids[p] for p in ps]
288 pars = [nodeids[p] for p in ps]
289 cx = context.memctx(
289 cx = context.memctx(
290 repo,
290 repo,
291 pars,
291 pars,
292 b"r%i" % id,
292 b"r%i" % id,
293 files,
293 files,
294 fctxfn,
294 fctxfn,
295 date=(id, 0),
295 date=(id, 0),
296 user=b"debugbuilddag",
296 user=b"debugbuilddag",
297 extra={b'branch': atbranch},
297 extra={b'branch': atbranch},
298 )
298 )
299 nodeid = repo.commitctx(cx)
299 nodeid = repo.commitctx(cx)
300 nodeids.append(nodeid)
300 nodeids.append(nodeid)
301 at = id
301 at = id
302 elif type == b'l':
302 elif type == b'l':
303 id, name = data
303 id, name = data
304 ui.note((b'tag %s\n' % name))
304 ui.note((b'tag %s\n' % name))
305 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
305 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
306 elif type == b'a':
306 elif type == b'a':
307 ui.note((b'branch %s\n' % data))
307 ui.note((b'branch %s\n' % data))
308 atbranch = data
308 atbranch = data
309 progress.update(id)
309 progress.update(id)
310
310
311 if tags:
311 if tags:
312 repo.vfs.write(b"localtags", b"".join(tags))
312 repo.vfs.write(b"localtags", b"".join(tags))
313
313
314
314
315 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
315 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
316 indent_string = b' ' * indent
316 indent_string = b' ' * indent
317 if all:
317 if all:
318 ui.writenoi18n(
318 ui.writenoi18n(
319 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
319 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
320 % indent_string
320 % indent_string
321 )
321 )
322
322
323 def showchunks(named):
323 def showchunks(named):
324 ui.write(b"\n%s%s\n" % (indent_string, named))
324 ui.write(b"\n%s%s\n" % (indent_string, named))
325 for deltadata in gen.deltaiter():
325 for deltadata in gen.deltaiter():
326 node, p1, p2, cs, deltabase, delta, flags = deltadata
326 node, p1, p2, cs, deltabase, delta, flags = deltadata
327 ui.write(
327 ui.write(
328 b"%s%s %s %s %s %s %d\n"
328 b"%s%s %s %s %s %s %d\n"
329 % (
329 % (
330 indent_string,
330 indent_string,
331 hex(node),
331 hex(node),
332 hex(p1),
332 hex(p1),
333 hex(p2),
333 hex(p2),
334 hex(cs),
334 hex(cs),
335 hex(deltabase),
335 hex(deltabase),
336 len(delta),
336 len(delta),
337 )
337 )
338 )
338 )
339
339
340 gen.changelogheader()
340 gen.changelogheader()
341 showchunks(b"changelog")
341 showchunks(b"changelog")
342 gen.manifestheader()
342 gen.manifestheader()
343 showchunks(b"manifest")
343 showchunks(b"manifest")
344 for chunkdata in iter(gen.filelogheader, {}):
344 for chunkdata in iter(gen.filelogheader, {}):
345 fname = chunkdata[b'filename']
345 fname = chunkdata[b'filename']
346 showchunks(fname)
346 showchunks(fname)
347 else:
347 else:
348 if isinstance(gen, bundle2.unbundle20):
348 if isinstance(gen, bundle2.unbundle20):
349 raise error.Abort(_(b'use debugbundle2 for this file'))
349 raise error.Abort(_(b'use debugbundle2 for this file'))
350 gen.changelogheader()
350 gen.changelogheader()
351 for deltadata in gen.deltaiter():
351 for deltadata in gen.deltaiter():
352 node, p1, p2, cs, deltabase, delta, flags = deltadata
352 node, p1, p2, cs, deltabase, delta, flags = deltadata
353 ui.write(b"%s%s\n" % (indent_string, hex(node)))
353 ui.write(b"%s%s\n" % (indent_string, hex(node)))
354
354
355
355
356 def _debugobsmarkers(ui, part, indent=0, **opts):
356 def _debugobsmarkers(ui, part, indent=0, **opts):
357 """display version and markers contained in 'data'"""
357 """display version and markers contained in 'data'"""
358 opts = pycompat.byteskwargs(opts)
358 opts = pycompat.byteskwargs(opts)
359 data = part.read()
359 data = part.read()
360 indent_string = b' ' * indent
360 indent_string = b' ' * indent
361 try:
361 try:
362 version, markers = obsolete._readmarkers(data)
362 version, markers = obsolete._readmarkers(data)
363 except error.UnknownVersion as exc:
363 except error.UnknownVersion as exc:
364 msg = b"%sunsupported version: %s (%d bytes)\n"
364 msg = b"%sunsupported version: %s (%d bytes)\n"
365 msg %= indent_string, exc.version, len(data)
365 msg %= indent_string, exc.version, len(data)
366 ui.write(msg)
366 ui.write(msg)
367 else:
367 else:
368 msg = b"%sversion: %d (%d bytes)\n"
368 msg = b"%sversion: %d (%d bytes)\n"
369 msg %= indent_string, version, len(data)
369 msg %= indent_string, version, len(data)
370 ui.write(msg)
370 ui.write(msg)
371 fm = ui.formatter(b'debugobsolete', opts)
371 fm = ui.formatter(b'debugobsolete', opts)
372 for rawmarker in sorted(markers):
372 for rawmarker in sorted(markers):
373 m = obsutil.marker(None, rawmarker)
373 m = obsutil.marker(None, rawmarker)
374 fm.startitem()
374 fm.startitem()
375 fm.plain(indent_string)
375 fm.plain(indent_string)
376 cmdutil.showmarker(fm, m)
376 cmdutil.showmarker(fm, m)
377 fm.end()
377 fm.end()
378
378
379
379
380 def _debugphaseheads(ui, data, indent=0):
380 def _debugphaseheads(ui, data, indent=0):
381 """display version and markers contained in 'data'"""
381 """display version and markers contained in 'data'"""
382 indent_string = b' ' * indent
382 indent_string = b' ' * indent
383 headsbyphase = phases.binarydecode(data)
383 headsbyphase = phases.binarydecode(data)
384 for phase in phases.allphases:
384 for phase in phases.allphases:
385 for head in headsbyphase[phase]:
385 for head in headsbyphase[phase]:
386 ui.write(indent_string)
386 ui.write(indent_string)
387 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
387 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
388
388
389
389
390 def _quasirepr(thing):
390 def _quasirepr(thing):
391 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
391 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
392 return b'{%s}' % (
392 return b'{%s}' % (
393 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
393 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
394 )
394 )
395 return pycompat.bytestr(repr(thing))
395 return pycompat.bytestr(repr(thing))
396
396
397
397
398 def _debugbundle2(ui, gen, all=None, **opts):
398 def _debugbundle2(ui, gen, all=None, **opts):
399 """lists the contents of a bundle2"""
399 """lists the contents of a bundle2"""
400 if not isinstance(gen, bundle2.unbundle20):
400 if not isinstance(gen, bundle2.unbundle20):
401 raise error.Abort(_(b'not a bundle2 file'))
401 raise error.Abort(_(b'not a bundle2 file'))
402 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
402 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
403 parttypes = opts.get('part_type', [])
403 parttypes = opts.get('part_type', [])
404 for part in gen.iterparts():
404 for part in gen.iterparts():
405 if parttypes and part.type not in parttypes:
405 if parttypes and part.type not in parttypes:
406 continue
406 continue
407 msg = b'%s -- %s (mandatory: %r)\n'
407 msg = b'%s -- %s (mandatory: %r)\n'
408 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
408 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
409 if part.type == b'changegroup':
409 if part.type == b'changegroup':
410 version = part.params.get(b'version', b'01')
410 version = part.params.get(b'version', b'01')
411 cg = changegroup.getunbundler(version, part, b'UN')
411 cg = changegroup.getunbundler(version, part, b'UN')
412 if not ui.quiet:
412 if not ui.quiet:
413 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
413 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
414 if part.type == b'obsmarkers':
414 if part.type == b'obsmarkers':
415 if not ui.quiet:
415 if not ui.quiet:
416 _debugobsmarkers(ui, part, indent=4, **opts)
416 _debugobsmarkers(ui, part, indent=4, **opts)
417 if part.type == b'phase-heads':
417 if part.type == b'phase-heads':
418 if not ui.quiet:
418 if not ui.quiet:
419 _debugphaseheads(ui, part, indent=4)
419 _debugphaseheads(ui, part, indent=4)
420
420
421
421
422 @command(
422 @command(
423 b'debugbundle',
423 b'debugbundle',
424 [
424 [
425 (b'a', b'all', None, _(b'show all details')),
425 (b'a', b'all', None, _(b'show all details')),
426 (b'', b'part-type', [], _(b'show only the named part type')),
426 (b'', b'part-type', [], _(b'show only the named part type')),
427 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
427 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
428 ],
428 ],
429 _(b'FILE'),
429 _(b'FILE'),
430 norepo=True,
430 norepo=True,
431 )
431 )
432 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
432 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
433 """lists the contents of a bundle"""
433 """lists the contents of a bundle"""
434 with hg.openpath(ui, bundlepath) as f:
434 with hg.openpath(ui, bundlepath) as f:
435 if spec:
435 if spec:
436 spec = exchange.getbundlespec(ui, f)
436 spec = exchange.getbundlespec(ui, f)
437 ui.write(b'%s\n' % spec)
437 ui.write(b'%s\n' % spec)
438 return
438 return
439
439
440 gen = exchange.readbundle(ui, f, bundlepath)
440 gen = exchange.readbundle(ui, f, bundlepath)
441 if isinstance(gen, bundle2.unbundle20):
441 if isinstance(gen, bundle2.unbundle20):
442 return _debugbundle2(ui, gen, all=all, **opts)
442 return _debugbundle2(ui, gen, all=all, **opts)
443 _debugchangegroup(ui, gen, all=all, **opts)
443 _debugchangegroup(ui, gen, all=all, **opts)
444
444
445
445
446 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
446 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
447 def debugcapabilities(ui, path, **opts):
447 def debugcapabilities(ui, path, **opts):
448 """lists the capabilities of a remote peer"""
448 """lists the capabilities of a remote peer"""
449 opts = pycompat.byteskwargs(opts)
449 opts = pycompat.byteskwargs(opts)
450 peer = hg.peer(ui, opts, path)
450 peer = hg.peer(ui, opts, path)
451 caps = peer.capabilities()
451 caps = peer.capabilities()
452 ui.writenoi18n(b'Main capabilities:\n')
452 ui.writenoi18n(b'Main capabilities:\n')
453 for c in sorted(caps):
453 for c in sorted(caps):
454 ui.write(b' %s\n' % c)
454 ui.write(b' %s\n' % c)
455 b2caps = bundle2.bundle2caps(peer)
455 b2caps = bundle2.bundle2caps(peer)
456 if b2caps:
456 if b2caps:
457 ui.writenoi18n(b'Bundle2 capabilities:\n')
457 ui.writenoi18n(b'Bundle2 capabilities:\n')
458 for key, values in sorted(pycompat.iteritems(b2caps)):
458 for key, values in sorted(pycompat.iteritems(b2caps)):
459 ui.write(b' %s\n' % key)
459 ui.write(b' %s\n' % key)
460 for v in values:
460 for v in values:
461 ui.write(b' %s\n' % v)
461 ui.write(b' %s\n' % v)
462
462
463
463
464 @command(b'debugcheckstate', [], b'')
464 @command(b'debugcheckstate', [], b'')
465 def debugcheckstate(ui, repo):
465 def debugcheckstate(ui, repo):
466 """validate the correctness of the current dirstate"""
466 """validate the correctness of the current dirstate"""
467 parent1, parent2 = repo.dirstate.parents()
467 parent1, parent2 = repo.dirstate.parents()
468 m1 = repo[parent1].manifest()
468 m1 = repo[parent1].manifest()
469 m2 = repo[parent2].manifest()
469 m2 = repo[parent2].manifest()
470 errors = 0
470 errors = 0
471 for f in repo.dirstate:
471 for f in repo.dirstate:
472 state = repo.dirstate[f]
472 state = repo.dirstate[f]
473 if state in b"nr" and f not in m1:
473 if state in b"nr" and f not in m1:
474 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
474 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
475 errors += 1
475 errors += 1
476 if state in b"a" and f in m1:
476 if state in b"a" and f in m1:
477 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
477 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
478 errors += 1
478 errors += 1
479 if state in b"m" and f not in m1 and f not in m2:
479 if state in b"m" and f not in m1 and f not in m2:
480 ui.warn(
480 ui.warn(
481 _(b"%s in state %s, but not in either manifest\n") % (f, state)
481 _(b"%s in state %s, but not in either manifest\n") % (f, state)
482 )
482 )
483 errors += 1
483 errors += 1
484 for f in m1:
484 for f in m1:
485 state = repo.dirstate[f]
485 state = repo.dirstate[f]
486 if state not in b"nrm":
486 if state not in b"nrm":
487 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
487 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
488 errors += 1
488 errors += 1
489 if errors:
489 if errors:
490 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
490 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
491 raise error.Abort(errstr)
491 raise error.Abort(errstr)
492
492
493
493
494 @command(
494 @command(
495 b'debugcolor',
495 b'debugcolor',
496 [(b'', b'style', None, _(b'show all configured styles'))],
496 [(b'', b'style', None, _(b'show all configured styles'))],
497 b'hg debugcolor',
497 b'hg debugcolor',
498 )
498 )
499 def debugcolor(ui, repo, **opts):
499 def debugcolor(ui, repo, **opts):
500 """show available color, effects or style"""
500 """show available color, effects or style"""
501 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
501 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
502 if opts.get('style'):
502 if opts.get('style'):
503 return _debugdisplaystyle(ui)
503 return _debugdisplaystyle(ui)
504 else:
504 else:
505 return _debugdisplaycolor(ui)
505 return _debugdisplaycolor(ui)
506
506
507
507
508 def _debugdisplaycolor(ui):
508 def _debugdisplaycolor(ui):
509 ui = ui.copy()
509 ui = ui.copy()
510 ui._styles.clear()
510 ui._styles.clear()
511 for effect in color._activeeffects(ui).keys():
511 for effect in color._activeeffects(ui).keys():
512 ui._styles[effect] = effect
512 ui._styles[effect] = effect
513 if ui._terminfoparams:
513 if ui._terminfoparams:
514 for k, v in ui.configitems(b'color'):
514 for k, v in ui.configitems(b'color'):
515 if k.startswith(b'color.'):
515 if k.startswith(b'color.'):
516 ui._styles[k] = k[6:]
516 ui._styles[k] = k[6:]
517 elif k.startswith(b'terminfo.'):
517 elif k.startswith(b'terminfo.'):
518 ui._styles[k] = k[9:]
518 ui._styles[k] = k[9:]
519 ui.write(_(b'available colors:\n'))
519 ui.write(_(b'available colors:\n'))
520 # sort label with a '_' after the other to group '_background' entry.
520 # sort label with a '_' after the other to group '_background' entry.
521 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
521 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
522 for colorname, label in items:
522 for colorname, label in items:
523 ui.write(b'%s\n' % colorname, label=label)
523 ui.write(b'%s\n' % colorname, label=label)
524
524
525
525
526 def _debugdisplaystyle(ui):
526 def _debugdisplaystyle(ui):
527 ui.write(_(b'available style:\n'))
527 ui.write(_(b'available style:\n'))
528 if not ui._styles:
528 if not ui._styles:
529 return
529 return
530 width = max(len(s) for s in ui._styles)
530 width = max(len(s) for s in ui._styles)
531 for label, effects in sorted(ui._styles.items()):
531 for label, effects in sorted(ui._styles.items()):
532 ui.write(b'%s' % label, label=label)
532 ui.write(b'%s' % label, label=label)
533 if effects:
533 if effects:
534 # 50
534 # 50
535 ui.write(b': ')
535 ui.write(b': ')
536 ui.write(b' ' * (max(0, width - len(label))))
536 ui.write(b' ' * (max(0, width - len(label))))
537 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
537 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
538 ui.write(b'\n')
538 ui.write(b'\n')
539
539
540
540
541 @command(b'debugcreatestreamclonebundle', [], b'FILE')
541 @command(b'debugcreatestreamclonebundle', [], b'FILE')
542 def debugcreatestreamclonebundle(ui, repo, fname):
542 def debugcreatestreamclonebundle(ui, repo, fname):
543 """create a stream clone bundle file
543 """create a stream clone bundle file
544
544
545 Stream bundles are special bundles that are essentially archives of
545 Stream bundles are special bundles that are essentially archives of
546 revlog files. They are commonly used for cloning very quickly.
546 revlog files. They are commonly used for cloning very quickly.
547 """
547 """
548 # TODO we may want to turn this into an abort when this functionality
548 # TODO we may want to turn this into an abort when this functionality
549 # is moved into `hg bundle`.
549 # is moved into `hg bundle`.
550 if phases.hassecret(repo):
550 if phases.hassecret(repo):
551 ui.warn(
551 ui.warn(
552 _(
552 _(
553 b'(warning: stream clone bundle will contain secret '
553 b'(warning: stream clone bundle will contain secret '
554 b'revisions)\n'
554 b'revisions)\n'
555 )
555 )
556 )
556 )
557
557
558 requirements, gen = streamclone.generatebundlev1(repo)
558 requirements, gen = streamclone.generatebundlev1(repo)
559 changegroup.writechunks(ui, gen, fname)
559 changegroup.writechunks(ui, gen, fname)
560
560
561 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
561 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
562
562
563
563
564 @command(
564 @command(
565 b'debugdag',
565 b'debugdag',
566 [
566 [
567 (b't', b'tags', None, _(b'use tags as labels')),
567 (b't', b'tags', None, _(b'use tags as labels')),
568 (b'b', b'branches', None, _(b'annotate with branch names')),
568 (b'b', b'branches', None, _(b'annotate with branch names')),
569 (b'', b'dots', None, _(b'use dots for runs')),
569 (b'', b'dots', None, _(b'use dots for runs')),
570 (b's', b'spaces', None, _(b'separate elements by spaces')),
570 (b's', b'spaces', None, _(b'separate elements by spaces')),
571 ],
571 ],
572 _(b'[OPTION]... [FILE [REV]...]'),
572 _(b'[OPTION]... [FILE [REV]...]'),
573 optionalrepo=True,
573 optionalrepo=True,
574 )
574 )
575 def debugdag(ui, repo, file_=None, *revs, **opts):
575 def debugdag(ui, repo, file_=None, *revs, **opts):
576 """format the changelog or an index DAG as a concise textual description
576 """format the changelog or an index DAG as a concise textual description
577
577
578 If you pass a revlog index, the revlog's DAG is emitted. If you list
578 If you pass a revlog index, the revlog's DAG is emitted. If you list
579 revision numbers, they get labeled in the output as rN.
579 revision numbers, they get labeled in the output as rN.
580
580
581 Otherwise, the changelog DAG of the current repo is emitted.
581 Otherwise, the changelog DAG of the current repo is emitted.
582 """
582 """
583 spaces = opts.get('spaces')
583 spaces = opts.get('spaces')
584 dots = opts.get('dots')
584 dots = opts.get('dots')
585 if file_:
585 if file_:
586 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
586 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
587 revs = {int(r) for r in revs}
587 revs = {int(r) for r in revs}
588
588
589 def events():
589 def events():
590 for r in rlog:
590 for r in rlog:
591 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
591 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
592 if r in revs:
592 if r in revs:
593 yield b'l', (r, b"r%i" % r)
593 yield b'l', (r, b"r%i" % r)
594
594
595 elif repo:
595 elif repo:
596 cl = repo.changelog
596 cl = repo.changelog
597 tags = opts.get('tags')
597 tags = opts.get('tags')
598 branches = opts.get('branches')
598 branches = opts.get('branches')
599 if tags:
599 if tags:
600 labels = {}
600 labels = {}
601 for l, n in repo.tags().items():
601 for l, n in repo.tags().items():
602 labels.setdefault(cl.rev(n), []).append(l)
602 labels.setdefault(cl.rev(n), []).append(l)
603
603
604 def events():
604 def events():
605 b = b"default"
605 b = b"default"
606 for r in cl:
606 for r in cl:
607 if branches:
607 if branches:
608 newb = cl.read(cl.node(r))[5][b'branch']
608 newb = cl.read(cl.node(r))[5][b'branch']
609 if newb != b:
609 if newb != b:
610 yield b'a', newb
610 yield b'a', newb
611 b = newb
611 b = newb
612 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
612 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
613 if tags:
613 if tags:
614 ls = labels.get(r)
614 ls = labels.get(r)
615 if ls:
615 if ls:
616 for l in ls:
616 for l in ls:
617 yield b'l', (r, l)
617 yield b'l', (r, l)
618
618
619 else:
619 else:
620 raise error.Abort(_(b'need repo for changelog dag'))
620 raise error.Abort(_(b'need repo for changelog dag'))
621
621
622 for line in dagparser.dagtextlines(
622 for line in dagparser.dagtextlines(
623 events(),
623 events(),
624 addspaces=spaces,
624 addspaces=spaces,
625 wraplabels=True,
625 wraplabels=True,
626 wrapannotations=True,
626 wrapannotations=True,
627 wrapnonlinear=dots,
627 wrapnonlinear=dots,
628 usedots=dots,
628 usedots=dots,
629 maxlinewidth=70,
629 maxlinewidth=70,
630 ):
630 ):
631 ui.write(line)
631 ui.write(line)
632 ui.write(b"\n")
632 ui.write(b"\n")
633
633
634
634
635 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
635 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
636 def debugdata(ui, repo, file_, rev=None, **opts):
636 def debugdata(ui, repo, file_, rev=None, **opts):
637 """dump the contents of a data file revision"""
637 """dump the contents of a data file revision"""
638 opts = pycompat.byteskwargs(opts)
638 opts = pycompat.byteskwargs(opts)
639 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
639 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
640 if rev is not None:
640 if rev is not None:
641 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
641 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
642 file_, rev = None, file_
642 file_, rev = None, file_
643 elif rev is None:
643 elif rev is None:
644 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
644 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
645 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
645 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
646 try:
646 try:
647 ui.write(r.rawdata(r.lookup(rev)))
647 ui.write(r.rawdata(r.lookup(rev)))
648 except KeyError:
648 except KeyError:
649 raise error.Abort(_(b'invalid revision identifier %s') % rev)
649 raise error.Abort(_(b'invalid revision identifier %s') % rev)
650
650
651
651
652 @command(
652 @command(
653 b'debugdate',
653 b'debugdate',
654 [(b'e', b'extended', None, _(b'try extended date formats'))],
654 [(b'e', b'extended', None, _(b'try extended date formats'))],
655 _(b'[-e] DATE [RANGE]'),
655 _(b'[-e] DATE [RANGE]'),
656 norepo=True,
656 norepo=True,
657 optionalrepo=True,
657 optionalrepo=True,
658 )
658 )
659 def debugdate(ui, date, range=None, **opts):
659 def debugdate(ui, date, range=None, **opts):
660 """parse and display a date"""
660 """parse and display a date"""
661 if opts["extended"]:
661 if opts["extended"]:
662 d = dateutil.parsedate(date, dateutil.extendeddateformats)
662 d = dateutil.parsedate(date, dateutil.extendeddateformats)
663 else:
663 else:
664 d = dateutil.parsedate(date)
664 d = dateutil.parsedate(date)
665 ui.writenoi18n(b"internal: %d %d\n" % d)
665 ui.writenoi18n(b"internal: %d %d\n" % d)
666 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
666 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
667 if range:
667 if range:
668 m = dateutil.matchdate(range)
668 m = dateutil.matchdate(range)
669 ui.writenoi18n(b"match: %s\n" % m(d[0]))
669 ui.writenoi18n(b"match: %s\n" % m(d[0]))
670
670
671
671
672 @command(
672 @command(
673 b'debugdeltachain',
673 b'debugdeltachain',
674 cmdutil.debugrevlogopts + cmdutil.formatteropts,
674 cmdutil.debugrevlogopts + cmdutil.formatteropts,
675 _(b'-c|-m|FILE'),
675 _(b'-c|-m|FILE'),
676 optionalrepo=True,
676 optionalrepo=True,
677 )
677 )
678 def debugdeltachain(ui, repo, file_=None, **opts):
678 def debugdeltachain(ui, repo, file_=None, **opts):
679 """dump information about delta chains in a revlog
679 """dump information about delta chains in a revlog
680
680
681 Output can be templatized. Available template keywords are:
681 Output can be templatized. Available template keywords are:
682
682
683 :``rev``: revision number
683 :``rev``: revision number
684 :``chainid``: delta chain identifier (numbered by unique base)
684 :``chainid``: delta chain identifier (numbered by unique base)
685 :``chainlen``: delta chain length to this revision
685 :``chainlen``: delta chain length to this revision
686 :``prevrev``: previous revision in delta chain
686 :``prevrev``: previous revision in delta chain
687 :``deltatype``: role of delta / how it was computed
687 :``deltatype``: role of delta / how it was computed
688 :``compsize``: compressed size of revision
688 :``compsize``: compressed size of revision
689 :``uncompsize``: uncompressed size of revision
689 :``uncompsize``: uncompressed size of revision
690 :``chainsize``: total size of compressed revisions in chain
690 :``chainsize``: total size of compressed revisions in chain
691 :``chainratio``: total chain size divided by uncompressed revision size
691 :``chainratio``: total chain size divided by uncompressed revision size
692 (new delta chains typically start at ratio 2.00)
692 (new delta chains typically start at ratio 2.00)
693 :``lindist``: linear distance from base revision in delta chain to end
693 :``lindist``: linear distance from base revision in delta chain to end
694 of this revision
694 of this revision
695 :``extradist``: total size of revisions not part of this delta chain from
695 :``extradist``: total size of revisions not part of this delta chain from
696 base of delta chain to end of this revision; a measurement
696 base of delta chain to end of this revision; a measurement
697 of how much extra data we need to read/seek across to read
697 of how much extra data we need to read/seek across to read
698 the delta chain for this revision
698 the delta chain for this revision
699 :``extraratio``: extradist divided by chainsize; another representation of
699 :``extraratio``: extradist divided by chainsize; another representation of
700 how much unrelated data is needed to load this delta chain
700 how much unrelated data is needed to load this delta chain
701
701
702 If the repository is configured to use the sparse read, additional keywords
702 If the repository is configured to use the sparse read, additional keywords
703 are available:
703 are available:
704
704
705 :``readsize``: total size of data read from the disk for a revision
705 :``readsize``: total size of data read from the disk for a revision
706 (sum of the sizes of all the blocks)
706 (sum of the sizes of all the blocks)
707 :``largestblock``: size of the largest block of data read from the disk
707 :``largestblock``: size of the largest block of data read from the disk
708 :``readdensity``: density of useful bytes in the data read from the disk
708 :``readdensity``: density of useful bytes in the data read from the disk
709 :``srchunks``: in how many data hunks the whole revision would be read
709 :``srchunks``: in how many data hunks the whole revision would be read
710
710
711 The sparse read can be enabled with experimental.sparse-read = True
711 The sparse read can be enabled with experimental.sparse-read = True
712 """
712 """
713 opts = pycompat.byteskwargs(opts)
713 opts = pycompat.byteskwargs(opts)
714 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
714 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
715 index = r.index
715 index = r.index
716 start = r.start
716 start = r.start
717 length = r.length
717 length = r.length
718 generaldelta = r.version & revlog.FLAG_GENERALDELTA
718 generaldelta = r.version & revlog.FLAG_GENERALDELTA
719 withsparseread = getattr(r, '_withsparseread', False)
719 withsparseread = getattr(r, '_withsparseread', False)
720
720
721 def revinfo(rev):
721 def revinfo(rev):
722 e = index[rev]
722 e = index[rev]
723 compsize = e[1]
723 compsize = e[1]
724 uncompsize = e[2]
724 uncompsize = e[2]
725 chainsize = 0
725 chainsize = 0
726
726
727 if generaldelta:
727 if generaldelta:
728 if e[3] == e[5]:
728 if e[3] == e[5]:
729 deltatype = b'p1'
729 deltatype = b'p1'
730 elif e[3] == e[6]:
730 elif e[3] == e[6]:
731 deltatype = b'p2'
731 deltatype = b'p2'
732 elif e[3] == rev - 1:
732 elif e[3] == rev - 1:
733 deltatype = b'prev'
733 deltatype = b'prev'
734 elif e[3] == rev:
734 elif e[3] == rev:
735 deltatype = b'base'
735 deltatype = b'base'
736 else:
736 else:
737 deltatype = b'other'
737 deltatype = b'other'
738 else:
738 else:
739 if e[3] == rev:
739 if e[3] == rev:
740 deltatype = b'base'
740 deltatype = b'base'
741 else:
741 else:
742 deltatype = b'prev'
742 deltatype = b'prev'
743
743
744 chain = r._deltachain(rev)[0]
744 chain = r._deltachain(rev)[0]
745 for iterrev in chain:
745 for iterrev in chain:
746 e = index[iterrev]
746 e = index[iterrev]
747 chainsize += e[1]
747 chainsize += e[1]
748
748
749 return compsize, uncompsize, deltatype, chain, chainsize
749 return compsize, uncompsize, deltatype, chain, chainsize
750
750
751 fm = ui.formatter(b'debugdeltachain', opts)
751 fm = ui.formatter(b'debugdeltachain', opts)
752
752
753 fm.plain(
753 fm.plain(
754 b' rev chain# chainlen prev delta '
754 b' rev chain# chainlen prev delta '
755 b'size rawsize chainsize ratio lindist extradist '
755 b'size rawsize chainsize ratio lindist extradist '
756 b'extraratio'
756 b'extraratio'
757 )
757 )
758 if withsparseread:
758 if withsparseread:
759 fm.plain(b' readsize largestblk rddensity srchunks')
759 fm.plain(b' readsize largestblk rddensity srchunks')
760 fm.plain(b'\n')
760 fm.plain(b'\n')
761
761
762 chainbases = {}
762 chainbases = {}
763 for rev in r:
763 for rev in r:
764 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
764 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
765 chainbase = chain[0]
765 chainbase = chain[0]
766 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
766 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
767 basestart = start(chainbase)
767 basestart = start(chainbase)
768 revstart = start(rev)
768 revstart = start(rev)
769 lineardist = revstart + comp - basestart
769 lineardist = revstart + comp - basestart
770 extradist = lineardist - chainsize
770 extradist = lineardist - chainsize
771 try:
771 try:
772 prevrev = chain[-2]
772 prevrev = chain[-2]
773 except IndexError:
773 except IndexError:
774 prevrev = -1
774 prevrev = -1
775
775
776 if uncomp != 0:
776 if uncomp != 0:
777 chainratio = float(chainsize) / float(uncomp)
777 chainratio = float(chainsize) / float(uncomp)
778 else:
778 else:
779 chainratio = chainsize
779 chainratio = chainsize
780
780
781 if chainsize != 0:
781 if chainsize != 0:
782 extraratio = float(extradist) / float(chainsize)
782 extraratio = float(extradist) / float(chainsize)
783 else:
783 else:
784 extraratio = extradist
784 extraratio = extradist
785
785
786 fm.startitem()
786 fm.startitem()
787 fm.write(
787 fm.write(
788 b'rev chainid chainlen prevrev deltatype compsize '
788 b'rev chainid chainlen prevrev deltatype compsize '
789 b'uncompsize chainsize chainratio lindist extradist '
789 b'uncompsize chainsize chainratio lindist extradist '
790 b'extraratio',
790 b'extraratio',
791 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
791 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
792 rev,
792 rev,
793 chainid,
793 chainid,
794 len(chain),
794 len(chain),
795 prevrev,
795 prevrev,
796 deltatype,
796 deltatype,
797 comp,
797 comp,
798 uncomp,
798 uncomp,
799 chainsize,
799 chainsize,
800 chainratio,
800 chainratio,
801 lineardist,
801 lineardist,
802 extradist,
802 extradist,
803 extraratio,
803 extraratio,
804 rev=rev,
804 rev=rev,
805 chainid=chainid,
805 chainid=chainid,
806 chainlen=len(chain),
806 chainlen=len(chain),
807 prevrev=prevrev,
807 prevrev=prevrev,
808 deltatype=deltatype,
808 deltatype=deltatype,
809 compsize=comp,
809 compsize=comp,
810 uncompsize=uncomp,
810 uncompsize=uncomp,
811 chainsize=chainsize,
811 chainsize=chainsize,
812 chainratio=chainratio,
812 chainratio=chainratio,
813 lindist=lineardist,
813 lindist=lineardist,
814 extradist=extradist,
814 extradist=extradist,
815 extraratio=extraratio,
815 extraratio=extraratio,
816 )
816 )
817 if withsparseread:
817 if withsparseread:
818 readsize = 0
818 readsize = 0
819 largestblock = 0
819 largestblock = 0
820 srchunks = 0
820 srchunks = 0
821
821
822 for revschunk in deltautil.slicechunk(r, chain):
822 for revschunk in deltautil.slicechunk(r, chain):
823 srchunks += 1
823 srchunks += 1
824 blkend = start(revschunk[-1]) + length(revschunk[-1])
824 blkend = start(revschunk[-1]) + length(revschunk[-1])
825 blksize = blkend - start(revschunk[0])
825 blksize = blkend - start(revschunk[0])
826
826
827 readsize += blksize
827 readsize += blksize
828 if largestblock < blksize:
828 if largestblock < blksize:
829 largestblock = blksize
829 largestblock = blksize
830
830
831 if readsize:
831 if readsize:
832 readdensity = float(chainsize) / float(readsize)
832 readdensity = float(chainsize) / float(readsize)
833 else:
833 else:
834 readdensity = 1
834 readdensity = 1
835
835
836 fm.write(
836 fm.write(
837 b'readsize largestblock readdensity srchunks',
837 b'readsize largestblock readdensity srchunks',
838 b' %10d %10d %9.5f %8d',
838 b' %10d %10d %9.5f %8d',
839 readsize,
839 readsize,
840 largestblock,
840 largestblock,
841 readdensity,
841 readdensity,
842 srchunks,
842 srchunks,
843 readsize=readsize,
843 readsize=readsize,
844 largestblock=largestblock,
844 largestblock=largestblock,
845 readdensity=readdensity,
845 readdensity=readdensity,
846 srchunks=srchunks,
846 srchunks=srchunks,
847 )
847 )
848
848
849 fm.plain(b'\n')
849 fm.plain(b'\n')
850
850
851 fm.end()
851 fm.end()
852
852
853
853
854 @command(
854 @command(
855 b'debugdirstate|debugstate',
855 b'debugdirstate|debugstate',
856 [
856 [
857 (
857 (
858 b'',
858 b'',
859 b'nodates',
859 b'nodates',
860 None,
860 None,
861 _(b'do not display the saved mtime (DEPRECATED)'),
861 _(b'do not display the saved mtime (DEPRECATED)'),
862 ),
862 ),
863 (b'', b'dates', True, _(b'display the saved mtime')),
863 (b'', b'dates', True, _(b'display the saved mtime')),
864 (b'', b'datesort', None, _(b'sort by saved mtime')),
864 (b'', b'datesort', None, _(b'sort by saved mtime')),
865 ],
865 ],
866 _(b'[OPTION]...'),
866 _(b'[OPTION]...'),
867 )
867 )
868 def debugstate(ui, repo, **opts):
868 def debugstate(ui, repo, **opts):
869 """show the contents of the current dirstate"""
869 """show the contents of the current dirstate"""
870
870
871 nodates = not opts['dates']
871 nodates = not opts['dates']
872 if opts.get('nodates') is not None:
872 if opts.get('nodates') is not None:
873 nodates = True
873 nodates = True
874 datesort = opts.get('datesort')
874 datesort = opts.get('datesort')
875
875
876 if datesort:
876 if datesort:
877 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
877 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
878 else:
878 else:
879 keyfunc = None # sort by filename
879 keyfunc = None # sort by filename
880 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
880 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
881 if ent[3] == -1:
881 if ent[3] == -1:
882 timestr = b'unset '
882 timestr = b'unset '
883 elif nodates:
883 elif nodates:
884 timestr = b'set '
884 timestr = b'set '
885 else:
885 else:
886 timestr = time.strftime(
886 timestr = time.strftime(
887 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
887 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
888 )
888 )
889 timestr = encoding.strtolocal(timestr)
889 timestr = encoding.strtolocal(timestr)
890 if ent[1] & 0o20000:
890 if ent[1] & 0o20000:
891 mode = b'lnk'
891 mode = b'lnk'
892 else:
892 else:
893 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
893 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
894 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
894 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
895 for f in repo.dirstate.copies():
895 for f in repo.dirstate.copies():
896 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
896 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
897
897
898
898
899 @command(
899 @command(
900 b'debugdiscovery',
900 b'debugdiscovery',
901 [
901 [
902 (b'', b'old', None, _(b'use old-style discovery')),
902 (b'', b'old', None, _(b'use old-style discovery')),
903 (
903 (
904 b'',
904 b'',
905 b'nonheads',
905 b'nonheads',
906 None,
906 None,
907 _(b'use old-style discovery with non-heads included'),
907 _(b'use old-style discovery with non-heads included'),
908 ),
908 ),
909 (b'', b'rev', [], b'restrict discovery to this set of revs'),
909 (b'', b'rev', [], b'restrict discovery to this set of revs'),
910 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
910 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
911 ]
911 ]
912 + cmdutil.remoteopts,
912 + cmdutil.remoteopts,
913 _(b'[--rev REV] [OTHER]'),
913 _(b'[--rev REV] [OTHER]'),
914 )
914 )
915 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
915 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
916 """runs the changeset discovery protocol in isolation"""
916 """runs the changeset discovery protocol in isolation"""
917 opts = pycompat.byteskwargs(opts)
917 opts = pycompat.byteskwargs(opts)
918 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
918 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
919 remote = hg.peer(repo, opts, remoteurl)
919 remote = hg.peer(repo, opts, remoteurl)
920 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
920 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
921
921
922 # make sure tests are repeatable
922 # make sure tests are repeatable
923 random.seed(int(opts[b'seed']))
923 random.seed(int(opts[b'seed']))
924
924
925 if opts.get(b'old'):
925 if opts.get(b'old'):
926
926
927 def doit(pushedrevs, remoteheads, remote=remote):
927 def doit(pushedrevs, remoteheads, remote=remote):
928 if not util.safehasattr(remote, b'branches'):
928 if not util.safehasattr(remote, b'branches'):
929 # enable in-client legacy support
929 # enable in-client legacy support
930 remote = localrepo.locallegacypeer(remote.local())
930 remote = localrepo.locallegacypeer(remote.local())
931 common, _in, hds = treediscovery.findcommonincoming(
931 common, _in, hds = treediscovery.findcommonincoming(
932 repo, remote, force=True
932 repo, remote, force=True
933 )
933 )
934 common = set(common)
934 common = set(common)
935 if not opts.get(b'nonheads'):
935 if not opts.get(b'nonheads'):
936 ui.writenoi18n(
936 ui.writenoi18n(
937 b"unpruned common: %s\n"
937 b"unpruned common: %s\n"
938 % b" ".join(sorted(short(n) for n in common))
938 % b" ".join(sorted(short(n) for n in common))
939 )
939 )
940
940
941 clnode = repo.changelog.node
941 clnode = repo.changelog.node
942 common = repo.revs(b'heads(::%ln)', common)
942 common = repo.revs(b'heads(::%ln)', common)
943 common = {clnode(r) for r in common}
943 common = {clnode(r) for r in common}
944 return common, hds
944 return common, hds
945
945
946 else:
946 else:
947
947
948 def doit(pushedrevs, remoteheads, remote=remote):
948 def doit(pushedrevs, remoteheads, remote=remote):
949 nodes = None
949 nodes = None
950 if pushedrevs:
950 if pushedrevs:
951 revs = scmutil.revrange(repo, pushedrevs)
951 revs = scmutil.revrange(repo, pushedrevs)
952 nodes = [repo[r].node() for r in revs]
952 nodes = [repo[r].node() for r in revs]
953 common, any, hds = setdiscovery.findcommonheads(
953 common, any, hds = setdiscovery.findcommonheads(
954 ui, repo, remote, ancestorsof=nodes
954 ui, repo, remote, ancestorsof=nodes
955 )
955 )
956 return common, hds
956 return common, hds
957
957
958 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
958 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
959 localrevs = opts[b'rev']
959 localrevs = opts[b'rev']
960 with util.timedcm('debug-discovery') as t:
960 with util.timedcm('debug-discovery') as t:
961 common, hds = doit(localrevs, remoterevs)
961 common, hds = doit(localrevs, remoterevs)
962
962
963 # compute all statistics
963 # compute all statistics
964 common = set(common)
964 common = set(common)
965 rheads = set(hds)
965 rheads = set(hds)
966 lheads = set(repo.heads())
966 lheads = set(repo.heads())
967
967
968 data = {}
968 data = {}
969 data[b'elapsed'] = t.elapsed
969 data[b'elapsed'] = t.elapsed
970 data[b'nb-common'] = len(common)
970 data[b'nb-common'] = len(common)
971 data[b'nb-common-local'] = len(common & lheads)
971 data[b'nb-common-local'] = len(common & lheads)
972 data[b'nb-common-remote'] = len(common & rheads)
972 data[b'nb-common-remote'] = len(common & rheads)
973 data[b'nb-common-both'] = len(common & rheads & lheads)
973 data[b'nb-common-both'] = len(common & rheads & lheads)
974 data[b'nb-local'] = len(lheads)
974 data[b'nb-local'] = len(lheads)
975 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
975 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
976 data[b'nb-remote'] = len(rheads)
976 data[b'nb-remote'] = len(rheads)
977 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
977 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
978 data[b'nb-revs'] = len(repo.revs(b'all()'))
978 data[b'nb-revs'] = len(repo.revs(b'all()'))
979 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
979 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
980 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
980 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
981
981
982 # display discovery summary
982 # display discovery summary
983 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
983 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
984 ui.writenoi18n(b"heads summary:\n")
984 ui.writenoi18n(b"heads summary:\n")
985 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
985 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
986 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
986 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
987 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
988 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
989 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
989 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
992 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
992 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
993 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
993 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
994 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
994 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
995 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
995 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
996 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
996 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
997 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
997 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
998
998
999 if ui.verbose:
999 if ui.verbose:
1000 ui.writenoi18n(
1000 ui.writenoi18n(
1001 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1001 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1002 )
1002 )
1003
1003
1004
1004
1005 _chunksize = 4 << 10
1005 _chunksize = 4 << 10
1006
1006
1007
1007
1008 @command(
1008 @command(
1009 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1009 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1010 )
1010 )
1011 def debugdownload(ui, repo, url, output=None, **opts):
1011 def debugdownload(ui, repo, url, output=None, **opts):
1012 """download a resource using Mercurial logic and config
1012 """download a resource using Mercurial logic and config
1013 """
1013 """
1014 fh = urlmod.open(ui, url, output)
1014 fh = urlmod.open(ui, url, output)
1015
1015
1016 dest = ui
1016 dest = ui
1017 if output:
1017 if output:
1018 dest = open(output, b"wb", _chunksize)
1018 dest = open(output, b"wb", _chunksize)
1019 try:
1019 try:
1020 data = fh.read(_chunksize)
1020 data = fh.read(_chunksize)
1021 while data:
1021 while data:
1022 dest.write(data)
1022 dest.write(data)
1023 data = fh.read(_chunksize)
1023 data = fh.read(_chunksize)
1024 finally:
1024 finally:
1025 if output:
1025 if output:
1026 dest.close()
1026 dest.close()
1027
1027
1028
1028
1029 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1029 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1030 def debugextensions(ui, repo, **opts):
1030 def debugextensions(ui, repo, **opts):
1031 '''show information about active extensions'''
1031 '''show information about active extensions'''
1032 opts = pycompat.byteskwargs(opts)
1032 opts = pycompat.byteskwargs(opts)
1033 exts = extensions.extensions(ui)
1033 exts = extensions.extensions(ui)
1034 hgver = util.version()
1034 hgver = util.version()
1035 fm = ui.formatter(b'debugextensions', opts)
1035 fm = ui.formatter(b'debugextensions', opts)
1036 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1036 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1037 isinternal = extensions.ismoduleinternal(extmod)
1037 isinternal = extensions.ismoduleinternal(extmod)
1038 extsource = None
1038 extsource = None
1039
1039
1040 if util.safehasattr(extmod, '__file__'):
1040 if util.safehasattr(extmod, '__file__'):
1041 extsource = pycompat.fsencode(extmod.__file__)
1041 extsource = pycompat.fsencode(extmod.__file__)
1042 elif getattr(sys, 'oxidized', False):
1042 elif getattr(sys, 'oxidized', False):
1043 extsource = pycompat.sysexecutable
1043 extsource = pycompat.sysexecutable
1044 if isinternal:
1044 if isinternal:
1045 exttestedwith = [] # never expose magic string to users
1045 exttestedwith = [] # never expose magic string to users
1046 else:
1046 else:
1047 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1047 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1048 extbuglink = getattr(extmod, 'buglink', None)
1048 extbuglink = getattr(extmod, 'buglink', None)
1049
1049
1050 fm.startitem()
1050 fm.startitem()
1051
1051
1052 if ui.quiet or ui.verbose:
1052 if ui.quiet or ui.verbose:
1053 fm.write(b'name', b'%s\n', extname)
1053 fm.write(b'name', b'%s\n', extname)
1054 else:
1054 else:
1055 fm.write(b'name', b'%s', extname)
1055 fm.write(b'name', b'%s', extname)
1056 if isinternal or hgver in exttestedwith:
1056 if isinternal or hgver in exttestedwith:
1057 fm.plain(b'\n')
1057 fm.plain(b'\n')
1058 elif not exttestedwith:
1058 elif not exttestedwith:
1059 fm.plain(_(b' (untested!)\n'))
1059 fm.plain(_(b' (untested!)\n'))
1060 else:
1060 else:
1061 lasttestedversion = exttestedwith[-1]
1061 lasttestedversion = exttestedwith[-1]
1062 fm.plain(b' (%s!)\n' % lasttestedversion)
1062 fm.plain(b' (%s!)\n' % lasttestedversion)
1063
1063
1064 fm.condwrite(
1064 fm.condwrite(
1065 ui.verbose and extsource,
1065 ui.verbose and extsource,
1066 b'source',
1066 b'source',
1067 _(b' location: %s\n'),
1067 _(b' location: %s\n'),
1068 extsource or b"",
1068 extsource or b"",
1069 )
1069 )
1070
1070
1071 if ui.verbose:
1071 if ui.verbose:
1072 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1072 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1073 fm.data(bundled=isinternal)
1073 fm.data(bundled=isinternal)
1074
1074
1075 fm.condwrite(
1075 fm.condwrite(
1076 ui.verbose and exttestedwith,
1076 ui.verbose and exttestedwith,
1077 b'testedwith',
1077 b'testedwith',
1078 _(b' tested with: %s\n'),
1078 _(b' tested with: %s\n'),
1079 fm.formatlist(exttestedwith, name=b'ver'),
1079 fm.formatlist(exttestedwith, name=b'ver'),
1080 )
1080 )
1081
1081
1082 fm.condwrite(
1082 fm.condwrite(
1083 ui.verbose and extbuglink,
1083 ui.verbose and extbuglink,
1084 b'buglink',
1084 b'buglink',
1085 _(b' bug reporting: %s\n'),
1085 _(b' bug reporting: %s\n'),
1086 extbuglink or b"",
1086 extbuglink or b"",
1087 )
1087 )
1088
1088
1089 fm.end()
1089 fm.end()
1090
1090
1091
1091
1092 @command(
1092 @command(
1093 b'debugfileset',
1093 b'debugfileset',
1094 [
1094 [
1095 (
1095 (
1096 b'r',
1096 b'r',
1097 b'rev',
1097 b'rev',
1098 b'',
1098 b'',
1099 _(b'apply the filespec on this revision'),
1099 _(b'apply the filespec on this revision'),
1100 _(b'REV'),
1100 _(b'REV'),
1101 ),
1101 ),
1102 (
1102 (
1103 b'',
1103 b'',
1104 b'all-files',
1104 b'all-files',
1105 False,
1105 False,
1106 _(b'test files from all revisions and working directory'),
1106 _(b'test files from all revisions and working directory'),
1107 ),
1107 ),
1108 (
1108 (
1109 b's',
1109 b's',
1110 b'show-matcher',
1110 b'show-matcher',
1111 None,
1111 None,
1112 _(b'print internal representation of matcher'),
1112 _(b'print internal representation of matcher'),
1113 ),
1113 ),
1114 (
1114 (
1115 b'p',
1115 b'p',
1116 b'show-stage',
1116 b'show-stage',
1117 [],
1117 [],
1118 _(b'print parsed tree at the given stage'),
1118 _(b'print parsed tree at the given stage'),
1119 _(b'NAME'),
1119 _(b'NAME'),
1120 ),
1120 ),
1121 ],
1121 ],
1122 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1122 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1123 )
1123 )
1124 def debugfileset(ui, repo, expr, **opts):
1124 def debugfileset(ui, repo, expr, **opts):
1125 '''parse and apply a fileset specification'''
1125 '''parse and apply a fileset specification'''
1126 from . import fileset
1126 from . import fileset
1127
1127
1128 fileset.symbols # force import of fileset so we have predicates to optimize
1128 fileset.symbols # force import of fileset so we have predicates to optimize
1129 opts = pycompat.byteskwargs(opts)
1129 opts = pycompat.byteskwargs(opts)
1130 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1130 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1131
1131
1132 stages = [
1132 stages = [
1133 (b'parsed', pycompat.identity),
1133 (b'parsed', pycompat.identity),
1134 (b'analyzed', filesetlang.analyze),
1134 (b'analyzed', filesetlang.analyze),
1135 (b'optimized', filesetlang.optimize),
1135 (b'optimized', filesetlang.optimize),
1136 ]
1136 ]
1137 stagenames = {n for n, f in stages}
1137 stagenames = {n for n, f in stages}
1138
1138
1139 showalways = set()
1139 showalways = set()
1140 if ui.verbose and not opts[b'show_stage']:
1140 if ui.verbose and not opts[b'show_stage']:
1141 # show parsed tree by --verbose (deprecated)
1141 # show parsed tree by --verbose (deprecated)
1142 showalways.add(b'parsed')
1142 showalways.add(b'parsed')
1143 if opts[b'show_stage'] == [b'all']:
1143 if opts[b'show_stage'] == [b'all']:
1144 showalways.update(stagenames)
1144 showalways.update(stagenames)
1145 else:
1145 else:
1146 for n in opts[b'show_stage']:
1146 for n in opts[b'show_stage']:
1147 if n not in stagenames:
1147 if n not in stagenames:
1148 raise error.Abort(_(b'invalid stage name: %s') % n)
1148 raise error.Abort(_(b'invalid stage name: %s') % n)
1149 showalways.update(opts[b'show_stage'])
1149 showalways.update(opts[b'show_stage'])
1150
1150
1151 tree = filesetlang.parse(expr)
1151 tree = filesetlang.parse(expr)
1152 for n, f in stages:
1152 for n, f in stages:
1153 tree = f(tree)
1153 tree = f(tree)
1154 if n in showalways:
1154 if n in showalways:
1155 if opts[b'show_stage'] or n != b'parsed':
1155 if opts[b'show_stage'] or n != b'parsed':
1156 ui.write(b"* %s:\n" % n)
1156 ui.write(b"* %s:\n" % n)
1157 ui.write(filesetlang.prettyformat(tree), b"\n")
1157 ui.write(filesetlang.prettyformat(tree), b"\n")
1158
1158
1159 files = set()
1159 files = set()
1160 if opts[b'all_files']:
1160 if opts[b'all_files']:
1161 for r in repo:
1161 for r in repo:
1162 c = repo[r]
1162 c = repo[r]
1163 files.update(c.files())
1163 files.update(c.files())
1164 files.update(c.substate)
1164 files.update(c.substate)
1165 if opts[b'all_files'] or ctx.rev() is None:
1165 if opts[b'all_files'] or ctx.rev() is None:
1166 wctx = repo[None]
1166 wctx = repo[None]
1167 files.update(
1167 files.update(
1168 repo.dirstate.walk(
1168 repo.dirstate.walk(
1169 scmutil.matchall(repo),
1169 scmutil.matchall(repo),
1170 subrepos=list(wctx.substate),
1170 subrepos=list(wctx.substate),
1171 unknown=True,
1171 unknown=True,
1172 ignored=True,
1172 ignored=True,
1173 )
1173 )
1174 )
1174 )
1175 files.update(wctx.substate)
1175 files.update(wctx.substate)
1176 else:
1176 else:
1177 files.update(ctx.files())
1177 files.update(ctx.files())
1178 files.update(ctx.substate)
1178 files.update(ctx.substate)
1179
1179
1180 m = ctx.matchfileset(repo.getcwd(), expr)
1180 m = ctx.matchfileset(repo.getcwd(), expr)
1181 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1181 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1182 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1182 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1183 for f in sorted(files):
1183 for f in sorted(files):
1184 if not m(f):
1184 if not m(f):
1185 continue
1185 continue
1186 ui.write(b"%s\n" % f)
1186 ui.write(b"%s\n" % f)
1187
1187
1188
1188
1189 @command(b'debugformat', [] + cmdutil.formatteropts)
1189 @command(b'debugformat', [] + cmdutil.formatteropts)
1190 def debugformat(ui, repo, **opts):
1190 def debugformat(ui, repo, **opts):
1191 """display format information about the current repository
1191 """display format information about the current repository
1192
1192
1193 Use --verbose to get extra information about current config value and
1193 Use --verbose to get extra information about current config value and
1194 Mercurial default."""
1194 Mercurial default."""
1195 opts = pycompat.byteskwargs(opts)
1195 opts = pycompat.byteskwargs(opts)
1196 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1196 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1197 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1197 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1198
1198
1199 def makeformatname(name):
1199 def makeformatname(name):
1200 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1200 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1201
1201
1202 fm = ui.formatter(b'debugformat', opts)
1202 fm = ui.formatter(b'debugformat', opts)
1203 if fm.isplain():
1203 if fm.isplain():
1204
1204
1205 def formatvalue(value):
1205 def formatvalue(value):
1206 if util.safehasattr(value, b'startswith'):
1206 if util.safehasattr(value, b'startswith'):
1207 return value
1207 return value
1208 if value:
1208 if value:
1209 return b'yes'
1209 return b'yes'
1210 else:
1210 else:
1211 return b'no'
1211 return b'no'
1212
1212
1213 else:
1213 else:
1214 formatvalue = pycompat.identity
1214 formatvalue = pycompat.identity
1215
1215
1216 fm.plain(b'format-variant')
1216 fm.plain(b'format-variant')
1217 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1217 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1218 fm.plain(b' repo')
1218 fm.plain(b' repo')
1219 if ui.verbose:
1219 if ui.verbose:
1220 fm.plain(b' config default')
1220 fm.plain(b' config default')
1221 fm.plain(b'\n')
1221 fm.plain(b'\n')
1222 for fv in upgrade.allformatvariant:
1222 for fv in upgrade.allformatvariant:
1223 fm.startitem()
1223 fm.startitem()
1224 repovalue = fv.fromrepo(repo)
1224 repovalue = fv.fromrepo(repo)
1225 configvalue = fv.fromconfig(repo)
1225 configvalue = fv.fromconfig(repo)
1226
1226
1227 if repovalue != configvalue:
1227 if repovalue != configvalue:
1228 namelabel = b'formatvariant.name.mismatchconfig'
1228 namelabel = b'formatvariant.name.mismatchconfig'
1229 repolabel = b'formatvariant.repo.mismatchconfig'
1229 repolabel = b'formatvariant.repo.mismatchconfig'
1230 elif repovalue != fv.default:
1230 elif repovalue != fv.default:
1231 namelabel = b'formatvariant.name.mismatchdefault'
1231 namelabel = b'formatvariant.name.mismatchdefault'
1232 repolabel = b'formatvariant.repo.mismatchdefault'
1232 repolabel = b'formatvariant.repo.mismatchdefault'
1233 else:
1233 else:
1234 namelabel = b'formatvariant.name.uptodate'
1234 namelabel = b'formatvariant.name.uptodate'
1235 repolabel = b'formatvariant.repo.uptodate'
1235 repolabel = b'formatvariant.repo.uptodate'
1236
1236
1237 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1237 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1238 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1238 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1239 if fv.default != configvalue:
1239 if fv.default != configvalue:
1240 configlabel = b'formatvariant.config.special'
1240 configlabel = b'formatvariant.config.special'
1241 else:
1241 else:
1242 configlabel = b'formatvariant.config.default'
1242 configlabel = b'formatvariant.config.default'
1243 fm.condwrite(
1243 fm.condwrite(
1244 ui.verbose,
1244 ui.verbose,
1245 b'config',
1245 b'config',
1246 b' %6s',
1246 b' %6s',
1247 formatvalue(configvalue),
1247 formatvalue(configvalue),
1248 label=configlabel,
1248 label=configlabel,
1249 )
1249 )
1250 fm.condwrite(
1250 fm.condwrite(
1251 ui.verbose,
1251 ui.verbose,
1252 b'default',
1252 b'default',
1253 b' %7s',
1253 b' %7s',
1254 formatvalue(fv.default),
1254 formatvalue(fv.default),
1255 label=b'formatvariant.default',
1255 label=b'formatvariant.default',
1256 )
1256 )
1257 fm.plain(b'\n')
1257 fm.plain(b'\n')
1258 fm.end()
1258 fm.end()
1259
1259
1260
1260
1261 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1261 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1262 def debugfsinfo(ui, path=b"."):
1262 def debugfsinfo(ui, path=b"."):
1263 """show information detected about current filesystem"""
1263 """show information detected about current filesystem"""
1264 ui.writenoi18n(b'path: %s\n' % path)
1264 ui.writenoi18n(b'path: %s\n' % path)
1265 ui.writenoi18n(
1265 ui.writenoi18n(
1266 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1266 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1267 )
1267 )
1268 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1268 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1269 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1269 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1270 ui.writenoi18n(
1270 ui.writenoi18n(
1271 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1271 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1272 )
1272 )
1273 ui.writenoi18n(
1273 ui.writenoi18n(
1274 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1274 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1275 )
1275 )
1276 casesensitive = b'(unknown)'
1276 casesensitive = b'(unknown)'
1277 try:
1277 try:
1278 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1278 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1279 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1279 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1280 except OSError:
1280 except OSError:
1281 pass
1281 pass
1282 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1282 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1283
1283
1284
1284
1285 @command(
1285 @command(
1286 b'debuggetbundle',
1286 b'debuggetbundle',
1287 [
1287 [
1288 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1288 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1289 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1289 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1290 (
1290 (
1291 b't',
1291 b't',
1292 b'type',
1292 b'type',
1293 b'bzip2',
1293 b'bzip2',
1294 _(b'bundle compression type to use'),
1294 _(b'bundle compression type to use'),
1295 _(b'TYPE'),
1295 _(b'TYPE'),
1296 ),
1296 ),
1297 ],
1297 ],
1298 _(b'REPO FILE [-H|-C ID]...'),
1298 _(b'REPO FILE [-H|-C ID]...'),
1299 norepo=True,
1299 norepo=True,
1300 )
1300 )
1301 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1301 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1302 """retrieves a bundle from a repo
1302 """retrieves a bundle from a repo
1303
1303
1304 Every ID must be a full-length hex node id string. Saves the bundle to the
1304 Every ID must be a full-length hex node id string. Saves the bundle to the
1305 given file.
1305 given file.
1306 """
1306 """
1307 opts = pycompat.byteskwargs(opts)
1307 opts = pycompat.byteskwargs(opts)
1308 repo = hg.peer(ui, opts, repopath)
1308 repo = hg.peer(ui, opts, repopath)
1309 if not repo.capable(b'getbundle'):
1309 if not repo.capable(b'getbundle'):
1310 raise error.Abort(b"getbundle() not supported by target repository")
1310 raise error.Abort(b"getbundle() not supported by target repository")
1311 args = {}
1311 args = {}
1312 if common:
1312 if common:
1313 args['common'] = [bin(s) for s in common]
1313 args['common'] = [bin(s) for s in common]
1314 if head:
1314 if head:
1315 args['heads'] = [bin(s) for s in head]
1315 args['heads'] = [bin(s) for s in head]
1316 # TODO: get desired bundlecaps from command line.
1316 # TODO: get desired bundlecaps from command line.
1317 args['bundlecaps'] = None
1317 args['bundlecaps'] = None
1318 bundle = repo.getbundle(b'debug', **args)
1318 bundle = repo.getbundle(b'debug', **args)
1319
1319
1320 bundletype = opts.get(b'type', b'bzip2').lower()
1320 bundletype = opts.get(b'type', b'bzip2').lower()
1321 btypes = {
1321 btypes = {
1322 b'none': b'HG10UN',
1322 b'none': b'HG10UN',
1323 b'bzip2': b'HG10BZ',
1323 b'bzip2': b'HG10BZ',
1324 b'gzip': b'HG10GZ',
1324 b'gzip': b'HG10GZ',
1325 b'bundle2': b'HG20',
1325 b'bundle2': b'HG20',
1326 }
1326 }
1327 bundletype = btypes.get(bundletype)
1327 bundletype = btypes.get(bundletype)
1328 if bundletype not in bundle2.bundletypes:
1328 if bundletype not in bundle2.bundletypes:
1329 raise error.Abort(_(b'unknown bundle type specified with --type'))
1329 raise error.Abort(_(b'unknown bundle type specified with --type'))
1330 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1330 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1331
1331
1332
1332
1333 @command(b'debugignore', [], b'[FILE]')
1333 @command(b'debugignore', [], b'[FILE]')
1334 def debugignore(ui, repo, *files, **opts):
1334 def debugignore(ui, repo, *files, **opts):
1335 """display the combined ignore pattern and information about ignored files
1335 """display the combined ignore pattern and information about ignored files
1336
1336
1337 With no argument display the combined ignore pattern.
1337 With no argument display the combined ignore pattern.
1338
1338
1339 Given space separated file names, shows if the given file is ignored and
1339 Given space separated file names, shows if the given file is ignored and
1340 if so, show the ignore rule (file and line number) that matched it.
1340 if so, show the ignore rule (file and line number) that matched it.
1341 """
1341 """
1342 ignore = repo.dirstate._ignore
1342 ignore = repo.dirstate._ignore
1343 if not files:
1343 if not files:
1344 # Show all the patterns
1344 # Show all the patterns
1345 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1345 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1346 else:
1346 else:
1347 m = scmutil.match(repo[None], pats=files)
1347 m = scmutil.match(repo[None], pats=files)
1348 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1348 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1349 for f in m.files():
1349 for f in m.files():
1350 nf = util.normpath(f)
1350 nf = util.normpath(f)
1351 ignored = None
1351 ignored = None
1352 ignoredata = None
1352 ignoredata = None
1353 if nf != b'.':
1353 if nf != b'.':
1354 if ignore(nf):
1354 if ignore(nf):
1355 ignored = nf
1355 ignored = nf
1356 ignoredata = repo.dirstate._ignorefileandline(nf)
1356 ignoredata = repo.dirstate._ignorefileandline(nf)
1357 else:
1357 else:
1358 for p in pathutil.finddirs(nf):
1358 for p in pathutil.finddirs(nf):
1359 if ignore(p):
1359 if ignore(p):
1360 ignored = p
1360 ignored = p
1361 ignoredata = repo.dirstate._ignorefileandline(p)
1361 ignoredata = repo.dirstate._ignorefileandline(p)
1362 break
1362 break
1363 if ignored:
1363 if ignored:
1364 if ignored == nf:
1364 if ignored == nf:
1365 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1365 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1366 else:
1366 else:
1367 ui.write(
1367 ui.write(
1368 _(
1368 _(
1369 b"%s is ignored because of "
1369 b"%s is ignored because of "
1370 b"containing directory %s\n"
1370 b"containing directory %s\n"
1371 )
1371 )
1372 % (uipathfn(f), ignored)
1372 % (uipathfn(f), ignored)
1373 )
1373 )
1374 ignorefile, lineno, line = ignoredata
1374 ignorefile, lineno, line = ignoredata
1375 ui.write(
1375 ui.write(
1376 _(b"(ignore rule in %s, line %d: '%s')\n")
1376 _(b"(ignore rule in %s, line %d: '%s')\n")
1377 % (ignorefile, lineno, line)
1377 % (ignorefile, lineno, line)
1378 )
1378 )
1379 else:
1379 else:
1380 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1380 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1381
1381
1382
1382
1383 @command(
1383 @command(
1384 b'debugindex',
1384 b'debugindex',
1385 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1385 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1386 _(b'-c|-m|FILE'),
1386 _(b'-c|-m|FILE'),
1387 )
1387 )
1388 def debugindex(ui, repo, file_=None, **opts):
1388 def debugindex(ui, repo, file_=None, **opts):
1389 """dump index data for a storage primitive"""
1389 """dump index data for a storage primitive"""
1390 opts = pycompat.byteskwargs(opts)
1390 opts = pycompat.byteskwargs(opts)
1391 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1391 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1392
1392
1393 if ui.debugflag:
1393 if ui.debugflag:
1394 shortfn = hex
1394 shortfn = hex
1395 else:
1395 else:
1396 shortfn = short
1396 shortfn = short
1397
1397
1398 idlen = 12
1398 idlen = 12
1399 for i in store:
1399 for i in store:
1400 idlen = len(shortfn(store.node(i)))
1400 idlen = len(shortfn(store.node(i)))
1401 break
1401 break
1402
1402
1403 fm = ui.formatter(b'debugindex', opts)
1403 fm = ui.formatter(b'debugindex', opts)
1404 fm.plain(
1404 fm.plain(
1405 b' rev linkrev %s %s p2\n'
1405 b' rev linkrev %s %s p2\n'
1406 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1406 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1407 )
1407 )
1408
1408
1409 for rev in store:
1409 for rev in store:
1410 node = store.node(rev)
1410 node = store.node(rev)
1411 parents = store.parents(node)
1411 parents = store.parents(node)
1412
1412
1413 fm.startitem()
1413 fm.startitem()
1414 fm.write(b'rev', b'%6d ', rev)
1414 fm.write(b'rev', b'%6d ', rev)
1415 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1415 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1416 fm.write(b'node', b'%s ', shortfn(node))
1416 fm.write(b'node', b'%s ', shortfn(node))
1417 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1417 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1418 fm.write(b'p2', b'%s', shortfn(parents[1]))
1418 fm.write(b'p2', b'%s', shortfn(parents[1]))
1419 fm.plain(b'\n')
1419 fm.plain(b'\n')
1420
1420
1421 fm.end()
1421 fm.end()
1422
1422
1423
1423
1424 @command(
1424 @command(
1425 b'debugindexdot',
1425 b'debugindexdot',
1426 cmdutil.debugrevlogopts,
1426 cmdutil.debugrevlogopts,
1427 _(b'-c|-m|FILE'),
1427 _(b'-c|-m|FILE'),
1428 optionalrepo=True,
1428 optionalrepo=True,
1429 )
1429 )
1430 def debugindexdot(ui, repo, file_=None, **opts):
1430 def debugindexdot(ui, repo, file_=None, **opts):
1431 """dump an index DAG as a graphviz dot file"""
1431 """dump an index DAG as a graphviz dot file"""
1432 opts = pycompat.byteskwargs(opts)
1432 opts = pycompat.byteskwargs(opts)
1433 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1433 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1434 ui.writenoi18n(b"digraph G {\n")
1434 ui.writenoi18n(b"digraph G {\n")
1435 for i in r:
1435 for i in r:
1436 node = r.node(i)
1436 node = r.node(i)
1437 pp = r.parents(node)
1437 pp = r.parents(node)
1438 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1438 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1439 if pp[1] != nullid:
1439 if pp[1] != nullid:
1440 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1440 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1441 ui.write(b"}\n")
1441 ui.write(b"}\n")
1442
1442
1443
1443
1444 @command(b'debugindexstats', [])
1444 @command(b'debugindexstats', [])
1445 def debugindexstats(ui, repo):
1445 def debugindexstats(ui, repo):
1446 """show stats related to the changelog index"""
1446 """show stats related to the changelog index"""
1447 repo.changelog.shortest(nullid, 1)
1447 repo.changelog.shortest(nullid, 1)
1448 index = repo.changelog.index
1448 index = repo.changelog.index
1449 if not util.safehasattr(index, b'stats'):
1449 if not util.safehasattr(index, b'stats'):
1450 raise error.Abort(_(b'debugindexstats only works with native code'))
1450 raise error.Abort(_(b'debugindexstats only works with native code'))
1451 for k, v in sorted(index.stats().items()):
1451 for k, v in sorted(index.stats().items()):
1452 ui.write(b'%s: %d\n' % (k, v))
1452 ui.write(b'%s: %d\n' % (k, v))
1453
1453
1454
1454
1455 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1455 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1456 def debuginstall(ui, **opts):
1456 def debuginstall(ui, **opts):
1457 '''test Mercurial installation
1457 '''test Mercurial installation
1458
1458
1459 Returns 0 on success.
1459 Returns 0 on success.
1460 '''
1460 '''
1461 opts = pycompat.byteskwargs(opts)
1461 opts = pycompat.byteskwargs(opts)
1462
1462
1463 problems = 0
1463 problems = 0
1464
1464
1465 fm = ui.formatter(b'debuginstall', opts)
1465 fm = ui.formatter(b'debuginstall', opts)
1466 fm.startitem()
1466 fm.startitem()
1467
1467
1468 # encoding
1468 # encoding
1469 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1469 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1470 err = None
1470 err = None
1471 try:
1471 try:
1472 codecs.lookup(pycompat.sysstr(encoding.encoding))
1472 codecs.lookup(pycompat.sysstr(encoding.encoding))
1473 except LookupError as inst:
1473 except LookupError as inst:
1474 err = stringutil.forcebytestr(inst)
1474 err = stringutil.forcebytestr(inst)
1475 problems += 1
1475 problems += 1
1476 fm.condwrite(
1476 fm.condwrite(
1477 err,
1477 err,
1478 b'encodingerror',
1478 b'encodingerror',
1479 _(b" %s\n (check that your locale is properly set)\n"),
1479 _(b" %s\n (check that your locale is properly set)\n"),
1480 err,
1480 err,
1481 )
1481 )
1482
1482
1483 # Python
1483 # Python
1484 pythonlib = None
1484 pythonlib = None
1485 if util.safehasattr(os, '__file__'):
1485 if util.safehasattr(os, '__file__'):
1486 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1486 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1487 elif getattr(sys, 'oxidized', False):
1487 elif getattr(sys, 'oxidized', False):
1488 pythonlib = pycompat.sysexecutable
1488 pythonlib = pycompat.sysexecutable
1489
1489
1490 fm.write(
1490 fm.write(
1491 b'pythonexe',
1491 b'pythonexe',
1492 _(b"checking Python executable (%s)\n"),
1492 _(b"checking Python executable (%s)\n"),
1493 pycompat.sysexecutable or _(b"unknown"),
1493 pycompat.sysexecutable or _(b"unknown"),
1494 )
1494 )
1495 fm.write(
1495 fm.write(
1496 b'pythonimplementation',
1496 b'pythonimplementation',
1497 _(b"checking Python implementation (%s)\n"),
1497 _(b"checking Python implementation (%s)\n"),
1498 pycompat.sysbytes(platform.python_implementation()),
1498 pycompat.sysbytes(platform.python_implementation()),
1499 )
1499 )
1500 fm.write(
1500 fm.write(
1501 b'pythonver',
1501 b'pythonver',
1502 _(b"checking Python version (%s)\n"),
1502 _(b"checking Python version (%s)\n"),
1503 (b"%d.%d.%d" % sys.version_info[:3]),
1503 (b"%d.%d.%d" % sys.version_info[:3]),
1504 )
1504 )
1505 fm.write(
1505 fm.write(
1506 b'pythonlib',
1506 b'pythonlib',
1507 _(b"checking Python lib (%s)...\n"),
1507 _(b"checking Python lib (%s)...\n"),
1508 pythonlib or _(b"unknown"),
1508 pythonlib or _(b"unknown"),
1509 )
1509 )
1510
1510
1511 try:
1511 try:
1512 from . import rustext
1512 from . import rustext
1513
1513
1514 rustext.__doc__ # trigger lazy import
1514 rustext.__doc__ # trigger lazy import
1515 except ImportError:
1515 except ImportError:
1516 rustext = None
1516 rustext = None
1517
1517
1518 security = set(sslutil.supportedprotocols)
1518 security = set(sslutil.supportedprotocols)
1519 if sslutil.hassni:
1519 if sslutil.hassni:
1520 security.add(b'sni')
1520 security.add(b'sni')
1521
1521
1522 fm.write(
1522 fm.write(
1523 b'pythonsecurity',
1523 b'pythonsecurity',
1524 _(b"checking Python security support (%s)\n"),
1524 _(b"checking Python security support (%s)\n"),
1525 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1525 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1526 )
1526 )
1527
1527
1528 # These are warnings, not errors. So don't increment problem count. This
1528 # These are warnings, not errors. So don't increment problem count. This
1529 # may change in the future.
1529 # may change in the future.
1530 if b'tls1.2' not in security:
1530 if b'tls1.2' not in security:
1531 fm.plain(
1531 fm.plain(
1532 _(
1532 _(
1533 b' TLS 1.2 not supported by Python install; '
1533 b' TLS 1.2 not supported by Python install; '
1534 b'network connections lack modern security\n'
1534 b'network connections lack modern security\n'
1535 )
1535 )
1536 )
1536 )
1537 if b'sni' not in security:
1537 if b'sni' not in security:
1538 fm.plain(
1538 fm.plain(
1539 _(
1539 _(
1540 b' SNI not supported by Python install; may have '
1540 b' SNI not supported by Python install; may have '
1541 b'connectivity issues with some servers\n'
1541 b'connectivity issues with some servers\n'
1542 )
1542 )
1543 )
1543 )
1544
1544
1545 fm.plain(
1545 fm.plain(
1546 _(
1546 _(
1547 b"checking Rust extensions (%s)\n"
1547 b"checking Rust extensions (%s)\n"
1548 % (b'missing' if rustext is None else b'installed')
1548 % (b'missing' if rustext is None else b'installed')
1549 ),
1549 ),
1550 )
1550 )
1551
1551
1552 # TODO print CA cert info
1552 # TODO print CA cert info
1553
1553
1554 # hg version
1554 # hg version
1555 hgver = util.version()
1555 hgver = util.version()
1556 fm.write(
1556 fm.write(
1557 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1557 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1558 )
1558 )
1559 fm.write(
1559 fm.write(
1560 b'hgverextra',
1560 b'hgverextra',
1561 _(b"checking Mercurial custom build (%s)\n"),
1561 _(b"checking Mercurial custom build (%s)\n"),
1562 b'+'.join(hgver.split(b'+')[1:]),
1562 b'+'.join(hgver.split(b'+')[1:]),
1563 )
1563 )
1564
1564
1565 # compiled modules
1565 # compiled modules
1566 hgmodules = None
1566 hgmodules = None
1567 if util.safehasattr(sys.modules[__name__], '__file__'):
1567 if util.safehasattr(sys.modules[__name__], '__file__'):
1568 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1568 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1569 elif getattr(sys, 'oxidized', False):
1569 elif getattr(sys, 'oxidized', False):
1570 hgmodules = pycompat.sysexecutable
1570 hgmodules = pycompat.sysexecutable
1571
1571
1572 fm.write(
1572 fm.write(
1573 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1573 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1574 )
1574 )
1575 fm.write(
1575 fm.write(
1576 b'hgmodules',
1576 b'hgmodules',
1577 _(b"checking installed modules (%s)...\n"),
1577 _(b"checking installed modules (%s)...\n"),
1578 hgmodules or _(b"unknown"),
1578 hgmodules or _(b"unknown"),
1579 )
1579 )
1580
1580
1581 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1581 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1582 rustext = rustandc # for now, that's the only case
1582 rustext = rustandc # for now, that's the only case
1583 cext = policy.policy in (b'c', b'allow') or rustandc
1583 cext = policy.policy in (b'c', b'allow') or rustandc
1584 nopure = cext or rustext
1584 nopure = cext or rustext
1585 if nopure:
1585 if nopure:
1586 err = None
1586 err = None
1587 try:
1587 try:
1588 if cext:
1588 if cext:
1589 from .cext import ( # pytype: disable=import-error
1589 from .cext import ( # pytype: disable=import-error
1590 base85,
1590 base85,
1591 bdiff,
1591 bdiff,
1592 mpatch,
1592 mpatch,
1593 osutil,
1593 osutil,
1594 )
1594 )
1595
1595
1596 # quiet pyflakes
1596 # quiet pyflakes
1597 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1597 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1598 if rustext:
1598 if rustext:
1599 from .rustext import ( # pytype: disable=import-error
1599 from .rustext import ( # pytype: disable=import-error
1600 ancestor,
1600 ancestor,
1601 dirstate,
1601 dirstate,
1602 )
1602 )
1603
1603
1604 dir(ancestor), dir(dirstate) # quiet pyflakes
1604 dir(ancestor), dir(dirstate) # quiet pyflakes
1605 except Exception as inst:
1605 except Exception as inst:
1606 err = stringutil.forcebytestr(inst)
1606 err = stringutil.forcebytestr(inst)
1607 problems += 1
1607 problems += 1
1608 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1608 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1609
1609
1610 compengines = util.compengines._engines.values()
1610 compengines = util.compengines._engines.values()
1611 fm.write(
1611 fm.write(
1612 b'compengines',
1612 b'compengines',
1613 _(b'checking registered compression engines (%s)\n'),
1613 _(b'checking registered compression engines (%s)\n'),
1614 fm.formatlist(
1614 fm.formatlist(
1615 sorted(e.name() for e in compengines),
1615 sorted(e.name() for e in compengines),
1616 name=b'compengine',
1616 name=b'compengine',
1617 fmt=b'%s',
1617 fmt=b'%s',
1618 sep=b', ',
1618 sep=b', ',
1619 ),
1619 ),
1620 )
1620 )
1621 fm.write(
1621 fm.write(
1622 b'compenginesavail',
1622 b'compenginesavail',
1623 _(b'checking available compression engines (%s)\n'),
1623 _(b'checking available compression engines (%s)\n'),
1624 fm.formatlist(
1624 fm.formatlist(
1625 sorted(e.name() for e in compengines if e.available()),
1625 sorted(e.name() for e in compengines if e.available()),
1626 name=b'compengine',
1626 name=b'compengine',
1627 fmt=b'%s',
1627 fmt=b'%s',
1628 sep=b', ',
1628 sep=b', ',
1629 ),
1629 ),
1630 )
1630 )
1631 wirecompengines = compression.compengines.supportedwireengines(
1631 wirecompengines = compression.compengines.supportedwireengines(
1632 compression.SERVERROLE
1632 compression.SERVERROLE
1633 )
1633 )
1634 fm.write(
1634 fm.write(
1635 b'compenginesserver',
1635 b'compenginesserver',
1636 _(
1636 _(
1637 b'checking available compression engines '
1637 b'checking available compression engines '
1638 b'for wire protocol (%s)\n'
1638 b'for wire protocol (%s)\n'
1639 ),
1639 ),
1640 fm.formatlist(
1640 fm.formatlist(
1641 [e.name() for e in wirecompengines if e.wireprotosupport()],
1641 [e.name() for e in wirecompengines if e.wireprotosupport()],
1642 name=b'compengine',
1642 name=b'compengine',
1643 fmt=b'%s',
1643 fmt=b'%s',
1644 sep=b', ',
1644 sep=b', ',
1645 ),
1645 ),
1646 )
1646 )
1647 re2 = b'missing'
1647 re2 = b'missing'
1648 if util._re2:
1648 if util._re2:
1649 re2 = b'available'
1649 re2 = b'available'
1650 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1650 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1651 fm.data(re2=bool(util._re2))
1651 fm.data(re2=bool(util._re2))
1652
1652
1653 rust_debug_mod = policy.importrust("debug")
1653 rust_debug_mod = policy.importrust("debug")
1654 if rust_debug_mod is not None:
1654 if rust_debug_mod is not None:
1655 re2_rust = b'installed' if rust_debug_mod.re2_installed else b'missing'
1655 re2_rust = b'installed' if rust_debug_mod.re2_installed else b'missing'
1656
1656
1657 msg = b'checking "re2" regexp engine Rust bindings (%s)\n'
1657 msg = b'checking "re2" regexp engine Rust bindings (%s)\n'
1658 fm.plain(_(msg % re2_rust))
1658 fm.plain(_(msg % re2_rust))
1659
1659
1660 # templates
1660 # templates
1661 p = templater.templatepaths()
1661 p = templater.templatepaths()
1662 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1662 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1663 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1663 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1664 if p:
1664 if p:
1665 m = templater.templatepath(b"map-cmdline.default")
1665 m = templater.templatepath(b"map-cmdline.default")
1666 if m:
1666 if m:
1667 # template found, check if it is working
1667 # template found, check if it is working
1668 err = None
1668 err = None
1669 try:
1669 try:
1670 templater.templater.frommapfile(m)
1670 templater.templater.frommapfile(m)
1671 except Exception as inst:
1671 except Exception as inst:
1672 err = stringutil.forcebytestr(inst)
1672 err = stringutil.forcebytestr(inst)
1673 p = None
1673 p = None
1674 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1674 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1675 else:
1675 else:
1676 p = None
1676 p = None
1677 fm.condwrite(
1677 fm.condwrite(
1678 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1678 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1679 )
1679 )
1680 fm.condwrite(
1680 fm.condwrite(
1681 not m,
1681 not m,
1682 b'defaulttemplatenotfound',
1682 b'defaulttemplatenotfound',
1683 _(b" template '%s' not found\n"),
1683 _(b" template '%s' not found\n"),
1684 b"default",
1684 b"default",
1685 )
1685 )
1686 if not p:
1686 if not p:
1687 problems += 1
1687 problems += 1
1688 fm.condwrite(
1688 fm.condwrite(
1689 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1689 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1690 )
1690 )
1691
1691
1692 # editor
1692 # editor
1693 editor = ui.geteditor()
1693 editor = ui.geteditor()
1694 editor = util.expandpath(editor)
1694 editor = util.expandpath(editor)
1695 editorbin = procutil.shellsplit(editor)[0]
1695 editorbin = procutil.shellsplit(editor)[0]
1696 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1696 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1697 cmdpath = procutil.findexe(editorbin)
1697 cmdpath = procutil.findexe(editorbin)
1698 fm.condwrite(
1698 fm.condwrite(
1699 not cmdpath and editor == b'vi',
1699 not cmdpath and editor == b'vi',
1700 b'vinotfound',
1700 b'vinotfound',
1701 _(
1701 _(
1702 b" No commit editor set and can't find %s in PATH\n"
1702 b" No commit editor set and can't find %s in PATH\n"
1703 b" (specify a commit editor in your configuration"
1703 b" (specify a commit editor in your configuration"
1704 b" file)\n"
1704 b" file)\n"
1705 ),
1705 ),
1706 not cmdpath and editor == b'vi' and editorbin,
1706 not cmdpath and editor == b'vi' and editorbin,
1707 )
1707 )
1708 fm.condwrite(
1708 fm.condwrite(
1709 not cmdpath and editor != b'vi',
1709 not cmdpath and editor != b'vi',
1710 b'editornotfound',
1710 b'editornotfound',
1711 _(
1711 _(
1712 b" Can't find editor '%s' in PATH\n"
1712 b" Can't find editor '%s' in PATH\n"
1713 b" (specify a commit editor in your configuration"
1713 b" (specify a commit editor in your configuration"
1714 b" file)\n"
1714 b" file)\n"
1715 ),
1715 ),
1716 not cmdpath and editorbin,
1716 not cmdpath and editorbin,
1717 )
1717 )
1718 if not cmdpath and editor != b'vi':
1718 if not cmdpath and editor != b'vi':
1719 problems += 1
1719 problems += 1
1720
1720
1721 # check username
1721 # check username
1722 username = None
1722 username = None
1723 err = None
1723 err = None
1724 try:
1724 try:
1725 username = ui.username()
1725 username = ui.username()
1726 except error.Abort as e:
1726 except error.Abort as e:
1727 err = stringutil.forcebytestr(e)
1727 err = stringutil.forcebytestr(e)
1728 problems += 1
1728 problems += 1
1729
1729
1730 fm.condwrite(
1730 fm.condwrite(
1731 username, b'username', _(b"checking username (%s)\n"), username
1731 username, b'username', _(b"checking username (%s)\n"), username
1732 )
1732 )
1733 fm.condwrite(
1733 fm.condwrite(
1734 err,
1734 err,
1735 b'usernameerror',
1735 b'usernameerror',
1736 _(
1736 _(
1737 b"checking username...\n %s\n"
1737 b"checking username...\n %s\n"
1738 b" (specify a username in your configuration file)\n"
1738 b" (specify a username in your configuration file)\n"
1739 ),
1739 ),
1740 err,
1740 err,
1741 )
1741 )
1742
1742
1743 for name, mod in extensions.extensions():
1743 for name, mod in extensions.extensions():
1744 handler = getattr(mod, 'debuginstall', None)
1744 handler = getattr(mod, 'debuginstall', None)
1745 if handler is not None:
1745 if handler is not None:
1746 problems += handler(ui, fm)
1746 problems += handler(ui, fm)
1747
1747
1748 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1748 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1749 if not problems:
1749 if not problems:
1750 fm.data(problems=problems)
1750 fm.data(problems=problems)
1751 fm.condwrite(
1751 fm.condwrite(
1752 problems,
1752 problems,
1753 b'problems',
1753 b'problems',
1754 _(b"%d problems detected, please check your install!\n"),
1754 _(b"%d problems detected, please check your install!\n"),
1755 problems,
1755 problems,
1756 )
1756 )
1757 fm.end()
1757 fm.end()
1758
1758
1759 return problems
1759 return problems
1760
1760
1761
1761
1762 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1762 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1763 def debugknown(ui, repopath, *ids, **opts):
1763 def debugknown(ui, repopath, *ids, **opts):
1764 """test whether node ids are known to a repo
1764 """test whether node ids are known to a repo
1765
1765
1766 Every ID must be a full-length hex node id string. Returns a list of 0s
1766 Every ID must be a full-length hex node id string. Returns a list of 0s
1767 and 1s indicating unknown/known.
1767 and 1s indicating unknown/known.
1768 """
1768 """
1769 opts = pycompat.byteskwargs(opts)
1769 opts = pycompat.byteskwargs(opts)
1770 repo = hg.peer(ui, opts, repopath)
1770 repo = hg.peer(ui, opts, repopath)
1771 if not repo.capable(b'known'):
1771 if not repo.capable(b'known'):
1772 raise error.Abort(b"known() not supported by target repository")
1772 raise error.Abort(b"known() not supported by target repository")
1773 flags = repo.known([bin(s) for s in ids])
1773 flags = repo.known([bin(s) for s in ids])
1774 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1774 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1775
1775
1776
1776
1777 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1777 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1778 def debuglabelcomplete(ui, repo, *args):
1778 def debuglabelcomplete(ui, repo, *args):
1779 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1779 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1780 debugnamecomplete(ui, repo, *args)
1780 debugnamecomplete(ui, repo, *args)
1781
1781
1782
1782
1783 @command(
1783 @command(
1784 b'debuglocks',
1784 b'debuglocks',
1785 [
1785 [
1786 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1786 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1787 (
1787 (
1788 b'W',
1788 b'W',
1789 b'force-wlock',
1789 b'force-wlock',
1790 None,
1790 None,
1791 _(b'free the working state lock (DANGEROUS)'),
1791 _(b'free the working state lock (DANGEROUS)'),
1792 ),
1792 ),
1793 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1793 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1794 (
1794 (
1795 b'S',
1795 b'S',
1796 b'set-wlock',
1796 b'set-wlock',
1797 None,
1797 None,
1798 _(b'set the working state lock until stopped'),
1798 _(b'set the working state lock until stopped'),
1799 ),
1799 ),
1800 ],
1800 ],
1801 _(b'[OPTION]...'),
1801 _(b'[OPTION]...'),
1802 )
1802 )
1803 def debuglocks(ui, repo, **opts):
1803 def debuglocks(ui, repo, **opts):
1804 """show or modify state of locks
1804 """show or modify state of locks
1805
1805
1806 By default, this command will show which locks are held. This
1806 By default, this command will show which locks are held. This
1807 includes the user and process holding the lock, the amount of time
1807 includes the user and process holding the lock, the amount of time
1808 the lock has been held, and the machine name where the process is
1808 the lock has been held, and the machine name where the process is
1809 running if it's not local.
1809 running if it's not local.
1810
1810
1811 Locks protect the integrity of Mercurial's data, so should be
1811 Locks protect the integrity of Mercurial's data, so should be
1812 treated with care. System crashes or other interruptions may cause
1812 treated with care. System crashes or other interruptions may cause
1813 locks to not be properly released, though Mercurial will usually
1813 locks to not be properly released, though Mercurial will usually
1814 detect and remove such stale locks automatically.
1814 detect and remove such stale locks automatically.
1815
1815
1816 However, detecting stale locks may not always be possible (for
1816 However, detecting stale locks may not always be possible (for
1817 instance, on a shared filesystem). Removing locks may also be
1817 instance, on a shared filesystem). Removing locks may also be
1818 blocked by filesystem permissions.
1818 blocked by filesystem permissions.
1819
1819
1820 Setting a lock will prevent other commands from changing the data.
1820 Setting a lock will prevent other commands from changing the data.
1821 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1821 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1822 The set locks are removed when the command exits.
1822 The set locks are removed when the command exits.
1823
1823
1824 Returns 0 if no locks are held.
1824 Returns 0 if no locks are held.
1825
1825
1826 """
1826 """
1827
1827
1828 if opts.get('force_lock'):
1828 if opts.get('force_lock'):
1829 repo.svfs.unlink(b'lock')
1829 repo.svfs.unlink(b'lock')
1830 if opts.get('force_wlock'):
1830 if opts.get('force_wlock'):
1831 repo.vfs.unlink(b'wlock')
1831 repo.vfs.unlink(b'wlock')
1832 if opts.get('force_lock') or opts.get('force_wlock'):
1832 if opts.get('force_lock') or opts.get('force_wlock'):
1833 return 0
1833 return 0
1834
1834
1835 locks = []
1835 locks = []
1836 try:
1836 try:
1837 if opts.get('set_wlock'):
1837 if opts.get('set_wlock'):
1838 try:
1838 try:
1839 locks.append(repo.wlock(False))
1839 locks.append(repo.wlock(False))
1840 except error.LockHeld:
1840 except error.LockHeld:
1841 raise error.Abort(_(b'wlock is already held'))
1841 raise error.Abort(_(b'wlock is already held'))
1842 if opts.get('set_lock'):
1842 if opts.get('set_lock'):
1843 try:
1843 try:
1844 locks.append(repo.lock(False))
1844 locks.append(repo.lock(False))
1845 except error.LockHeld:
1845 except error.LockHeld:
1846 raise error.Abort(_(b'lock is already held'))
1846 raise error.Abort(_(b'lock is already held'))
1847 if len(locks):
1847 if len(locks):
1848 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1848 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1849 return 0
1849 return 0
1850 finally:
1850 finally:
1851 release(*locks)
1851 release(*locks)
1852
1852
1853 now = time.time()
1853 now = time.time()
1854 held = 0
1854 held = 0
1855
1855
1856 def report(vfs, name, method):
1856 def report(vfs, name, method):
1857 # this causes stale locks to get reaped for more accurate reporting
1857 # this causes stale locks to get reaped for more accurate reporting
1858 try:
1858 try:
1859 l = method(False)
1859 l = method(False)
1860 except error.LockHeld:
1860 except error.LockHeld:
1861 l = None
1861 l = None
1862
1862
1863 if l:
1863 if l:
1864 l.release()
1864 l.release()
1865 else:
1865 else:
1866 try:
1866 try:
1867 st = vfs.lstat(name)
1867 st = vfs.lstat(name)
1868 age = now - st[stat.ST_MTIME]
1868 age = now - st[stat.ST_MTIME]
1869 user = util.username(st.st_uid)
1869 user = util.username(st.st_uid)
1870 locker = vfs.readlock(name)
1870 locker = vfs.readlock(name)
1871 if b":" in locker:
1871 if b":" in locker:
1872 host, pid = locker.split(b':')
1872 host, pid = locker.split(b':')
1873 if host == socket.gethostname():
1873 if host == socket.gethostname():
1874 locker = b'user %s, process %s' % (user or b'None', pid)
1874 locker = b'user %s, process %s' % (user or b'None', pid)
1875 else:
1875 else:
1876 locker = b'user %s, process %s, host %s' % (
1876 locker = b'user %s, process %s, host %s' % (
1877 user or b'None',
1877 user or b'None',
1878 pid,
1878 pid,
1879 host,
1879 host,
1880 )
1880 )
1881 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1881 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1882 return 1
1882 return 1
1883 except OSError as e:
1883 except OSError as e:
1884 if e.errno != errno.ENOENT:
1884 if e.errno != errno.ENOENT:
1885 raise
1885 raise
1886
1886
1887 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1887 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1888 return 0
1888 return 0
1889
1889
1890 held += report(repo.svfs, b"lock", repo.lock)
1890 held += report(repo.svfs, b"lock", repo.lock)
1891 held += report(repo.vfs, b"wlock", repo.wlock)
1891 held += report(repo.vfs, b"wlock", repo.wlock)
1892
1892
1893 return held
1893 return held
1894
1894
1895
1895
1896 @command(
1896 @command(
1897 b'debugmanifestfulltextcache',
1897 b'debugmanifestfulltextcache',
1898 [
1898 [
1899 (b'', b'clear', False, _(b'clear the cache')),
1899 (b'', b'clear', False, _(b'clear the cache')),
1900 (
1900 (
1901 b'a',
1901 b'a',
1902 b'add',
1902 b'add',
1903 [],
1903 [],
1904 _(b'add the given manifest nodes to the cache'),
1904 _(b'add the given manifest nodes to the cache'),
1905 _(b'NODE'),
1905 _(b'NODE'),
1906 ),
1906 ),
1907 ],
1907 ],
1908 b'',
1908 b'',
1909 )
1909 )
1910 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1910 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1911 """show, clear or amend the contents of the manifest fulltext cache"""
1911 """show, clear or amend the contents of the manifest fulltext cache"""
1912
1912
1913 def getcache():
1913 def getcache():
1914 r = repo.manifestlog.getstorage(b'')
1914 r = repo.manifestlog.getstorage(b'')
1915 try:
1915 try:
1916 return r._fulltextcache
1916 return r._fulltextcache
1917 except AttributeError:
1917 except AttributeError:
1918 msg = _(
1918 msg = _(
1919 b"Current revlog implementation doesn't appear to have a "
1919 b"Current revlog implementation doesn't appear to have a "
1920 b"manifest fulltext cache\n"
1920 b"manifest fulltext cache\n"
1921 )
1921 )
1922 raise error.Abort(msg)
1922 raise error.Abort(msg)
1923
1923
1924 if opts.get('clear'):
1924 if opts.get('clear'):
1925 with repo.wlock():
1925 with repo.wlock():
1926 cache = getcache()
1926 cache = getcache()
1927 cache.clear(clear_persisted_data=True)
1927 cache.clear(clear_persisted_data=True)
1928 return
1928 return
1929
1929
1930 if add:
1930 if add:
1931 with repo.wlock():
1931 with repo.wlock():
1932 m = repo.manifestlog
1932 m = repo.manifestlog
1933 store = m.getstorage(b'')
1933 store = m.getstorage(b'')
1934 for n in add:
1934 for n in add:
1935 try:
1935 try:
1936 manifest = m[store.lookup(n)]
1936 manifest = m[store.lookup(n)]
1937 except error.LookupError as e:
1937 except error.LookupError as e:
1938 raise error.Abort(e, hint=b"Check your manifest node id")
1938 raise error.Abort(e, hint=b"Check your manifest node id")
1939 manifest.read() # stores revisision in cache too
1939 manifest.read() # stores revisision in cache too
1940 return
1940 return
1941
1941
1942 cache = getcache()
1942 cache = getcache()
1943 if not len(cache):
1943 if not len(cache):
1944 ui.write(_(b'cache empty\n'))
1944 ui.write(_(b'cache empty\n'))
1945 else:
1945 else:
1946 ui.write(
1946 ui.write(
1947 _(
1947 _(
1948 b'cache contains %d manifest entries, in order of most to '
1948 b'cache contains %d manifest entries, in order of most to '
1949 b'least recent:\n'
1949 b'least recent:\n'
1950 )
1950 )
1951 % (len(cache),)
1951 % (len(cache),)
1952 )
1952 )
1953 totalsize = 0
1953 totalsize = 0
1954 for nodeid in cache:
1954 for nodeid in cache:
1955 # Use cache.get to not update the LRU order
1955 # Use cache.get to not update the LRU order
1956 data = cache.peek(nodeid)
1956 data = cache.peek(nodeid)
1957 size = len(data)
1957 size = len(data)
1958 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1958 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1959 ui.write(
1959 ui.write(
1960 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1960 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1961 )
1961 )
1962 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1962 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1963 ui.write(
1963 ui.write(
1964 _(b'total cache data size %s, on-disk %s\n')
1964 _(b'total cache data size %s, on-disk %s\n')
1965 % (util.bytecount(totalsize), util.bytecount(ondisk))
1965 % (util.bytecount(totalsize), util.bytecount(ondisk))
1966 )
1966 )
1967
1967
1968
1968
1969 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1969 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
1970 def debugmergestate(ui, repo, *args, **opts):
1970 def debugmergestate(ui, repo, *args, **opts):
1971 """print merge state
1971 """print merge state
1972
1972
1973 Use --verbose to print out information about whether v1 or v2 merge state
1973 Use --verbose to print out information about whether v1 or v2 merge state
1974 was chosen."""
1974 was chosen."""
1975
1975
1976 if ui.verbose:
1976 if ui.verbose:
1977 ms = mergemod.mergestate(repo)
1977 ms = mergemod.mergestate(repo)
1978
1978
1979 # sort so that reasonable information is on top
1979 # sort so that reasonable information is on top
1980 v1records = ms._readrecordsv1()
1980 v1records = ms._readrecordsv1()
1981 v2records = ms._readrecordsv2()
1981 v2records = ms._readrecordsv2()
1982
1982
1983 if not v1records and not v2records:
1983 if not v1records and not v2records:
1984 pass
1984 pass
1985 elif not v2records:
1985 elif not v2records:
1986 ui.writenoi18n(b'no version 2 merge state\n')
1986 ui.writenoi18n(b'no version 2 merge state\n')
1987 elif ms._v1v2match(v1records, v2records):
1987 elif ms._v1v2match(v1records, v2records):
1988 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1988 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
1989 else:
1989 else:
1990 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
1990 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
1991
1991
1992 opts = pycompat.byteskwargs(opts)
1992 opts = pycompat.byteskwargs(opts)
1993 if not opts[b'template']:
1993 if not opts[b'template']:
1994 opts[b'template'] = (
1994 opts[b'template'] = (
1995 b'{if(commits, "", "no merge state found\n")}'
1995 b'{if(commits, "", "no merge state found\n")}'
1996 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
1996 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
1997 b'{files % "file: {path} (state \\"{state}\\")\n'
1997 b'{files % "file: {path} (state \\"{state}\\")\n'
1998 b'{if(local_path, "'
1998 b'{if(local_path, "'
1999 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
1999 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2000 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2000 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2001 b' other path: {other_path} (node {other_node})\n'
2001 b' other path: {other_path} (node {other_node})\n'
2002 b'")}'
2002 b'")}'
2003 b'{if(rename_side, "'
2003 b'{if(rename_side, "'
2004 b' rename side: {rename_side}\n'
2004 b' rename side: {rename_side}\n'
2005 b' renamed path: {renamed_path}\n'
2005 b' renamed path: {renamed_path}\n'
2006 b'")}'
2006 b'")}'
2007 b'{extras % " extra: {key} = {value}\n"}'
2007 b'{extras % " extra: {key} = {value}\n"}'
2008 b'"}'
2008 b'"}'
2009 )
2009 )
2010
2010
2011 ms = mergemod.mergestate.read(repo)
2011 ms = mergemod.mergestate.read(repo)
2012
2012
2013 fm = ui.formatter(b'debugmergestate', opts)
2013 fm = ui.formatter(b'debugmergestate', opts)
2014 fm.startitem()
2014 fm.startitem()
2015
2015
2016 fm_commits = fm.nested(b'commits')
2016 fm_commits = fm.nested(b'commits')
2017 if ms.active():
2017 if ms.active():
2018 for name, node, label_index in (
2018 for name, node, label_index in (
2019 (b'local', ms.local, 0),
2019 (b'local', ms.local, 0),
2020 (b'other', ms.other, 1),
2020 (b'other', ms.other, 1),
2021 ):
2021 ):
2022 fm_commits.startitem()
2022 fm_commits.startitem()
2023 fm_commits.data(name=name)
2023 fm_commits.data(name=name)
2024 fm_commits.data(node=hex(node))
2024 fm_commits.data(node=hex(node))
2025 if ms._labels and len(ms._labels) > label_index:
2025 if ms._labels and len(ms._labels) > label_index:
2026 fm_commits.data(label=ms._labels[label_index])
2026 fm_commits.data(label=ms._labels[label_index])
2027 fm_commits.end()
2027 fm_commits.end()
2028
2028
2029 fm_files = fm.nested(b'files')
2029 fm_files = fm.nested(b'files')
2030 if ms.active():
2030 if ms.active():
2031 for f in ms:
2031 for f in ms:
2032 fm_files.startitem()
2032 fm_files.startitem()
2033 fm_files.data(path=f)
2033 fm_files.data(path=f)
2034 state = ms._state[f]
2034 state = ms._state[f]
2035 fm_files.data(state=state[0])
2035 fm_files.data(state=state[0])
2036 if state[0] in (
2036 if state[0] in (
2037 mergemod.MERGE_RECORD_UNRESOLVED,
2037 mergemod.MERGE_RECORD_UNRESOLVED,
2038 mergemod.MERGE_RECORD_RESOLVED,
2038 mergemod.MERGE_RECORD_RESOLVED,
2039 ):
2039 ):
2040 fm_files.data(local_key=state[1])
2040 fm_files.data(local_key=state[1])
2041 fm_files.data(local_path=state[2])
2041 fm_files.data(local_path=state[2])
2042 fm_files.data(ancestor_path=state[3])
2042 fm_files.data(ancestor_path=state[3])
2043 fm_files.data(ancestor_node=state[4])
2043 fm_files.data(ancestor_node=state[4])
2044 fm_files.data(other_path=state[5])
2044 fm_files.data(other_path=state[5])
2045 fm_files.data(other_node=state[6])
2045 fm_files.data(other_node=state[6])
2046 fm_files.data(local_flags=state[7])
2046 fm_files.data(local_flags=state[7])
2047 elif state[0] in (
2047 elif state[0] in (
2048 mergemod.MERGE_RECORD_UNRESOLVED_PATH,
2048 mergemod.MERGE_RECORD_UNRESOLVED_PATH,
2049 mergemod.MERGE_RECORD_RESOLVED_PATH,
2049 mergemod.MERGE_RECORD_RESOLVED_PATH,
2050 ):
2050 ):
2051 fm_files.data(renamed_path=state[1])
2051 fm_files.data(renamed_path=state[1])
2052 fm_files.data(rename_side=state[2])
2052 fm_files.data(rename_side=state[2])
2053 fm_extras = fm_files.nested(b'extras')
2053 fm_extras = fm_files.nested(b'extras')
2054 for k, v in ms.extras(f).items():
2054 for k, v in ms.extras(f).items():
2055 fm_extras.startitem()
2055 fm_extras.startitem()
2056 fm_extras.data(key=k)
2056 fm_extras.data(key=k)
2057 fm_extras.data(value=v)
2057 fm_extras.data(value=v)
2058 fm_extras.end()
2058 fm_extras.end()
2059
2059
2060 fm_files.end()
2060 fm_files.end()
2061
2061
2062 fm.end()
2062 fm.end()
2063
2063
2064
2064
2065 @command(b'debugnamecomplete', [], _(b'NAME...'))
2065 @command(b'debugnamecomplete', [], _(b'NAME...'))
2066 def debugnamecomplete(ui, repo, *args):
2066 def debugnamecomplete(ui, repo, *args):
2067 '''complete "names" - tags, open branch names, bookmark names'''
2067 '''complete "names" - tags, open branch names, bookmark names'''
2068
2068
2069 names = set()
2069 names = set()
2070 # since we previously only listed open branches, we will handle that
2070 # since we previously only listed open branches, we will handle that
2071 # specially (after this for loop)
2071 # specially (after this for loop)
2072 for name, ns in pycompat.iteritems(repo.names):
2072 for name, ns in pycompat.iteritems(repo.names):
2073 if name != b'branches':
2073 if name != b'branches':
2074 names.update(ns.listnames(repo))
2074 names.update(ns.listnames(repo))
2075 names.update(
2075 names.update(
2076 tag
2076 tag
2077 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2077 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2078 if not closed
2078 if not closed
2079 )
2079 )
2080 completions = set()
2080 completions = set()
2081 if not args:
2081 if not args:
2082 args = [b'']
2082 args = [b'']
2083 for a in args:
2083 for a in args:
2084 completions.update(n for n in names if n.startswith(a))
2084 completions.update(n for n in names if n.startswith(a))
2085 ui.write(b'\n'.join(sorted(completions)))
2085 ui.write(b'\n'.join(sorted(completions)))
2086 ui.write(b'\n')
2086 ui.write(b'\n')
2087
2087
2088
2088
2089 @command(
2089 @command(
2090 b'debugnodemap',
2090 b'debugnodemap',
2091 [
2091 [
2092 (
2092 (
2093 b'',
2093 b'',
2094 b'dump-new',
2094 b'dump-new',
2095 False,
2095 False,
2096 _(b'write a (new) persistent binary nodemap on stdin'),
2096 _(b'write a (new) persistent binary nodemap on stdin'),
2097 ),
2097 ),
2098 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2098 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2099 (
2099 (
2100 b'',
2100 b'',
2101 b'check',
2101 b'check',
2102 False,
2102 False,
2103 _(b'check that the data on disk data are correct.'),
2103 _(b'check that the data on disk data are correct.'),
2104 ),
2104 ),
2105 (
2105 (
2106 b'',
2106 b'',
2107 b'metadata',
2107 b'metadata',
2108 False,
2108 False,
2109 _(b'display the on disk meta data for the nodemap'),
2109 _(b'display the on disk meta data for the nodemap'),
2110 ),
2110 ),
2111 ],
2111 ],
2112 )
2112 )
2113 def debugnodemap(ui, repo, **opts):
2113 def debugnodemap(ui, repo, **opts):
2114 """write and inspect on disk nodemap
2114 """write and inspect on disk nodemap
2115 """
2115 """
2116 if opts['dump_new']:
2116 if opts['dump_new']:
2117 unfi = repo.unfiltered()
2117 unfi = repo.unfiltered()
2118 cl = unfi.changelog
2118 cl = unfi.changelog
2119 if util.safehasattr(cl.index, "nodemap_data_all"):
2119 if util.safehasattr(cl.index, "nodemap_data_all"):
2120 data = cl.index.nodemap_data_all()
2120 data = cl.index.nodemap_data_all()
2121 else:
2121 else:
2122 data = nodemap.persistent_data(cl.index)
2122 data = nodemap.persistent_data(cl.index)
2123 ui.write(data)
2123 ui.write(data)
2124 elif opts['dump_disk']:
2124 elif opts['dump_disk']:
2125 unfi = repo.unfiltered()
2125 unfi = repo.unfiltered()
2126 cl = unfi.changelog
2126 cl = unfi.changelog
2127 nm_data = nodemap.persisted_data(cl)
2127 nm_data = nodemap.persisted_data(cl)
2128 if nm_data is not None:
2128 if nm_data is not None:
2129 docket, data = nm_data
2129 docket, data = nm_data
2130 ui.write(data[:])
2130 ui.write(data[:])
2131 elif opts['check']:
2131 elif opts['check']:
2132 unfi = repo.unfiltered()
2132 unfi = repo.unfiltered()
2133 cl = unfi.changelog
2133 cl = unfi.changelog
2134 nm_data = nodemap.persisted_data(cl)
2134 nm_data = nodemap.persisted_data(cl)
2135 if nm_data is not None:
2135 if nm_data is not None:
2136 docket, data = nm_data
2136 docket, data = nm_data
2137 return nodemap.check_data(ui, cl.index, data)
2137 return nodemap.check_data(ui, cl.index, data)
2138 elif opts['metadata']:
2138 elif opts['metadata']:
2139 unfi = repo.unfiltered()
2139 unfi = repo.unfiltered()
2140 cl = unfi.changelog
2140 cl = unfi.changelog
2141 nm_data = nodemap.persisted_data(cl)
2141 nm_data = nodemap.persisted_data(cl)
2142 if nm_data is not None:
2142 if nm_data is not None:
2143 docket, data = nm_data
2143 docket, data = nm_data
2144 ui.write((b"uid: %s\n") % docket.uid)
2144 ui.write((b"uid: %s\n") % docket.uid)
2145 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2145 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2146 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2146 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2147 ui.write((b"data-length: %d\n") % docket.data_length)
2147 ui.write((b"data-length: %d\n") % docket.data_length)
2148 ui.write((b"data-unused: %d\n") % docket.data_unused)
2148 ui.write((b"data-unused: %d\n") % docket.data_unused)
2149 unused_perc = docket.data_unused * 100.0 / docket.data_length
2149 unused_perc = docket.data_unused * 100.0 / docket.data_length
2150 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2150 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2151
2151
2152
2152
2153 @command(
2153 @command(
2154 b'debugobsolete',
2154 b'debugobsolete',
2155 [
2155 [
2156 (b'', b'flags', 0, _(b'markers flag')),
2156 (b'', b'flags', 0, _(b'markers flag')),
2157 (
2157 (
2158 b'',
2158 b'',
2159 b'record-parents',
2159 b'record-parents',
2160 False,
2160 False,
2161 _(b'record parent information for the precursor'),
2161 _(b'record parent information for the precursor'),
2162 ),
2162 ),
2163 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2163 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2164 (
2164 (
2165 b'',
2165 b'',
2166 b'exclusive',
2166 b'exclusive',
2167 False,
2167 False,
2168 _(b'restrict display to markers only relevant to REV'),
2168 _(b'restrict display to markers only relevant to REV'),
2169 ),
2169 ),
2170 (b'', b'index', False, _(b'display index of the marker')),
2170 (b'', b'index', False, _(b'display index of the marker')),
2171 (b'', b'delete', [], _(b'delete markers specified by indices')),
2171 (b'', b'delete', [], _(b'delete markers specified by indices')),
2172 ]
2172 ]
2173 + cmdutil.commitopts2
2173 + cmdutil.commitopts2
2174 + cmdutil.formatteropts,
2174 + cmdutil.formatteropts,
2175 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2175 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2176 )
2176 )
2177 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2177 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2178 """create arbitrary obsolete marker
2178 """create arbitrary obsolete marker
2179
2179
2180 With no arguments, displays the list of obsolescence markers."""
2180 With no arguments, displays the list of obsolescence markers."""
2181
2181
2182 opts = pycompat.byteskwargs(opts)
2182 opts = pycompat.byteskwargs(opts)
2183
2183
2184 def parsenodeid(s):
2184 def parsenodeid(s):
2185 try:
2185 try:
2186 # We do not use revsingle/revrange functions here to accept
2186 # We do not use revsingle/revrange functions here to accept
2187 # arbitrary node identifiers, possibly not present in the
2187 # arbitrary node identifiers, possibly not present in the
2188 # local repository.
2188 # local repository.
2189 n = bin(s)
2189 n = bin(s)
2190 if len(n) != len(nullid):
2190 if len(n) != len(nullid):
2191 raise TypeError()
2191 raise TypeError()
2192 return n
2192 return n
2193 except TypeError:
2193 except TypeError:
2194 raise error.Abort(
2194 raise error.Abort(
2195 b'changeset references must be full hexadecimal '
2195 b'changeset references must be full hexadecimal '
2196 b'node identifiers'
2196 b'node identifiers'
2197 )
2197 )
2198
2198
2199 if opts.get(b'delete'):
2199 if opts.get(b'delete'):
2200 indices = []
2200 indices = []
2201 for v in opts.get(b'delete'):
2201 for v in opts.get(b'delete'):
2202 try:
2202 try:
2203 indices.append(int(v))
2203 indices.append(int(v))
2204 except ValueError:
2204 except ValueError:
2205 raise error.Abort(
2205 raise error.Abort(
2206 _(b'invalid index value: %r') % v,
2206 _(b'invalid index value: %r') % v,
2207 hint=_(b'use integers for indices'),
2207 hint=_(b'use integers for indices'),
2208 )
2208 )
2209
2209
2210 if repo.currenttransaction():
2210 if repo.currenttransaction():
2211 raise error.Abort(
2211 raise error.Abort(
2212 _(b'cannot delete obsmarkers in the middle of transaction.')
2212 _(b'cannot delete obsmarkers in the middle of transaction.')
2213 )
2213 )
2214
2214
2215 with repo.lock():
2215 with repo.lock():
2216 n = repair.deleteobsmarkers(repo.obsstore, indices)
2216 n = repair.deleteobsmarkers(repo.obsstore, indices)
2217 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2217 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2218
2218
2219 return
2219 return
2220
2220
2221 if precursor is not None:
2221 if precursor is not None:
2222 if opts[b'rev']:
2222 if opts[b'rev']:
2223 raise error.Abort(b'cannot select revision when creating marker')
2223 raise error.Abort(b'cannot select revision when creating marker')
2224 metadata = {}
2224 metadata = {}
2225 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2225 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2226 succs = tuple(parsenodeid(succ) for succ in successors)
2226 succs = tuple(parsenodeid(succ) for succ in successors)
2227 l = repo.lock()
2227 l = repo.lock()
2228 try:
2228 try:
2229 tr = repo.transaction(b'debugobsolete')
2229 tr = repo.transaction(b'debugobsolete')
2230 try:
2230 try:
2231 date = opts.get(b'date')
2231 date = opts.get(b'date')
2232 if date:
2232 if date:
2233 date = dateutil.parsedate(date)
2233 date = dateutil.parsedate(date)
2234 else:
2234 else:
2235 date = None
2235 date = None
2236 prec = parsenodeid(precursor)
2236 prec = parsenodeid(precursor)
2237 parents = None
2237 parents = None
2238 if opts[b'record_parents']:
2238 if opts[b'record_parents']:
2239 if prec not in repo.unfiltered():
2239 if prec not in repo.unfiltered():
2240 raise error.Abort(
2240 raise error.Abort(
2241 b'cannot used --record-parents on '
2241 b'cannot used --record-parents on '
2242 b'unknown changesets'
2242 b'unknown changesets'
2243 )
2243 )
2244 parents = repo.unfiltered()[prec].parents()
2244 parents = repo.unfiltered()[prec].parents()
2245 parents = tuple(p.node() for p in parents)
2245 parents = tuple(p.node() for p in parents)
2246 repo.obsstore.create(
2246 repo.obsstore.create(
2247 tr,
2247 tr,
2248 prec,
2248 prec,
2249 succs,
2249 succs,
2250 opts[b'flags'],
2250 opts[b'flags'],
2251 parents=parents,
2251 parents=parents,
2252 date=date,
2252 date=date,
2253 metadata=metadata,
2253 metadata=metadata,
2254 ui=ui,
2254 ui=ui,
2255 )
2255 )
2256 tr.close()
2256 tr.close()
2257 except ValueError as exc:
2257 except ValueError as exc:
2258 raise error.Abort(
2258 raise error.Abort(
2259 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2259 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2260 )
2260 )
2261 finally:
2261 finally:
2262 tr.release()
2262 tr.release()
2263 finally:
2263 finally:
2264 l.release()
2264 l.release()
2265 else:
2265 else:
2266 if opts[b'rev']:
2266 if opts[b'rev']:
2267 revs = scmutil.revrange(repo, opts[b'rev'])
2267 revs = scmutil.revrange(repo, opts[b'rev'])
2268 nodes = [repo[r].node() for r in revs]
2268 nodes = [repo[r].node() for r in revs]
2269 markers = list(
2269 markers = list(
2270 obsutil.getmarkers(
2270 obsutil.getmarkers(
2271 repo, nodes=nodes, exclusive=opts[b'exclusive']
2271 repo, nodes=nodes, exclusive=opts[b'exclusive']
2272 )
2272 )
2273 )
2273 )
2274 markers.sort(key=lambda x: x._data)
2274 markers.sort(key=lambda x: x._data)
2275 else:
2275 else:
2276 markers = obsutil.getmarkers(repo)
2276 markers = obsutil.getmarkers(repo)
2277
2277
2278 markerstoiter = markers
2278 markerstoiter = markers
2279 isrelevant = lambda m: True
2279 isrelevant = lambda m: True
2280 if opts.get(b'rev') and opts.get(b'index'):
2280 if opts.get(b'rev') and opts.get(b'index'):
2281 markerstoiter = obsutil.getmarkers(repo)
2281 markerstoiter = obsutil.getmarkers(repo)
2282 markerset = set(markers)
2282 markerset = set(markers)
2283 isrelevant = lambda m: m in markerset
2283 isrelevant = lambda m: m in markerset
2284
2284
2285 fm = ui.formatter(b'debugobsolete', opts)
2285 fm = ui.formatter(b'debugobsolete', opts)
2286 for i, m in enumerate(markerstoiter):
2286 for i, m in enumerate(markerstoiter):
2287 if not isrelevant(m):
2287 if not isrelevant(m):
2288 # marker can be irrelevant when we're iterating over a set
2288 # marker can be irrelevant when we're iterating over a set
2289 # of markers (markerstoiter) which is bigger than the set
2289 # of markers (markerstoiter) which is bigger than the set
2290 # of markers we want to display (markers)
2290 # of markers we want to display (markers)
2291 # this can happen if both --index and --rev options are
2291 # this can happen if both --index and --rev options are
2292 # provided and thus we need to iterate over all of the markers
2292 # provided and thus we need to iterate over all of the markers
2293 # to get the correct indices, but only display the ones that
2293 # to get the correct indices, but only display the ones that
2294 # are relevant to --rev value
2294 # are relevant to --rev value
2295 continue
2295 continue
2296 fm.startitem()
2296 fm.startitem()
2297 ind = i if opts.get(b'index') else None
2297 ind = i if opts.get(b'index') else None
2298 cmdutil.showmarker(fm, m, index=ind)
2298 cmdutil.showmarker(fm, m, index=ind)
2299 fm.end()
2299 fm.end()
2300
2300
2301
2301
2302 @command(
2302 @command(
2303 b'debugp1copies',
2303 b'debugp1copies',
2304 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2304 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2305 _(b'[-r REV]'),
2305 _(b'[-r REV]'),
2306 )
2306 )
2307 def debugp1copies(ui, repo, **opts):
2307 def debugp1copies(ui, repo, **opts):
2308 """dump copy information compared to p1"""
2308 """dump copy information compared to p1"""
2309
2309
2310 opts = pycompat.byteskwargs(opts)
2310 opts = pycompat.byteskwargs(opts)
2311 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2311 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2312 for dst, src in ctx.p1copies().items():
2312 for dst, src in ctx.p1copies().items():
2313 ui.write(b'%s -> %s\n' % (src, dst))
2313 ui.write(b'%s -> %s\n' % (src, dst))
2314
2314
2315
2315
2316 @command(
2316 @command(
2317 b'debugp2copies',
2317 b'debugp2copies',
2318 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2318 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2319 _(b'[-r REV]'),
2319 _(b'[-r REV]'),
2320 )
2320 )
2321 def debugp1copies(ui, repo, **opts):
2321 def debugp1copies(ui, repo, **opts):
2322 """dump copy information compared to p2"""
2322 """dump copy information compared to p2"""
2323
2323
2324 opts = pycompat.byteskwargs(opts)
2324 opts = pycompat.byteskwargs(opts)
2325 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2325 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2326 for dst, src in ctx.p2copies().items():
2326 for dst, src in ctx.p2copies().items():
2327 ui.write(b'%s -> %s\n' % (src, dst))
2327 ui.write(b'%s -> %s\n' % (src, dst))
2328
2328
2329
2329
2330 @command(
2330 @command(
2331 b'debugpathcomplete',
2331 b'debugpathcomplete',
2332 [
2332 [
2333 (b'f', b'full', None, _(b'complete an entire path')),
2333 (b'f', b'full', None, _(b'complete an entire path')),
2334 (b'n', b'normal', None, _(b'show only normal files')),
2334 (b'n', b'normal', None, _(b'show only normal files')),
2335 (b'a', b'added', None, _(b'show only added files')),
2335 (b'a', b'added', None, _(b'show only added files')),
2336 (b'r', b'removed', None, _(b'show only removed files')),
2336 (b'r', b'removed', None, _(b'show only removed files')),
2337 ],
2337 ],
2338 _(b'FILESPEC...'),
2338 _(b'FILESPEC...'),
2339 )
2339 )
2340 def debugpathcomplete(ui, repo, *specs, **opts):
2340 def debugpathcomplete(ui, repo, *specs, **opts):
2341 '''complete part or all of a tracked path
2341 '''complete part or all of a tracked path
2342
2342
2343 This command supports shells that offer path name completion. It
2343 This command supports shells that offer path name completion. It
2344 currently completes only files already known to the dirstate.
2344 currently completes only files already known to the dirstate.
2345
2345
2346 Completion extends only to the next path segment unless
2346 Completion extends only to the next path segment unless
2347 --full is specified, in which case entire paths are used.'''
2347 --full is specified, in which case entire paths are used.'''
2348
2348
2349 def complete(path, acceptable):
2349 def complete(path, acceptable):
2350 dirstate = repo.dirstate
2350 dirstate = repo.dirstate
2351 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2351 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2352 rootdir = repo.root + pycompat.ossep
2352 rootdir = repo.root + pycompat.ossep
2353 if spec != repo.root and not spec.startswith(rootdir):
2353 if spec != repo.root and not spec.startswith(rootdir):
2354 return [], []
2354 return [], []
2355 if os.path.isdir(spec):
2355 if os.path.isdir(spec):
2356 spec += b'/'
2356 spec += b'/'
2357 spec = spec[len(rootdir) :]
2357 spec = spec[len(rootdir) :]
2358 fixpaths = pycompat.ossep != b'/'
2358 fixpaths = pycompat.ossep != b'/'
2359 if fixpaths:
2359 if fixpaths:
2360 spec = spec.replace(pycompat.ossep, b'/')
2360 spec = spec.replace(pycompat.ossep, b'/')
2361 speclen = len(spec)
2361 speclen = len(spec)
2362 fullpaths = opts['full']
2362 fullpaths = opts['full']
2363 files, dirs = set(), set()
2363 files, dirs = set(), set()
2364 adddir, addfile = dirs.add, files.add
2364 adddir, addfile = dirs.add, files.add
2365 for f, st in pycompat.iteritems(dirstate):
2365 for f, st in pycompat.iteritems(dirstate):
2366 if f.startswith(spec) and st[0] in acceptable:
2366 if f.startswith(spec) and st[0] in acceptable:
2367 if fixpaths:
2367 if fixpaths:
2368 f = f.replace(b'/', pycompat.ossep)
2368 f = f.replace(b'/', pycompat.ossep)
2369 if fullpaths:
2369 if fullpaths:
2370 addfile(f)
2370 addfile(f)
2371 continue
2371 continue
2372 s = f.find(pycompat.ossep, speclen)
2372 s = f.find(pycompat.ossep, speclen)
2373 if s >= 0:
2373 if s >= 0:
2374 adddir(f[:s])
2374 adddir(f[:s])
2375 else:
2375 else:
2376 addfile(f)
2376 addfile(f)
2377 return files, dirs
2377 return files, dirs
2378
2378
2379 acceptable = b''
2379 acceptable = b''
2380 if opts['normal']:
2380 if opts['normal']:
2381 acceptable += b'nm'
2381 acceptable += b'nm'
2382 if opts['added']:
2382 if opts['added']:
2383 acceptable += b'a'
2383 acceptable += b'a'
2384 if opts['removed']:
2384 if opts['removed']:
2385 acceptable += b'r'
2385 acceptable += b'r'
2386 cwd = repo.getcwd()
2386 cwd = repo.getcwd()
2387 if not specs:
2387 if not specs:
2388 specs = [b'.']
2388 specs = [b'.']
2389
2389
2390 files, dirs = set(), set()
2390 files, dirs = set(), set()
2391 for spec in specs:
2391 for spec in specs:
2392 f, d = complete(spec, acceptable or b'nmar')
2392 f, d = complete(spec, acceptable or b'nmar')
2393 files.update(f)
2393 files.update(f)
2394 dirs.update(d)
2394 dirs.update(d)
2395 files.update(dirs)
2395 files.update(dirs)
2396 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2396 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2397 ui.write(b'\n')
2397 ui.write(b'\n')
2398
2398
2399
2399
2400 @command(
2400 @command(
2401 b'debugpathcopies',
2401 b'debugpathcopies',
2402 cmdutil.walkopts,
2402 cmdutil.walkopts,
2403 b'hg debugpathcopies REV1 REV2 [FILE]',
2403 b'hg debugpathcopies REV1 REV2 [FILE]',
2404 inferrepo=True,
2404 inferrepo=True,
2405 )
2405 )
2406 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2406 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2407 """show copies between two revisions"""
2407 """show copies between two revisions"""
2408 ctx1 = scmutil.revsingle(repo, rev1)
2408 ctx1 = scmutil.revsingle(repo, rev1)
2409 ctx2 = scmutil.revsingle(repo, rev2)
2409 ctx2 = scmutil.revsingle(repo, rev2)
2410 m = scmutil.match(ctx1, pats, opts)
2410 m = scmutil.match(ctx1, pats, opts)
2411 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2411 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2412 ui.write(b'%s -> %s\n' % (src, dst))
2412 ui.write(b'%s -> %s\n' % (src, dst))
2413
2413
2414
2414
2415 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2415 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2416 def debugpeer(ui, path):
2416 def debugpeer(ui, path):
2417 """establish a connection to a peer repository"""
2417 """establish a connection to a peer repository"""
2418 # Always enable peer request logging. Requires --debug to display
2418 # Always enable peer request logging. Requires --debug to display
2419 # though.
2419 # though.
2420 overrides = {
2420 overrides = {
2421 (b'devel', b'debug.peer-request'): True,
2421 (b'devel', b'debug.peer-request'): True,
2422 }
2422 }
2423
2423
2424 with ui.configoverride(overrides):
2424 with ui.configoverride(overrides):
2425 peer = hg.peer(ui, {}, path)
2425 peer = hg.peer(ui, {}, path)
2426
2426
2427 local = peer.local() is not None
2427 local = peer.local() is not None
2428 canpush = peer.canpush()
2428 canpush = peer.canpush()
2429
2429
2430 ui.write(_(b'url: %s\n') % peer.url())
2430 ui.write(_(b'url: %s\n') % peer.url())
2431 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2431 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2432 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2432 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2433
2433
2434
2434
2435 @command(
2435 @command(
2436 b'debugpickmergetool',
2436 b'debugpickmergetool',
2437 [
2437 [
2438 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2438 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2439 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2439 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2440 ]
2440 ]
2441 + cmdutil.walkopts
2441 + cmdutil.walkopts
2442 + cmdutil.mergetoolopts,
2442 + cmdutil.mergetoolopts,
2443 _(b'[PATTERN]...'),
2443 _(b'[PATTERN]...'),
2444 inferrepo=True,
2444 inferrepo=True,
2445 )
2445 )
2446 def debugpickmergetool(ui, repo, *pats, **opts):
2446 def debugpickmergetool(ui, repo, *pats, **opts):
2447 """examine which merge tool is chosen for specified file
2447 """examine which merge tool is chosen for specified file
2448
2448
2449 As described in :hg:`help merge-tools`, Mercurial examines
2449 As described in :hg:`help merge-tools`, Mercurial examines
2450 configurations below in this order to decide which merge tool is
2450 configurations below in this order to decide which merge tool is
2451 chosen for specified file.
2451 chosen for specified file.
2452
2452
2453 1. ``--tool`` option
2453 1. ``--tool`` option
2454 2. ``HGMERGE`` environment variable
2454 2. ``HGMERGE`` environment variable
2455 3. configurations in ``merge-patterns`` section
2455 3. configurations in ``merge-patterns`` section
2456 4. configuration of ``ui.merge``
2456 4. configuration of ``ui.merge``
2457 5. configurations in ``merge-tools`` section
2457 5. configurations in ``merge-tools`` section
2458 6. ``hgmerge`` tool (for historical reason only)
2458 6. ``hgmerge`` tool (for historical reason only)
2459 7. default tool for fallback (``:merge`` or ``:prompt``)
2459 7. default tool for fallback (``:merge`` or ``:prompt``)
2460
2460
2461 This command writes out examination result in the style below::
2461 This command writes out examination result in the style below::
2462
2462
2463 FILE = MERGETOOL
2463 FILE = MERGETOOL
2464
2464
2465 By default, all files known in the first parent context of the
2465 By default, all files known in the first parent context of the
2466 working directory are examined. Use file patterns and/or -I/-X
2466 working directory are examined. Use file patterns and/or -I/-X
2467 options to limit target files. -r/--rev is also useful to examine
2467 options to limit target files. -r/--rev is also useful to examine
2468 files in another context without actual updating to it.
2468 files in another context without actual updating to it.
2469
2469
2470 With --debug, this command shows warning messages while matching
2470 With --debug, this command shows warning messages while matching
2471 against ``merge-patterns`` and so on, too. It is recommended to
2471 against ``merge-patterns`` and so on, too. It is recommended to
2472 use this option with explicit file patterns and/or -I/-X options,
2472 use this option with explicit file patterns and/or -I/-X options,
2473 because this option increases amount of output per file according
2473 because this option increases amount of output per file according
2474 to configurations in hgrc.
2474 to configurations in hgrc.
2475
2475
2476 With -v/--verbose, this command shows configurations below at
2476 With -v/--verbose, this command shows configurations below at
2477 first (only if specified).
2477 first (only if specified).
2478
2478
2479 - ``--tool`` option
2479 - ``--tool`` option
2480 - ``HGMERGE`` environment variable
2480 - ``HGMERGE`` environment variable
2481 - configuration of ``ui.merge``
2481 - configuration of ``ui.merge``
2482
2482
2483 If merge tool is chosen before matching against
2483 If merge tool is chosen before matching against
2484 ``merge-patterns``, this command can't show any helpful
2484 ``merge-patterns``, this command can't show any helpful
2485 information, even with --debug. In such case, information above is
2485 information, even with --debug. In such case, information above is
2486 useful to know why a merge tool is chosen.
2486 useful to know why a merge tool is chosen.
2487 """
2487 """
2488 opts = pycompat.byteskwargs(opts)
2488 opts = pycompat.byteskwargs(opts)
2489 overrides = {}
2489 overrides = {}
2490 if opts[b'tool']:
2490 if opts[b'tool']:
2491 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2491 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2492 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2492 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2493
2493
2494 with ui.configoverride(overrides, b'debugmergepatterns'):
2494 with ui.configoverride(overrides, b'debugmergepatterns'):
2495 hgmerge = encoding.environ.get(b"HGMERGE")
2495 hgmerge = encoding.environ.get(b"HGMERGE")
2496 if hgmerge is not None:
2496 if hgmerge is not None:
2497 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2497 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2498 uimerge = ui.config(b"ui", b"merge")
2498 uimerge = ui.config(b"ui", b"merge")
2499 if uimerge:
2499 if uimerge:
2500 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2500 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2501
2501
2502 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2502 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2503 m = scmutil.match(ctx, pats, opts)
2503 m = scmutil.match(ctx, pats, opts)
2504 changedelete = opts[b'changedelete']
2504 changedelete = opts[b'changedelete']
2505 for path in ctx.walk(m):
2505 for path in ctx.walk(m):
2506 fctx = ctx[path]
2506 fctx = ctx[path]
2507 try:
2507 try:
2508 if not ui.debugflag:
2508 if not ui.debugflag:
2509 ui.pushbuffer(error=True)
2509 ui.pushbuffer(error=True)
2510 tool, toolpath = filemerge._picktool(
2510 tool, toolpath = filemerge._picktool(
2511 repo,
2511 repo,
2512 ui,
2512 ui,
2513 path,
2513 path,
2514 fctx.isbinary(),
2514 fctx.isbinary(),
2515 b'l' in fctx.flags(),
2515 b'l' in fctx.flags(),
2516 changedelete,
2516 changedelete,
2517 )
2517 )
2518 finally:
2518 finally:
2519 if not ui.debugflag:
2519 if not ui.debugflag:
2520 ui.popbuffer()
2520 ui.popbuffer()
2521 ui.write(b'%s = %s\n' % (path, tool))
2521 ui.write(b'%s = %s\n' % (path, tool))
2522
2522
2523
2523
2524 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2524 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2525 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2525 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2526 '''access the pushkey key/value protocol
2526 '''access the pushkey key/value protocol
2527
2527
2528 With two args, list the keys in the given namespace.
2528 With two args, list the keys in the given namespace.
2529
2529
2530 With five args, set a key to new if it currently is set to old.
2530 With five args, set a key to new if it currently is set to old.
2531 Reports success or failure.
2531 Reports success or failure.
2532 '''
2532 '''
2533
2533
2534 target = hg.peer(ui, {}, repopath)
2534 target = hg.peer(ui, {}, repopath)
2535 if keyinfo:
2535 if keyinfo:
2536 key, old, new = keyinfo
2536 key, old, new = keyinfo
2537 with target.commandexecutor() as e:
2537 with target.commandexecutor() as e:
2538 r = e.callcommand(
2538 r = e.callcommand(
2539 b'pushkey',
2539 b'pushkey',
2540 {
2540 {
2541 b'namespace': namespace,
2541 b'namespace': namespace,
2542 b'key': key,
2542 b'key': key,
2543 b'old': old,
2543 b'old': old,
2544 b'new': new,
2544 b'new': new,
2545 },
2545 },
2546 ).result()
2546 ).result()
2547
2547
2548 ui.status(pycompat.bytestr(r) + b'\n')
2548 ui.status(pycompat.bytestr(r) + b'\n')
2549 return not r
2549 return not r
2550 else:
2550 else:
2551 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2551 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2552 ui.write(
2552 ui.write(
2553 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2553 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2554 )
2554 )
2555
2555
2556
2556
2557 @command(b'debugpvec', [], _(b'A B'))
2557 @command(b'debugpvec', [], _(b'A B'))
2558 def debugpvec(ui, repo, a, b=None):
2558 def debugpvec(ui, repo, a, b=None):
2559 ca = scmutil.revsingle(repo, a)
2559 ca = scmutil.revsingle(repo, a)
2560 cb = scmutil.revsingle(repo, b)
2560 cb = scmutil.revsingle(repo, b)
2561 pa = pvec.ctxpvec(ca)
2561 pa = pvec.ctxpvec(ca)
2562 pb = pvec.ctxpvec(cb)
2562 pb = pvec.ctxpvec(cb)
2563 if pa == pb:
2563 if pa == pb:
2564 rel = b"="
2564 rel = b"="
2565 elif pa > pb:
2565 elif pa > pb:
2566 rel = b">"
2566 rel = b">"
2567 elif pa < pb:
2567 elif pa < pb:
2568 rel = b"<"
2568 rel = b"<"
2569 elif pa | pb:
2569 elif pa | pb:
2570 rel = b"|"
2570 rel = b"|"
2571 ui.write(_(b"a: %s\n") % pa)
2571 ui.write(_(b"a: %s\n") % pa)
2572 ui.write(_(b"b: %s\n") % pb)
2572 ui.write(_(b"b: %s\n") % pb)
2573 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2573 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2574 ui.write(
2574 ui.write(
2575 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2575 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2576 % (
2576 % (
2577 abs(pa._depth - pb._depth),
2577 abs(pa._depth - pb._depth),
2578 pvec._hamming(pa._vec, pb._vec),
2578 pvec._hamming(pa._vec, pb._vec),
2579 pa.distance(pb),
2579 pa.distance(pb),
2580 rel,
2580 rel,
2581 )
2581 )
2582 )
2582 )
2583
2583
2584
2584
2585 @command(
2585 @command(
2586 b'debugrebuilddirstate|debugrebuildstate',
2586 b'debugrebuilddirstate|debugrebuildstate',
2587 [
2587 [
2588 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2588 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2589 (
2589 (
2590 b'',
2590 b'',
2591 b'minimal',
2591 b'minimal',
2592 None,
2592 None,
2593 _(
2593 _(
2594 b'only rebuild files that are inconsistent with '
2594 b'only rebuild files that are inconsistent with '
2595 b'the working copy parent'
2595 b'the working copy parent'
2596 ),
2596 ),
2597 ),
2597 ),
2598 ],
2598 ],
2599 _(b'[-r REV]'),
2599 _(b'[-r REV]'),
2600 )
2600 )
2601 def debugrebuilddirstate(ui, repo, rev, **opts):
2601 def debugrebuilddirstate(ui, repo, rev, **opts):
2602 """rebuild the dirstate as it would look like for the given revision
2602 """rebuild the dirstate as it would look like for the given revision
2603
2603
2604 If no revision is specified the first current parent will be used.
2604 If no revision is specified the first current parent will be used.
2605
2605
2606 The dirstate will be set to the files of the given revision.
2606 The dirstate will be set to the files of the given revision.
2607 The actual working directory content or existing dirstate
2607 The actual working directory content or existing dirstate
2608 information such as adds or removes is not considered.
2608 information such as adds or removes is not considered.
2609
2609
2610 ``minimal`` will only rebuild the dirstate status for files that claim to be
2610 ``minimal`` will only rebuild the dirstate status for files that claim to be
2611 tracked but are not in the parent manifest, or that exist in the parent
2611 tracked but are not in the parent manifest, or that exist in the parent
2612 manifest but are not in the dirstate. It will not change adds, removes, or
2612 manifest but are not in the dirstate. It will not change adds, removes, or
2613 modified files that are in the working copy parent.
2613 modified files that are in the working copy parent.
2614
2614
2615 One use of this command is to make the next :hg:`status` invocation
2615 One use of this command is to make the next :hg:`status` invocation
2616 check the actual file content.
2616 check the actual file content.
2617 """
2617 """
2618 ctx = scmutil.revsingle(repo, rev)
2618 ctx = scmutil.revsingle(repo, rev)
2619 with repo.wlock():
2619 with repo.wlock():
2620 dirstate = repo.dirstate
2620 dirstate = repo.dirstate
2621 changedfiles = None
2621 changedfiles = None
2622 # See command doc for what minimal does.
2622 # See command doc for what minimal does.
2623 if opts.get('minimal'):
2623 if opts.get('minimal'):
2624 manifestfiles = set(ctx.manifest().keys())
2624 manifestfiles = set(ctx.manifest().keys())
2625 dirstatefiles = set(dirstate)
2625 dirstatefiles = set(dirstate)
2626 manifestonly = manifestfiles - dirstatefiles
2626 manifestonly = manifestfiles - dirstatefiles
2627 dsonly = dirstatefiles - manifestfiles
2627 dsonly = dirstatefiles - manifestfiles
2628 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2628 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2629 changedfiles = manifestonly | dsnotadded
2629 changedfiles = manifestonly | dsnotadded
2630
2630
2631 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2631 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2632
2632
2633
2633
2634 @command(b'debugrebuildfncache', [], b'')
2634 @command(b'debugrebuildfncache', [], b'')
2635 def debugrebuildfncache(ui, repo):
2635 def debugrebuildfncache(ui, repo):
2636 """rebuild the fncache file"""
2636 """rebuild the fncache file"""
2637 repair.rebuildfncache(ui, repo)
2637 repair.rebuildfncache(ui, repo)
2638
2638
2639
2639
2640 @command(
2640 @command(
2641 b'debugrename',
2641 b'debugrename',
2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2643 _(b'[-r REV] [FILE]...'),
2643 _(b'[-r REV] [FILE]...'),
2644 )
2644 )
2645 def debugrename(ui, repo, *pats, **opts):
2645 def debugrename(ui, repo, *pats, **opts):
2646 """dump rename information"""
2646 """dump rename information"""
2647
2647
2648 opts = pycompat.byteskwargs(opts)
2648 opts = pycompat.byteskwargs(opts)
2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2650 m = scmutil.match(ctx, pats, opts)
2650 m = scmutil.match(ctx, pats, opts)
2651 for abs in ctx.walk(m):
2651 for abs in ctx.walk(m):
2652 fctx = ctx[abs]
2652 fctx = ctx[abs]
2653 o = fctx.filelog().renamed(fctx.filenode())
2653 o = fctx.filelog().renamed(fctx.filenode())
2654 rel = repo.pathto(abs)
2654 rel = repo.pathto(abs)
2655 if o:
2655 if o:
2656 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2656 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2657 else:
2657 else:
2658 ui.write(_(b"%s not renamed\n") % rel)
2658 ui.write(_(b"%s not renamed\n") % rel)
2659
2659
2660
2660
2661 @command(
2661 @command(
2662 b'debugrevlog',
2662 b'debugrevlog',
2663 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2663 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2664 _(b'-c|-m|FILE'),
2664 _(b'-c|-m|FILE'),
2665 optionalrepo=True,
2665 optionalrepo=True,
2666 )
2666 )
2667 def debugrevlog(ui, repo, file_=None, **opts):
2667 def debugrevlog(ui, repo, file_=None, **opts):
2668 """show data and statistics about a revlog"""
2668 """show data and statistics about a revlog"""
2669 opts = pycompat.byteskwargs(opts)
2669 opts = pycompat.byteskwargs(opts)
2670 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2670 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2671
2671
2672 if opts.get(b"dump"):
2672 if opts.get(b"dump"):
2673 numrevs = len(r)
2673 numrevs = len(r)
2674 ui.write(
2674 ui.write(
2675 (
2675 (
2676 b"# rev p1rev p2rev start end deltastart base p1 p2"
2676 b"# rev p1rev p2rev start end deltastart base p1 p2"
2677 b" rawsize totalsize compression heads chainlen\n"
2677 b" rawsize totalsize compression heads chainlen\n"
2678 )
2678 )
2679 )
2679 )
2680 ts = 0
2680 ts = 0
2681 heads = set()
2681 heads = set()
2682
2682
2683 for rev in pycompat.xrange(numrevs):
2683 for rev in pycompat.xrange(numrevs):
2684 dbase = r.deltaparent(rev)
2684 dbase = r.deltaparent(rev)
2685 if dbase == -1:
2685 if dbase == -1:
2686 dbase = rev
2686 dbase = rev
2687 cbase = r.chainbase(rev)
2687 cbase = r.chainbase(rev)
2688 clen = r.chainlen(rev)
2688 clen = r.chainlen(rev)
2689 p1, p2 = r.parentrevs(rev)
2689 p1, p2 = r.parentrevs(rev)
2690 rs = r.rawsize(rev)
2690 rs = r.rawsize(rev)
2691 ts = ts + rs
2691 ts = ts + rs
2692 heads -= set(r.parentrevs(rev))
2692 heads -= set(r.parentrevs(rev))
2693 heads.add(rev)
2693 heads.add(rev)
2694 try:
2694 try:
2695 compression = ts / r.end(rev)
2695 compression = ts / r.end(rev)
2696 except ZeroDivisionError:
2696 except ZeroDivisionError:
2697 compression = 0
2697 compression = 0
2698 ui.write(
2698 ui.write(
2699 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2699 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2700 b"%11d %5d %8d\n"
2700 b"%11d %5d %8d\n"
2701 % (
2701 % (
2702 rev,
2702 rev,
2703 p1,
2703 p1,
2704 p2,
2704 p2,
2705 r.start(rev),
2705 r.start(rev),
2706 r.end(rev),
2706 r.end(rev),
2707 r.start(dbase),
2707 r.start(dbase),
2708 r.start(cbase),
2708 r.start(cbase),
2709 r.start(p1),
2709 r.start(p1),
2710 r.start(p2),
2710 r.start(p2),
2711 rs,
2711 rs,
2712 ts,
2712 ts,
2713 compression,
2713 compression,
2714 len(heads),
2714 len(heads),
2715 clen,
2715 clen,
2716 )
2716 )
2717 )
2717 )
2718 return 0
2718 return 0
2719
2719
2720 v = r.version
2720 v = r.version
2721 format = v & 0xFFFF
2721 format = v & 0xFFFF
2722 flags = []
2722 flags = []
2723 gdelta = False
2723 gdelta = False
2724 if v & revlog.FLAG_INLINE_DATA:
2724 if v & revlog.FLAG_INLINE_DATA:
2725 flags.append(b'inline')
2725 flags.append(b'inline')
2726 if v & revlog.FLAG_GENERALDELTA:
2726 if v & revlog.FLAG_GENERALDELTA:
2727 gdelta = True
2727 gdelta = True
2728 flags.append(b'generaldelta')
2728 flags.append(b'generaldelta')
2729 if not flags:
2729 if not flags:
2730 flags = [b'(none)']
2730 flags = [b'(none)']
2731
2731
2732 ### tracks merge vs single parent
2732 ### tracks merge vs single parent
2733 nummerges = 0
2733 nummerges = 0
2734
2734
2735 ### tracks ways the "delta" are build
2735 ### tracks ways the "delta" are build
2736 # nodelta
2736 # nodelta
2737 numempty = 0
2737 numempty = 0
2738 numemptytext = 0
2738 numemptytext = 0
2739 numemptydelta = 0
2739 numemptydelta = 0
2740 # full file content
2740 # full file content
2741 numfull = 0
2741 numfull = 0
2742 # intermediate snapshot against a prior snapshot
2742 # intermediate snapshot against a prior snapshot
2743 numsemi = 0
2743 numsemi = 0
2744 # snapshot count per depth
2744 # snapshot count per depth
2745 numsnapdepth = collections.defaultdict(lambda: 0)
2745 numsnapdepth = collections.defaultdict(lambda: 0)
2746 # delta against previous revision
2746 # delta against previous revision
2747 numprev = 0
2747 numprev = 0
2748 # delta against first or second parent (not prev)
2748 # delta against first or second parent (not prev)
2749 nump1 = 0
2749 nump1 = 0
2750 nump2 = 0
2750 nump2 = 0
2751 # delta against neither prev nor parents
2751 # delta against neither prev nor parents
2752 numother = 0
2752 numother = 0
2753 # delta against prev that are also first or second parent
2753 # delta against prev that are also first or second parent
2754 # (details of `numprev`)
2754 # (details of `numprev`)
2755 nump1prev = 0
2755 nump1prev = 0
2756 nump2prev = 0
2756 nump2prev = 0
2757
2757
2758 # data about delta chain of each revs
2758 # data about delta chain of each revs
2759 chainlengths = []
2759 chainlengths = []
2760 chainbases = []
2760 chainbases = []
2761 chainspans = []
2761 chainspans = []
2762
2762
2763 # data about each revision
2763 # data about each revision
2764 datasize = [None, 0, 0]
2764 datasize = [None, 0, 0]
2765 fullsize = [None, 0, 0]
2765 fullsize = [None, 0, 0]
2766 semisize = [None, 0, 0]
2766 semisize = [None, 0, 0]
2767 # snapshot count per depth
2767 # snapshot count per depth
2768 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2768 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2769 deltasize = [None, 0, 0]
2769 deltasize = [None, 0, 0]
2770 chunktypecounts = {}
2770 chunktypecounts = {}
2771 chunktypesizes = {}
2771 chunktypesizes = {}
2772
2772
2773 def addsize(size, l):
2773 def addsize(size, l):
2774 if l[0] is None or size < l[0]:
2774 if l[0] is None or size < l[0]:
2775 l[0] = size
2775 l[0] = size
2776 if size > l[1]:
2776 if size > l[1]:
2777 l[1] = size
2777 l[1] = size
2778 l[2] += size
2778 l[2] += size
2779
2779
2780 numrevs = len(r)
2780 numrevs = len(r)
2781 for rev in pycompat.xrange(numrevs):
2781 for rev in pycompat.xrange(numrevs):
2782 p1, p2 = r.parentrevs(rev)
2782 p1, p2 = r.parentrevs(rev)
2783 delta = r.deltaparent(rev)
2783 delta = r.deltaparent(rev)
2784 if format > 0:
2784 if format > 0:
2785 addsize(r.rawsize(rev), datasize)
2785 addsize(r.rawsize(rev), datasize)
2786 if p2 != nullrev:
2786 if p2 != nullrev:
2787 nummerges += 1
2787 nummerges += 1
2788 size = r.length(rev)
2788 size = r.length(rev)
2789 if delta == nullrev:
2789 if delta == nullrev:
2790 chainlengths.append(0)
2790 chainlengths.append(0)
2791 chainbases.append(r.start(rev))
2791 chainbases.append(r.start(rev))
2792 chainspans.append(size)
2792 chainspans.append(size)
2793 if size == 0:
2793 if size == 0:
2794 numempty += 1
2794 numempty += 1
2795 numemptytext += 1
2795 numemptytext += 1
2796 else:
2796 else:
2797 numfull += 1
2797 numfull += 1
2798 numsnapdepth[0] += 1
2798 numsnapdepth[0] += 1
2799 addsize(size, fullsize)
2799 addsize(size, fullsize)
2800 addsize(size, snapsizedepth[0])
2800 addsize(size, snapsizedepth[0])
2801 else:
2801 else:
2802 chainlengths.append(chainlengths[delta] + 1)
2802 chainlengths.append(chainlengths[delta] + 1)
2803 baseaddr = chainbases[delta]
2803 baseaddr = chainbases[delta]
2804 revaddr = r.start(rev)
2804 revaddr = r.start(rev)
2805 chainbases.append(baseaddr)
2805 chainbases.append(baseaddr)
2806 chainspans.append((revaddr - baseaddr) + size)
2806 chainspans.append((revaddr - baseaddr) + size)
2807 if size == 0:
2807 if size == 0:
2808 numempty += 1
2808 numempty += 1
2809 numemptydelta += 1
2809 numemptydelta += 1
2810 elif r.issnapshot(rev):
2810 elif r.issnapshot(rev):
2811 addsize(size, semisize)
2811 addsize(size, semisize)
2812 numsemi += 1
2812 numsemi += 1
2813 depth = r.snapshotdepth(rev)
2813 depth = r.snapshotdepth(rev)
2814 numsnapdepth[depth] += 1
2814 numsnapdepth[depth] += 1
2815 addsize(size, snapsizedepth[depth])
2815 addsize(size, snapsizedepth[depth])
2816 else:
2816 else:
2817 addsize(size, deltasize)
2817 addsize(size, deltasize)
2818 if delta == rev - 1:
2818 if delta == rev - 1:
2819 numprev += 1
2819 numprev += 1
2820 if delta == p1:
2820 if delta == p1:
2821 nump1prev += 1
2821 nump1prev += 1
2822 elif delta == p2:
2822 elif delta == p2:
2823 nump2prev += 1
2823 nump2prev += 1
2824 elif delta == p1:
2824 elif delta == p1:
2825 nump1 += 1
2825 nump1 += 1
2826 elif delta == p2:
2826 elif delta == p2:
2827 nump2 += 1
2827 nump2 += 1
2828 elif delta != nullrev:
2828 elif delta != nullrev:
2829 numother += 1
2829 numother += 1
2830
2830
2831 # Obtain data on the raw chunks in the revlog.
2831 # Obtain data on the raw chunks in the revlog.
2832 if util.safehasattr(r, b'_getsegmentforrevs'):
2832 if util.safehasattr(r, b'_getsegmentforrevs'):
2833 segment = r._getsegmentforrevs(rev, rev)[1]
2833 segment = r._getsegmentforrevs(rev, rev)[1]
2834 else:
2834 else:
2835 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2835 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2836 if segment:
2836 if segment:
2837 chunktype = bytes(segment[0:1])
2837 chunktype = bytes(segment[0:1])
2838 else:
2838 else:
2839 chunktype = b'empty'
2839 chunktype = b'empty'
2840
2840
2841 if chunktype not in chunktypecounts:
2841 if chunktype not in chunktypecounts:
2842 chunktypecounts[chunktype] = 0
2842 chunktypecounts[chunktype] = 0
2843 chunktypesizes[chunktype] = 0
2843 chunktypesizes[chunktype] = 0
2844
2844
2845 chunktypecounts[chunktype] += 1
2845 chunktypecounts[chunktype] += 1
2846 chunktypesizes[chunktype] += size
2846 chunktypesizes[chunktype] += size
2847
2847
2848 # Adjust size min value for empty cases
2848 # Adjust size min value for empty cases
2849 for size in (datasize, fullsize, semisize, deltasize):
2849 for size in (datasize, fullsize, semisize, deltasize):
2850 if size[0] is None:
2850 if size[0] is None:
2851 size[0] = 0
2851 size[0] = 0
2852
2852
2853 numdeltas = numrevs - numfull - numempty - numsemi
2853 numdeltas = numrevs - numfull - numempty - numsemi
2854 numoprev = numprev - nump1prev - nump2prev
2854 numoprev = numprev - nump1prev - nump2prev
2855 totalrawsize = datasize[2]
2855 totalrawsize = datasize[2]
2856 datasize[2] /= numrevs
2856 datasize[2] /= numrevs
2857 fulltotal = fullsize[2]
2857 fulltotal = fullsize[2]
2858 if numfull == 0:
2858 if numfull == 0:
2859 fullsize[2] = 0
2859 fullsize[2] = 0
2860 else:
2860 else:
2861 fullsize[2] /= numfull
2861 fullsize[2] /= numfull
2862 semitotal = semisize[2]
2862 semitotal = semisize[2]
2863 snaptotal = {}
2863 snaptotal = {}
2864 if numsemi > 0:
2864 if numsemi > 0:
2865 semisize[2] /= numsemi
2865 semisize[2] /= numsemi
2866 for depth in snapsizedepth:
2866 for depth in snapsizedepth:
2867 snaptotal[depth] = snapsizedepth[depth][2]
2867 snaptotal[depth] = snapsizedepth[depth][2]
2868 snapsizedepth[depth][2] /= numsnapdepth[depth]
2868 snapsizedepth[depth][2] /= numsnapdepth[depth]
2869
2869
2870 deltatotal = deltasize[2]
2870 deltatotal = deltasize[2]
2871 if numdeltas > 0:
2871 if numdeltas > 0:
2872 deltasize[2] /= numdeltas
2872 deltasize[2] /= numdeltas
2873 totalsize = fulltotal + semitotal + deltatotal
2873 totalsize = fulltotal + semitotal + deltatotal
2874 avgchainlen = sum(chainlengths) / numrevs
2874 avgchainlen = sum(chainlengths) / numrevs
2875 maxchainlen = max(chainlengths)
2875 maxchainlen = max(chainlengths)
2876 maxchainspan = max(chainspans)
2876 maxchainspan = max(chainspans)
2877 compratio = 1
2877 compratio = 1
2878 if totalsize:
2878 if totalsize:
2879 compratio = totalrawsize / totalsize
2879 compratio = totalrawsize / totalsize
2880
2880
2881 basedfmtstr = b'%%%dd\n'
2881 basedfmtstr = b'%%%dd\n'
2882 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2882 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2883
2883
2884 def dfmtstr(max):
2884 def dfmtstr(max):
2885 return basedfmtstr % len(str(max))
2885 return basedfmtstr % len(str(max))
2886
2886
2887 def pcfmtstr(max, padding=0):
2887 def pcfmtstr(max, padding=0):
2888 return basepcfmtstr % (len(str(max)), b' ' * padding)
2888 return basepcfmtstr % (len(str(max)), b' ' * padding)
2889
2889
2890 def pcfmt(value, total):
2890 def pcfmt(value, total):
2891 if total:
2891 if total:
2892 return (value, 100 * float(value) / total)
2892 return (value, 100 * float(value) / total)
2893 else:
2893 else:
2894 return value, 100.0
2894 return value, 100.0
2895
2895
2896 ui.writenoi18n(b'format : %d\n' % format)
2896 ui.writenoi18n(b'format : %d\n' % format)
2897 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2897 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2898
2898
2899 ui.write(b'\n')
2899 ui.write(b'\n')
2900 fmt = pcfmtstr(totalsize)
2900 fmt = pcfmtstr(totalsize)
2901 fmt2 = dfmtstr(totalsize)
2901 fmt2 = dfmtstr(totalsize)
2902 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2902 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2903 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2903 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2904 ui.writenoi18n(
2904 ui.writenoi18n(
2905 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2905 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2906 )
2906 )
2907 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2907 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2908 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2908 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2909 ui.writenoi18n(
2909 ui.writenoi18n(
2910 b' text : '
2910 b' text : '
2911 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2911 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2912 )
2912 )
2913 ui.writenoi18n(
2913 ui.writenoi18n(
2914 b' delta : '
2914 b' delta : '
2915 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2915 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2916 )
2916 )
2917 ui.writenoi18n(
2917 ui.writenoi18n(
2918 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2918 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2919 )
2919 )
2920 for depth in sorted(numsnapdepth):
2920 for depth in sorted(numsnapdepth):
2921 ui.write(
2921 ui.write(
2922 (b' lvl-%-3d : ' % depth)
2922 (b' lvl-%-3d : ' % depth)
2923 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2923 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2924 )
2924 )
2925 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2925 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2926 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2926 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2927 ui.writenoi18n(
2927 ui.writenoi18n(
2928 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2928 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2929 )
2929 )
2930 for depth in sorted(numsnapdepth):
2930 for depth in sorted(numsnapdepth):
2931 ui.write(
2931 ui.write(
2932 (b' lvl-%-3d : ' % depth)
2932 (b' lvl-%-3d : ' % depth)
2933 + fmt % pcfmt(snaptotal[depth], totalsize)
2933 + fmt % pcfmt(snaptotal[depth], totalsize)
2934 )
2934 )
2935 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2935 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2936
2936
2937 def fmtchunktype(chunktype):
2937 def fmtchunktype(chunktype):
2938 if chunktype == b'empty':
2938 if chunktype == b'empty':
2939 return b' %s : ' % chunktype
2939 return b' %s : ' % chunktype
2940 elif chunktype in pycompat.bytestr(string.ascii_letters):
2940 elif chunktype in pycompat.bytestr(string.ascii_letters):
2941 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2941 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2942 else:
2942 else:
2943 return b' 0x%s : ' % hex(chunktype)
2943 return b' 0x%s : ' % hex(chunktype)
2944
2944
2945 ui.write(b'\n')
2945 ui.write(b'\n')
2946 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2946 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2947 for chunktype in sorted(chunktypecounts):
2947 for chunktype in sorted(chunktypecounts):
2948 ui.write(fmtchunktype(chunktype))
2948 ui.write(fmtchunktype(chunktype))
2949 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2949 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2950 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2950 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2951 for chunktype in sorted(chunktypecounts):
2951 for chunktype in sorted(chunktypecounts):
2952 ui.write(fmtchunktype(chunktype))
2952 ui.write(fmtchunktype(chunktype))
2953 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2953 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2954
2954
2955 ui.write(b'\n')
2955 ui.write(b'\n')
2956 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2956 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2957 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2957 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2958 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2958 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2959 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2959 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2960 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2960 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2961
2961
2962 if format > 0:
2962 if format > 0:
2963 ui.write(b'\n')
2963 ui.write(b'\n')
2964 ui.writenoi18n(
2964 ui.writenoi18n(
2965 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2965 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2966 % tuple(datasize)
2966 % tuple(datasize)
2967 )
2967 )
2968 ui.writenoi18n(
2968 ui.writenoi18n(
2969 b'full revision size (min/max/avg) : %d / %d / %d\n'
2969 b'full revision size (min/max/avg) : %d / %d / %d\n'
2970 % tuple(fullsize)
2970 % tuple(fullsize)
2971 )
2971 )
2972 ui.writenoi18n(
2972 ui.writenoi18n(
2973 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2973 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2974 % tuple(semisize)
2974 % tuple(semisize)
2975 )
2975 )
2976 for depth in sorted(snapsizedepth):
2976 for depth in sorted(snapsizedepth):
2977 if depth == 0:
2977 if depth == 0:
2978 continue
2978 continue
2979 ui.writenoi18n(
2979 ui.writenoi18n(
2980 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2980 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2981 % ((depth,) + tuple(snapsizedepth[depth]))
2981 % ((depth,) + tuple(snapsizedepth[depth]))
2982 )
2982 )
2983 ui.writenoi18n(
2983 ui.writenoi18n(
2984 b'delta size (min/max/avg) : %d / %d / %d\n'
2984 b'delta size (min/max/avg) : %d / %d / %d\n'
2985 % tuple(deltasize)
2985 % tuple(deltasize)
2986 )
2986 )
2987
2987
2988 if numdeltas > 0:
2988 if numdeltas > 0:
2989 ui.write(b'\n')
2989 ui.write(b'\n')
2990 fmt = pcfmtstr(numdeltas)
2990 fmt = pcfmtstr(numdeltas)
2991 fmt2 = pcfmtstr(numdeltas, 4)
2991 fmt2 = pcfmtstr(numdeltas, 4)
2992 ui.writenoi18n(
2992 ui.writenoi18n(
2993 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2993 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2994 )
2994 )
2995 if numprev > 0:
2995 if numprev > 0:
2996 ui.writenoi18n(
2996 ui.writenoi18n(
2997 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2997 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2998 )
2998 )
2999 ui.writenoi18n(
2999 ui.writenoi18n(
3000 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3000 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3001 )
3001 )
3002 ui.writenoi18n(
3002 ui.writenoi18n(
3003 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3003 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3004 )
3004 )
3005 if gdelta:
3005 if gdelta:
3006 ui.writenoi18n(
3006 ui.writenoi18n(
3007 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3007 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3008 )
3008 )
3009 ui.writenoi18n(
3009 ui.writenoi18n(
3010 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3010 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3011 )
3011 )
3012 ui.writenoi18n(
3012 ui.writenoi18n(
3013 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3013 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3014 )
3014 )
3015
3015
3016
3016
3017 @command(
3017 @command(
3018 b'debugrevlogindex',
3018 b'debugrevlogindex',
3019 cmdutil.debugrevlogopts
3019 cmdutil.debugrevlogopts
3020 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3020 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3021 _(b'[-f FORMAT] -c|-m|FILE'),
3021 _(b'[-f FORMAT] -c|-m|FILE'),
3022 optionalrepo=True,
3022 optionalrepo=True,
3023 )
3023 )
3024 def debugrevlogindex(ui, repo, file_=None, **opts):
3024 def debugrevlogindex(ui, repo, file_=None, **opts):
3025 """dump the contents of a revlog index"""
3025 """dump the contents of a revlog index"""
3026 opts = pycompat.byteskwargs(opts)
3026 opts = pycompat.byteskwargs(opts)
3027 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3027 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3028 format = opts.get(b'format', 0)
3028 format = opts.get(b'format', 0)
3029 if format not in (0, 1):
3029 if format not in (0, 1):
3030 raise error.Abort(_(b"unknown format %d") % format)
3030 raise error.Abort(_(b"unknown format %d") % format)
3031
3031
3032 if ui.debugflag:
3032 if ui.debugflag:
3033 shortfn = hex
3033 shortfn = hex
3034 else:
3034 else:
3035 shortfn = short
3035 shortfn = short
3036
3036
3037 # There might not be anything in r, so have a sane default
3037 # There might not be anything in r, so have a sane default
3038 idlen = 12
3038 idlen = 12
3039 for i in r:
3039 for i in r:
3040 idlen = len(shortfn(r.node(i)))
3040 idlen = len(shortfn(r.node(i)))
3041 break
3041 break
3042
3042
3043 if format == 0:
3043 if format == 0:
3044 if ui.verbose:
3044 if ui.verbose:
3045 ui.writenoi18n(
3045 ui.writenoi18n(
3046 b" rev offset length linkrev %s %s p2\n"
3046 b" rev offset length linkrev %s %s p2\n"
3047 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3047 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3048 )
3048 )
3049 else:
3049 else:
3050 ui.writenoi18n(
3050 ui.writenoi18n(
3051 b" rev linkrev %s %s p2\n"
3051 b" rev linkrev %s %s p2\n"
3052 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3052 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3053 )
3053 )
3054 elif format == 1:
3054 elif format == 1:
3055 if ui.verbose:
3055 if ui.verbose:
3056 ui.writenoi18n(
3056 ui.writenoi18n(
3057 (
3057 (
3058 b" rev flag offset length size link p1"
3058 b" rev flag offset length size link p1"
3059 b" p2 %s\n"
3059 b" p2 %s\n"
3060 )
3060 )
3061 % b"nodeid".rjust(idlen)
3061 % b"nodeid".rjust(idlen)
3062 )
3062 )
3063 else:
3063 else:
3064 ui.writenoi18n(
3064 ui.writenoi18n(
3065 b" rev flag size link p1 p2 %s\n"
3065 b" rev flag size link p1 p2 %s\n"
3066 % b"nodeid".rjust(idlen)
3066 % b"nodeid".rjust(idlen)
3067 )
3067 )
3068
3068
3069 for i in r:
3069 for i in r:
3070 node = r.node(i)
3070 node = r.node(i)
3071 if format == 0:
3071 if format == 0:
3072 try:
3072 try:
3073 pp = r.parents(node)
3073 pp = r.parents(node)
3074 except Exception:
3074 except Exception:
3075 pp = [nullid, nullid]
3075 pp = [nullid, nullid]
3076 if ui.verbose:
3076 if ui.verbose:
3077 ui.write(
3077 ui.write(
3078 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3078 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3079 % (
3079 % (
3080 i,
3080 i,
3081 r.start(i),
3081 r.start(i),
3082 r.length(i),
3082 r.length(i),
3083 r.linkrev(i),
3083 r.linkrev(i),
3084 shortfn(node),
3084 shortfn(node),
3085 shortfn(pp[0]),
3085 shortfn(pp[0]),
3086 shortfn(pp[1]),
3086 shortfn(pp[1]),
3087 )
3087 )
3088 )
3088 )
3089 else:
3089 else:
3090 ui.write(
3090 ui.write(
3091 b"% 6d % 7d %s %s %s\n"
3091 b"% 6d % 7d %s %s %s\n"
3092 % (
3092 % (
3093 i,
3093 i,
3094 r.linkrev(i),
3094 r.linkrev(i),
3095 shortfn(node),
3095 shortfn(node),
3096 shortfn(pp[0]),
3096 shortfn(pp[0]),
3097 shortfn(pp[1]),
3097 shortfn(pp[1]),
3098 )
3098 )
3099 )
3099 )
3100 elif format == 1:
3100 elif format == 1:
3101 pr = r.parentrevs(i)
3101 pr = r.parentrevs(i)
3102 if ui.verbose:
3102 if ui.verbose:
3103 ui.write(
3103 ui.write(
3104 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3104 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3105 % (
3105 % (
3106 i,
3106 i,
3107 r.flags(i),
3107 r.flags(i),
3108 r.start(i),
3108 r.start(i),
3109 r.length(i),
3109 r.length(i),
3110 r.rawsize(i),
3110 r.rawsize(i),
3111 r.linkrev(i),
3111 r.linkrev(i),
3112 pr[0],
3112 pr[0],
3113 pr[1],
3113 pr[1],
3114 shortfn(node),
3114 shortfn(node),
3115 )
3115 )
3116 )
3116 )
3117 else:
3117 else:
3118 ui.write(
3118 ui.write(
3119 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3119 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3120 % (
3120 % (
3121 i,
3121 i,
3122 r.flags(i),
3122 r.flags(i),
3123 r.rawsize(i),
3123 r.rawsize(i),
3124 r.linkrev(i),
3124 r.linkrev(i),
3125 pr[0],
3125 pr[0],
3126 pr[1],
3126 pr[1],
3127 shortfn(node),
3127 shortfn(node),
3128 )
3128 )
3129 )
3129 )
3130
3130
3131
3131
3132 @command(
3132 @command(
3133 b'debugrevspec',
3133 b'debugrevspec',
3134 [
3134 [
3135 (
3135 (
3136 b'',
3136 b'',
3137 b'optimize',
3137 b'optimize',
3138 None,
3138 None,
3139 _(b'print parsed tree after optimizing (DEPRECATED)'),
3139 _(b'print parsed tree after optimizing (DEPRECATED)'),
3140 ),
3140 ),
3141 (
3141 (
3142 b'',
3142 b'',
3143 b'show-revs',
3143 b'show-revs',
3144 True,
3144 True,
3145 _(b'print list of result revisions (default)'),
3145 _(b'print list of result revisions (default)'),
3146 ),
3146 ),
3147 (
3147 (
3148 b's',
3148 b's',
3149 b'show-set',
3149 b'show-set',
3150 None,
3150 None,
3151 _(b'print internal representation of result set'),
3151 _(b'print internal representation of result set'),
3152 ),
3152 ),
3153 (
3153 (
3154 b'p',
3154 b'p',
3155 b'show-stage',
3155 b'show-stage',
3156 [],
3156 [],
3157 _(b'print parsed tree at the given stage'),
3157 _(b'print parsed tree at the given stage'),
3158 _(b'NAME'),
3158 _(b'NAME'),
3159 ),
3159 ),
3160 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3160 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3161 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3161 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3162 ],
3162 ],
3163 b'REVSPEC',
3163 b'REVSPEC',
3164 )
3164 )
3165 def debugrevspec(ui, repo, expr, **opts):
3165 def debugrevspec(ui, repo, expr, **opts):
3166 """parse and apply a revision specification
3166 """parse and apply a revision specification
3167
3167
3168 Use -p/--show-stage option to print the parsed tree at the given stages.
3168 Use -p/--show-stage option to print the parsed tree at the given stages.
3169 Use -p all to print tree at every stage.
3169 Use -p all to print tree at every stage.
3170
3170
3171 Use --no-show-revs option with -s or -p to print only the set
3171 Use --no-show-revs option with -s or -p to print only the set
3172 representation or the parsed tree respectively.
3172 representation or the parsed tree respectively.
3173
3173
3174 Use --verify-optimized to compare the optimized result with the unoptimized
3174 Use --verify-optimized to compare the optimized result with the unoptimized
3175 one. Returns 1 if the optimized result differs.
3175 one. Returns 1 if the optimized result differs.
3176 """
3176 """
3177 opts = pycompat.byteskwargs(opts)
3177 opts = pycompat.byteskwargs(opts)
3178 aliases = ui.configitems(b'revsetalias')
3178 aliases = ui.configitems(b'revsetalias')
3179 stages = [
3179 stages = [
3180 (b'parsed', lambda tree: tree),
3180 (b'parsed', lambda tree: tree),
3181 (
3181 (
3182 b'expanded',
3182 b'expanded',
3183 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3183 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3184 ),
3184 ),
3185 (b'concatenated', revsetlang.foldconcat),
3185 (b'concatenated', revsetlang.foldconcat),
3186 (b'analyzed', revsetlang.analyze),
3186 (b'analyzed', revsetlang.analyze),
3187 (b'optimized', revsetlang.optimize),
3187 (b'optimized', revsetlang.optimize),
3188 ]
3188 ]
3189 if opts[b'no_optimized']:
3189 if opts[b'no_optimized']:
3190 stages = stages[:-1]
3190 stages = stages[:-1]
3191 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3191 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3192 raise error.Abort(
3192 raise error.Abort(
3193 _(b'cannot use --verify-optimized with --no-optimized')
3193 _(b'cannot use --verify-optimized with --no-optimized')
3194 )
3194 )
3195 stagenames = {n for n, f in stages}
3195 stagenames = {n for n, f in stages}
3196
3196
3197 showalways = set()
3197 showalways = set()
3198 showchanged = set()
3198 showchanged = set()
3199 if ui.verbose and not opts[b'show_stage']:
3199 if ui.verbose and not opts[b'show_stage']:
3200 # show parsed tree by --verbose (deprecated)
3200 # show parsed tree by --verbose (deprecated)
3201 showalways.add(b'parsed')
3201 showalways.add(b'parsed')
3202 showchanged.update([b'expanded', b'concatenated'])
3202 showchanged.update([b'expanded', b'concatenated'])
3203 if opts[b'optimize']:
3203 if opts[b'optimize']:
3204 showalways.add(b'optimized')
3204 showalways.add(b'optimized')
3205 if opts[b'show_stage'] and opts[b'optimize']:
3205 if opts[b'show_stage'] and opts[b'optimize']:
3206 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3206 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3207 if opts[b'show_stage'] == [b'all']:
3207 if opts[b'show_stage'] == [b'all']:
3208 showalways.update(stagenames)
3208 showalways.update(stagenames)
3209 else:
3209 else:
3210 for n in opts[b'show_stage']:
3210 for n in opts[b'show_stage']:
3211 if n not in stagenames:
3211 if n not in stagenames:
3212 raise error.Abort(_(b'invalid stage name: %s') % n)
3212 raise error.Abort(_(b'invalid stage name: %s') % n)
3213 showalways.update(opts[b'show_stage'])
3213 showalways.update(opts[b'show_stage'])
3214
3214
3215 treebystage = {}
3215 treebystage = {}
3216 printedtree = None
3216 printedtree = None
3217 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3217 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3218 for n, f in stages:
3218 for n, f in stages:
3219 treebystage[n] = tree = f(tree)
3219 treebystage[n] = tree = f(tree)
3220 if n in showalways or (n in showchanged and tree != printedtree):
3220 if n in showalways or (n in showchanged and tree != printedtree):
3221 if opts[b'show_stage'] or n != b'parsed':
3221 if opts[b'show_stage'] or n != b'parsed':
3222 ui.write(b"* %s:\n" % n)
3222 ui.write(b"* %s:\n" % n)
3223 ui.write(revsetlang.prettyformat(tree), b"\n")
3223 ui.write(revsetlang.prettyformat(tree), b"\n")
3224 printedtree = tree
3224 printedtree = tree
3225
3225
3226 if opts[b'verify_optimized']:
3226 if opts[b'verify_optimized']:
3227 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3227 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3228 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3228 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3229 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3229 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3230 ui.writenoi18n(
3230 ui.writenoi18n(
3231 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3231 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3232 )
3232 )
3233 ui.writenoi18n(
3233 ui.writenoi18n(
3234 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3234 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3235 )
3235 )
3236 arevs = list(arevs)
3236 arevs = list(arevs)
3237 brevs = list(brevs)
3237 brevs = list(brevs)
3238 if arevs == brevs:
3238 if arevs == brevs:
3239 return 0
3239 return 0
3240 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3240 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3241 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3241 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3242 sm = difflib.SequenceMatcher(None, arevs, brevs)
3242 sm = difflib.SequenceMatcher(None, arevs, brevs)
3243 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3243 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3244 if tag in ('delete', 'replace'):
3244 if tag in ('delete', 'replace'):
3245 for c in arevs[alo:ahi]:
3245 for c in arevs[alo:ahi]:
3246 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3246 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3247 if tag in ('insert', 'replace'):
3247 if tag in ('insert', 'replace'):
3248 for c in brevs[blo:bhi]:
3248 for c in brevs[blo:bhi]:
3249 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3249 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3250 if tag == 'equal':
3250 if tag == 'equal':
3251 for c in arevs[alo:ahi]:
3251 for c in arevs[alo:ahi]:
3252 ui.write(b' %d\n' % c)
3252 ui.write(b' %d\n' % c)
3253 return 1
3253 return 1
3254
3254
3255 func = revset.makematcher(tree)
3255 func = revset.makematcher(tree)
3256 revs = func(repo)
3256 revs = func(repo)
3257 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3257 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3258 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3258 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3259 if not opts[b'show_revs']:
3259 if not opts[b'show_revs']:
3260 return
3260 return
3261 for c in revs:
3261 for c in revs:
3262 ui.write(b"%d\n" % c)
3262 ui.write(b"%d\n" % c)
3263
3263
3264
3264
3265 @command(
3265 @command(
3266 b'debugserve',
3266 b'debugserve',
3267 [
3267 [
3268 (
3268 (
3269 b'',
3269 b'',
3270 b'sshstdio',
3270 b'sshstdio',
3271 False,
3271 False,
3272 _(b'run an SSH server bound to process handles'),
3272 _(b'run an SSH server bound to process handles'),
3273 ),
3273 ),
3274 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3274 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3275 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3275 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3276 ],
3276 ],
3277 b'',
3277 b'',
3278 )
3278 )
3279 def debugserve(ui, repo, **opts):
3279 def debugserve(ui, repo, **opts):
3280 """run a server with advanced settings
3280 """run a server with advanced settings
3281
3281
3282 This command is similar to :hg:`serve`. It exists partially as a
3282 This command is similar to :hg:`serve`. It exists partially as a
3283 workaround to the fact that ``hg serve --stdio`` must have specific
3283 workaround to the fact that ``hg serve --stdio`` must have specific
3284 arguments for security reasons.
3284 arguments for security reasons.
3285 """
3285 """
3286 opts = pycompat.byteskwargs(opts)
3286 opts = pycompat.byteskwargs(opts)
3287
3287
3288 if not opts[b'sshstdio']:
3288 if not opts[b'sshstdio']:
3289 raise error.Abort(_(b'only --sshstdio is currently supported'))
3289 raise error.Abort(_(b'only --sshstdio is currently supported'))
3290
3290
3291 logfh = None
3291 logfh = None
3292
3292
3293 if opts[b'logiofd'] and opts[b'logiofile']:
3293 if opts[b'logiofd'] and opts[b'logiofile']:
3294 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3294 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3295
3295
3296 if opts[b'logiofd']:
3296 if opts[b'logiofd']:
3297 # Ideally we would be line buffered. But line buffering in binary
3297 # Ideally we would be line buffered. But line buffering in binary
3298 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3298 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3299 # buffering could have performance impacts. But since this isn't
3299 # buffering could have performance impacts. But since this isn't
3300 # performance critical code, it should be fine.
3300 # performance critical code, it should be fine.
3301 try:
3301 try:
3302 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3302 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3303 except OSError as e:
3303 except OSError as e:
3304 if e.errno != errno.ESPIPE:
3304 if e.errno != errno.ESPIPE:
3305 raise
3305 raise
3306 # can't seek a pipe, so `ab` mode fails on py3
3306 # can't seek a pipe, so `ab` mode fails on py3
3307 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3307 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3308 elif opts[b'logiofile']:
3308 elif opts[b'logiofile']:
3309 logfh = open(opts[b'logiofile'], b'ab', 0)
3309 logfh = open(opts[b'logiofile'], b'ab', 0)
3310
3310
3311 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3311 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3312 s.serve_forever()
3312 s.serve_forever()
3313
3313
3314
3314
3315 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3315 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3316 def debugsetparents(ui, repo, rev1, rev2=None):
3316 def debugsetparents(ui, repo, rev1, rev2=None):
3317 """manually set the parents of the current working directory
3317 """manually set the parents of the current working directory
3318
3318
3319 This is useful for writing repository conversion tools, but should
3319 This is useful for writing repository conversion tools, but should
3320 be used with care. For example, neither the working directory nor the
3320 be used with care. For example, neither the working directory nor the
3321 dirstate is updated, so file status may be incorrect after running this
3321 dirstate is updated, so file status may be incorrect after running this
3322 command.
3322 command.
3323
3323
3324 Returns 0 on success.
3324 Returns 0 on success.
3325 """
3325 """
3326
3326
3327 node1 = scmutil.revsingle(repo, rev1).node()
3327 node1 = scmutil.revsingle(repo, rev1).node()
3328 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3328 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3329
3329
3330 with repo.wlock():
3330 with repo.wlock():
3331 repo.setparents(node1, node2)
3331 repo.setparents(node1, node2)
3332
3332
3333
3333
3334 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3334 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3335 def debugsidedata(ui, repo, file_, rev=None, **opts):
3335 def debugsidedata(ui, repo, file_, rev=None, **opts):
3336 """dump the side data for a cl/manifest/file revision
3336 """dump the side data for a cl/manifest/file revision
3337
3337
3338 Use --verbose to dump the sidedata content."""
3338 Use --verbose to dump the sidedata content."""
3339 opts = pycompat.byteskwargs(opts)
3339 opts = pycompat.byteskwargs(opts)
3340 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3340 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3341 if rev is not None:
3341 if rev is not None:
3342 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3342 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3343 file_, rev = None, file_
3343 file_, rev = None, file_
3344 elif rev is None:
3344 elif rev is None:
3345 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3345 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3346 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3346 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3347 r = getattr(r, '_revlog', r)
3347 r = getattr(r, '_revlog', r)
3348 try:
3348 try:
3349 sidedata = r.sidedata(r.lookup(rev))
3349 sidedata = r.sidedata(r.lookup(rev))
3350 except KeyError:
3350 except KeyError:
3351 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3351 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3352 if sidedata:
3352 if sidedata:
3353 sidedata = list(sidedata.items())
3353 sidedata = list(sidedata.items())
3354 sidedata.sort()
3354 sidedata.sort()
3355 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3355 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3356 for key, value in sidedata:
3356 for key, value in sidedata:
3357 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3357 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3358 if ui.verbose:
3358 if ui.verbose:
3359 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3359 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3360
3360
3361
3361
3362 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3362 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3363 def debugssl(ui, repo, source=None, **opts):
3363 def debugssl(ui, repo, source=None, **opts):
3364 '''test a secure connection to a server
3364 '''test a secure connection to a server
3365
3365
3366 This builds the certificate chain for the server on Windows, installing the
3366 This builds the certificate chain for the server on Windows, installing the
3367 missing intermediates and trusted root via Windows Update if necessary. It
3367 missing intermediates and trusted root via Windows Update if necessary. It
3368 does nothing on other platforms.
3368 does nothing on other platforms.
3369
3369
3370 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3370 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3371 that server is used. See :hg:`help urls` for more information.
3371 that server is used. See :hg:`help urls` for more information.
3372
3372
3373 If the update succeeds, retry the original operation. Otherwise, the cause
3373 If the update succeeds, retry the original operation. Otherwise, the cause
3374 of the SSL error is likely another issue.
3374 of the SSL error is likely another issue.
3375 '''
3375 '''
3376 if not pycompat.iswindows:
3376 if not pycompat.iswindows:
3377 raise error.Abort(
3377 raise error.Abort(
3378 _(b'certificate chain building is only possible on Windows')
3378 _(b'certificate chain building is only possible on Windows')
3379 )
3379 )
3380
3380
3381 if not source:
3381 if not source:
3382 if not repo:
3382 if not repo:
3383 raise error.Abort(
3383 raise error.Abort(
3384 _(
3384 _(
3385 b"there is no Mercurial repository here, and no "
3385 b"there is no Mercurial repository here, and no "
3386 b"server specified"
3386 b"server specified"
3387 )
3387 )
3388 )
3388 )
3389 source = b"default"
3389 source = b"default"
3390
3390
3391 source, branches = hg.parseurl(ui.expandpath(source))
3391 source, branches = hg.parseurl(ui.expandpath(source))
3392 url = util.url(source)
3392 url = util.url(source)
3393
3393
3394 defaultport = {b'https': 443, b'ssh': 22}
3394 defaultport = {b'https': 443, b'ssh': 22}
3395 if url.scheme in defaultport:
3395 if url.scheme in defaultport:
3396 try:
3396 try:
3397 addr = (url.host, int(url.port or defaultport[url.scheme]))
3397 addr = (url.host, int(url.port or defaultport[url.scheme]))
3398 except ValueError:
3398 except ValueError:
3399 raise error.Abort(_(b"malformed port number in URL"))
3399 raise error.Abort(_(b"malformed port number in URL"))
3400 else:
3400 else:
3401 raise error.Abort(_(b"only https and ssh connections are supported"))
3401 raise error.Abort(_(b"only https and ssh connections are supported"))
3402
3402
3403 from . import win32
3403 from . import win32
3404
3404
3405 s = ssl.wrap_socket(
3405 s = ssl.wrap_socket(
3406 socket.socket(),
3406 socket.socket(),
3407 ssl_version=ssl.PROTOCOL_TLS,
3407 ssl_version=ssl.PROTOCOL_TLS,
3408 cert_reqs=ssl.CERT_NONE,
3408 cert_reqs=ssl.CERT_NONE,
3409 ca_certs=None,
3409 ca_certs=None,
3410 )
3410 )
3411
3411
3412 try:
3412 try:
3413 s.connect(addr)
3413 s.connect(addr)
3414 cert = s.getpeercert(True)
3414 cert = s.getpeercert(True)
3415
3415
3416 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3416 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3417
3417
3418 complete = win32.checkcertificatechain(cert, build=False)
3418 complete = win32.checkcertificatechain(cert, build=False)
3419
3419
3420 if not complete:
3420 if not complete:
3421 ui.status(_(b'certificate chain is incomplete, updating... '))
3421 ui.status(_(b'certificate chain is incomplete, updating... '))
3422
3422
3423 if not win32.checkcertificatechain(cert):
3423 if not win32.checkcertificatechain(cert):
3424 ui.status(_(b'failed.\n'))
3424 ui.status(_(b'failed.\n'))
3425 else:
3425 else:
3426 ui.status(_(b'done.\n'))
3426 ui.status(_(b'done.\n'))
3427 else:
3427 else:
3428 ui.status(_(b'full certificate chain is available\n'))
3428 ui.status(_(b'full certificate chain is available\n'))
3429 finally:
3429 finally:
3430 s.close()
3430 s.close()
3431
3431
3432
3432
3433 @command(
3433 @command(
3434 b"debugbackupbundle",
3434 b"debugbackupbundle",
3435 [
3435 [
3436 (
3436 (
3437 b"",
3437 b"",
3438 b"recover",
3438 b"recover",
3439 b"",
3439 b"",
3440 b"brings the specified changeset back into the repository",
3440 b"brings the specified changeset back into the repository",
3441 )
3441 )
3442 ]
3442 ]
3443 + cmdutil.logopts,
3443 + cmdutil.logopts,
3444 _(b"hg debugbackupbundle [--recover HASH]"),
3444 _(b"hg debugbackupbundle [--recover HASH]"),
3445 )
3445 )
3446 def debugbackupbundle(ui, repo, *pats, **opts):
3446 def debugbackupbundle(ui, repo, *pats, **opts):
3447 """lists the changesets available in backup bundles
3447 """lists the changesets available in backup bundles
3448
3448
3449 Without any arguments, this command prints a list of the changesets in each
3449 Without any arguments, this command prints a list of the changesets in each
3450 backup bundle.
3450 backup bundle.
3451
3451
3452 --recover takes a changeset hash and unbundles the first bundle that
3452 --recover takes a changeset hash and unbundles the first bundle that
3453 contains that hash, which puts that changeset back in your repository.
3453 contains that hash, which puts that changeset back in your repository.
3454
3454
3455 --verbose will print the entire commit message and the bundle path for that
3455 --verbose will print the entire commit message and the bundle path for that
3456 backup.
3456 backup.
3457 """
3457 """
3458 backups = list(
3458 backups = list(
3459 filter(
3459 filter(
3460 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3460 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3461 )
3461 )
3462 )
3462 )
3463 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3463 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3464
3464
3465 opts = pycompat.byteskwargs(opts)
3465 opts = pycompat.byteskwargs(opts)
3466 opts[b"bundle"] = b""
3466 opts[b"bundle"] = b""
3467 opts[b"force"] = None
3467 opts[b"force"] = None
3468 limit = logcmdutil.getlimit(opts)
3468 limit = logcmdutil.getlimit(opts)
3469
3469
3470 def display(other, chlist, displayer):
3470 def display(other, chlist, displayer):
3471 if opts.get(b"newest_first"):
3471 if opts.get(b"newest_first"):
3472 chlist.reverse()
3472 chlist.reverse()
3473 count = 0
3473 count = 0
3474 for n in chlist:
3474 for n in chlist:
3475 if limit is not None and count >= limit:
3475 if limit is not None and count >= limit:
3476 break
3476 break
3477 parents = [True for p in other.changelog.parents(n) if p != nullid]
3477 parents = [True for p in other.changelog.parents(n) if p != nullid]
3478 if opts.get(b"no_merges") and len(parents) == 2:
3478 if opts.get(b"no_merges") and len(parents) == 2:
3479 continue
3479 continue
3480 count += 1
3480 count += 1
3481 displayer.show(other[n])
3481 displayer.show(other[n])
3482
3482
3483 recovernode = opts.get(b"recover")
3483 recovernode = opts.get(b"recover")
3484 if recovernode:
3484 if recovernode:
3485 if scmutil.isrevsymbol(repo, recovernode):
3485 if scmutil.isrevsymbol(repo, recovernode):
3486 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3486 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3487 return
3487 return
3488 elif backups:
3488 elif backups:
3489 msg = _(
3489 msg = _(
3490 b"Recover changesets using: hg debugbackupbundle --recover "
3490 b"Recover changesets using: hg debugbackupbundle --recover "
3491 b"<changeset hash>\n\nAvailable backup changesets:"
3491 b"<changeset hash>\n\nAvailable backup changesets:"
3492 )
3492 )
3493 ui.status(msg, label=b"status.removed")
3493 ui.status(msg, label=b"status.removed")
3494 else:
3494 else:
3495 ui.status(_(b"no backup changesets found\n"))
3495 ui.status(_(b"no backup changesets found\n"))
3496 return
3496 return
3497
3497
3498 for backup in backups:
3498 for backup in backups:
3499 # Much of this is copied from the hg incoming logic
3499 # Much of this is copied from the hg incoming logic
3500 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3500 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3501 source, branches = hg.parseurl(source, opts.get(b"branch"))
3501 source, branches = hg.parseurl(source, opts.get(b"branch"))
3502 try:
3502 try:
3503 other = hg.peer(repo, opts, source)
3503 other = hg.peer(repo, opts, source)
3504 except error.LookupError as ex:
3504 except error.LookupError as ex:
3505 msg = _(b"\nwarning: unable to open bundle %s") % source
3505 msg = _(b"\nwarning: unable to open bundle %s") % source
3506 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3506 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3507 ui.warn(msg, hint=hint)
3507 ui.warn(msg, hint=hint)
3508 continue
3508 continue
3509 revs, checkout = hg.addbranchrevs(
3509 revs, checkout = hg.addbranchrevs(
3510 repo, other, branches, opts.get(b"rev")
3510 repo, other, branches, opts.get(b"rev")
3511 )
3511 )
3512
3512
3513 if revs:
3513 if revs:
3514 revs = [other.lookup(rev) for rev in revs]
3514 revs = [other.lookup(rev) for rev in revs]
3515
3515
3516 quiet = ui.quiet
3516 quiet = ui.quiet
3517 try:
3517 try:
3518 ui.quiet = True
3518 ui.quiet = True
3519 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3519 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3520 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3520 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3521 )
3521 )
3522 except error.LookupError:
3522 except error.LookupError:
3523 continue
3523 continue
3524 finally:
3524 finally:
3525 ui.quiet = quiet
3525 ui.quiet = quiet
3526
3526
3527 try:
3527 try:
3528 if not chlist:
3528 if not chlist:
3529 continue
3529 continue
3530 if recovernode:
3530 if recovernode:
3531 with repo.lock(), repo.transaction(b"unbundle") as tr:
3531 with repo.lock(), repo.transaction(b"unbundle") as tr:
3532 if scmutil.isrevsymbol(other, recovernode):
3532 if scmutil.isrevsymbol(other, recovernode):
3533 ui.status(_(b"Unbundling %s\n") % (recovernode))
3533 ui.status(_(b"Unbundling %s\n") % (recovernode))
3534 f = hg.openpath(ui, source)
3534 f = hg.openpath(ui, source)
3535 gen = exchange.readbundle(ui, f, source)
3535 gen = exchange.readbundle(ui, f, source)
3536 if isinstance(gen, bundle2.unbundle20):
3536 if isinstance(gen, bundle2.unbundle20):
3537 bundle2.applybundle(
3537 bundle2.applybundle(
3538 repo,
3538 repo,
3539 gen,
3539 gen,
3540 tr,
3540 tr,
3541 source=b"unbundle",
3541 source=b"unbundle",
3542 url=b"bundle:" + source,
3542 url=b"bundle:" + source,
3543 )
3543 )
3544 else:
3544 else:
3545 gen.apply(repo, b"unbundle", b"bundle:" + source)
3545 gen.apply(repo, b"unbundle", b"bundle:" + source)
3546 break
3546 break
3547 else:
3547 else:
3548 backupdate = encoding.strtolocal(
3548 backupdate = encoding.strtolocal(
3549 time.strftime(
3549 time.strftime(
3550 "%a %H:%M, %Y-%m-%d",
3550 "%a %H:%M, %Y-%m-%d",
3551 time.localtime(os.path.getmtime(source)),
3551 time.localtime(os.path.getmtime(source)),
3552 )
3552 )
3553 )
3553 )
3554 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3554 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3555 if ui.verbose:
3555 if ui.verbose:
3556 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3556 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3557 else:
3557 else:
3558 opts[
3558 opts[
3559 b"template"
3559 b"template"
3560 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3560 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3561 displayer = logcmdutil.changesetdisplayer(
3561 displayer = logcmdutil.changesetdisplayer(
3562 ui, other, opts, False
3562 ui, other, opts, False
3563 )
3563 )
3564 display(other, chlist, displayer)
3564 display(other, chlist, displayer)
3565 displayer.close()
3565 displayer.close()
3566 finally:
3566 finally:
3567 cleanupfn()
3567 cleanupfn()
3568
3568
3569
3569
3570 @command(
3570 @command(
3571 b'debugsub',
3571 b'debugsub',
3572 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3572 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3573 _(b'[-r REV] [REV]'),
3573 _(b'[-r REV] [REV]'),
3574 )
3574 )
3575 def debugsub(ui, repo, rev=None):
3575 def debugsub(ui, repo, rev=None):
3576 ctx = scmutil.revsingle(repo, rev, None)
3576 ctx = scmutil.revsingle(repo, rev, None)
3577 for k, v in sorted(ctx.substate.items()):
3577 for k, v in sorted(ctx.substate.items()):
3578 ui.writenoi18n(b'path %s\n' % k)
3578 ui.writenoi18n(b'path %s\n' % k)
3579 ui.writenoi18n(b' source %s\n' % v[0])
3579 ui.writenoi18n(b' source %s\n' % v[0])
3580 ui.writenoi18n(b' revision %s\n' % v[1])
3580 ui.writenoi18n(b' revision %s\n' % v[1])
3581
3581
3582
3582
3583 @command(
3583 @command(
3584 b'debugsuccessorssets',
3584 b'debugsuccessorssets',
3585 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3585 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3586 _(b'[REV]'),
3586 _(b'[REV]'),
3587 )
3587 )
3588 def debugsuccessorssets(ui, repo, *revs, **opts):
3588 def debugsuccessorssets(ui, repo, *revs, **opts):
3589 """show set of successors for revision
3589 """show set of successors for revision
3590
3590
3591 A successors set of changeset A is a consistent group of revisions that
3591 A successors set of changeset A is a consistent group of revisions that
3592 succeed A. It contains non-obsolete changesets only unless closests
3592 succeed A. It contains non-obsolete changesets only unless closests
3593 successors set is set.
3593 successors set is set.
3594
3594
3595 In most cases a changeset A has a single successors set containing a single
3595 In most cases a changeset A has a single successors set containing a single
3596 successor (changeset A replaced by A').
3596 successor (changeset A replaced by A').
3597
3597
3598 A changeset that is made obsolete with no successors are called "pruned".
3598 A changeset that is made obsolete with no successors are called "pruned".
3599 Such changesets have no successors sets at all.
3599 Such changesets have no successors sets at all.
3600
3600
3601 A changeset that has been "split" will have a successors set containing
3601 A changeset that has been "split" will have a successors set containing
3602 more than one successor.
3602 more than one successor.
3603
3603
3604 A changeset that has been rewritten in multiple different ways is called
3604 A changeset that has been rewritten in multiple different ways is called
3605 "divergent". Such changesets have multiple successor sets (each of which
3605 "divergent". Such changesets have multiple successor sets (each of which
3606 may also be split, i.e. have multiple successors).
3606 may also be split, i.e. have multiple successors).
3607
3607
3608 Results are displayed as follows::
3608 Results are displayed as follows::
3609
3609
3610 <rev1>
3610 <rev1>
3611 <successors-1A>
3611 <successors-1A>
3612 <rev2>
3612 <rev2>
3613 <successors-2A>
3613 <successors-2A>
3614 <successors-2B1> <successors-2B2> <successors-2B3>
3614 <successors-2B1> <successors-2B2> <successors-2B3>
3615
3615
3616 Here rev2 has two possible (i.e. divergent) successors sets. The first
3616 Here rev2 has two possible (i.e. divergent) successors sets. The first
3617 holds one element, whereas the second holds three (i.e. the changeset has
3617 holds one element, whereas the second holds three (i.e. the changeset has
3618 been split).
3618 been split).
3619 """
3619 """
3620 # passed to successorssets caching computation from one call to another
3620 # passed to successorssets caching computation from one call to another
3621 cache = {}
3621 cache = {}
3622 ctx2str = bytes
3622 ctx2str = bytes
3623 node2str = short
3623 node2str = short
3624 for rev in scmutil.revrange(repo, revs):
3624 for rev in scmutil.revrange(repo, revs):
3625 ctx = repo[rev]
3625 ctx = repo[rev]
3626 ui.write(b'%s\n' % ctx2str(ctx))
3626 ui.write(b'%s\n' % ctx2str(ctx))
3627 for succsset in obsutil.successorssets(
3627 for succsset in obsutil.successorssets(
3628 repo, ctx.node(), closest=opts['closest'], cache=cache
3628 repo, ctx.node(), closest=opts['closest'], cache=cache
3629 ):
3629 ):
3630 if succsset:
3630 if succsset:
3631 ui.write(b' ')
3631 ui.write(b' ')
3632 ui.write(node2str(succsset[0]))
3632 ui.write(node2str(succsset[0]))
3633 for node in succsset[1:]:
3633 for node in succsset[1:]:
3634 ui.write(b' ')
3634 ui.write(b' ')
3635 ui.write(node2str(node))
3635 ui.write(node2str(node))
3636 ui.write(b'\n')
3636 ui.write(b'\n')
3637
3637
3638
3638
3639 @command(b'debugtagscache', [])
3639 @command(b'debugtagscache', [])
3640 def debugtagscache(ui, repo):
3640 def debugtagscache(ui, repo):
3641 """display the contents of .hg/cache/hgtagsfnodes1"""
3641 """display the contents of .hg/cache/hgtagsfnodes1"""
3642 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3642 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3643 for r in repo:
3643 for r in repo:
3644 node = repo[r].node()
3644 node = repo[r].node()
3645 tagsnode = cache.getfnode(node, computemissing=False)
3645 tagsnode = cache.getfnode(node, computemissing=False)
3646 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3646 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3647 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3647 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3648
3648
3649
3649
3650 @command(
3650 @command(
3651 b'debugtemplate',
3651 b'debugtemplate',
3652 [
3652 [
3653 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3653 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3654 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3654 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3655 ],
3655 ],
3656 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3656 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3657 optionalrepo=True,
3657 optionalrepo=True,
3658 )
3658 )
3659 def debugtemplate(ui, repo, tmpl, **opts):
3659 def debugtemplate(ui, repo, tmpl, **opts):
3660 """parse and apply a template
3660 """parse and apply a template
3661
3661
3662 If -r/--rev is given, the template is processed as a log template and
3662 If -r/--rev is given, the template is processed as a log template and
3663 applied to the given changesets. Otherwise, it is processed as a generic
3663 applied to the given changesets. Otherwise, it is processed as a generic
3664 template.
3664 template.
3665
3665
3666 Use --verbose to print the parsed tree.
3666 Use --verbose to print the parsed tree.
3667 """
3667 """
3668 revs = None
3668 revs = None
3669 if opts['rev']:
3669 if opts['rev']:
3670 if repo is None:
3670 if repo is None:
3671 raise error.RepoError(
3671 raise error.RepoError(
3672 _(b'there is no Mercurial repository here (.hg not found)')
3672 _(b'there is no Mercurial repository here (.hg not found)')
3673 )
3673 )
3674 revs = scmutil.revrange(repo, opts['rev'])
3674 revs = scmutil.revrange(repo, opts['rev'])
3675
3675
3676 props = {}
3676 props = {}
3677 for d in opts['define']:
3677 for d in opts['define']:
3678 try:
3678 try:
3679 k, v = (e.strip() for e in d.split(b'=', 1))
3679 k, v = (e.strip() for e in d.split(b'=', 1))
3680 if not k or k == b'ui':
3680 if not k or k == b'ui':
3681 raise ValueError
3681 raise ValueError
3682 props[k] = v
3682 props[k] = v
3683 except ValueError:
3683 except ValueError:
3684 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3684 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3685
3685
3686 if ui.verbose:
3686 if ui.verbose:
3687 aliases = ui.configitems(b'templatealias')
3687 aliases = ui.configitems(b'templatealias')
3688 tree = templater.parse(tmpl)
3688 tree = templater.parse(tmpl)
3689 ui.note(templater.prettyformat(tree), b'\n')
3689 ui.note(templater.prettyformat(tree), b'\n')
3690 newtree = templater.expandaliases(tree, aliases)
3690 newtree = templater.expandaliases(tree, aliases)
3691 if newtree != tree:
3691 if newtree != tree:
3692 ui.notenoi18n(
3692 ui.notenoi18n(
3693 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3693 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3694 )
3694 )
3695
3695
3696 if revs is None:
3696 if revs is None:
3697 tres = formatter.templateresources(ui, repo)
3697 tres = formatter.templateresources(ui, repo)
3698 t = formatter.maketemplater(ui, tmpl, resources=tres)
3698 t = formatter.maketemplater(ui, tmpl, resources=tres)
3699 if ui.verbose:
3699 if ui.verbose:
3700 kwds, funcs = t.symbolsuseddefault()
3700 kwds, funcs = t.symbolsuseddefault()
3701 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3701 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3702 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3702 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3703 ui.write(t.renderdefault(props))
3703 ui.write(t.renderdefault(props))
3704 else:
3704 else:
3705 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3705 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3706 if ui.verbose:
3706 if ui.verbose:
3707 kwds, funcs = displayer.t.symbolsuseddefault()
3707 kwds, funcs = displayer.t.symbolsuseddefault()
3708 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3708 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3709 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3709 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3710 for r in revs:
3710 for r in revs:
3711 displayer.show(repo[r], **pycompat.strkwargs(props))
3711 displayer.show(repo[r], **pycompat.strkwargs(props))
3712 displayer.close()
3712 displayer.close()
3713
3713
3714
3714
3715 @command(
3715 @command(
3716 b'debuguigetpass',
3716 b'debuguigetpass',
3717 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3717 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3718 _(b'[-p TEXT]'),
3718 _(b'[-p TEXT]'),
3719 norepo=True,
3719 norepo=True,
3720 )
3720 )
3721 def debuguigetpass(ui, prompt=b''):
3721 def debuguigetpass(ui, prompt=b''):
3722 """show prompt to type password"""
3722 """show prompt to type password"""
3723 r = ui.getpass(prompt)
3723 r = ui.getpass(prompt)
3724 ui.writenoi18n(b'respose: %s\n' % r)
3724 ui.writenoi18n(b'response: %s\n' % r)
3725
3725
3726
3726
3727 @command(
3727 @command(
3728 b'debuguiprompt',
3728 b'debuguiprompt',
3729 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3729 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3730 _(b'[-p TEXT]'),
3730 _(b'[-p TEXT]'),
3731 norepo=True,
3731 norepo=True,
3732 )
3732 )
3733 def debuguiprompt(ui, prompt=b''):
3733 def debuguiprompt(ui, prompt=b''):
3734 """show plain prompt"""
3734 """show plain prompt"""
3735 r = ui.prompt(prompt)
3735 r = ui.prompt(prompt)
3736 ui.writenoi18n(b'response: %s\n' % r)
3736 ui.writenoi18n(b'response: %s\n' % r)
3737
3737
3738
3738
3739 @command(b'debugupdatecaches', [])
3739 @command(b'debugupdatecaches', [])
3740 def debugupdatecaches(ui, repo, *pats, **opts):
3740 def debugupdatecaches(ui, repo, *pats, **opts):
3741 """warm all known caches in the repository"""
3741 """warm all known caches in the repository"""
3742 with repo.wlock(), repo.lock():
3742 with repo.wlock(), repo.lock():
3743 repo.updatecaches(full=True)
3743 repo.updatecaches(full=True)
3744
3744
3745
3745
3746 @command(
3746 @command(
3747 b'debugupgraderepo',
3747 b'debugupgraderepo',
3748 [
3748 [
3749 (
3749 (
3750 b'o',
3750 b'o',
3751 b'optimize',
3751 b'optimize',
3752 [],
3752 [],
3753 _(b'extra optimization to perform'),
3753 _(b'extra optimization to perform'),
3754 _(b'NAME'),
3754 _(b'NAME'),
3755 ),
3755 ),
3756 (b'', b'run', False, _(b'performs an upgrade')),
3756 (b'', b'run', False, _(b'performs an upgrade')),
3757 (b'', b'backup', True, _(b'keep the old repository content around')),
3757 (b'', b'backup', True, _(b'keep the old repository content around')),
3758 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3758 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3759 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3759 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3760 ],
3760 ],
3761 )
3761 )
3762 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3762 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3763 """upgrade a repository to use different features
3763 """upgrade a repository to use different features
3764
3764
3765 If no arguments are specified, the repository is evaluated for upgrade
3765 If no arguments are specified, the repository is evaluated for upgrade
3766 and a list of problems and potential optimizations is printed.
3766 and a list of problems and potential optimizations is printed.
3767
3767
3768 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3768 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3769 can be influenced via additional arguments. More details will be provided
3769 can be influenced via additional arguments. More details will be provided
3770 by the command output when run without ``--run``.
3770 by the command output when run without ``--run``.
3771
3771
3772 During the upgrade, the repository will be locked and no writes will be
3772 During the upgrade, the repository will be locked and no writes will be
3773 allowed.
3773 allowed.
3774
3774
3775 At the end of the upgrade, the repository may not be readable while new
3775 At the end of the upgrade, the repository may not be readable while new
3776 repository data is swapped in. This window will be as long as it takes to
3776 repository data is swapped in. This window will be as long as it takes to
3777 rename some directories inside the ``.hg`` directory. On most machines, this
3777 rename some directories inside the ``.hg`` directory. On most machines, this
3778 should complete almost instantaneously and the chances of a consumer being
3778 should complete almost instantaneously and the chances of a consumer being
3779 unable to access the repository should be low.
3779 unable to access the repository should be low.
3780
3780
3781 By default, all revlog will be upgraded. You can restrict this using flag
3781 By default, all revlog will be upgraded. You can restrict this using flag
3782 such as `--manifest`:
3782 such as `--manifest`:
3783
3783
3784 * `--manifest`: only optimize the manifest
3784 * `--manifest`: only optimize the manifest
3785 * `--no-manifest`: optimize all revlog but the manifest
3785 * `--no-manifest`: optimize all revlog but the manifest
3786 * `--changelog`: optimize the changelog only
3786 * `--changelog`: optimize the changelog only
3787 * `--no-changelog --no-manifest`: optimize filelogs only
3787 * `--no-changelog --no-manifest`: optimize filelogs only
3788 """
3788 """
3789 return upgrade.upgraderepo(
3789 return upgrade.upgraderepo(
3790 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3790 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3791 )
3791 )
3792
3792
3793
3793
3794 @command(
3794 @command(
3795 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3795 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3796 )
3796 )
3797 def debugwalk(ui, repo, *pats, **opts):
3797 def debugwalk(ui, repo, *pats, **opts):
3798 """show how files match on given patterns"""
3798 """show how files match on given patterns"""
3799 opts = pycompat.byteskwargs(opts)
3799 opts = pycompat.byteskwargs(opts)
3800 m = scmutil.match(repo[None], pats, opts)
3800 m = scmutil.match(repo[None], pats, opts)
3801 if ui.verbose:
3801 if ui.verbose:
3802 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3802 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3803 items = list(repo[None].walk(m))
3803 items = list(repo[None].walk(m))
3804 if not items:
3804 if not items:
3805 return
3805 return
3806 f = lambda fn: fn
3806 f = lambda fn: fn
3807 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3807 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3808 f = lambda fn: util.normpath(fn)
3808 f = lambda fn: util.normpath(fn)
3809 fmt = b'f %%-%ds %%-%ds %%s' % (
3809 fmt = b'f %%-%ds %%-%ds %%s' % (
3810 max([len(abs) for abs in items]),
3810 max([len(abs) for abs in items]),
3811 max([len(repo.pathto(abs)) for abs in items]),
3811 max([len(repo.pathto(abs)) for abs in items]),
3812 )
3812 )
3813 for abs in items:
3813 for abs in items:
3814 line = fmt % (
3814 line = fmt % (
3815 abs,
3815 abs,
3816 f(repo.pathto(abs)),
3816 f(repo.pathto(abs)),
3817 m.exact(abs) and b'exact' or b'',
3817 m.exact(abs) and b'exact' or b'',
3818 )
3818 )
3819 ui.write(b"%s\n" % line.rstrip())
3819 ui.write(b"%s\n" % line.rstrip())
3820
3820
3821
3821
3822 @command(b'debugwhyunstable', [], _(b'REV'))
3822 @command(b'debugwhyunstable', [], _(b'REV'))
3823 def debugwhyunstable(ui, repo, rev):
3823 def debugwhyunstable(ui, repo, rev):
3824 """explain instabilities of a changeset"""
3824 """explain instabilities of a changeset"""
3825 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3825 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3826 dnodes = b''
3826 dnodes = b''
3827 if entry.get(b'divergentnodes'):
3827 if entry.get(b'divergentnodes'):
3828 dnodes = (
3828 dnodes = (
3829 b' '.join(
3829 b' '.join(
3830 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3830 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3831 for ctx in entry[b'divergentnodes']
3831 for ctx in entry[b'divergentnodes']
3832 )
3832 )
3833 + b' '
3833 + b' '
3834 )
3834 )
3835 ui.write(
3835 ui.write(
3836 b'%s: %s%s %s\n'
3836 b'%s: %s%s %s\n'
3837 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3837 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3838 )
3838 )
3839
3839
3840
3840
3841 @command(
3841 @command(
3842 b'debugwireargs',
3842 b'debugwireargs',
3843 [
3843 [
3844 (b'', b'three', b'', b'three'),
3844 (b'', b'three', b'', b'three'),
3845 (b'', b'four', b'', b'four'),
3845 (b'', b'four', b'', b'four'),
3846 (b'', b'five', b'', b'five'),
3846 (b'', b'five', b'', b'five'),
3847 ]
3847 ]
3848 + cmdutil.remoteopts,
3848 + cmdutil.remoteopts,
3849 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3849 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3850 norepo=True,
3850 norepo=True,
3851 )
3851 )
3852 def debugwireargs(ui, repopath, *vals, **opts):
3852 def debugwireargs(ui, repopath, *vals, **opts):
3853 opts = pycompat.byteskwargs(opts)
3853 opts = pycompat.byteskwargs(opts)
3854 repo = hg.peer(ui, opts, repopath)
3854 repo = hg.peer(ui, opts, repopath)
3855 for opt in cmdutil.remoteopts:
3855 for opt in cmdutil.remoteopts:
3856 del opts[opt[1]]
3856 del opts[opt[1]]
3857 args = {}
3857 args = {}
3858 for k, v in pycompat.iteritems(opts):
3858 for k, v in pycompat.iteritems(opts):
3859 if v:
3859 if v:
3860 args[k] = v
3860 args[k] = v
3861 args = pycompat.strkwargs(args)
3861 args = pycompat.strkwargs(args)
3862 # run twice to check that we don't mess up the stream for the next command
3862 # run twice to check that we don't mess up the stream for the next command
3863 res1 = repo.debugwireargs(*vals, **args)
3863 res1 = repo.debugwireargs(*vals, **args)
3864 res2 = repo.debugwireargs(*vals, **args)
3864 res2 = repo.debugwireargs(*vals, **args)
3865 ui.write(b"%s\n" % res1)
3865 ui.write(b"%s\n" % res1)
3866 if res1 != res2:
3866 if res1 != res2:
3867 ui.warn(b"%s\n" % res2)
3867 ui.warn(b"%s\n" % res2)
3868
3868
3869
3869
3870 def _parsewirelangblocks(fh):
3870 def _parsewirelangblocks(fh):
3871 activeaction = None
3871 activeaction = None
3872 blocklines = []
3872 blocklines = []
3873 lastindent = 0
3873 lastindent = 0
3874
3874
3875 for line in fh:
3875 for line in fh:
3876 line = line.rstrip()
3876 line = line.rstrip()
3877 if not line:
3877 if not line:
3878 continue
3878 continue
3879
3879
3880 if line.startswith(b'#'):
3880 if line.startswith(b'#'):
3881 continue
3881 continue
3882
3882
3883 if not line.startswith(b' '):
3883 if not line.startswith(b' '):
3884 # New block. Flush previous one.
3884 # New block. Flush previous one.
3885 if activeaction:
3885 if activeaction:
3886 yield activeaction, blocklines
3886 yield activeaction, blocklines
3887
3887
3888 activeaction = line
3888 activeaction = line
3889 blocklines = []
3889 blocklines = []
3890 lastindent = 0
3890 lastindent = 0
3891 continue
3891 continue
3892
3892
3893 # Else we start with an indent.
3893 # Else we start with an indent.
3894
3894
3895 if not activeaction:
3895 if not activeaction:
3896 raise error.Abort(_(b'indented line outside of block'))
3896 raise error.Abort(_(b'indented line outside of block'))
3897
3897
3898 indent = len(line) - len(line.lstrip())
3898 indent = len(line) - len(line.lstrip())
3899
3899
3900 # If this line is indented more than the last line, concatenate it.
3900 # If this line is indented more than the last line, concatenate it.
3901 if indent > lastindent and blocklines:
3901 if indent > lastindent and blocklines:
3902 blocklines[-1] += line.lstrip()
3902 blocklines[-1] += line.lstrip()
3903 else:
3903 else:
3904 blocklines.append(line)
3904 blocklines.append(line)
3905 lastindent = indent
3905 lastindent = indent
3906
3906
3907 # Flush last block.
3907 # Flush last block.
3908 if activeaction:
3908 if activeaction:
3909 yield activeaction, blocklines
3909 yield activeaction, blocklines
3910
3910
3911
3911
3912 @command(
3912 @command(
3913 b'debugwireproto',
3913 b'debugwireproto',
3914 [
3914 [
3915 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3915 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3916 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3916 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3917 (
3917 (
3918 b'',
3918 b'',
3919 b'noreadstderr',
3919 b'noreadstderr',
3920 False,
3920 False,
3921 _(b'do not read from stderr of the remote'),
3921 _(b'do not read from stderr of the remote'),
3922 ),
3922 ),
3923 (
3923 (
3924 b'',
3924 b'',
3925 b'nologhandshake',
3925 b'nologhandshake',
3926 False,
3926 False,
3927 _(b'do not log I/O related to the peer handshake'),
3927 _(b'do not log I/O related to the peer handshake'),
3928 ),
3928 ),
3929 ]
3929 ]
3930 + cmdutil.remoteopts,
3930 + cmdutil.remoteopts,
3931 _(b'[PATH]'),
3931 _(b'[PATH]'),
3932 optionalrepo=True,
3932 optionalrepo=True,
3933 )
3933 )
3934 def debugwireproto(ui, repo, path=None, **opts):
3934 def debugwireproto(ui, repo, path=None, **opts):
3935 """send wire protocol commands to a server
3935 """send wire protocol commands to a server
3936
3936
3937 This command can be used to issue wire protocol commands to remote
3937 This command can be used to issue wire protocol commands to remote
3938 peers and to debug the raw data being exchanged.
3938 peers and to debug the raw data being exchanged.
3939
3939
3940 ``--localssh`` will start an SSH server against the current repository
3940 ``--localssh`` will start an SSH server against the current repository
3941 and connect to that. By default, the connection will perform a handshake
3941 and connect to that. By default, the connection will perform a handshake
3942 and establish an appropriate peer instance.
3942 and establish an appropriate peer instance.
3943
3943
3944 ``--peer`` can be used to bypass the handshake protocol and construct a
3944 ``--peer`` can be used to bypass the handshake protocol and construct a
3945 peer instance using the specified class type. Valid values are ``raw``,
3945 peer instance using the specified class type. Valid values are ``raw``,
3946 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3946 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3947 raw data payloads and don't support higher-level command actions.
3947 raw data payloads and don't support higher-level command actions.
3948
3948
3949 ``--noreadstderr`` can be used to disable automatic reading from stderr
3949 ``--noreadstderr`` can be used to disable automatic reading from stderr
3950 of the peer (for SSH connections only). Disabling automatic reading of
3950 of the peer (for SSH connections only). Disabling automatic reading of
3951 stderr is useful for making output more deterministic.
3951 stderr is useful for making output more deterministic.
3952
3952
3953 Commands are issued via a mini language which is specified via stdin.
3953 Commands are issued via a mini language which is specified via stdin.
3954 The language consists of individual actions to perform. An action is
3954 The language consists of individual actions to perform. An action is
3955 defined by a block. A block is defined as a line with no leading
3955 defined by a block. A block is defined as a line with no leading
3956 space followed by 0 or more lines with leading space. Blocks are
3956 space followed by 0 or more lines with leading space. Blocks are
3957 effectively a high-level command with additional metadata.
3957 effectively a high-level command with additional metadata.
3958
3958
3959 Lines beginning with ``#`` are ignored.
3959 Lines beginning with ``#`` are ignored.
3960
3960
3961 The following sections denote available actions.
3961 The following sections denote available actions.
3962
3962
3963 raw
3963 raw
3964 ---
3964 ---
3965
3965
3966 Send raw data to the server.
3966 Send raw data to the server.
3967
3967
3968 The block payload contains the raw data to send as one atomic send
3968 The block payload contains the raw data to send as one atomic send
3969 operation. The data may not actually be delivered in a single system
3969 operation. The data may not actually be delivered in a single system
3970 call: it depends on the abilities of the transport being used.
3970 call: it depends on the abilities of the transport being used.
3971
3971
3972 Each line in the block is de-indented and concatenated. Then, that
3972 Each line in the block is de-indented and concatenated. Then, that
3973 value is evaluated as a Python b'' literal. This allows the use of
3973 value is evaluated as a Python b'' literal. This allows the use of
3974 backslash escaping, etc.
3974 backslash escaping, etc.
3975
3975
3976 raw+
3976 raw+
3977 ----
3977 ----
3978
3978
3979 Behaves like ``raw`` except flushes output afterwards.
3979 Behaves like ``raw`` except flushes output afterwards.
3980
3980
3981 command <X>
3981 command <X>
3982 -----------
3982 -----------
3983
3983
3984 Send a request to run a named command, whose name follows the ``command``
3984 Send a request to run a named command, whose name follows the ``command``
3985 string.
3985 string.
3986
3986
3987 Arguments to the command are defined as lines in this block. The format of
3987 Arguments to the command are defined as lines in this block. The format of
3988 each line is ``<key> <value>``. e.g.::
3988 each line is ``<key> <value>``. e.g.::
3989
3989
3990 command listkeys
3990 command listkeys
3991 namespace bookmarks
3991 namespace bookmarks
3992
3992
3993 If the value begins with ``eval:``, it will be interpreted as a Python
3993 If the value begins with ``eval:``, it will be interpreted as a Python
3994 literal expression. Otherwise values are interpreted as Python b'' literals.
3994 literal expression. Otherwise values are interpreted as Python b'' literals.
3995 This allows sending complex types and encoding special byte sequences via
3995 This allows sending complex types and encoding special byte sequences via
3996 backslash escaping.
3996 backslash escaping.
3997
3997
3998 The following arguments have special meaning:
3998 The following arguments have special meaning:
3999
3999
4000 ``PUSHFILE``
4000 ``PUSHFILE``
4001 When defined, the *push* mechanism of the peer will be used instead
4001 When defined, the *push* mechanism of the peer will be used instead
4002 of the static request-response mechanism and the content of the
4002 of the static request-response mechanism and the content of the
4003 file specified in the value of this argument will be sent as the
4003 file specified in the value of this argument will be sent as the
4004 command payload.
4004 command payload.
4005
4005
4006 This can be used to submit a local bundle file to the remote.
4006 This can be used to submit a local bundle file to the remote.
4007
4007
4008 batchbegin
4008 batchbegin
4009 ----------
4009 ----------
4010
4010
4011 Instruct the peer to begin a batched send.
4011 Instruct the peer to begin a batched send.
4012
4012
4013 All ``command`` blocks are queued for execution until the next
4013 All ``command`` blocks are queued for execution until the next
4014 ``batchsubmit`` block.
4014 ``batchsubmit`` block.
4015
4015
4016 batchsubmit
4016 batchsubmit
4017 -----------
4017 -----------
4018
4018
4019 Submit previously queued ``command`` blocks as a batch request.
4019 Submit previously queued ``command`` blocks as a batch request.
4020
4020
4021 This action MUST be paired with a ``batchbegin`` action.
4021 This action MUST be paired with a ``batchbegin`` action.
4022
4022
4023 httprequest <method> <path>
4023 httprequest <method> <path>
4024 ---------------------------
4024 ---------------------------
4025
4025
4026 (HTTP peer only)
4026 (HTTP peer only)
4027
4027
4028 Send an HTTP request to the peer.
4028 Send an HTTP request to the peer.
4029
4029
4030 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4030 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4031
4031
4032 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4032 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4033 headers to add to the request. e.g. ``Accept: foo``.
4033 headers to add to the request. e.g. ``Accept: foo``.
4034
4034
4035 The following arguments are special:
4035 The following arguments are special:
4036
4036
4037 ``BODYFILE``
4037 ``BODYFILE``
4038 The content of the file defined as the value to this argument will be
4038 The content of the file defined as the value to this argument will be
4039 transferred verbatim as the HTTP request body.
4039 transferred verbatim as the HTTP request body.
4040
4040
4041 ``frame <type> <flags> <payload>``
4041 ``frame <type> <flags> <payload>``
4042 Send a unified protocol frame as part of the request body.
4042 Send a unified protocol frame as part of the request body.
4043
4043
4044 All frames will be collected and sent as the body to the HTTP
4044 All frames will be collected and sent as the body to the HTTP
4045 request.
4045 request.
4046
4046
4047 close
4047 close
4048 -----
4048 -----
4049
4049
4050 Close the connection to the server.
4050 Close the connection to the server.
4051
4051
4052 flush
4052 flush
4053 -----
4053 -----
4054
4054
4055 Flush data written to the server.
4055 Flush data written to the server.
4056
4056
4057 readavailable
4057 readavailable
4058 -------------
4058 -------------
4059
4059
4060 Close the write end of the connection and read all available data from
4060 Close the write end of the connection and read all available data from
4061 the server.
4061 the server.
4062
4062
4063 If the connection to the server encompasses multiple pipes, we poll both
4063 If the connection to the server encompasses multiple pipes, we poll both
4064 pipes and read available data.
4064 pipes and read available data.
4065
4065
4066 readline
4066 readline
4067 --------
4067 --------
4068
4068
4069 Read a line of output from the server. If there are multiple output
4069 Read a line of output from the server. If there are multiple output
4070 pipes, reads only the main pipe.
4070 pipes, reads only the main pipe.
4071
4071
4072 ereadline
4072 ereadline
4073 ---------
4073 ---------
4074
4074
4075 Like ``readline``, but read from the stderr pipe, if available.
4075 Like ``readline``, but read from the stderr pipe, if available.
4076
4076
4077 read <X>
4077 read <X>
4078 --------
4078 --------
4079
4079
4080 ``read()`` N bytes from the server's main output pipe.
4080 ``read()`` N bytes from the server's main output pipe.
4081
4081
4082 eread <X>
4082 eread <X>
4083 ---------
4083 ---------
4084
4084
4085 ``read()`` N bytes from the server's stderr pipe, if available.
4085 ``read()`` N bytes from the server's stderr pipe, if available.
4086
4086
4087 Specifying Unified Frame-Based Protocol Frames
4087 Specifying Unified Frame-Based Protocol Frames
4088 ----------------------------------------------
4088 ----------------------------------------------
4089
4089
4090 It is possible to emit a *Unified Frame-Based Protocol* by using special
4090 It is possible to emit a *Unified Frame-Based Protocol* by using special
4091 syntax.
4091 syntax.
4092
4092
4093 A frame is composed as a type, flags, and payload. These can be parsed
4093 A frame is composed as a type, flags, and payload. These can be parsed
4094 from a string of the form:
4094 from a string of the form:
4095
4095
4096 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4096 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4097
4097
4098 ``request-id`` and ``stream-id`` are integers defining the request and
4098 ``request-id`` and ``stream-id`` are integers defining the request and
4099 stream identifiers.
4099 stream identifiers.
4100
4100
4101 ``type`` can be an integer value for the frame type or the string name
4101 ``type`` can be an integer value for the frame type or the string name
4102 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4102 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4103 ``command-name``.
4103 ``command-name``.
4104
4104
4105 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4105 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4106 components. Each component (and there can be just one) can be an integer
4106 components. Each component (and there can be just one) can be an integer
4107 or a flag name for stream flags or frame flags, respectively. Values are
4107 or a flag name for stream flags or frame flags, respectively. Values are
4108 resolved to integers and then bitwise OR'd together.
4108 resolved to integers and then bitwise OR'd together.
4109
4109
4110 ``payload`` represents the raw frame payload. If it begins with
4110 ``payload`` represents the raw frame payload. If it begins with
4111 ``cbor:``, the following string is evaluated as Python code and the
4111 ``cbor:``, the following string is evaluated as Python code and the
4112 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4112 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4113 as a Python byte string literal.
4113 as a Python byte string literal.
4114 """
4114 """
4115 opts = pycompat.byteskwargs(opts)
4115 opts = pycompat.byteskwargs(opts)
4116
4116
4117 if opts[b'localssh'] and not repo:
4117 if opts[b'localssh'] and not repo:
4118 raise error.Abort(_(b'--localssh requires a repository'))
4118 raise error.Abort(_(b'--localssh requires a repository'))
4119
4119
4120 if opts[b'peer'] and opts[b'peer'] not in (
4120 if opts[b'peer'] and opts[b'peer'] not in (
4121 b'raw',
4121 b'raw',
4122 b'http2',
4122 b'http2',
4123 b'ssh1',
4123 b'ssh1',
4124 b'ssh2',
4124 b'ssh2',
4125 ):
4125 ):
4126 raise error.Abort(
4126 raise error.Abort(
4127 _(b'invalid value for --peer'),
4127 _(b'invalid value for --peer'),
4128 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4128 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4129 )
4129 )
4130
4130
4131 if path and opts[b'localssh']:
4131 if path and opts[b'localssh']:
4132 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4132 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4133
4133
4134 if ui.interactive():
4134 if ui.interactive():
4135 ui.write(_(b'(waiting for commands on stdin)\n'))
4135 ui.write(_(b'(waiting for commands on stdin)\n'))
4136
4136
4137 blocks = list(_parsewirelangblocks(ui.fin))
4137 blocks = list(_parsewirelangblocks(ui.fin))
4138
4138
4139 proc = None
4139 proc = None
4140 stdin = None
4140 stdin = None
4141 stdout = None
4141 stdout = None
4142 stderr = None
4142 stderr = None
4143 opener = None
4143 opener = None
4144
4144
4145 if opts[b'localssh']:
4145 if opts[b'localssh']:
4146 # We start the SSH server in its own process so there is process
4146 # We start the SSH server in its own process so there is process
4147 # separation. This prevents a whole class of potential bugs around
4147 # separation. This prevents a whole class of potential bugs around
4148 # shared state from interfering with server operation.
4148 # shared state from interfering with server operation.
4149 args = procutil.hgcmd() + [
4149 args = procutil.hgcmd() + [
4150 b'-R',
4150 b'-R',
4151 repo.root,
4151 repo.root,
4152 b'debugserve',
4152 b'debugserve',
4153 b'--sshstdio',
4153 b'--sshstdio',
4154 ]
4154 ]
4155 proc = subprocess.Popen(
4155 proc = subprocess.Popen(
4156 pycompat.rapply(procutil.tonativestr, args),
4156 pycompat.rapply(procutil.tonativestr, args),
4157 stdin=subprocess.PIPE,
4157 stdin=subprocess.PIPE,
4158 stdout=subprocess.PIPE,
4158 stdout=subprocess.PIPE,
4159 stderr=subprocess.PIPE,
4159 stderr=subprocess.PIPE,
4160 bufsize=0,
4160 bufsize=0,
4161 )
4161 )
4162
4162
4163 stdin = proc.stdin
4163 stdin = proc.stdin
4164 stdout = proc.stdout
4164 stdout = proc.stdout
4165 stderr = proc.stderr
4165 stderr = proc.stderr
4166
4166
4167 # We turn the pipes into observers so we can log I/O.
4167 # We turn the pipes into observers so we can log I/O.
4168 if ui.verbose or opts[b'peer'] == b'raw':
4168 if ui.verbose or opts[b'peer'] == b'raw':
4169 stdin = util.makeloggingfileobject(
4169 stdin = util.makeloggingfileobject(
4170 ui, proc.stdin, b'i', logdata=True
4170 ui, proc.stdin, b'i', logdata=True
4171 )
4171 )
4172 stdout = util.makeloggingfileobject(
4172 stdout = util.makeloggingfileobject(
4173 ui, proc.stdout, b'o', logdata=True
4173 ui, proc.stdout, b'o', logdata=True
4174 )
4174 )
4175 stderr = util.makeloggingfileobject(
4175 stderr = util.makeloggingfileobject(
4176 ui, proc.stderr, b'e', logdata=True
4176 ui, proc.stderr, b'e', logdata=True
4177 )
4177 )
4178
4178
4179 # --localssh also implies the peer connection settings.
4179 # --localssh also implies the peer connection settings.
4180
4180
4181 url = b'ssh://localserver'
4181 url = b'ssh://localserver'
4182 autoreadstderr = not opts[b'noreadstderr']
4182 autoreadstderr = not opts[b'noreadstderr']
4183
4183
4184 if opts[b'peer'] == b'ssh1':
4184 if opts[b'peer'] == b'ssh1':
4185 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4185 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4186 peer = sshpeer.sshv1peer(
4186 peer = sshpeer.sshv1peer(
4187 ui,
4187 ui,
4188 url,
4188 url,
4189 proc,
4189 proc,
4190 stdin,
4190 stdin,
4191 stdout,
4191 stdout,
4192 stderr,
4192 stderr,
4193 None,
4193 None,
4194 autoreadstderr=autoreadstderr,
4194 autoreadstderr=autoreadstderr,
4195 )
4195 )
4196 elif opts[b'peer'] == b'ssh2':
4196 elif opts[b'peer'] == b'ssh2':
4197 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4197 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4198 peer = sshpeer.sshv2peer(
4198 peer = sshpeer.sshv2peer(
4199 ui,
4199 ui,
4200 url,
4200 url,
4201 proc,
4201 proc,
4202 stdin,
4202 stdin,
4203 stdout,
4203 stdout,
4204 stderr,
4204 stderr,
4205 None,
4205 None,
4206 autoreadstderr=autoreadstderr,
4206 autoreadstderr=autoreadstderr,
4207 )
4207 )
4208 elif opts[b'peer'] == b'raw':
4208 elif opts[b'peer'] == b'raw':
4209 ui.write(_(b'using raw connection to peer\n'))
4209 ui.write(_(b'using raw connection to peer\n'))
4210 peer = None
4210 peer = None
4211 else:
4211 else:
4212 ui.write(_(b'creating ssh peer from handshake results\n'))
4212 ui.write(_(b'creating ssh peer from handshake results\n'))
4213 peer = sshpeer.makepeer(
4213 peer = sshpeer.makepeer(
4214 ui,
4214 ui,
4215 url,
4215 url,
4216 proc,
4216 proc,
4217 stdin,
4217 stdin,
4218 stdout,
4218 stdout,
4219 stderr,
4219 stderr,
4220 autoreadstderr=autoreadstderr,
4220 autoreadstderr=autoreadstderr,
4221 )
4221 )
4222
4222
4223 elif path:
4223 elif path:
4224 # We bypass hg.peer() so we can proxy the sockets.
4224 # We bypass hg.peer() so we can proxy the sockets.
4225 # TODO consider not doing this because we skip
4225 # TODO consider not doing this because we skip
4226 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4226 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4227 u = util.url(path)
4227 u = util.url(path)
4228 if u.scheme != b'http':
4228 if u.scheme != b'http':
4229 raise error.Abort(_(b'only http:// paths are currently supported'))
4229 raise error.Abort(_(b'only http:// paths are currently supported'))
4230
4230
4231 url, authinfo = u.authinfo()
4231 url, authinfo = u.authinfo()
4232 openerargs = {
4232 openerargs = {
4233 'useragent': b'Mercurial debugwireproto',
4233 'useragent': b'Mercurial debugwireproto',
4234 }
4234 }
4235
4235
4236 # Turn pipes/sockets into observers so we can log I/O.
4236 # Turn pipes/sockets into observers so we can log I/O.
4237 if ui.verbose:
4237 if ui.verbose:
4238 openerargs.update(
4238 openerargs.update(
4239 {
4239 {
4240 'loggingfh': ui,
4240 'loggingfh': ui,
4241 'loggingname': b's',
4241 'loggingname': b's',
4242 'loggingopts': {'logdata': True, 'logdataapis': False,},
4242 'loggingopts': {'logdata': True, 'logdataapis': False,},
4243 }
4243 }
4244 )
4244 )
4245
4245
4246 if ui.debugflag:
4246 if ui.debugflag:
4247 openerargs['loggingopts']['logdataapis'] = True
4247 openerargs['loggingopts']['logdataapis'] = True
4248
4248
4249 # Don't send default headers when in raw mode. This allows us to
4249 # Don't send default headers when in raw mode. This allows us to
4250 # bypass most of the behavior of our URL handling code so we can
4250 # bypass most of the behavior of our URL handling code so we can
4251 # have near complete control over what's sent on the wire.
4251 # have near complete control over what's sent on the wire.
4252 if opts[b'peer'] == b'raw':
4252 if opts[b'peer'] == b'raw':
4253 openerargs['sendaccept'] = False
4253 openerargs['sendaccept'] = False
4254
4254
4255 opener = urlmod.opener(ui, authinfo, **openerargs)
4255 opener = urlmod.opener(ui, authinfo, **openerargs)
4256
4256
4257 if opts[b'peer'] == b'http2':
4257 if opts[b'peer'] == b'http2':
4258 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4258 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4259 # We go through makepeer() because we need an API descriptor for
4259 # We go through makepeer() because we need an API descriptor for
4260 # the peer instance to be useful.
4260 # the peer instance to be useful.
4261 with ui.configoverride(
4261 with ui.configoverride(
4262 {(b'experimental', b'httppeer.advertise-v2'): True}
4262 {(b'experimental', b'httppeer.advertise-v2'): True}
4263 ):
4263 ):
4264 if opts[b'nologhandshake']:
4264 if opts[b'nologhandshake']:
4265 ui.pushbuffer()
4265 ui.pushbuffer()
4266
4266
4267 peer = httppeer.makepeer(ui, path, opener=opener)
4267 peer = httppeer.makepeer(ui, path, opener=opener)
4268
4268
4269 if opts[b'nologhandshake']:
4269 if opts[b'nologhandshake']:
4270 ui.popbuffer()
4270 ui.popbuffer()
4271
4271
4272 if not isinstance(peer, httppeer.httpv2peer):
4272 if not isinstance(peer, httppeer.httpv2peer):
4273 raise error.Abort(
4273 raise error.Abort(
4274 _(
4274 _(
4275 b'could not instantiate HTTP peer for '
4275 b'could not instantiate HTTP peer for '
4276 b'wire protocol version 2'
4276 b'wire protocol version 2'
4277 ),
4277 ),
4278 hint=_(
4278 hint=_(
4279 b'the server may not have the feature '
4279 b'the server may not have the feature '
4280 b'enabled or is not allowing this '
4280 b'enabled or is not allowing this '
4281 b'client version'
4281 b'client version'
4282 ),
4282 ),
4283 )
4283 )
4284
4284
4285 elif opts[b'peer'] == b'raw':
4285 elif opts[b'peer'] == b'raw':
4286 ui.write(_(b'using raw connection to peer\n'))
4286 ui.write(_(b'using raw connection to peer\n'))
4287 peer = None
4287 peer = None
4288 elif opts[b'peer']:
4288 elif opts[b'peer']:
4289 raise error.Abort(
4289 raise error.Abort(
4290 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4290 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4291 )
4291 )
4292 else:
4292 else:
4293 peer = httppeer.makepeer(ui, path, opener=opener)
4293 peer = httppeer.makepeer(ui, path, opener=opener)
4294
4294
4295 # We /could/ populate stdin/stdout with sock.makefile()...
4295 # We /could/ populate stdin/stdout with sock.makefile()...
4296 else:
4296 else:
4297 raise error.Abort(_(b'unsupported connection configuration'))
4297 raise error.Abort(_(b'unsupported connection configuration'))
4298
4298
4299 batchedcommands = None
4299 batchedcommands = None
4300
4300
4301 # Now perform actions based on the parsed wire language instructions.
4301 # Now perform actions based on the parsed wire language instructions.
4302 for action, lines in blocks:
4302 for action, lines in blocks:
4303 if action in (b'raw', b'raw+'):
4303 if action in (b'raw', b'raw+'):
4304 if not stdin:
4304 if not stdin:
4305 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4305 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4306
4306
4307 # Concatenate the data together.
4307 # Concatenate the data together.
4308 data = b''.join(l.lstrip() for l in lines)
4308 data = b''.join(l.lstrip() for l in lines)
4309 data = stringutil.unescapestr(data)
4309 data = stringutil.unescapestr(data)
4310 stdin.write(data)
4310 stdin.write(data)
4311
4311
4312 if action == b'raw+':
4312 if action == b'raw+':
4313 stdin.flush()
4313 stdin.flush()
4314 elif action == b'flush':
4314 elif action == b'flush':
4315 if not stdin:
4315 if not stdin:
4316 raise error.Abort(_(b'cannot call flush on this peer'))
4316 raise error.Abort(_(b'cannot call flush on this peer'))
4317 stdin.flush()
4317 stdin.flush()
4318 elif action.startswith(b'command'):
4318 elif action.startswith(b'command'):
4319 if not peer:
4319 if not peer:
4320 raise error.Abort(
4320 raise error.Abort(
4321 _(
4321 _(
4322 b'cannot send commands unless peer instance '
4322 b'cannot send commands unless peer instance '
4323 b'is available'
4323 b'is available'
4324 )
4324 )
4325 )
4325 )
4326
4326
4327 command = action.split(b' ', 1)[1]
4327 command = action.split(b' ', 1)[1]
4328
4328
4329 args = {}
4329 args = {}
4330 for line in lines:
4330 for line in lines:
4331 # We need to allow empty values.
4331 # We need to allow empty values.
4332 fields = line.lstrip().split(b' ', 1)
4332 fields = line.lstrip().split(b' ', 1)
4333 if len(fields) == 1:
4333 if len(fields) == 1:
4334 key = fields[0]
4334 key = fields[0]
4335 value = b''
4335 value = b''
4336 else:
4336 else:
4337 key, value = fields
4337 key, value = fields
4338
4338
4339 if value.startswith(b'eval:'):
4339 if value.startswith(b'eval:'):
4340 value = stringutil.evalpythonliteral(value[5:])
4340 value = stringutil.evalpythonliteral(value[5:])
4341 else:
4341 else:
4342 value = stringutil.unescapestr(value)
4342 value = stringutil.unescapestr(value)
4343
4343
4344 args[key] = value
4344 args[key] = value
4345
4345
4346 if batchedcommands is not None:
4346 if batchedcommands is not None:
4347 batchedcommands.append((command, args))
4347 batchedcommands.append((command, args))
4348 continue
4348 continue
4349
4349
4350 ui.status(_(b'sending %s command\n') % command)
4350 ui.status(_(b'sending %s command\n') % command)
4351
4351
4352 if b'PUSHFILE' in args:
4352 if b'PUSHFILE' in args:
4353 with open(args[b'PUSHFILE'], 'rb') as fh:
4353 with open(args[b'PUSHFILE'], 'rb') as fh:
4354 del args[b'PUSHFILE']
4354 del args[b'PUSHFILE']
4355 res, output = peer._callpush(
4355 res, output = peer._callpush(
4356 command, fh, **pycompat.strkwargs(args)
4356 command, fh, **pycompat.strkwargs(args)
4357 )
4357 )
4358 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4358 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4359 ui.status(
4359 ui.status(
4360 _(b'remote output: %s\n') % stringutil.escapestr(output)
4360 _(b'remote output: %s\n') % stringutil.escapestr(output)
4361 )
4361 )
4362 else:
4362 else:
4363 with peer.commandexecutor() as e:
4363 with peer.commandexecutor() as e:
4364 res = e.callcommand(command, args).result()
4364 res = e.callcommand(command, args).result()
4365
4365
4366 if isinstance(res, wireprotov2peer.commandresponse):
4366 if isinstance(res, wireprotov2peer.commandresponse):
4367 val = res.objects()
4367 val = res.objects()
4368 ui.status(
4368 ui.status(
4369 _(b'response: %s\n')
4369 _(b'response: %s\n')
4370 % stringutil.pprint(val, bprefix=True, indent=2)
4370 % stringutil.pprint(val, bprefix=True, indent=2)
4371 )
4371 )
4372 else:
4372 else:
4373 ui.status(
4373 ui.status(
4374 _(b'response: %s\n')
4374 _(b'response: %s\n')
4375 % stringutil.pprint(res, bprefix=True, indent=2)
4375 % stringutil.pprint(res, bprefix=True, indent=2)
4376 )
4376 )
4377
4377
4378 elif action == b'batchbegin':
4378 elif action == b'batchbegin':
4379 if batchedcommands is not None:
4379 if batchedcommands is not None:
4380 raise error.Abort(_(b'nested batchbegin not allowed'))
4380 raise error.Abort(_(b'nested batchbegin not allowed'))
4381
4381
4382 batchedcommands = []
4382 batchedcommands = []
4383 elif action == b'batchsubmit':
4383 elif action == b'batchsubmit':
4384 # There is a batching API we could go through. But it would be
4384 # There is a batching API we could go through. But it would be
4385 # difficult to normalize requests into function calls. It is easier
4385 # difficult to normalize requests into function calls. It is easier
4386 # to bypass this layer and normalize to commands + args.
4386 # to bypass this layer and normalize to commands + args.
4387 ui.status(
4387 ui.status(
4388 _(b'sending batch with %d sub-commands\n')
4388 _(b'sending batch with %d sub-commands\n')
4389 % len(batchedcommands)
4389 % len(batchedcommands)
4390 )
4390 )
4391 assert peer is not None
4391 assert peer is not None
4392 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4392 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4393 ui.status(
4393 ui.status(
4394 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4394 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4395 )
4395 )
4396
4396
4397 batchedcommands = None
4397 batchedcommands = None
4398
4398
4399 elif action.startswith(b'httprequest '):
4399 elif action.startswith(b'httprequest '):
4400 if not opener:
4400 if not opener:
4401 raise error.Abort(
4401 raise error.Abort(
4402 _(b'cannot use httprequest without an HTTP peer')
4402 _(b'cannot use httprequest without an HTTP peer')
4403 )
4403 )
4404
4404
4405 request = action.split(b' ', 2)
4405 request = action.split(b' ', 2)
4406 if len(request) != 3:
4406 if len(request) != 3:
4407 raise error.Abort(
4407 raise error.Abort(
4408 _(
4408 _(
4409 b'invalid httprequest: expected format is '
4409 b'invalid httprequest: expected format is '
4410 b'"httprequest <method> <path>'
4410 b'"httprequest <method> <path>'
4411 )
4411 )
4412 )
4412 )
4413
4413
4414 method, httppath = request[1:]
4414 method, httppath = request[1:]
4415 headers = {}
4415 headers = {}
4416 body = None
4416 body = None
4417 frames = []
4417 frames = []
4418 for line in lines:
4418 for line in lines:
4419 line = line.lstrip()
4419 line = line.lstrip()
4420 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4420 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4421 if m:
4421 if m:
4422 # Headers need to use native strings.
4422 # Headers need to use native strings.
4423 key = pycompat.strurl(m.group(1))
4423 key = pycompat.strurl(m.group(1))
4424 value = pycompat.strurl(m.group(2))
4424 value = pycompat.strurl(m.group(2))
4425 headers[key] = value
4425 headers[key] = value
4426 continue
4426 continue
4427
4427
4428 if line.startswith(b'BODYFILE '):
4428 if line.startswith(b'BODYFILE '):
4429 with open(line.split(b' ', 1), b'rb') as fh:
4429 with open(line.split(b' ', 1), b'rb') as fh:
4430 body = fh.read()
4430 body = fh.read()
4431 elif line.startswith(b'frame '):
4431 elif line.startswith(b'frame '):
4432 frame = wireprotoframing.makeframefromhumanstring(
4432 frame = wireprotoframing.makeframefromhumanstring(
4433 line[len(b'frame ') :]
4433 line[len(b'frame ') :]
4434 )
4434 )
4435
4435
4436 frames.append(frame)
4436 frames.append(frame)
4437 else:
4437 else:
4438 raise error.Abort(
4438 raise error.Abort(
4439 _(b'unknown argument to httprequest: %s') % line
4439 _(b'unknown argument to httprequest: %s') % line
4440 )
4440 )
4441
4441
4442 url = path + httppath
4442 url = path + httppath
4443
4443
4444 if frames:
4444 if frames:
4445 body = b''.join(bytes(f) for f in frames)
4445 body = b''.join(bytes(f) for f in frames)
4446
4446
4447 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4447 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4448
4448
4449 # urllib.Request insists on using has_data() as a proxy for
4449 # urllib.Request insists on using has_data() as a proxy for
4450 # determining the request method. Override that to use our
4450 # determining the request method. Override that to use our
4451 # explicitly requested method.
4451 # explicitly requested method.
4452 req.get_method = lambda: pycompat.sysstr(method)
4452 req.get_method = lambda: pycompat.sysstr(method)
4453
4453
4454 try:
4454 try:
4455 res = opener.open(req)
4455 res = opener.open(req)
4456 body = res.read()
4456 body = res.read()
4457 except util.urlerr.urlerror as e:
4457 except util.urlerr.urlerror as e:
4458 # read() method must be called, but only exists in Python 2
4458 # read() method must be called, but only exists in Python 2
4459 getattr(e, 'read', lambda: None)()
4459 getattr(e, 'read', lambda: None)()
4460 continue
4460 continue
4461
4461
4462 ct = res.headers.get('Content-Type')
4462 ct = res.headers.get('Content-Type')
4463 if ct == 'application/mercurial-cbor':
4463 if ct == 'application/mercurial-cbor':
4464 ui.write(
4464 ui.write(
4465 _(b'cbor> %s\n')
4465 _(b'cbor> %s\n')
4466 % stringutil.pprint(
4466 % stringutil.pprint(
4467 cborutil.decodeall(body), bprefix=True, indent=2
4467 cborutil.decodeall(body), bprefix=True, indent=2
4468 )
4468 )
4469 )
4469 )
4470
4470
4471 elif action == b'close':
4471 elif action == b'close':
4472 assert peer is not None
4472 assert peer is not None
4473 peer.close()
4473 peer.close()
4474 elif action == b'readavailable':
4474 elif action == b'readavailable':
4475 if not stdout or not stderr:
4475 if not stdout or not stderr:
4476 raise error.Abort(
4476 raise error.Abort(
4477 _(b'readavailable not available on this peer')
4477 _(b'readavailable not available on this peer')
4478 )
4478 )
4479
4479
4480 stdin.close()
4480 stdin.close()
4481 stdout.read()
4481 stdout.read()
4482 stderr.read()
4482 stderr.read()
4483
4483
4484 elif action == b'readline':
4484 elif action == b'readline':
4485 if not stdout:
4485 if not stdout:
4486 raise error.Abort(_(b'readline not available on this peer'))
4486 raise error.Abort(_(b'readline not available on this peer'))
4487 stdout.readline()
4487 stdout.readline()
4488 elif action == b'ereadline':
4488 elif action == b'ereadline':
4489 if not stderr:
4489 if not stderr:
4490 raise error.Abort(_(b'ereadline not available on this peer'))
4490 raise error.Abort(_(b'ereadline not available on this peer'))
4491 stderr.readline()
4491 stderr.readline()
4492 elif action.startswith(b'read '):
4492 elif action.startswith(b'read '):
4493 count = int(action.split(b' ', 1)[1])
4493 count = int(action.split(b' ', 1)[1])
4494 if not stdout:
4494 if not stdout:
4495 raise error.Abort(_(b'read not available on this peer'))
4495 raise error.Abort(_(b'read not available on this peer'))
4496 stdout.read(count)
4496 stdout.read(count)
4497 elif action.startswith(b'eread '):
4497 elif action.startswith(b'eread '):
4498 count = int(action.split(b' ', 1)[1])
4498 count = int(action.split(b' ', 1)[1])
4499 if not stderr:
4499 if not stderr:
4500 raise error.Abort(_(b'eread not available on this peer'))
4500 raise error.Abort(_(b'eread not available on this peer'))
4501 stderr.read(count)
4501 stderr.read(count)
4502 else:
4502 else:
4503 raise error.Abort(_(b'unknown action: %s') % action)
4503 raise error.Abort(_(b'unknown action: %s') % action)
4504
4504
4505 if batchedcommands is not None:
4505 if batchedcommands is not None:
4506 raise error.Abort(_(b'unclosed "batchbegin" request'))
4506 raise error.Abort(_(b'unclosed "batchbegin" request'))
4507
4507
4508 if peer:
4508 if peer:
4509 peer.close()
4509 peer.close()
4510
4510
4511 if proc:
4511 if proc:
4512 proc.kill()
4512 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now