##// END OF EJS Templates
debugcommands: move away from line buffered output on binary stream...
Gregory Szorc -
r44582:52f8b07a default
parent child Browse files
Show More
@@ -1,4285 +1,4288 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 encoding,
47 encoding,
48 error,
48 error,
49 exchange,
49 exchange,
50 extensions,
50 extensions,
51 filemerge,
51 filemerge,
52 filesetlang,
52 filesetlang,
53 formatter,
53 formatter,
54 hg,
54 hg,
55 httppeer,
55 httppeer,
56 localrepo,
56 localrepo,
57 lock as lockmod,
57 lock as lockmod,
58 logcmdutil,
58 logcmdutil,
59 merge as mergemod,
59 merge as mergemod,
60 obsolete,
60 obsolete,
61 obsutil,
61 obsutil,
62 pathutil,
62 pathutil,
63 phases,
63 phases,
64 policy,
64 policy,
65 pvec,
65 pvec,
66 pycompat,
66 pycompat,
67 registrar,
67 registrar,
68 repair,
68 repair,
69 revlog,
69 revlog,
70 revset,
70 revset,
71 revsetlang,
71 revsetlang,
72 scmutil,
72 scmutil,
73 setdiscovery,
73 setdiscovery,
74 simplemerge,
74 simplemerge,
75 sshpeer,
75 sshpeer,
76 sslutil,
76 sslutil,
77 streamclone,
77 streamclone,
78 templater,
78 templater,
79 treediscovery,
79 treediscovery,
80 upgrade,
80 upgrade,
81 url as urlmod,
81 url as urlmod,
82 util,
82 util,
83 vfs as vfsmod,
83 vfs as vfsmod,
84 wireprotoframing,
84 wireprotoframing,
85 wireprotoserver,
85 wireprotoserver,
86 wireprotov2peer,
86 wireprotov2peer,
87 )
87 )
88 from .utils import (
88 from .utils import (
89 cborutil,
89 cborutil,
90 compression,
90 compression,
91 dateutil,
91 dateutil,
92 procutil,
92 procutil,
93 stringutil,
93 stringutil,
94 )
94 )
95
95
96 from .revlogutils import deltas as deltautil
96 from .revlogutils import deltas as deltautil
97
97
98 release = lockmod.release
98 release = lockmod.release
99
99
100 command = registrar.command()
100 command = registrar.command()
101
101
102
102
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 def debugancestor(ui, repo, *args):
104 def debugancestor(ui, repo, *args):
105 """find the ancestor revision of two revisions in a given index"""
105 """find the ancestor revision of two revisions in a given index"""
106 if len(args) == 3:
106 if len(args) == 3:
107 index, rev1, rev2 = args
107 index, rev1, rev2 = args
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 lookup = r.lookup
109 lookup = r.lookup
110 elif len(args) == 2:
110 elif len(args) == 2:
111 if not repo:
111 if not repo:
112 raise error.Abort(
112 raise error.Abort(
113 _(b'there is no Mercurial repository here (.hg not found)')
113 _(b'there is no Mercurial repository here (.hg not found)')
114 )
114 )
115 rev1, rev2 = args
115 rev1, rev2 = args
116 r = repo.changelog
116 r = repo.changelog
117 lookup = repo.lookup
117 lookup = repo.lookup
118 else:
118 else:
119 raise error.Abort(_(b'either two or three arguments required'))
119 raise error.Abort(_(b'either two or three arguments required'))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122
122
123
123
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 def debugapplystreamclonebundle(ui, repo, fname):
125 def debugapplystreamclonebundle(ui, repo, fname):
126 """apply a stream clone bundle file"""
126 """apply a stream clone bundle file"""
127 f = hg.openpath(ui, fname)
127 f = hg.openpath(ui, fname)
128 gen = exchange.readbundle(ui, f, fname)
128 gen = exchange.readbundle(ui, f, fname)
129 gen.apply(repo)
129 gen.apply(repo)
130
130
131
131
132 @command(
132 @command(
133 b'debugbuilddag',
133 b'debugbuilddag',
134 [
134 [
135 (
135 (
136 b'm',
136 b'm',
137 b'mergeable-file',
137 b'mergeable-file',
138 None,
138 None,
139 _(b'add single file mergeable changes'),
139 _(b'add single file mergeable changes'),
140 ),
140 ),
141 (
141 (
142 b'o',
142 b'o',
143 b'overwritten-file',
143 b'overwritten-file',
144 None,
144 None,
145 _(b'add single file all revs overwrite'),
145 _(b'add single file all revs overwrite'),
146 ),
146 ),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 ],
148 ],
149 _(b'[OPTION]... [TEXT]'),
149 _(b'[OPTION]... [TEXT]'),
150 )
150 )
151 def debugbuilddag(
151 def debugbuilddag(
152 ui,
152 ui,
153 repo,
153 repo,
154 text=None,
154 text=None,
155 mergeable_file=False,
155 mergeable_file=False,
156 overwritten_file=False,
156 overwritten_file=False,
157 new_file=False,
157 new_file=False,
158 ):
158 ):
159 """builds a repo with a given DAG from scratch in the current empty repo
159 """builds a repo with a given DAG from scratch in the current empty repo
160
160
161 The description of the DAG is read from stdin if not given on the
161 The description of the DAG is read from stdin if not given on the
162 command line.
162 command line.
163
163
164 Elements:
164 Elements:
165
165
166 - "+n" is a linear run of n nodes based on the current default parent
166 - "+n" is a linear run of n nodes based on the current default parent
167 - "." is a single node based on the current default parent
167 - "." is a single node based on the current default parent
168 - "$" resets the default parent to null (implied at the start);
168 - "$" resets the default parent to null (implied at the start);
169 otherwise the default parent is always the last node created
169 otherwise the default parent is always the last node created
170 - "<p" sets the default parent to the backref p
170 - "<p" sets the default parent to the backref p
171 - "*p" is a fork at parent p, which is a backref
171 - "*p" is a fork at parent p, which is a backref
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 - "/p2" is a merge of the preceding node and p2
173 - "/p2" is a merge of the preceding node and p2
174 - ":tag" defines a local tag for the preceding node
174 - ":tag" defines a local tag for the preceding node
175 - "@branch" sets the named branch for subsequent nodes
175 - "@branch" sets the named branch for subsequent nodes
176 - "#...\\n" is a comment up to the end of the line
176 - "#...\\n" is a comment up to the end of the line
177
177
178 Whitespace between the above elements is ignored.
178 Whitespace between the above elements is ignored.
179
179
180 A backref is either
180 A backref is either
181
181
182 - a number n, which references the node curr-n, where curr is the current
182 - a number n, which references the node curr-n, where curr is the current
183 node, or
183 node, or
184 - the name of a local tag you placed earlier using ":tag", or
184 - the name of a local tag you placed earlier using ":tag", or
185 - empty to denote the default parent.
185 - empty to denote the default parent.
186
186
187 All string valued-elements are either strictly alphanumeric, or must
187 All string valued-elements are either strictly alphanumeric, or must
188 be enclosed in double quotes ("..."), with "\\" as escape character.
188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 """
189 """
190
190
191 if text is None:
191 if text is None:
192 ui.status(_(b"reading DAG from stdin\n"))
192 ui.status(_(b"reading DAG from stdin\n"))
193 text = ui.fin.read()
193 text = ui.fin.read()
194
194
195 cl = repo.changelog
195 cl = repo.changelog
196 if len(cl) > 0:
196 if len(cl) > 0:
197 raise error.Abort(_(b'repository is not empty'))
197 raise error.Abort(_(b'repository is not empty'))
198
198
199 # determine number of revs in DAG
199 # determine number of revs in DAG
200 total = 0
200 total = 0
201 for type, data in dagparser.parsedag(text):
201 for type, data in dagparser.parsedag(text):
202 if type == b'n':
202 if type == b'n':
203 total += 1
203 total += 1
204
204
205 if mergeable_file:
205 if mergeable_file:
206 linesperrev = 2
206 linesperrev = 2
207 # make a file with k lines per rev
207 # make a file with k lines per rev
208 initialmergedlines = [
208 initialmergedlines = [
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 ]
210 ]
211 initialmergedlines.append(b"")
211 initialmergedlines.append(b"")
212
212
213 tags = []
213 tags = []
214 progress = ui.makeprogress(
214 progress = ui.makeprogress(
215 _(b'building'), unit=_(b'revisions'), total=total
215 _(b'building'), unit=_(b'revisions'), total=total
216 )
216 )
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 at = -1
218 at = -1
219 atbranch = b'default'
219 atbranch = b'default'
220 nodeids = []
220 nodeids = []
221 id = 0
221 id = 0
222 progress.update(id)
222 progress.update(id)
223 for type, data in dagparser.parsedag(text):
223 for type, data in dagparser.parsedag(text):
224 if type == b'n':
224 if type == b'n':
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 id, ps = data
226 id, ps = data
227
227
228 files = []
228 files = []
229 filecontent = {}
229 filecontent = {}
230
230
231 p2 = None
231 p2 = None
232 if mergeable_file:
232 if mergeable_file:
233 fn = b"mf"
233 fn = b"mf"
234 p1 = repo[ps[0]]
234 p1 = repo[ps[0]]
235 if len(ps) > 1:
235 if len(ps) > 1:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 pa = p1.ancestor(p2)
237 pa = p1.ancestor(p2)
238 base, local, other = [
238 base, local, other = [
239 x[fn].data() for x in (pa, p1, p2)
239 x[fn].data() for x in (pa, p1, p2)
240 ]
240 ]
241 m3 = simplemerge.Merge3Text(base, local, other)
241 m3 = simplemerge.Merge3Text(base, local, other)
242 ml = [l.strip() for l in m3.merge_lines()]
242 ml = [l.strip() for l in m3.merge_lines()]
243 ml.append(b"")
243 ml.append(b"")
244 elif at > 0:
244 elif at > 0:
245 ml = p1[fn].data().split(b"\n")
245 ml = p1[fn].data().split(b"\n")
246 else:
246 else:
247 ml = initialmergedlines
247 ml = initialmergedlines
248 ml[id * linesperrev] += b" r%i" % id
248 ml[id * linesperrev] += b" r%i" % id
249 mergedtext = b"\n".join(ml)
249 mergedtext = b"\n".join(ml)
250 files.append(fn)
250 files.append(fn)
251 filecontent[fn] = mergedtext
251 filecontent[fn] = mergedtext
252
252
253 if overwritten_file:
253 if overwritten_file:
254 fn = b"of"
254 fn = b"of"
255 files.append(fn)
255 files.append(fn)
256 filecontent[fn] = b"r%i\n" % id
256 filecontent[fn] = b"r%i\n" % id
257
257
258 if new_file:
258 if new_file:
259 fn = b"nf%i" % id
259 fn = b"nf%i" % id
260 files.append(fn)
260 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
261 filecontent[fn] = b"r%i\n" % id
262 if len(ps) > 1:
262 if len(ps) > 1:
263 if not p2:
263 if not p2:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 for fn in p2:
265 for fn in p2:
266 if fn.startswith(b"nf"):
266 if fn.startswith(b"nf"):
267 files.append(fn)
267 files.append(fn)
268 filecontent[fn] = p2[fn].data()
268 filecontent[fn] = p2[fn].data()
269
269
270 def fctxfn(repo, cx, path):
270 def fctxfn(repo, cx, path):
271 if path in filecontent:
271 if path in filecontent:
272 return context.memfilectx(
272 return context.memfilectx(
273 repo, cx, path, filecontent[path]
273 repo, cx, path, filecontent[path]
274 )
274 )
275 return None
275 return None
276
276
277 if len(ps) == 0 or ps[0] < 0:
277 if len(ps) == 0 or ps[0] < 0:
278 pars = [None, None]
278 pars = [None, None]
279 elif len(ps) == 1:
279 elif len(ps) == 1:
280 pars = [nodeids[ps[0]], None]
280 pars = [nodeids[ps[0]], None]
281 else:
281 else:
282 pars = [nodeids[p] for p in ps]
282 pars = [nodeids[p] for p in ps]
283 cx = context.memctx(
283 cx = context.memctx(
284 repo,
284 repo,
285 pars,
285 pars,
286 b"r%i" % id,
286 b"r%i" % id,
287 files,
287 files,
288 fctxfn,
288 fctxfn,
289 date=(id, 0),
289 date=(id, 0),
290 user=b"debugbuilddag",
290 user=b"debugbuilddag",
291 extra={b'branch': atbranch},
291 extra={b'branch': atbranch},
292 )
292 )
293 nodeid = repo.commitctx(cx)
293 nodeid = repo.commitctx(cx)
294 nodeids.append(nodeid)
294 nodeids.append(nodeid)
295 at = id
295 at = id
296 elif type == b'l':
296 elif type == b'l':
297 id, name = data
297 id, name = data
298 ui.note((b'tag %s\n' % name))
298 ui.note((b'tag %s\n' % name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 elif type == b'a':
300 elif type == b'a':
301 ui.note((b'branch %s\n' % data))
301 ui.note((b'branch %s\n' % data))
302 atbranch = data
302 atbranch = data
303 progress.update(id)
303 progress.update(id)
304
304
305 if tags:
305 if tags:
306 repo.vfs.write(b"localtags", b"".join(tags))
306 repo.vfs.write(b"localtags", b"".join(tags))
307
307
308
308
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 indent_string = b' ' * indent
310 indent_string = b' ' * indent
311 if all:
311 if all:
312 ui.writenoi18n(
312 ui.writenoi18n(
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 % indent_string
314 % indent_string
315 )
315 )
316
316
317 def showchunks(named):
317 def showchunks(named):
318 ui.write(b"\n%s%s\n" % (indent_string, named))
318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 for deltadata in gen.deltaiter():
319 for deltadata in gen.deltaiter():
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 ui.write(
321 ui.write(
322 b"%s%s %s %s %s %s %d\n"
322 b"%s%s %s %s %s %s %d\n"
323 % (
323 % (
324 indent_string,
324 indent_string,
325 hex(node),
325 hex(node),
326 hex(p1),
326 hex(p1),
327 hex(p2),
327 hex(p2),
328 hex(cs),
328 hex(cs),
329 hex(deltabase),
329 hex(deltabase),
330 len(delta),
330 len(delta),
331 )
331 )
332 )
332 )
333
333
334 gen.changelogheader()
334 gen.changelogheader()
335 showchunks(b"changelog")
335 showchunks(b"changelog")
336 gen.manifestheader()
336 gen.manifestheader()
337 showchunks(b"manifest")
337 showchunks(b"manifest")
338 for chunkdata in iter(gen.filelogheader, {}):
338 for chunkdata in iter(gen.filelogheader, {}):
339 fname = chunkdata[b'filename']
339 fname = chunkdata[b'filename']
340 showchunks(fname)
340 showchunks(fname)
341 else:
341 else:
342 if isinstance(gen, bundle2.unbundle20):
342 if isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_(b'use debugbundle2 for this file'))
343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 gen.changelogheader()
344 gen.changelogheader()
345 for deltadata in gen.deltaiter():
345 for deltadata in gen.deltaiter():
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348
348
349
349
350 def _debugobsmarkers(ui, part, indent=0, **opts):
350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 """display version and markers contained in 'data'"""
351 """display version and markers contained in 'data'"""
352 opts = pycompat.byteskwargs(opts)
352 opts = pycompat.byteskwargs(opts)
353 data = part.read()
353 data = part.read()
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 try:
355 try:
356 version, markers = obsolete._readmarkers(data)
356 version, markers = obsolete._readmarkers(data)
357 except error.UnknownVersion as exc:
357 except error.UnknownVersion as exc:
358 msg = b"%sunsupported version: %s (%d bytes)\n"
358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 msg %= indent_string, exc.version, len(data)
359 msg %= indent_string, exc.version, len(data)
360 ui.write(msg)
360 ui.write(msg)
361 else:
361 else:
362 msg = b"%sversion: %d (%d bytes)\n"
362 msg = b"%sversion: %d (%d bytes)\n"
363 msg %= indent_string, version, len(data)
363 msg %= indent_string, version, len(data)
364 ui.write(msg)
364 ui.write(msg)
365 fm = ui.formatter(b'debugobsolete', opts)
365 fm = ui.formatter(b'debugobsolete', opts)
366 for rawmarker in sorted(markers):
366 for rawmarker in sorted(markers):
367 m = obsutil.marker(None, rawmarker)
367 m = obsutil.marker(None, rawmarker)
368 fm.startitem()
368 fm.startitem()
369 fm.plain(indent_string)
369 fm.plain(indent_string)
370 cmdutil.showmarker(fm, m)
370 cmdutil.showmarker(fm, m)
371 fm.end()
371 fm.end()
372
372
373
373
374 def _debugphaseheads(ui, data, indent=0):
374 def _debugphaseheads(ui, data, indent=0):
375 """display version and markers contained in 'data'"""
375 """display version and markers contained in 'data'"""
376 indent_string = b' ' * indent
376 indent_string = b' ' * indent
377 headsbyphase = phases.binarydecode(data)
377 headsbyphase = phases.binarydecode(data)
378 for phase in phases.allphases:
378 for phase in phases.allphases:
379 for head in headsbyphase[phase]:
379 for head in headsbyphase[phase]:
380 ui.write(indent_string)
380 ui.write(indent_string)
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382
382
383
383
384 def _quasirepr(thing):
384 def _quasirepr(thing):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 return b'{%s}' % (
386 return b'{%s}' % (
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 )
388 )
389 return pycompat.bytestr(repr(thing))
389 return pycompat.bytestr(repr(thing))
390
390
391
391
392 def _debugbundle2(ui, gen, all=None, **opts):
392 def _debugbundle2(ui, gen, all=None, **opts):
393 """lists the contents of a bundle2"""
393 """lists the contents of a bundle2"""
394 if not isinstance(gen, bundle2.unbundle20):
394 if not isinstance(gen, bundle2.unbundle20):
395 raise error.Abort(_(b'not a bundle2 file'))
395 raise error.Abort(_(b'not a bundle2 file'))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 parttypes = opts.get('part_type', [])
397 parttypes = opts.get('part_type', [])
398 for part in gen.iterparts():
398 for part in gen.iterparts():
399 if parttypes and part.type not in parttypes:
399 if parttypes and part.type not in parttypes:
400 continue
400 continue
401 msg = b'%s -- %s (mandatory: %r)\n'
401 msg = b'%s -- %s (mandatory: %r)\n'
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 if part.type == b'changegroup':
403 if part.type == b'changegroup':
404 version = part.params.get(b'version', b'01')
404 version = part.params.get(b'version', b'01')
405 cg = changegroup.getunbundler(version, part, b'UN')
405 cg = changegroup.getunbundler(version, part, b'UN')
406 if not ui.quiet:
406 if not ui.quiet:
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 if part.type == b'obsmarkers':
408 if part.type == b'obsmarkers':
409 if not ui.quiet:
409 if not ui.quiet:
410 _debugobsmarkers(ui, part, indent=4, **opts)
410 _debugobsmarkers(ui, part, indent=4, **opts)
411 if part.type == b'phase-heads':
411 if part.type == b'phase-heads':
412 if not ui.quiet:
412 if not ui.quiet:
413 _debugphaseheads(ui, part, indent=4)
413 _debugphaseheads(ui, part, indent=4)
414
414
415
415
416 @command(
416 @command(
417 b'debugbundle',
417 b'debugbundle',
418 [
418 [
419 (b'a', b'all', None, _(b'show all details')),
419 (b'a', b'all', None, _(b'show all details')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 ],
422 ],
423 _(b'FILE'),
423 _(b'FILE'),
424 norepo=True,
424 norepo=True,
425 )
425 )
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 """lists the contents of a bundle"""
427 """lists the contents of a bundle"""
428 with hg.openpath(ui, bundlepath) as f:
428 with hg.openpath(ui, bundlepath) as f:
429 if spec:
429 if spec:
430 spec = exchange.getbundlespec(ui, f)
430 spec = exchange.getbundlespec(ui, f)
431 ui.write(b'%s\n' % spec)
431 ui.write(b'%s\n' % spec)
432 return
432 return
433
433
434 gen = exchange.readbundle(ui, f, bundlepath)
434 gen = exchange.readbundle(ui, f, bundlepath)
435 if isinstance(gen, bundle2.unbundle20):
435 if isinstance(gen, bundle2.unbundle20):
436 return _debugbundle2(ui, gen, all=all, **opts)
436 return _debugbundle2(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
438
438
439
439
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 def debugcapabilities(ui, path, **opts):
441 def debugcapabilities(ui, path, **opts):
442 """lists the capabilities of a remote peer"""
442 """lists the capabilities of a remote peer"""
443 opts = pycompat.byteskwargs(opts)
443 opts = pycompat.byteskwargs(opts)
444 peer = hg.peer(ui, opts, path)
444 peer = hg.peer(ui, opts, path)
445 caps = peer.capabilities()
445 caps = peer.capabilities()
446 ui.writenoi18n(b'Main capabilities:\n')
446 ui.writenoi18n(b'Main capabilities:\n')
447 for c in sorted(caps):
447 for c in sorted(caps):
448 ui.write(b' %s\n' % c)
448 ui.write(b' %s\n' % c)
449 b2caps = bundle2.bundle2caps(peer)
449 b2caps = bundle2.bundle2caps(peer)
450 if b2caps:
450 if b2caps:
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 for key, values in sorted(pycompat.iteritems(b2caps)):
452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 ui.write(b' %s\n' % key)
453 ui.write(b' %s\n' % key)
454 for v in values:
454 for v in values:
455 ui.write(b' %s\n' % v)
455 ui.write(b' %s\n' % v)
456
456
457
457
458 @command(b'debugcheckstate', [], b'')
458 @command(b'debugcheckstate', [], b'')
459 def debugcheckstate(ui, repo):
459 def debugcheckstate(ui, repo):
460 """validate the correctness of the current dirstate"""
460 """validate the correctness of the current dirstate"""
461 parent1, parent2 = repo.dirstate.parents()
461 parent1, parent2 = repo.dirstate.parents()
462 m1 = repo[parent1].manifest()
462 m1 = repo[parent1].manifest()
463 m2 = repo[parent2].manifest()
463 m2 = repo[parent2].manifest()
464 errors = 0
464 errors = 0
465 for f in repo.dirstate:
465 for f in repo.dirstate:
466 state = repo.dirstate[f]
466 state = repo.dirstate[f]
467 if state in b"nr" and f not in m1:
467 if state in b"nr" and f not in m1:
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 errors += 1
469 errors += 1
470 if state in b"a" and f in m1:
470 if state in b"a" and f in m1:
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 errors += 1
472 errors += 1
473 if state in b"m" and f not in m1 and f not in m2:
473 if state in b"m" and f not in m1 and f not in m2:
474 ui.warn(
474 ui.warn(
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 )
476 )
477 errors += 1
477 errors += 1
478 for f in m1:
478 for f in m1:
479 state = repo.dirstate[f]
479 state = repo.dirstate[f]
480 if state not in b"nrm":
480 if state not in b"nrm":
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 errors += 1
482 errors += 1
483 if errors:
483 if errors:
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(errstr)
485 raise error.Abort(errstr)
486
486
487
487
488 @command(
488 @command(
489 b'debugcolor',
489 b'debugcolor',
490 [(b'', b'style', None, _(b'show all configured styles'))],
490 [(b'', b'style', None, _(b'show all configured styles'))],
491 b'hg debugcolor',
491 b'hg debugcolor',
492 )
492 )
493 def debugcolor(ui, repo, **opts):
493 def debugcolor(ui, repo, **opts):
494 """show available color, effects or style"""
494 """show available color, effects or style"""
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 if opts.get('style'):
496 if opts.get('style'):
497 return _debugdisplaystyle(ui)
497 return _debugdisplaystyle(ui)
498 else:
498 else:
499 return _debugdisplaycolor(ui)
499 return _debugdisplaycolor(ui)
500
500
501
501
502 def _debugdisplaycolor(ui):
502 def _debugdisplaycolor(ui):
503 ui = ui.copy()
503 ui = ui.copy()
504 ui._styles.clear()
504 ui._styles.clear()
505 for effect in color._activeeffects(ui).keys():
505 for effect in color._activeeffects(ui).keys():
506 ui._styles[effect] = effect
506 ui._styles[effect] = effect
507 if ui._terminfoparams:
507 if ui._terminfoparams:
508 for k, v in ui.configitems(b'color'):
508 for k, v in ui.configitems(b'color'):
509 if k.startswith(b'color.'):
509 if k.startswith(b'color.'):
510 ui._styles[k] = k[6:]
510 ui._styles[k] = k[6:]
511 elif k.startswith(b'terminfo.'):
511 elif k.startswith(b'terminfo.'):
512 ui._styles[k] = k[9:]
512 ui._styles[k] = k[9:]
513 ui.write(_(b'available colors:\n'))
513 ui.write(_(b'available colors:\n'))
514 # sort label with a '_' after the other to group '_background' entry.
514 # sort label with a '_' after the other to group '_background' entry.
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 for colorname, label in items:
516 for colorname, label in items:
517 ui.write(b'%s\n' % colorname, label=label)
517 ui.write(b'%s\n' % colorname, label=label)
518
518
519
519
520 def _debugdisplaystyle(ui):
520 def _debugdisplaystyle(ui):
521 ui.write(_(b'available style:\n'))
521 ui.write(_(b'available style:\n'))
522 if not ui._styles:
522 if not ui._styles:
523 return
523 return
524 width = max(len(s) for s in ui._styles)
524 width = max(len(s) for s in ui._styles)
525 for label, effects in sorted(ui._styles.items()):
525 for label, effects in sorted(ui._styles.items()):
526 ui.write(b'%s' % label, label=label)
526 ui.write(b'%s' % label, label=label)
527 if effects:
527 if effects:
528 # 50
528 # 50
529 ui.write(b': ')
529 ui.write(b': ')
530 ui.write(b' ' * (max(0, width - len(label))))
530 ui.write(b' ' * (max(0, width - len(label))))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 ui.write(b'\n')
532 ui.write(b'\n')
533
533
534
534
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 def debugcreatestreamclonebundle(ui, repo, fname):
536 def debugcreatestreamclonebundle(ui, repo, fname):
537 """create a stream clone bundle file
537 """create a stream clone bundle file
538
538
539 Stream bundles are special bundles that are essentially archives of
539 Stream bundles are special bundles that are essentially archives of
540 revlog files. They are commonly used for cloning very quickly.
540 revlog files. They are commonly used for cloning very quickly.
541 """
541 """
542 # TODO we may want to turn this into an abort when this functionality
542 # TODO we may want to turn this into an abort when this functionality
543 # is moved into `hg bundle`.
543 # is moved into `hg bundle`.
544 if phases.hassecret(repo):
544 if phases.hassecret(repo):
545 ui.warn(
545 ui.warn(
546 _(
546 _(
547 b'(warning: stream clone bundle will contain secret '
547 b'(warning: stream clone bundle will contain secret '
548 b'revisions)\n'
548 b'revisions)\n'
549 )
549 )
550 )
550 )
551
551
552 requirements, gen = streamclone.generatebundlev1(repo)
552 requirements, gen = streamclone.generatebundlev1(repo)
553 changegroup.writechunks(ui, gen, fname)
553 changegroup.writechunks(ui, gen, fname)
554
554
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556
556
557
557
558 @command(
558 @command(
559 b'debugdag',
559 b'debugdag',
560 [
560 [
561 (b't', b'tags', None, _(b'use tags as labels')),
561 (b't', b'tags', None, _(b'use tags as labels')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 (b'', b'dots', None, _(b'use dots for runs')),
563 (b'', b'dots', None, _(b'use dots for runs')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 ],
565 ],
566 _(b'[OPTION]... [FILE [REV]...]'),
566 _(b'[OPTION]... [FILE [REV]...]'),
567 optionalrepo=True,
567 optionalrepo=True,
568 )
568 )
569 def debugdag(ui, repo, file_=None, *revs, **opts):
569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 """format the changelog or an index DAG as a concise textual description
570 """format the changelog or an index DAG as a concise textual description
571
571
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 revision numbers, they get labeled in the output as rN.
573 revision numbers, they get labeled in the output as rN.
574
574
575 Otherwise, the changelog DAG of the current repo is emitted.
575 Otherwise, the changelog DAG of the current repo is emitted.
576 """
576 """
577 spaces = opts.get('spaces')
577 spaces = opts.get('spaces')
578 dots = opts.get('dots')
578 dots = opts.get('dots')
579 if file_:
579 if file_:
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 revs = set((int(r) for r in revs))
581 revs = set((int(r) for r in revs))
582
582
583 def events():
583 def events():
584 for r in rlog:
584 for r in rlog:
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 if r in revs:
586 if r in revs:
587 yield b'l', (r, b"r%i" % r)
587 yield b'l', (r, b"r%i" % r)
588
588
589 elif repo:
589 elif repo:
590 cl = repo.changelog
590 cl = repo.changelog
591 tags = opts.get('tags')
591 tags = opts.get('tags')
592 branches = opts.get('branches')
592 branches = opts.get('branches')
593 if tags:
593 if tags:
594 labels = {}
594 labels = {}
595 for l, n in repo.tags().items():
595 for l, n in repo.tags().items():
596 labels.setdefault(cl.rev(n), []).append(l)
596 labels.setdefault(cl.rev(n), []).append(l)
597
597
598 def events():
598 def events():
599 b = b"default"
599 b = b"default"
600 for r in cl:
600 for r in cl:
601 if branches:
601 if branches:
602 newb = cl.read(cl.node(r))[5][b'branch']
602 newb = cl.read(cl.node(r))[5][b'branch']
603 if newb != b:
603 if newb != b:
604 yield b'a', newb
604 yield b'a', newb
605 b = newb
605 b = newb
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 if tags:
607 if tags:
608 ls = labels.get(r)
608 ls = labels.get(r)
609 if ls:
609 if ls:
610 for l in ls:
610 for l in ls:
611 yield b'l', (r, l)
611 yield b'l', (r, l)
612
612
613 else:
613 else:
614 raise error.Abort(_(b'need repo for changelog dag'))
614 raise error.Abort(_(b'need repo for changelog dag'))
615
615
616 for line in dagparser.dagtextlines(
616 for line in dagparser.dagtextlines(
617 events(),
617 events(),
618 addspaces=spaces,
618 addspaces=spaces,
619 wraplabels=True,
619 wraplabels=True,
620 wrapannotations=True,
620 wrapannotations=True,
621 wrapnonlinear=dots,
621 wrapnonlinear=dots,
622 usedots=dots,
622 usedots=dots,
623 maxlinewidth=70,
623 maxlinewidth=70,
624 ):
624 ):
625 ui.write(line)
625 ui.write(line)
626 ui.write(b"\n")
626 ui.write(b"\n")
627
627
628
628
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 def debugdata(ui, repo, file_, rev=None, **opts):
630 def debugdata(ui, repo, file_, rev=None, **opts):
631 """dump the contents of a data file revision"""
631 """dump the contents of a data file revision"""
632 opts = pycompat.byteskwargs(opts)
632 opts = pycompat.byteskwargs(opts)
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 if rev is not None:
634 if rev is not None:
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 file_, rev = None, file_
636 file_, rev = None, file_
637 elif rev is None:
637 elif rev is None:
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 try:
640 try:
641 ui.write(r.rawdata(r.lookup(rev)))
641 ui.write(r.rawdata(r.lookup(rev)))
642 except KeyError:
642 except KeyError:
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644
644
645
645
646 @command(
646 @command(
647 b'debugdate',
647 b'debugdate',
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 _(b'[-e] DATE [RANGE]'),
649 _(b'[-e] DATE [RANGE]'),
650 norepo=True,
650 norepo=True,
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdate(ui, date, range=None, **opts):
653 def debugdate(ui, date, range=None, **opts):
654 """parse and display a date"""
654 """parse and display a date"""
655 if opts["extended"]:
655 if opts["extended"]:
656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
657 else:
657 else:
658 d = dateutil.parsedate(date)
658 d = dateutil.parsedate(date)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 if range:
661 if range:
662 m = dateutil.matchdate(range)
662 m = dateutil.matchdate(range)
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664
664
665
665
666 @command(
666 @command(
667 b'debugdeltachain',
667 b'debugdeltachain',
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 _(b'-c|-m|FILE'),
669 _(b'-c|-m|FILE'),
670 optionalrepo=True,
670 optionalrepo=True,
671 )
671 )
672 def debugdeltachain(ui, repo, file_=None, **opts):
672 def debugdeltachain(ui, repo, file_=None, **opts):
673 """dump information about delta chains in a revlog
673 """dump information about delta chains in a revlog
674
674
675 Output can be templatized. Available template keywords are:
675 Output can be templatized. Available template keywords are:
676
676
677 :``rev``: revision number
677 :``rev``: revision number
678 :``chainid``: delta chain identifier (numbered by unique base)
678 :``chainid``: delta chain identifier (numbered by unique base)
679 :``chainlen``: delta chain length to this revision
679 :``chainlen``: delta chain length to this revision
680 :``prevrev``: previous revision in delta chain
680 :``prevrev``: previous revision in delta chain
681 :``deltatype``: role of delta / how it was computed
681 :``deltatype``: role of delta / how it was computed
682 :``compsize``: compressed size of revision
682 :``compsize``: compressed size of revision
683 :``uncompsize``: uncompressed size of revision
683 :``uncompsize``: uncompressed size of revision
684 :``chainsize``: total size of compressed revisions in chain
684 :``chainsize``: total size of compressed revisions in chain
685 :``chainratio``: total chain size divided by uncompressed revision size
685 :``chainratio``: total chain size divided by uncompressed revision size
686 (new delta chains typically start at ratio 2.00)
686 (new delta chains typically start at ratio 2.00)
687 :``lindist``: linear distance from base revision in delta chain to end
687 :``lindist``: linear distance from base revision in delta chain to end
688 of this revision
688 of this revision
689 :``extradist``: total size of revisions not part of this delta chain from
689 :``extradist``: total size of revisions not part of this delta chain from
690 base of delta chain to end of this revision; a measurement
690 base of delta chain to end of this revision; a measurement
691 of how much extra data we need to read/seek across to read
691 of how much extra data we need to read/seek across to read
692 the delta chain for this revision
692 the delta chain for this revision
693 :``extraratio``: extradist divided by chainsize; another representation of
693 :``extraratio``: extradist divided by chainsize; another representation of
694 how much unrelated data is needed to load this delta chain
694 how much unrelated data is needed to load this delta chain
695
695
696 If the repository is configured to use the sparse read, additional keywords
696 If the repository is configured to use the sparse read, additional keywords
697 are available:
697 are available:
698
698
699 :``readsize``: total size of data read from the disk for a revision
699 :``readsize``: total size of data read from the disk for a revision
700 (sum of the sizes of all the blocks)
700 (sum of the sizes of all the blocks)
701 :``largestblock``: size of the largest block of data read from the disk
701 :``largestblock``: size of the largest block of data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
703 :``srchunks``: in how many data hunks the whole revision would be read
703 :``srchunks``: in how many data hunks the whole revision would be read
704
704
705 The sparse read can be enabled with experimental.sparse-read = True
705 The sparse read can be enabled with experimental.sparse-read = True
706 """
706 """
707 opts = pycompat.byteskwargs(opts)
707 opts = pycompat.byteskwargs(opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 index = r.index
709 index = r.index
710 start = r.start
710 start = r.start
711 length = r.length
711 length = r.length
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 withsparseread = getattr(r, '_withsparseread', False)
713 withsparseread = getattr(r, '_withsparseread', False)
714
714
715 def revinfo(rev):
715 def revinfo(rev):
716 e = index[rev]
716 e = index[rev]
717 compsize = e[1]
717 compsize = e[1]
718 uncompsize = e[2]
718 uncompsize = e[2]
719 chainsize = 0
719 chainsize = 0
720
720
721 if generaldelta:
721 if generaldelta:
722 if e[3] == e[5]:
722 if e[3] == e[5]:
723 deltatype = b'p1'
723 deltatype = b'p1'
724 elif e[3] == e[6]:
724 elif e[3] == e[6]:
725 deltatype = b'p2'
725 deltatype = b'p2'
726 elif e[3] == rev - 1:
726 elif e[3] == rev - 1:
727 deltatype = b'prev'
727 deltatype = b'prev'
728 elif e[3] == rev:
728 elif e[3] == rev:
729 deltatype = b'base'
729 deltatype = b'base'
730 else:
730 else:
731 deltatype = b'other'
731 deltatype = b'other'
732 else:
732 else:
733 if e[3] == rev:
733 if e[3] == rev:
734 deltatype = b'base'
734 deltatype = b'base'
735 else:
735 else:
736 deltatype = b'prev'
736 deltatype = b'prev'
737
737
738 chain = r._deltachain(rev)[0]
738 chain = r._deltachain(rev)[0]
739 for iterrev in chain:
739 for iterrev in chain:
740 e = index[iterrev]
740 e = index[iterrev]
741 chainsize += e[1]
741 chainsize += e[1]
742
742
743 return compsize, uncompsize, deltatype, chain, chainsize
743 return compsize, uncompsize, deltatype, chain, chainsize
744
744
745 fm = ui.formatter(b'debugdeltachain', opts)
745 fm = ui.formatter(b'debugdeltachain', opts)
746
746
747 fm.plain(
747 fm.plain(
748 b' rev chain# chainlen prev delta '
748 b' rev chain# chainlen prev delta '
749 b'size rawsize chainsize ratio lindist extradist '
749 b'size rawsize chainsize ratio lindist extradist '
750 b'extraratio'
750 b'extraratio'
751 )
751 )
752 if withsparseread:
752 if withsparseread:
753 fm.plain(b' readsize largestblk rddensity srchunks')
753 fm.plain(b' readsize largestblk rddensity srchunks')
754 fm.plain(b'\n')
754 fm.plain(b'\n')
755
755
756 chainbases = {}
756 chainbases = {}
757 for rev in r:
757 for rev in r:
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 chainbase = chain[0]
759 chainbase = chain[0]
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 basestart = start(chainbase)
761 basestart = start(chainbase)
762 revstart = start(rev)
762 revstart = start(rev)
763 lineardist = revstart + comp - basestart
763 lineardist = revstart + comp - basestart
764 extradist = lineardist - chainsize
764 extradist = lineardist - chainsize
765 try:
765 try:
766 prevrev = chain[-2]
766 prevrev = chain[-2]
767 except IndexError:
767 except IndexError:
768 prevrev = -1
768 prevrev = -1
769
769
770 if uncomp != 0:
770 if uncomp != 0:
771 chainratio = float(chainsize) / float(uncomp)
771 chainratio = float(chainsize) / float(uncomp)
772 else:
772 else:
773 chainratio = chainsize
773 chainratio = chainsize
774
774
775 if chainsize != 0:
775 if chainsize != 0:
776 extraratio = float(extradist) / float(chainsize)
776 extraratio = float(extradist) / float(chainsize)
777 else:
777 else:
778 extraratio = extradist
778 extraratio = extradist
779
779
780 fm.startitem()
780 fm.startitem()
781 fm.write(
781 fm.write(
782 b'rev chainid chainlen prevrev deltatype compsize '
782 b'rev chainid chainlen prevrev deltatype compsize '
783 b'uncompsize chainsize chainratio lindist extradist '
783 b'uncompsize chainsize chainratio lindist extradist '
784 b'extraratio',
784 b'extraratio',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 rev,
786 rev,
787 chainid,
787 chainid,
788 len(chain),
788 len(chain),
789 prevrev,
789 prevrev,
790 deltatype,
790 deltatype,
791 comp,
791 comp,
792 uncomp,
792 uncomp,
793 chainsize,
793 chainsize,
794 chainratio,
794 chainratio,
795 lineardist,
795 lineardist,
796 extradist,
796 extradist,
797 extraratio,
797 extraratio,
798 rev=rev,
798 rev=rev,
799 chainid=chainid,
799 chainid=chainid,
800 chainlen=len(chain),
800 chainlen=len(chain),
801 prevrev=prevrev,
801 prevrev=prevrev,
802 deltatype=deltatype,
802 deltatype=deltatype,
803 compsize=comp,
803 compsize=comp,
804 uncompsize=uncomp,
804 uncompsize=uncomp,
805 chainsize=chainsize,
805 chainsize=chainsize,
806 chainratio=chainratio,
806 chainratio=chainratio,
807 lindist=lineardist,
807 lindist=lineardist,
808 extradist=extradist,
808 extradist=extradist,
809 extraratio=extraratio,
809 extraratio=extraratio,
810 )
810 )
811 if withsparseread:
811 if withsparseread:
812 readsize = 0
812 readsize = 0
813 largestblock = 0
813 largestblock = 0
814 srchunks = 0
814 srchunks = 0
815
815
816 for revschunk in deltautil.slicechunk(r, chain):
816 for revschunk in deltautil.slicechunk(r, chain):
817 srchunks += 1
817 srchunks += 1
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 blksize = blkend - start(revschunk[0])
819 blksize = blkend - start(revschunk[0])
820
820
821 readsize += blksize
821 readsize += blksize
822 if largestblock < blksize:
822 if largestblock < blksize:
823 largestblock = blksize
823 largestblock = blksize
824
824
825 if readsize:
825 if readsize:
826 readdensity = float(chainsize) / float(readsize)
826 readdensity = float(chainsize) / float(readsize)
827 else:
827 else:
828 readdensity = 1
828 readdensity = 1
829
829
830 fm.write(
830 fm.write(
831 b'readsize largestblock readdensity srchunks',
831 b'readsize largestblock readdensity srchunks',
832 b' %10d %10d %9.5f %8d',
832 b' %10d %10d %9.5f %8d',
833 readsize,
833 readsize,
834 largestblock,
834 largestblock,
835 readdensity,
835 readdensity,
836 srchunks,
836 srchunks,
837 readsize=readsize,
837 readsize=readsize,
838 largestblock=largestblock,
838 largestblock=largestblock,
839 readdensity=readdensity,
839 readdensity=readdensity,
840 srchunks=srchunks,
840 srchunks=srchunks,
841 )
841 )
842
842
843 fm.plain(b'\n')
843 fm.plain(b'\n')
844
844
845 fm.end()
845 fm.end()
846
846
847
847
848 @command(
848 @command(
849 b'debugdirstate|debugstate',
849 b'debugdirstate|debugstate',
850 [
850 [
851 (
851 (
852 b'',
852 b'',
853 b'nodates',
853 b'nodates',
854 None,
854 None,
855 _(b'do not display the saved mtime (DEPRECATED)'),
855 _(b'do not display the saved mtime (DEPRECATED)'),
856 ),
856 ),
857 (b'', b'dates', True, _(b'display the saved mtime')),
857 (b'', b'dates', True, _(b'display the saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 ],
859 ],
860 _(b'[OPTION]...'),
860 _(b'[OPTION]...'),
861 )
861 )
862 def debugstate(ui, repo, **opts):
862 def debugstate(ui, repo, **opts):
863 """show the contents of the current dirstate"""
863 """show the contents of the current dirstate"""
864
864
865 nodates = not opts['dates']
865 nodates = not opts['dates']
866 if opts.get('nodates') is not None:
866 if opts.get('nodates') is not None:
867 nodates = True
867 nodates = True
868 datesort = opts.get('datesort')
868 datesort = opts.get('datesort')
869
869
870 if datesort:
870 if datesort:
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 else:
872 else:
873 keyfunc = None # sort by filename
873 keyfunc = None # sort by filename
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 if ent[3] == -1:
875 if ent[3] == -1:
876 timestr = b'unset '
876 timestr = b'unset '
877 elif nodates:
877 elif nodates:
878 timestr = b'set '
878 timestr = b'set '
879 else:
879 else:
880 timestr = time.strftime(
880 timestr = time.strftime(
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 )
882 )
883 timestr = encoding.strtolocal(timestr)
883 timestr = encoding.strtolocal(timestr)
884 if ent[1] & 0o20000:
884 if ent[1] & 0o20000:
885 mode = b'lnk'
885 mode = b'lnk'
886 else:
886 else:
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 for f in repo.dirstate.copies():
889 for f in repo.dirstate.copies():
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891
891
892
892
893 @command(
893 @command(
894 b'debugdiscovery',
894 b'debugdiscovery',
895 [
895 [
896 (b'', b'old', None, _(b'use old-style discovery')),
896 (b'', b'old', None, _(b'use old-style discovery')),
897 (
897 (
898 b'',
898 b'',
899 b'nonheads',
899 b'nonheads',
900 None,
900 None,
901 _(b'use old-style discovery with non-heads included'),
901 _(b'use old-style discovery with non-heads included'),
902 ),
902 ),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 ]
905 ]
906 + cmdutil.remoteopts,
906 + cmdutil.remoteopts,
907 _(b'[--rev REV] [OTHER]'),
907 _(b'[--rev REV] [OTHER]'),
908 )
908 )
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 """runs the changeset discovery protocol in isolation"""
910 """runs the changeset discovery protocol in isolation"""
911 opts = pycompat.byteskwargs(opts)
911 opts = pycompat.byteskwargs(opts)
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 remote = hg.peer(repo, opts, remoteurl)
913 remote = hg.peer(repo, opts, remoteurl)
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915
915
916 # make sure tests are repeatable
916 # make sure tests are repeatable
917 random.seed(int(opts[b'seed']))
917 random.seed(int(opts[b'seed']))
918
918
919 if opts.get(b'old'):
919 if opts.get(b'old'):
920
920
921 def doit(pushedrevs, remoteheads, remote=remote):
921 def doit(pushedrevs, remoteheads, remote=remote):
922 if not util.safehasattr(remote, b'branches'):
922 if not util.safehasattr(remote, b'branches'):
923 # enable in-client legacy support
923 # enable in-client legacy support
924 remote = localrepo.locallegacypeer(remote.local())
924 remote = localrepo.locallegacypeer(remote.local())
925 common, _in, hds = treediscovery.findcommonincoming(
925 common, _in, hds = treediscovery.findcommonincoming(
926 repo, remote, force=True
926 repo, remote, force=True
927 )
927 )
928 common = set(common)
928 common = set(common)
929 if not opts.get(b'nonheads'):
929 if not opts.get(b'nonheads'):
930 ui.writenoi18n(
930 ui.writenoi18n(
931 b"unpruned common: %s\n"
931 b"unpruned common: %s\n"
932 % b" ".join(sorted(short(n) for n in common))
932 % b" ".join(sorted(short(n) for n in common))
933 )
933 )
934
934
935 clnode = repo.changelog.node
935 clnode = repo.changelog.node
936 common = repo.revs(b'heads(::%ln)', common)
936 common = repo.revs(b'heads(::%ln)', common)
937 common = {clnode(r) for r in common}
937 common = {clnode(r) for r in common}
938 return common, hds
938 return common, hds
939
939
940 else:
940 else:
941
941
942 def doit(pushedrevs, remoteheads, remote=remote):
942 def doit(pushedrevs, remoteheads, remote=remote):
943 nodes = None
943 nodes = None
944 if pushedrevs:
944 if pushedrevs:
945 revs = scmutil.revrange(repo, pushedrevs)
945 revs = scmutil.revrange(repo, pushedrevs)
946 nodes = [repo[r].node() for r in revs]
946 nodes = [repo[r].node() for r in revs]
947 common, any, hds = setdiscovery.findcommonheads(
947 common, any, hds = setdiscovery.findcommonheads(
948 ui, repo, remote, ancestorsof=nodes
948 ui, repo, remote, ancestorsof=nodes
949 )
949 )
950 return common, hds
950 return common, hds
951
951
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 localrevs = opts[b'rev']
953 localrevs = opts[b'rev']
954 with util.timedcm('debug-discovery') as t:
954 with util.timedcm('debug-discovery') as t:
955 common, hds = doit(localrevs, remoterevs)
955 common, hds = doit(localrevs, remoterevs)
956
956
957 # compute all statistics
957 # compute all statistics
958 common = set(common)
958 common = set(common)
959 rheads = set(hds)
959 rheads = set(hds)
960 lheads = set(repo.heads())
960 lheads = set(repo.heads())
961
961
962 data = {}
962 data = {}
963 data[b'elapsed'] = t.elapsed
963 data[b'elapsed'] = t.elapsed
964 data[b'nb-common'] = len(common)
964 data[b'nb-common'] = len(common)
965 data[b'nb-common-local'] = len(common & lheads)
965 data[b'nb-common-local'] = len(common & lheads)
966 data[b'nb-common-remote'] = len(common & rheads)
966 data[b'nb-common-remote'] = len(common & rheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 data[b'nb-local'] = len(lheads)
968 data[b'nb-local'] = len(lheads)
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 data[b'nb-remote'] = len(rheads)
970 data[b'nb-remote'] = len(rheads)
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975
975
976 # display discovery summary
976 # display discovery summary
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 ui.writenoi18n(b"heads summary:\n")
978 ui.writenoi18n(b"heads summary:\n")
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992
992
993 if ui.verbose:
993 if ui.verbose:
994 ui.writenoi18n(
994 ui.writenoi18n(
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 )
996 )
997
997
998
998
999 _chunksize = 4 << 10
999 _chunksize = 4 << 10
1000
1000
1001
1001
1002 @command(
1002 @command(
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 )
1004 )
1005 def debugdownload(ui, repo, url, output=None, **opts):
1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 """download a resource using Mercurial logic and config
1006 """download a resource using Mercurial logic and config
1007 """
1007 """
1008 fh = urlmod.open(ui, url, output)
1008 fh = urlmod.open(ui, url, output)
1009
1009
1010 dest = ui
1010 dest = ui
1011 if output:
1011 if output:
1012 dest = open(output, b"wb", _chunksize)
1012 dest = open(output, b"wb", _chunksize)
1013 try:
1013 try:
1014 data = fh.read(_chunksize)
1014 data = fh.read(_chunksize)
1015 while data:
1015 while data:
1016 dest.write(data)
1016 dest.write(data)
1017 data = fh.read(_chunksize)
1017 data = fh.read(_chunksize)
1018 finally:
1018 finally:
1019 if output:
1019 if output:
1020 dest.close()
1020 dest.close()
1021
1021
1022
1022
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 def debugextensions(ui, repo, **opts):
1024 def debugextensions(ui, repo, **opts):
1025 '''show information about active extensions'''
1025 '''show information about active extensions'''
1026 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1027 exts = extensions.extensions(ui)
1027 exts = extensions.extensions(ui)
1028 hgver = util.version()
1028 hgver = util.version()
1029 fm = ui.formatter(b'debugextensions', opts)
1029 fm = ui.formatter(b'debugextensions', opts)
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 isinternal = extensions.ismoduleinternal(extmod)
1031 isinternal = extensions.ismoduleinternal(extmod)
1032 extsource = None
1032 extsource = None
1033
1033
1034 if util.safehasattr(extmod, '__file__'):
1034 if util.safehasattr(extmod, '__file__'):
1035 extsource = pycompat.fsencode(extmod.__file__)
1035 extsource = pycompat.fsencode(extmod.__file__)
1036 elif getattr(sys, 'oxidized', False):
1036 elif getattr(sys, 'oxidized', False):
1037 extsource = pycompat.sysexecutable
1037 extsource = pycompat.sysexecutable
1038 if isinternal:
1038 if isinternal:
1039 exttestedwith = [] # never expose magic string to users
1039 exttestedwith = [] # never expose magic string to users
1040 else:
1040 else:
1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1042 extbuglink = getattr(extmod, 'buglink', None)
1042 extbuglink = getattr(extmod, 'buglink', None)
1043
1043
1044 fm.startitem()
1044 fm.startitem()
1045
1045
1046 if ui.quiet or ui.verbose:
1046 if ui.quiet or ui.verbose:
1047 fm.write(b'name', b'%s\n', extname)
1047 fm.write(b'name', b'%s\n', extname)
1048 else:
1048 else:
1049 fm.write(b'name', b'%s', extname)
1049 fm.write(b'name', b'%s', extname)
1050 if isinternal or hgver in exttestedwith:
1050 if isinternal or hgver in exttestedwith:
1051 fm.plain(b'\n')
1051 fm.plain(b'\n')
1052 elif not exttestedwith:
1052 elif not exttestedwith:
1053 fm.plain(_(b' (untested!)\n'))
1053 fm.plain(_(b' (untested!)\n'))
1054 else:
1054 else:
1055 lasttestedversion = exttestedwith[-1]
1055 lasttestedversion = exttestedwith[-1]
1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1057
1057
1058 fm.condwrite(
1058 fm.condwrite(
1059 ui.verbose and extsource,
1059 ui.verbose and extsource,
1060 b'source',
1060 b'source',
1061 _(b' location: %s\n'),
1061 _(b' location: %s\n'),
1062 extsource or b"",
1062 extsource or b"",
1063 )
1063 )
1064
1064
1065 if ui.verbose:
1065 if ui.verbose:
1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1067 fm.data(bundled=isinternal)
1067 fm.data(bundled=isinternal)
1068
1068
1069 fm.condwrite(
1069 fm.condwrite(
1070 ui.verbose and exttestedwith,
1070 ui.verbose and exttestedwith,
1071 b'testedwith',
1071 b'testedwith',
1072 _(b' tested with: %s\n'),
1072 _(b' tested with: %s\n'),
1073 fm.formatlist(exttestedwith, name=b'ver'),
1073 fm.formatlist(exttestedwith, name=b'ver'),
1074 )
1074 )
1075
1075
1076 fm.condwrite(
1076 fm.condwrite(
1077 ui.verbose and extbuglink,
1077 ui.verbose and extbuglink,
1078 b'buglink',
1078 b'buglink',
1079 _(b' bug reporting: %s\n'),
1079 _(b' bug reporting: %s\n'),
1080 extbuglink or b"",
1080 extbuglink or b"",
1081 )
1081 )
1082
1082
1083 fm.end()
1083 fm.end()
1084
1084
1085
1085
1086 @command(
1086 @command(
1087 b'debugfileset',
1087 b'debugfileset',
1088 [
1088 [
1089 (
1089 (
1090 b'r',
1090 b'r',
1091 b'rev',
1091 b'rev',
1092 b'',
1092 b'',
1093 _(b'apply the filespec on this revision'),
1093 _(b'apply the filespec on this revision'),
1094 _(b'REV'),
1094 _(b'REV'),
1095 ),
1095 ),
1096 (
1096 (
1097 b'',
1097 b'',
1098 b'all-files',
1098 b'all-files',
1099 False,
1099 False,
1100 _(b'test files from all revisions and working directory'),
1100 _(b'test files from all revisions and working directory'),
1101 ),
1101 ),
1102 (
1102 (
1103 b's',
1103 b's',
1104 b'show-matcher',
1104 b'show-matcher',
1105 None,
1105 None,
1106 _(b'print internal representation of matcher'),
1106 _(b'print internal representation of matcher'),
1107 ),
1107 ),
1108 (
1108 (
1109 b'p',
1109 b'p',
1110 b'show-stage',
1110 b'show-stage',
1111 [],
1111 [],
1112 _(b'print parsed tree at the given stage'),
1112 _(b'print parsed tree at the given stage'),
1113 _(b'NAME'),
1113 _(b'NAME'),
1114 ),
1114 ),
1115 ],
1115 ],
1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1117 )
1117 )
1118 def debugfileset(ui, repo, expr, **opts):
1118 def debugfileset(ui, repo, expr, **opts):
1119 '''parse and apply a fileset specification'''
1119 '''parse and apply a fileset specification'''
1120 from . import fileset
1120 from . import fileset
1121
1121
1122 fileset.symbols # force import of fileset so we have predicates to optimize
1122 fileset.symbols # force import of fileset so we have predicates to optimize
1123 opts = pycompat.byteskwargs(opts)
1123 opts = pycompat.byteskwargs(opts)
1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1125
1125
1126 stages = [
1126 stages = [
1127 (b'parsed', pycompat.identity),
1127 (b'parsed', pycompat.identity),
1128 (b'analyzed', filesetlang.analyze),
1128 (b'analyzed', filesetlang.analyze),
1129 (b'optimized', filesetlang.optimize),
1129 (b'optimized', filesetlang.optimize),
1130 ]
1130 ]
1131 stagenames = set(n for n, f in stages)
1131 stagenames = set(n for n, f in stages)
1132
1132
1133 showalways = set()
1133 showalways = set()
1134 if ui.verbose and not opts[b'show_stage']:
1134 if ui.verbose and not opts[b'show_stage']:
1135 # show parsed tree by --verbose (deprecated)
1135 # show parsed tree by --verbose (deprecated)
1136 showalways.add(b'parsed')
1136 showalways.add(b'parsed')
1137 if opts[b'show_stage'] == [b'all']:
1137 if opts[b'show_stage'] == [b'all']:
1138 showalways.update(stagenames)
1138 showalways.update(stagenames)
1139 else:
1139 else:
1140 for n in opts[b'show_stage']:
1140 for n in opts[b'show_stage']:
1141 if n not in stagenames:
1141 if n not in stagenames:
1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1143 showalways.update(opts[b'show_stage'])
1143 showalways.update(opts[b'show_stage'])
1144
1144
1145 tree = filesetlang.parse(expr)
1145 tree = filesetlang.parse(expr)
1146 for n, f in stages:
1146 for n, f in stages:
1147 tree = f(tree)
1147 tree = f(tree)
1148 if n in showalways:
1148 if n in showalways:
1149 if opts[b'show_stage'] or n != b'parsed':
1149 if opts[b'show_stage'] or n != b'parsed':
1150 ui.write(b"* %s:\n" % n)
1150 ui.write(b"* %s:\n" % n)
1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1152
1152
1153 files = set()
1153 files = set()
1154 if opts[b'all_files']:
1154 if opts[b'all_files']:
1155 for r in repo:
1155 for r in repo:
1156 c = repo[r]
1156 c = repo[r]
1157 files.update(c.files())
1157 files.update(c.files())
1158 files.update(c.substate)
1158 files.update(c.substate)
1159 if opts[b'all_files'] or ctx.rev() is None:
1159 if opts[b'all_files'] or ctx.rev() is None:
1160 wctx = repo[None]
1160 wctx = repo[None]
1161 files.update(
1161 files.update(
1162 repo.dirstate.walk(
1162 repo.dirstate.walk(
1163 scmutil.matchall(repo),
1163 scmutil.matchall(repo),
1164 subrepos=list(wctx.substate),
1164 subrepos=list(wctx.substate),
1165 unknown=True,
1165 unknown=True,
1166 ignored=True,
1166 ignored=True,
1167 )
1167 )
1168 )
1168 )
1169 files.update(wctx.substate)
1169 files.update(wctx.substate)
1170 else:
1170 else:
1171 files.update(ctx.files())
1171 files.update(ctx.files())
1172 files.update(ctx.substate)
1172 files.update(ctx.substate)
1173
1173
1174 m = ctx.matchfileset(repo.getcwd(), expr)
1174 m = ctx.matchfileset(repo.getcwd(), expr)
1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1177 for f in sorted(files):
1177 for f in sorted(files):
1178 if not m(f):
1178 if not m(f):
1179 continue
1179 continue
1180 ui.write(b"%s\n" % f)
1180 ui.write(b"%s\n" % f)
1181
1181
1182
1182
1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1184 def debugformat(ui, repo, **opts):
1184 def debugformat(ui, repo, **opts):
1185 """display format information about the current repository
1185 """display format information about the current repository
1186
1186
1187 Use --verbose to get extra information about current config value and
1187 Use --verbose to get extra information about current config value and
1188 Mercurial default."""
1188 Mercurial default."""
1189 opts = pycompat.byteskwargs(opts)
1189 opts = pycompat.byteskwargs(opts)
1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1192
1192
1193 def makeformatname(name):
1193 def makeformatname(name):
1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1195
1195
1196 fm = ui.formatter(b'debugformat', opts)
1196 fm = ui.formatter(b'debugformat', opts)
1197 if fm.isplain():
1197 if fm.isplain():
1198
1198
1199 def formatvalue(value):
1199 def formatvalue(value):
1200 if util.safehasattr(value, b'startswith'):
1200 if util.safehasattr(value, b'startswith'):
1201 return value
1201 return value
1202 if value:
1202 if value:
1203 return b'yes'
1203 return b'yes'
1204 else:
1204 else:
1205 return b'no'
1205 return b'no'
1206
1206
1207 else:
1207 else:
1208 formatvalue = pycompat.identity
1208 formatvalue = pycompat.identity
1209
1209
1210 fm.plain(b'format-variant')
1210 fm.plain(b'format-variant')
1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1212 fm.plain(b' repo')
1212 fm.plain(b' repo')
1213 if ui.verbose:
1213 if ui.verbose:
1214 fm.plain(b' config default')
1214 fm.plain(b' config default')
1215 fm.plain(b'\n')
1215 fm.plain(b'\n')
1216 for fv in upgrade.allformatvariant:
1216 for fv in upgrade.allformatvariant:
1217 fm.startitem()
1217 fm.startitem()
1218 repovalue = fv.fromrepo(repo)
1218 repovalue = fv.fromrepo(repo)
1219 configvalue = fv.fromconfig(repo)
1219 configvalue = fv.fromconfig(repo)
1220
1220
1221 if repovalue != configvalue:
1221 if repovalue != configvalue:
1222 namelabel = b'formatvariant.name.mismatchconfig'
1222 namelabel = b'formatvariant.name.mismatchconfig'
1223 repolabel = b'formatvariant.repo.mismatchconfig'
1223 repolabel = b'formatvariant.repo.mismatchconfig'
1224 elif repovalue != fv.default:
1224 elif repovalue != fv.default:
1225 namelabel = b'formatvariant.name.mismatchdefault'
1225 namelabel = b'formatvariant.name.mismatchdefault'
1226 repolabel = b'formatvariant.repo.mismatchdefault'
1226 repolabel = b'formatvariant.repo.mismatchdefault'
1227 else:
1227 else:
1228 namelabel = b'formatvariant.name.uptodate'
1228 namelabel = b'formatvariant.name.uptodate'
1229 repolabel = b'formatvariant.repo.uptodate'
1229 repolabel = b'formatvariant.repo.uptodate'
1230
1230
1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1233 if fv.default != configvalue:
1233 if fv.default != configvalue:
1234 configlabel = b'formatvariant.config.special'
1234 configlabel = b'formatvariant.config.special'
1235 else:
1235 else:
1236 configlabel = b'formatvariant.config.default'
1236 configlabel = b'formatvariant.config.default'
1237 fm.condwrite(
1237 fm.condwrite(
1238 ui.verbose,
1238 ui.verbose,
1239 b'config',
1239 b'config',
1240 b' %6s',
1240 b' %6s',
1241 formatvalue(configvalue),
1241 formatvalue(configvalue),
1242 label=configlabel,
1242 label=configlabel,
1243 )
1243 )
1244 fm.condwrite(
1244 fm.condwrite(
1245 ui.verbose,
1245 ui.verbose,
1246 b'default',
1246 b'default',
1247 b' %7s',
1247 b' %7s',
1248 formatvalue(fv.default),
1248 formatvalue(fv.default),
1249 label=b'formatvariant.default',
1249 label=b'formatvariant.default',
1250 )
1250 )
1251 fm.plain(b'\n')
1251 fm.plain(b'\n')
1252 fm.end()
1252 fm.end()
1253
1253
1254
1254
1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1256 def debugfsinfo(ui, path=b"."):
1256 def debugfsinfo(ui, path=b"."):
1257 """show information detected about current filesystem"""
1257 """show information detected about current filesystem"""
1258 ui.writenoi18n(b'path: %s\n' % path)
1258 ui.writenoi18n(b'path: %s\n' % path)
1259 ui.writenoi18n(
1259 ui.writenoi18n(
1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1261 )
1261 )
1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1264 ui.writenoi18n(
1264 ui.writenoi18n(
1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1266 )
1266 )
1267 ui.writenoi18n(
1267 ui.writenoi18n(
1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1269 )
1269 )
1270 casesensitive = b'(unknown)'
1270 casesensitive = b'(unknown)'
1271 try:
1271 try:
1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1274 except OSError:
1274 except OSError:
1275 pass
1275 pass
1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1277
1277
1278
1278
1279 @command(
1279 @command(
1280 b'debuggetbundle',
1280 b'debuggetbundle',
1281 [
1281 [
1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1284 (
1284 (
1285 b't',
1285 b't',
1286 b'type',
1286 b'type',
1287 b'bzip2',
1287 b'bzip2',
1288 _(b'bundle compression type to use'),
1288 _(b'bundle compression type to use'),
1289 _(b'TYPE'),
1289 _(b'TYPE'),
1290 ),
1290 ),
1291 ],
1291 ],
1292 _(b'REPO FILE [-H|-C ID]...'),
1292 _(b'REPO FILE [-H|-C ID]...'),
1293 norepo=True,
1293 norepo=True,
1294 )
1294 )
1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1296 """retrieves a bundle from a repo
1296 """retrieves a bundle from a repo
1297
1297
1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1299 given file.
1299 given file.
1300 """
1300 """
1301 opts = pycompat.byteskwargs(opts)
1301 opts = pycompat.byteskwargs(opts)
1302 repo = hg.peer(ui, opts, repopath)
1302 repo = hg.peer(ui, opts, repopath)
1303 if not repo.capable(b'getbundle'):
1303 if not repo.capable(b'getbundle'):
1304 raise error.Abort(b"getbundle() not supported by target repository")
1304 raise error.Abort(b"getbundle() not supported by target repository")
1305 args = {}
1305 args = {}
1306 if common:
1306 if common:
1307 args['common'] = [bin(s) for s in common]
1307 args['common'] = [bin(s) for s in common]
1308 if head:
1308 if head:
1309 args['heads'] = [bin(s) for s in head]
1309 args['heads'] = [bin(s) for s in head]
1310 # TODO: get desired bundlecaps from command line.
1310 # TODO: get desired bundlecaps from command line.
1311 args['bundlecaps'] = None
1311 args['bundlecaps'] = None
1312 bundle = repo.getbundle(b'debug', **args)
1312 bundle = repo.getbundle(b'debug', **args)
1313
1313
1314 bundletype = opts.get(b'type', b'bzip2').lower()
1314 bundletype = opts.get(b'type', b'bzip2').lower()
1315 btypes = {
1315 btypes = {
1316 b'none': b'HG10UN',
1316 b'none': b'HG10UN',
1317 b'bzip2': b'HG10BZ',
1317 b'bzip2': b'HG10BZ',
1318 b'gzip': b'HG10GZ',
1318 b'gzip': b'HG10GZ',
1319 b'bundle2': b'HG20',
1319 b'bundle2': b'HG20',
1320 }
1320 }
1321 bundletype = btypes.get(bundletype)
1321 bundletype = btypes.get(bundletype)
1322 if bundletype not in bundle2.bundletypes:
1322 if bundletype not in bundle2.bundletypes:
1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1325
1325
1326
1326
1327 @command(b'debugignore', [], b'[FILE]')
1327 @command(b'debugignore', [], b'[FILE]')
1328 def debugignore(ui, repo, *files, **opts):
1328 def debugignore(ui, repo, *files, **opts):
1329 """display the combined ignore pattern and information about ignored files
1329 """display the combined ignore pattern and information about ignored files
1330
1330
1331 With no argument display the combined ignore pattern.
1331 With no argument display the combined ignore pattern.
1332
1332
1333 Given space separated file names, shows if the given file is ignored and
1333 Given space separated file names, shows if the given file is ignored and
1334 if so, show the ignore rule (file and line number) that matched it.
1334 if so, show the ignore rule (file and line number) that matched it.
1335 """
1335 """
1336 ignore = repo.dirstate._ignore
1336 ignore = repo.dirstate._ignore
1337 if not files:
1337 if not files:
1338 # Show all the patterns
1338 # Show all the patterns
1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1340 else:
1340 else:
1341 m = scmutil.match(repo[None], pats=files)
1341 m = scmutil.match(repo[None], pats=files)
1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1343 for f in m.files():
1343 for f in m.files():
1344 nf = util.normpath(f)
1344 nf = util.normpath(f)
1345 ignored = None
1345 ignored = None
1346 ignoredata = None
1346 ignoredata = None
1347 if nf != b'.':
1347 if nf != b'.':
1348 if ignore(nf):
1348 if ignore(nf):
1349 ignored = nf
1349 ignored = nf
1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1351 else:
1351 else:
1352 for p in pathutil.finddirs(nf):
1352 for p in pathutil.finddirs(nf):
1353 if ignore(p):
1353 if ignore(p):
1354 ignored = p
1354 ignored = p
1355 ignoredata = repo.dirstate._ignorefileandline(p)
1355 ignoredata = repo.dirstate._ignorefileandline(p)
1356 break
1356 break
1357 if ignored:
1357 if ignored:
1358 if ignored == nf:
1358 if ignored == nf:
1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1360 else:
1360 else:
1361 ui.write(
1361 ui.write(
1362 _(
1362 _(
1363 b"%s is ignored because of "
1363 b"%s is ignored because of "
1364 b"containing directory %s\n"
1364 b"containing directory %s\n"
1365 )
1365 )
1366 % (uipathfn(f), ignored)
1366 % (uipathfn(f), ignored)
1367 )
1367 )
1368 ignorefile, lineno, line = ignoredata
1368 ignorefile, lineno, line = ignoredata
1369 ui.write(
1369 ui.write(
1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1371 % (ignorefile, lineno, line)
1371 % (ignorefile, lineno, line)
1372 )
1372 )
1373 else:
1373 else:
1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1375
1375
1376
1376
1377 @command(
1377 @command(
1378 b'debugindex',
1378 b'debugindex',
1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1380 _(b'-c|-m|FILE'),
1380 _(b'-c|-m|FILE'),
1381 )
1381 )
1382 def debugindex(ui, repo, file_=None, **opts):
1382 def debugindex(ui, repo, file_=None, **opts):
1383 """dump index data for a storage primitive"""
1383 """dump index data for a storage primitive"""
1384 opts = pycompat.byteskwargs(opts)
1384 opts = pycompat.byteskwargs(opts)
1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1386
1386
1387 if ui.debugflag:
1387 if ui.debugflag:
1388 shortfn = hex
1388 shortfn = hex
1389 else:
1389 else:
1390 shortfn = short
1390 shortfn = short
1391
1391
1392 idlen = 12
1392 idlen = 12
1393 for i in store:
1393 for i in store:
1394 idlen = len(shortfn(store.node(i)))
1394 idlen = len(shortfn(store.node(i)))
1395 break
1395 break
1396
1396
1397 fm = ui.formatter(b'debugindex', opts)
1397 fm = ui.formatter(b'debugindex', opts)
1398 fm.plain(
1398 fm.plain(
1399 b' rev linkrev %s %s p2\n'
1399 b' rev linkrev %s %s p2\n'
1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1401 )
1401 )
1402
1402
1403 for rev in store:
1403 for rev in store:
1404 node = store.node(rev)
1404 node = store.node(rev)
1405 parents = store.parents(node)
1405 parents = store.parents(node)
1406
1406
1407 fm.startitem()
1407 fm.startitem()
1408 fm.write(b'rev', b'%6d ', rev)
1408 fm.write(b'rev', b'%6d ', rev)
1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1410 fm.write(b'node', b'%s ', shortfn(node))
1410 fm.write(b'node', b'%s ', shortfn(node))
1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1413 fm.plain(b'\n')
1413 fm.plain(b'\n')
1414
1414
1415 fm.end()
1415 fm.end()
1416
1416
1417
1417
1418 @command(
1418 @command(
1419 b'debugindexdot',
1419 b'debugindexdot',
1420 cmdutil.debugrevlogopts,
1420 cmdutil.debugrevlogopts,
1421 _(b'-c|-m|FILE'),
1421 _(b'-c|-m|FILE'),
1422 optionalrepo=True,
1422 optionalrepo=True,
1423 )
1423 )
1424 def debugindexdot(ui, repo, file_=None, **opts):
1424 def debugindexdot(ui, repo, file_=None, **opts):
1425 """dump an index DAG as a graphviz dot file"""
1425 """dump an index DAG as a graphviz dot file"""
1426 opts = pycompat.byteskwargs(opts)
1426 opts = pycompat.byteskwargs(opts)
1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1428 ui.writenoi18n(b"digraph G {\n")
1428 ui.writenoi18n(b"digraph G {\n")
1429 for i in r:
1429 for i in r:
1430 node = r.node(i)
1430 node = r.node(i)
1431 pp = r.parents(node)
1431 pp = r.parents(node)
1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1433 if pp[1] != nullid:
1433 if pp[1] != nullid:
1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1435 ui.write(b"}\n")
1435 ui.write(b"}\n")
1436
1436
1437
1437
1438 @command(b'debugindexstats', [])
1438 @command(b'debugindexstats', [])
1439 def debugindexstats(ui, repo):
1439 def debugindexstats(ui, repo):
1440 """show stats related to the changelog index"""
1440 """show stats related to the changelog index"""
1441 repo.changelog.shortest(nullid, 1)
1441 repo.changelog.shortest(nullid, 1)
1442 index = repo.changelog.index
1442 index = repo.changelog.index
1443 if not util.safehasattr(index, b'stats'):
1443 if not util.safehasattr(index, b'stats'):
1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1445 for k, v in sorted(index.stats().items()):
1445 for k, v in sorted(index.stats().items()):
1446 ui.write(b'%s: %d\n' % (k, v))
1446 ui.write(b'%s: %d\n' % (k, v))
1447
1447
1448
1448
1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1450 def debuginstall(ui, **opts):
1450 def debuginstall(ui, **opts):
1451 '''test Mercurial installation
1451 '''test Mercurial installation
1452
1452
1453 Returns 0 on success.
1453 Returns 0 on success.
1454 '''
1454 '''
1455 opts = pycompat.byteskwargs(opts)
1455 opts = pycompat.byteskwargs(opts)
1456
1456
1457 problems = 0
1457 problems = 0
1458
1458
1459 fm = ui.formatter(b'debuginstall', opts)
1459 fm = ui.formatter(b'debuginstall', opts)
1460 fm.startitem()
1460 fm.startitem()
1461
1461
1462 # encoding
1462 # encoding
1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1464 err = None
1464 err = None
1465 try:
1465 try:
1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1467 except LookupError as inst:
1467 except LookupError as inst:
1468 err = stringutil.forcebytestr(inst)
1468 err = stringutil.forcebytestr(inst)
1469 problems += 1
1469 problems += 1
1470 fm.condwrite(
1470 fm.condwrite(
1471 err,
1471 err,
1472 b'encodingerror',
1472 b'encodingerror',
1473 _(b" %s\n (check that your locale is properly set)\n"),
1473 _(b" %s\n (check that your locale is properly set)\n"),
1474 err,
1474 err,
1475 )
1475 )
1476
1476
1477 # Python
1477 # Python
1478 pythonlib = None
1478 pythonlib = None
1479 if util.safehasattr(os, '__file__'):
1479 if util.safehasattr(os, '__file__'):
1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1481 elif getattr(sys, 'oxidized', False):
1481 elif getattr(sys, 'oxidized', False):
1482 pythonlib = pycompat.sysexecutable
1482 pythonlib = pycompat.sysexecutable
1483
1483
1484 fm.write(
1484 fm.write(
1485 b'pythonexe',
1485 b'pythonexe',
1486 _(b"checking Python executable (%s)\n"),
1486 _(b"checking Python executable (%s)\n"),
1487 pycompat.sysexecutable or _(b"unknown"),
1487 pycompat.sysexecutable or _(b"unknown"),
1488 )
1488 )
1489 fm.write(
1489 fm.write(
1490 b'pythonver',
1490 b'pythonver',
1491 _(b"checking Python version (%s)\n"),
1491 _(b"checking Python version (%s)\n"),
1492 (b"%d.%d.%d" % sys.version_info[:3]),
1492 (b"%d.%d.%d" % sys.version_info[:3]),
1493 )
1493 )
1494 fm.write(
1494 fm.write(
1495 b'pythonlib',
1495 b'pythonlib',
1496 _(b"checking Python lib (%s)...\n"),
1496 _(b"checking Python lib (%s)...\n"),
1497 pythonlib or _(b"unknown"),
1497 pythonlib or _(b"unknown"),
1498 )
1498 )
1499
1499
1500 security = set(sslutil.supportedprotocols)
1500 security = set(sslutil.supportedprotocols)
1501 if sslutil.hassni:
1501 if sslutil.hassni:
1502 security.add(b'sni')
1502 security.add(b'sni')
1503
1503
1504 fm.write(
1504 fm.write(
1505 b'pythonsecurity',
1505 b'pythonsecurity',
1506 _(b"checking Python security support (%s)\n"),
1506 _(b"checking Python security support (%s)\n"),
1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1508 )
1508 )
1509
1509
1510 # These are warnings, not errors. So don't increment problem count. This
1510 # These are warnings, not errors. So don't increment problem count. This
1511 # may change in the future.
1511 # may change in the future.
1512 if b'tls1.2' not in security:
1512 if b'tls1.2' not in security:
1513 fm.plain(
1513 fm.plain(
1514 _(
1514 _(
1515 b' TLS 1.2 not supported by Python install; '
1515 b' TLS 1.2 not supported by Python install; '
1516 b'network connections lack modern security\n'
1516 b'network connections lack modern security\n'
1517 )
1517 )
1518 )
1518 )
1519 if b'sni' not in security:
1519 if b'sni' not in security:
1520 fm.plain(
1520 fm.plain(
1521 _(
1521 _(
1522 b' SNI not supported by Python install; may have '
1522 b' SNI not supported by Python install; may have '
1523 b'connectivity issues with some servers\n'
1523 b'connectivity issues with some servers\n'
1524 )
1524 )
1525 )
1525 )
1526
1526
1527 # TODO print CA cert info
1527 # TODO print CA cert info
1528
1528
1529 # hg version
1529 # hg version
1530 hgver = util.version()
1530 hgver = util.version()
1531 fm.write(
1531 fm.write(
1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1533 )
1533 )
1534 fm.write(
1534 fm.write(
1535 b'hgverextra',
1535 b'hgverextra',
1536 _(b"checking Mercurial custom build (%s)\n"),
1536 _(b"checking Mercurial custom build (%s)\n"),
1537 b'+'.join(hgver.split(b'+')[1:]),
1537 b'+'.join(hgver.split(b'+')[1:]),
1538 )
1538 )
1539
1539
1540 # compiled modules
1540 # compiled modules
1541 hgmodules = None
1541 hgmodules = None
1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1544 elif getattr(sys, 'oxidized', False):
1544 elif getattr(sys, 'oxidized', False):
1545 hgmodules = pycompat.sysexecutable
1545 hgmodules = pycompat.sysexecutable
1546
1546
1547 fm.write(
1547 fm.write(
1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1549 )
1549 )
1550 fm.write(
1550 fm.write(
1551 b'hgmodules',
1551 b'hgmodules',
1552 _(b"checking installed modules (%s)...\n"),
1552 _(b"checking installed modules (%s)...\n"),
1553 hgmodules or _(b"unknown"),
1553 hgmodules or _(b"unknown"),
1554 )
1554 )
1555
1555
1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1557 rustext = rustandc # for now, that's the only case
1557 rustext = rustandc # for now, that's the only case
1558 cext = policy.policy in (b'c', b'allow') or rustandc
1558 cext = policy.policy in (b'c', b'allow') or rustandc
1559 nopure = cext or rustext
1559 nopure = cext or rustext
1560 if nopure:
1560 if nopure:
1561 err = None
1561 err = None
1562 try:
1562 try:
1563 if cext:
1563 if cext:
1564 from .cext import ( # pytype: disable=import-error
1564 from .cext import ( # pytype: disable=import-error
1565 base85,
1565 base85,
1566 bdiff,
1566 bdiff,
1567 mpatch,
1567 mpatch,
1568 osutil,
1568 osutil,
1569 )
1569 )
1570
1570
1571 # quiet pyflakes
1571 # quiet pyflakes
1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1573 if rustext:
1573 if rustext:
1574 from .rustext import ( # pytype: disable=import-error
1574 from .rustext import ( # pytype: disable=import-error
1575 ancestor,
1575 ancestor,
1576 dirstate,
1576 dirstate,
1577 )
1577 )
1578
1578
1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1580 except Exception as inst:
1580 except Exception as inst:
1581 err = stringutil.forcebytestr(inst)
1581 err = stringutil.forcebytestr(inst)
1582 problems += 1
1582 problems += 1
1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1584
1584
1585 compengines = util.compengines._engines.values()
1585 compengines = util.compengines._engines.values()
1586 fm.write(
1586 fm.write(
1587 b'compengines',
1587 b'compengines',
1588 _(b'checking registered compression engines (%s)\n'),
1588 _(b'checking registered compression engines (%s)\n'),
1589 fm.formatlist(
1589 fm.formatlist(
1590 sorted(e.name() for e in compengines),
1590 sorted(e.name() for e in compengines),
1591 name=b'compengine',
1591 name=b'compengine',
1592 fmt=b'%s',
1592 fmt=b'%s',
1593 sep=b', ',
1593 sep=b', ',
1594 ),
1594 ),
1595 )
1595 )
1596 fm.write(
1596 fm.write(
1597 b'compenginesavail',
1597 b'compenginesavail',
1598 _(b'checking available compression engines (%s)\n'),
1598 _(b'checking available compression engines (%s)\n'),
1599 fm.formatlist(
1599 fm.formatlist(
1600 sorted(e.name() for e in compengines if e.available()),
1600 sorted(e.name() for e in compengines if e.available()),
1601 name=b'compengine',
1601 name=b'compengine',
1602 fmt=b'%s',
1602 fmt=b'%s',
1603 sep=b', ',
1603 sep=b', ',
1604 ),
1604 ),
1605 )
1605 )
1606 wirecompengines = compression.compengines.supportedwireengines(
1606 wirecompengines = compression.compengines.supportedwireengines(
1607 compression.SERVERROLE
1607 compression.SERVERROLE
1608 )
1608 )
1609 fm.write(
1609 fm.write(
1610 b'compenginesserver',
1610 b'compenginesserver',
1611 _(
1611 _(
1612 b'checking available compression engines '
1612 b'checking available compression engines '
1613 b'for wire protocol (%s)\n'
1613 b'for wire protocol (%s)\n'
1614 ),
1614 ),
1615 fm.formatlist(
1615 fm.formatlist(
1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1617 name=b'compengine',
1617 name=b'compengine',
1618 fmt=b'%s',
1618 fmt=b'%s',
1619 sep=b', ',
1619 sep=b', ',
1620 ),
1620 ),
1621 )
1621 )
1622 re2 = b'missing'
1622 re2 = b'missing'
1623 if util._re2:
1623 if util._re2:
1624 re2 = b'available'
1624 re2 = b'available'
1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1626 fm.data(re2=bool(util._re2))
1626 fm.data(re2=bool(util._re2))
1627
1627
1628 # templates
1628 # templates
1629 p = templater.templatepaths()
1629 p = templater.templatepaths()
1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1632 if p:
1632 if p:
1633 m = templater.templatepath(b"map-cmdline.default")
1633 m = templater.templatepath(b"map-cmdline.default")
1634 if m:
1634 if m:
1635 # template found, check if it is working
1635 # template found, check if it is working
1636 err = None
1636 err = None
1637 try:
1637 try:
1638 templater.templater.frommapfile(m)
1638 templater.templater.frommapfile(m)
1639 except Exception as inst:
1639 except Exception as inst:
1640 err = stringutil.forcebytestr(inst)
1640 err = stringutil.forcebytestr(inst)
1641 p = None
1641 p = None
1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1643 else:
1643 else:
1644 p = None
1644 p = None
1645 fm.condwrite(
1645 fm.condwrite(
1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1647 )
1647 )
1648 fm.condwrite(
1648 fm.condwrite(
1649 not m,
1649 not m,
1650 b'defaulttemplatenotfound',
1650 b'defaulttemplatenotfound',
1651 _(b" template '%s' not found\n"),
1651 _(b" template '%s' not found\n"),
1652 b"default",
1652 b"default",
1653 )
1653 )
1654 if not p:
1654 if not p:
1655 problems += 1
1655 problems += 1
1656 fm.condwrite(
1656 fm.condwrite(
1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1658 )
1658 )
1659
1659
1660 # editor
1660 # editor
1661 editor = ui.geteditor()
1661 editor = ui.geteditor()
1662 editor = util.expandpath(editor)
1662 editor = util.expandpath(editor)
1663 editorbin = procutil.shellsplit(editor)[0]
1663 editorbin = procutil.shellsplit(editor)[0]
1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1665 cmdpath = procutil.findexe(editorbin)
1665 cmdpath = procutil.findexe(editorbin)
1666 fm.condwrite(
1666 fm.condwrite(
1667 not cmdpath and editor == b'vi',
1667 not cmdpath and editor == b'vi',
1668 b'vinotfound',
1668 b'vinotfound',
1669 _(
1669 _(
1670 b" No commit editor set and can't find %s in PATH\n"
1670 b" No commit editor set and can't find %s in PATH\n"
1671 b" (specify a commit editor in your configuration"
1671 b" (specify a commit editor in your configuration"
1672 b" file)\n"
1672 b" file)\n"
1673 ),
1673 ),
1674 not cmdpath and editor == b'vi' and editorbin,
1674 not cmdpath and editor == b'vi' and editorbin,
1675 )
1675 )
1676 fm.condwrite(
1676 fm.condwrite(
1677 not cmdpath and editor != b'vi',
1677 not cmdpath and editor != b'vi',
1678 b'editornotfound',
1678 b'editornotfound',
1679 _(
1679 _(
1680 b" Can't find editor '%s' in PATH\n"
1680 b" Can't find editor '%s' in PATH\n"
1681 b" (specify a commit editor in your configuration"
1681 b" (specify a commit editor in your configuration"
1682 b" file)\n"
1682 b" file)\n"
1683 ),
1683 ),
1684 not cmdpath and editorbin,
1684 not cmdpath and editorbin,
1685 )
1685 )
1686 if not cmdpath and editor != b'vi':
1686 if not cmdpath and editor != b'vi':
1687 problems += 1
1687 problems += 1
1688
1688
1689 # check username
1689 # check username
1690 username = None
1690 username = None
1691 err = None
1691 err = None
1692 try:
1692 try:
1693 username = ui.username()
1693 username = ui.username()
1694 except error.Abort as e:
1694 except error.Abort as e:
1695 err = stringutil.forcebytestr(e)
1695 err = stringutil.forcebytestr(e)
1696 problems += 1
1696 problems += 1
1697
1697
1698 fm.condwrite(
1698 fm.condwrite(
1699 username, b'username', _(b"checking username (%s)\n"), username
1699 username, b'username', _(b"checking username (%s)\n"), username
1700 )
1700 )
1701 fm.condwrite(
1701 fm.condwrite(
1702 err,
1702 err,
1703 b'usernameerror',
1703 b'usernameerror',
1704 _(
1704 _(
1705 b"checking username...\n %s\n"
1705 b"checking username...\n %s\n"
1706 b" (specify a username in your configuration file)\n"
1706 b" (specify a username in your configuration file)\n"
1707 ),
1707 ),
1708 err,
1708 err,
1709 )
1709 )
1710
1710
1711 for name, mod in extensions.extensions():
1711 for name, mod in extensions.extensions():
1712 handler = getattr(mod, 'debuginstall', None)
1712 handler = getattr(mod, 'debuginstall', None)
1713 if handler is not None:
1713 if handler is not None:
1714 problems += handler(ui, fm)
1714 problems += handler(ui, fm)
1715
1715
1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1717 if not problems:
1717 if not problems:
1718 fm.data(problems=problems)
1718 fm.data(problems=problems)
1719 fm.condwrite(
1719 fm.condwrite(
1720 problems,
1720 problems,
1721 b'problems',
1721 b'problems',
1722 _(b"%d problems detected, please check your install!\n"),
1722 _(b"%d problems detected, please check your install!\n"),
1723 problems,
1723 problems,
1724 )
1724 )
1725 fm.end()
1725 fm.end()
1726
1726
1727 return problems
1727 return problems
1728
1728
1729
1729
1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1731 def debugknown(ui, repopath, *ids, **opts):
1731 def debugknown(ui, repopath, *ids, **opts):
1732 """test whether node ids are known to a repo
1732 """test whether node ids are known to a repo
1733
1733
1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1735 and 1s indicating unknown/known.
1735 and 1s indicating unknown/known.
1736 """
1736 """
1737 opts = pycompat.byteskwargs(opts)
1737 opts = pycompat.byteskwargs(opts)
1738 repo = hg.peer(ui, opts, repopath)
1738 repo = hg.peer(ui, opts, repopath)
1739 if not repo.capable(b'known'):
1739 if not repo.capable(b'known'):
1740 raise error.Abort(b"known() not supported by target repository")
1740 raise error.Abort(b"known() not supported by target repository")
1741 flags = repo.known([bin(s) for s in ids])
1741 flags = repo.known([bin(s) for s in ids])
1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1743
1743
1744
1744
1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1746 def debuglabelcomplete(ui, repo, *args):
1746 def debuglabelcomplete(ui, repo, *args):
1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1748 debugnamecomplete(ui, repo, *args)
1748 debugnamecomplete(ui, repo, *args)
1749
1749
1750
1750
1751 @command(
1751 @command(
1752 b'debuglocks',
1752 b'debuglocks',
1753 [
1753 [
1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1755 (
1755 (
1756 b'W',
1756 b'W',
1757 b'force-wlock',
1757 b'force-wlock',
1758 None,
1758 None,
1759 _(b'free the working state lock (DANGEROUS)'),
1759 _(b'free the working state lock (DANGEROUS)'),
1760 ),
1760 ),
1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1762 (
1762 (
1763 b'S',
1763 b'S',
1764 b'set-wlock',
1764 b'set-wlock',
1765 None,
1765 None,
1766 _(b'set the working state lock until stopped'),
1766 _(b'set the working state lock until stopped'),
1767 ),
1767 ),
1768 ],
1768 ],
1769 _(b'[OPTION]...'),
1769 _(b'[OPTION]...'),
1770 )
1770 )
1771 def debuglocks(ui, repo, **opts):
1771 def debuglocks(ui, repo, **opts):
1772 """show or modify state of locks
1772 """show or modify state of locks
1773
1773
1774 By default, this command will show which locks are held. This
1774 By default, this command will show which locks are held. This
1775 includes the user and process holding the lock, the amount of time
1775 includes the user and process holding the lock, the amount of time
1776 the lock has been held, and the machine name where the process is
1776 the lock has been held, and the machine name where the process is
1777 running if it's not local.
1777 running if it's not local.
1778
1778
1779 Locks protect the integrity of Mercurial's data, so should be
1779 Locks protect the integrity of Mercurial's data, so should be
1780 treated with care. System crashes or other interruptions may cause
1780 treated with care. System crashes or other interruptions may cause
1781 locks to not be properly released, though Mercurial will usually
1781 locks to not be properly released, though Mercurial will usually
1782 detect and remove such stale locks automatically.
1782 detect and remove such stale locks automatically.
1783
1783
1784 However, detecting stale locks may not always be possible (for
1784 However, detecting stale locks may not always be possible (for
1785 instance, on a shared filesystem). Removing locks may also be
1785 instance, on a shared filesystem). Removing locks may also be
1786 blocked by filesystem permissions.
1786 blocked by filesystem permissions.
1787
1787
1788 Setting a lock will prevent other commands from changing the data.
1788 Setting a lock will prevent other commands from changing the data.
1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1790 The set locks are removed when the command exits.
1790 The set locks are removed when the command exits.
1791
1791
1792 Returns 0 if no locks are held.
1792 Returns 0 if no locks are held.
1793
1793
1794 """
1794 """
1795
1795
1796 if opts.get('force_lock'):
1796 if opts.get('force_lock'):
1797 repo.svfs.unlink(b'lock')
1797 repo.svfs.unlink(b'lock')
1798 if opts.get('force_wlock'):
1798 if opts.get('force_wlock'):
1799 repo.vfs.unlink(b'wlock')
1799 repo.vfs.unlink(b'wlock')
1800 if opts.get('force_lock') or opts.get('force_wlock'):
1800 if opts.get('force_lock') or opts.get('force_wlock'):
1801 return 0
1801 return 0
1802
1802
1803 locks = []
1803 locks = []
1804 try:
1804 try:
1805 if opts.get('set_wlock'):
1805 if opts.get('set_wlock'):
1806 try:
1806 try:
1807 locks.append(repo.wlock(False))
1807 locks.append(repo.wlock(False))
1808 except error.LockHeld:
1808 except error.LockHeld:
1809 raise error.Abort(_(b'wlock is already held'))
1809 raise error.Abort(_(b'wlock is already held'))
1810 if opts.get('set_lock'):
1810 if opts.get('set_lock'):
1811 try:
1811 try:
1812 locks.append(repo.lock(False))
1812 locks.append(repo.lock(False))
1813 except error.LockHeld:
1813 except error.LockHeld:
1814 raise error.Abort(_(b'lock is already held'))
1814 raise error.Abort(_(b'lock is already held'))
1815 if len(locks):
1815 if len(locks):
1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1817 return 0
1817 return 0
1818 finally:
1818 finally:
1819 release(*locks)
1819 release(*locks)
1820
1820
1821 now = time.time()
1821 now = time.time()
1822 held = 0
1822 held = 0
1823
1823
1824 def report(vfs, name, method):
1824 def report(vfs, name, method):
1825 # this causes stale locks to get reaped for more accurate reporting
1825 # this causes stale locks to get reaped for more accurate reporting
1826 try:
1826 try:
1827 l = method(False)
1827 l = method(False)
1828 except error.LockHeld:
1828 except error.LockHeld:
1829 l = None
1829 l = None
1830
1830
1831 if l:
1831 if l:
1832 l.release()
1832 l.release()
1833 else:
1833 else:
1834 try:
1834 try:
1835 st = vfs.lstat(name)
1835 st = vfs.lstat(name)
1836 age = now - st[stat.ST_MTIME]
1836 age = now - st[stat.ST_MTIME]
1837 user = util.username(st.st_uid)
1837 user = util.username(st.st_uid)
1838 locker = vfs.readlock(name)
1838 locker = vfs.readlock(name)
1839 if b":" in locker:
1839 if b":" in locker:
1840 host, pid = locker.split(b':')
1840 host, pid = locker.split(b':')
1841 if host == socket.gethostname():
1841 if host == socket.gethostname():
1842 locker = b'user %s, process %s' % (user or b'None', pid)
1842 locker = b'user %s, process %s' % (user or b'None', pid)
1843 else:
1843 else:
1844 locker = b'user %s, process %s, host %s' % (
1844 locker = b'user %s, process %s, host %s' % (
1845 user or b'None',
1845 user or b'None',
1846 pid,
1846 pid,
1847 host,
1847 host,
1848 )
1848 )
1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1850 return 1
1850 return 1
1851 except OSError as e:
1851 except OSError as e:
1852 if e.errno != errno.ENOENT:
1852 if e.errno != errno.ENOENT:
1853 raise
1853 raise
1854
1854
1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1856 return 0
1856 return 0
1857
1857
1858 held += report(repo.svfs, b"lock", repo.lock)
1858 held += report(repo.svfs, b"lock", repo.lock)
1859 held += report(repo.vfs, b"wlock", repo.wlock)
1859 held += report(repo.vfs, b"wlock", repo.wlock)
1860
1860
1861 return held
1861 return held
1862
1862
1863
1863
1864 @command(
1864 @command(
1865 b'debugmanifestfulltextcache',
1865 b'debugmanifestfulltextcache',
1866 [
1866 [
1867 (b'', b'clear', False, _(b'clear the cache')),
1867 (b'', b'clear', False, _(b'clear the cache')),
1868 (
1868 (
1869 b'a',
1869 b'a',
1870 b'add',
1870 b'add',
1871 [],
1871 [],
1872 _(b'add the given manifest nodes to the cache'),
1872 _(b'add the given manifest nodes to the cache'),
1873 _(b'NODE'),
1873 _(b'NODE'),
1874 ),
1874 ),
1875 ],
1875 ],
1876 b'',
1876 b'',
1877 )
1877 )
1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1879 """show, clear or amend the contents of the manifest fulltext cache"""
1879 """show, clear or amend the contents of the manifest fulltext cache"""
1880
1880
1881 def getcache():
1881 def getcache():
1882 r = repo.manifestlog.getstorage(b'')
1882 r = repo.manifestlog.getstorage(b'')
1883 try:
1883 try:
1884 return r._fulltextcache
1884 return r._fulltextcache
1885 except AttributeError:
1885 except AttributeError:
1886 msg = _(
1886 msg = _(
1887 b"Current revlog implementation doesn't appear to have a "
1887 b"Current revlog implementation doesn't appear to have a "
1888 b"manifest fulltext cache\n"
1888 b"manifest fulltext cache\n"
1889 )
1889 )
1890 raise error.Abort(msg)
1890 raise error.Abort(msg)
1891
1891
1892 if opts.get('clear'):
1892 if opts.get('clear'):
1893 with repo.wlock():
1893 with repo.wlock():
1894 cache = getcache()
1894 cache = getcache()
1895 cache.clear(clear_persisted_data=True)
1895 cache.clear(clear_persisted_data=True)
1896 return
1896 return
1897
1897
1898 if add:
1898 if add:
1899 with repo.wlock():
1899 with repo.wlock():
1900 m = repo.manifestlog
1900 m = repo.manifestlog
1901 store = m.getstorage(b'')
1901 store = m.getstorage(b'')
1902 for n in add:
1902 for n in add:
1903 try:
1903 try:
1904 manifest = m[store.lookup(n)]
1904 manifest = m[store.lookup(n)]
1905 except error.LookupError as e:
1905 except error.LookupError as e:
1906 raise error.Abort(e, hint=b"Check your manifest node id")
1906 raise error.Abort(e, hint=b"Check your manifest node id")
1907 manifest.read() # stores revisision in cache too
1907 manifest.read() # stores revisision in cache too
1908 return
1908 return
1909
1909
1910 cache = getcache()
1910 cache = getcache()
1911 if not len(cache):
1911 if not len(cache):
1912 ui.write(_(b'cache empty\n'))
1912 ui.write(_(b'cache empty\n'))
1913 else:
1913 else:
1914 ui.write(
1914 ui.write(
1915 _(
1915 _(
1916 b'cache contains %d manifest entries, in order of most to '
1916 b'cache contains %d manifest entries, in order of most to '
1917 b'least recent:\n'
1917 b'least recent:\n'
1918 )
1918 )
1919 % (len(cache),)
1919 % (len(cache),)
1920 )
1920 )
1921 totalsize = 0
1921 totalsize = 0
1922 for nodeid in cache:
1922 for nodeid in cache:
1923 # Use cache.get to not update the LRU order
1923 # Use cache.get to not update the LRU order
1924 data = cache.peek(nodeid)
1924 data = cache.peek(nodeid)
1925 size = len(data)
1925 size = len(data)
1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1927 ui.write(
1927 ui.write(
1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1929 )
1929 )
1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1931 ui.write(
1931 ui.write(
1932 _(b'total cache data size %s, on-disk %s\n')
1932 _(b'total cache data size %s, on-disk %s\n')
1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1934 )
1934 )
1935
1935
1936
1936
1937 @command(b'debugmergestate', [], b'')
1937 @command(b'debugmergestate', [], b'')
1938 def debugmergestate(ui, repo, *args):
1938 def debugmergestate(ui, repo, *args):
1939 """print merge state
1939 """print merge state
1940
1940
1941 Use --verbose to print out information about whether v1 or v2 merge state
1941 Use --verbose to print out information about whether v1 or v2 merge state
1942 was chosen."""
1942 was chosen."""
1943
1943
1944 def _hashornull(h):
1944 def _hashornull(h):
1945 if h == nullhex:
1945 if h == nullhex:
1946 return b'null'
1946 return b'null'
1947 else:
1947 else:
1948 return h
1948 return h
1949
1949
1950 def printrecords(version):
1950 def printrecords(version):
1951 ui.writenoi18n(b'* version %d records\n' % version)
1951 ui.writenoi18n(b'* version %d records\n' % version)
1952 if version == 1:
1952 if version == 1:
1953 records = v1records
1953 records = v1records
1954 else:
1954 else:
1955 records = v2records
1955 records = v2records
1956
1956
1957 for rtype, record in records:
1957 for rtype, record in records:
1958 # pretty print some record types
1958 # pretty print some record types
1959 if rtype == b'L':
1959 if rtype == b'L':
1960 ui.writenoi18n(b'local: %s\n' % record)
1960 ui.writenoi18n(b'local: %s\n' % record)
1961 elif rtype == b'O':
1961 elif rtype == b'O':
1962 ui.writenoi18n(b'other: %s\n' % record)
1962 ui.writenoi18n(b'other: %s\n' % record)
1963 elif rtype == b'm':
1963 elif rtype == b'm':
1964 driver, mdstate = record.split(b'\0', 1)
1964 driver, mdstate = record.split(b'\0', 1)
1965 ui.writenoi18n(
1965 ui.writenoi18n(
1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1967 )
1967 )
1968 elif rtype in b'FDC':
1968 elif rtype in b'FDC':
1969 r = record.split(b'\0')
1969 r = record.split(b'\0')
1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1971 if version == 1:
1971 if version == 1:
1972 onode = b'not stored in v1 format'
1972 onode = b'not stored in v1 format'
1973 flags = r[7]
1973 flags = r[7]
1974 else:
1974 else:
1975 onode, flags = r[7:9]
1975 onode, flags = r[7:9]
1976 ui.writenoi18n(
1976 ui.writenoi18n(
1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1978 % (f, rtype, state, _hashornull(hash))
1978 % (f, rtype, state, _hashornull(hash))
1979 )
1979 )
1980 ui.writenoi18n(
1980 ui.writenoi18n(
1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1982 )
1982 )
1983 ui.writenoi18n(
1983 ui.writenoi18n(
1984 b' ancestor path: %s (node %s)\n'
1984 b' ancestor path: %s (node %s)\n'
1985 % (afile, _hashornull(anode))
1985 % (afile, _hashornull(anode))
1986 )
1986 )
1987 ui.writenoi18n(
1987 ui.writenoi18n(
1988 b' other path: %s (node %s)\n'
1988 b' other path: %s (node %s)\n'
1989 % (ofile, _hashornull(onode))
1989 % (ofile, _hashornull(onode))
1990 )
1990 )
1991 elif rtype == b'f':
1991 elif rtype == b'f':
1992 filename, rawextras = record.split(b'\0', 1)
1992 filename, rawextras = record.split(b'\0', 1)
1993 extras = rawextras.split(b'\0')
1993 extras = rawextras.split(b'\0')
1994 i = 0
1994 i = 0
1995 extrastrings = []
1995 extrastrings = []
1996 while i < len(extras):
1996 while i < len(extras):
1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1998 i += 2
1998 i += 2
1999
1999
2000 ui.writenoi18n(
2000 ui.writenoi18n(
2001 b'file extras: %s (%s)\n'
2001 b'file extras: %s (%s)\n'
2002 % (filename, b', '.join(extrastrings))
2002 % (filename, b', '.join(extrastrings))
2003 )
2003 )
2004 elif rtype == b'l':
2004 elif rtype == b'l':
2005 labels = record.split(b'\0', 2)
2005 labels = record.split(b'\0', 2)
2006 labels = [l for l in labels if len(l) > 0]
2006 labels = [l for l in labels if len(l) > 0]
2007 ui.writenoi18n(b'labels:\n')
2007 ui.writenoi18n(b'labels:\n')
2008 ui.write((b' local: %s\n' % labels[0]))
2008 ui.write((b' local: %s\n' % labels[0]))
2009 ui.write((b' other: %s\n' % labels[1]))
2009 ui.write((b' other: %s\n' % labels[1]))
2010 if len(labels) > 2:
2010 if len(labels) > 2:
2011 ui.write((b' base: %s\n' % labels[2]))
2011 ui.write((b' base: %s\n' % labels[2]))
2012 else:
2012 else:
2013 ui.writenoi18n(
2013 ui.writenoi18n(
2014 b'unrecognized entry: %s\t%s\n'
2014 b'unrecognized entry: %s\t%s\n'
2015 % (rtype, record.replace(b'\0', b'\t'))
2015 % (rtype, record.replace(b'\0', b'\t'))
2016 )
2016 )
2017
2017
2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2019 # merge state records. We shouldn't be doing this, but this is OK since this
2019 # merge state records. We shouldn't be doing this, but this is OK since this
2020 # command is pretty low-level.
2020 # command is pretty low-level.
2021 ms = mergemod.mergestate(repo)
2021 ms = mergemod.mergestate(repo)
2022
2022
2023 # sort so that reasonable information is on top
2023 # sort so that reasonable information is on top
2024 v1records = ms._readrecordsv1()
2024 v1records = ms._readrecordsv1()
2025 v2records = ms._readrecordsv2()
2025 v2records = ms._readrecordsv2()
2026 order = b'LOml'
2026 order = b'LOml'
2027
2027
2028 def key(r):
2028 def key(r):
2029 idx = order.find(r[0])
2029 idx = order.find(r[0])
2030 if idx == -1:
2030 if idx == -1:
2031 return (1, r[1])
2031 return (1, r[1])
2032 else:
2032 else:
2033 return (0, idx)
2033 return (0, idx)
2034
2034
2035 v1records.sort(key=key)
2035 v1records.sort(key=key)
2036 v2records.sort(key=key)
2036 v2records.sort(key=key)
2037
2037
2038 if not v1records and not v2records:
2038 if not v1records and not v2records:
2039 ui.writenoi18n(b'no merge state found\n')
2039 ui.writenoi18n(b'no merge state found\n')
2040 elif not v2records:
2040 elif not v2records:
2041 ui.notenoi18n(b'no version 2 merge state\n')
2041 ui.notenoi18n(b'no version 2 merge state\n')
2042 printrecords(1)
2042 printrecords(1)
2043 elif ms._v1v2match(v1records, v2records):
2043 elif ms._v1v2match(v1records, v2records):
2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2045 printrecords(2)
2045 printrecords(2)
2046 else:
2046 else:
2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2048 printrecords(1)
2048 printrecords(1)
2049 if ui.verbose:
2049 if ui.verbose:
2050 printrecords(2)
2050 printrecords(2)
2051
2051
2052
2052
2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2054 def debugnamecomplete(ui, repo, *args):
2054 def debugnamecomplete(ui, repo, *args):
2055 '''complete "names" - tags, open branch names, bookmark names'''
2055 '''complete "names" - tags, open branch names, bookmark names'''
2056
2056
2057 names = set()
2057 names = set()
2058 # since we previously only listed open branches, we will handle that
2058 # since we previously only listed open branches, we will handle that
2059 # specially (after this for loop)
2059 # specially (after this for loop)
2060 for name, ns in pycompat.iteritems(repo.names):
2060 for name, ns in pycompat.iteritems(repo.names):
2061 if name != b'branches':
2061 if name != b'branches':
2062 names.update(ns.listnames(repo))
2062 names.update(ns.listnames(repo))
2063 names.update(
2063 names.update(
2064 tag
2064 tag
2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2066 if not closed
2066 if not closed
2067 )
2067 )
2068 completions = set()
2068 completions = set()
2069 if not args:
2069 if not args:
2070 args = [b'']
2070 args = [b'']
2071 for a in args:
2071 for a in args:
2072 completions.update(n for n in names if n.startswith(a))
2072 completions.update(n for n in names if n.startswith(a))
2073 ui.write(b'\n'.join(sorted(completions)))
2073 ui.write(b'\n'.join(sorted(completions)))
2074 ui.write(b'\n')
2074 ui.write(b'\n')
2075
2075
2076
2076
2077 @command(
2077 @command(
2078 b'debugobsolete',
2078 b'debugobsolete',
2079 [
2079 [
2080 (b'', b'flags', 0, _(b'markers flag')),
2080 (b'', b'flags', 0, _(b'markers flag')),
2081 (
2081 (
2082 b'',
2082 b'',
2083 b'record-parents',
2083 b'record-parents',
2084 False,
2084 False,
2085 _(b'record parent information for the precursor'),
2085 _(b'record parent information for the precursor'),
2086 ),
2086 ),
2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2088 (
2088 (
2089 b'',
2089 b'',
2090 b'exclusive',
2090 b'exclusive',
2091 False,
2091 False,
2092 _(b'restrict display to markers only relevant to REV'),
2092 _(b'restrict display to markers only relevant to REV'),
2093 ),
2093 ),
2094 (b'', b'index', False, _(b'display index of the marker')),
2094 (b'', b'index', False, _(b'display index of the marker')),
2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2096 ]
2096 ]
2097 + cmdutil.commitopts2
2097 + cmdutil.commitopts2
2098 + cmdutil.formatteropts,
2098 + cmdutil.formatteropts,
2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2100 )
2100 )
2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2102 """create arbitrary obsolete marker
2102 """create arbitrary obsolete marker
2103
2103
2104 With no arguments, displays the list of obsolescence markers."""
2104 With no arguments, displays the list of obsolescence markers."""
2105
2105
2106 opts = pycompat.byteskwargs(opts)
2106 opts = pycompat.byteskwargs(opts)
2107
2107
2108 def parsenodeid(s):
2108 def parsenodeid(s):
2109 try:
2109 try:
2110 # We do not use revsingle/revrange functions here to accept
2110 # We do not use revsingle/revrange functions here to accept
2111 # arbitrary node identifiers, possibly not present in the
2111 # arbitrary node identifiers, possibly not present in the
2112 # local repository.
2112 # local repository.
2113 n = bin(s)
2113 n = bin(s)
2114 if len(n) != len(nullid):
2114 if len(n) != len(nullid):
2115 raise TypeError()
2115 raise TypeError()
2116 return n
2116 return n
2117 except TypeError:
2117 except TypeError:
2118 raise error.Abort(
2118 raise error.Abort(
2119 b'changeset references must be full hexadecimal '
2119 b'changeset references must be full hexadecimal '
2120 b'node identifiers'
2120 b'node identifiers'
2121 )
2121 )
2122
2122
2123 if opts.get(b'delete'):
2123 if opts.get(b'delete'):
2124 indices = []
2124 indices = []
2125 for v in opts.get(b'delete'):
2125 for v in opts.get(b'delete'):
2126 try:
2126 try:
2127 indices.append(int(v))
2127 indices.append(int(v))
2128 except ValueError:
2128 except ValueError:
2129 raise error.Abort(
2129 raise error.Abort(
2130 _(b'invalid index value: %r') % v,
2130 _(b'invalid index value: %r') % v,
2131 hint=_(b'use integers for indices'),
2131 hint=_(b'use integers for indices'),
2132 )
2132 )
2133
2133
2134 if repo.currenttransaction():
2134 if repo.currenttransaction():
2135 raise error.Abort(
2135 raise error.Abort(
2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2137 )
2137 )
2138
2138
2139 with repo.lock():
2139 with repo.lock():
2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2142
2142
2143 return
2143 return
2144
2144
2145 if precursor is not None:
2145 if precursor is not None:
2146 if opts[b'rev']:
2146 if opts[b'rev']:
2147 raise error.Abort(b'cannot select revision when creating marker')
2147 raise error.Abort(b'cannot select revision when creating marker')
2148 metadata = {}
2148 metadata = {}
2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2150 succs = tuple(parsenodeid(succ) for succ in successors)
2150 succs = tuple(parsenodeid(succ) for succ in successors)
2151 l = repo.lock()
2151 l = repo.lock()
2152 try:
2152 try:
2153 tr = repo.transaction(b'debugobsolete')
2153 tr = repo.transaction(b'debugobsolete')
2154 try:
2154 try:
2155 date = opts.get(b'date')
2155 date = opts.get(b'date')
2156 if date:
2156 if date:
2157 date = dateutil.parsedate(date)
2157 date = dateutil.parsedate(date)
2158 else:
2158 else:
2159 date = None
2159 date = None
2160 prec = parsenodeid(precursor)
2160 prec = parsenodeid(precursor)
2161 parents = None
2161 parents = None
2162 if opts[b'record_parents']:
2162 if opts[b'record_parents']:
2163 if prec not in repo.unfiltered():
2163 if prec not in repo.unfiltered():
2164 raise error.Abort(
2164 raise error.Abort(
2165 b'cannot used --record-parents on '
2165 b'cannot used --record-parents on '
2166 b'unknown changesets'
2166 b'unknown changesets'
2167 )
2167 )
2168 parents = repo.unfiltered()[prec].parents()
2168 parents = repo.unfiltered()[prec].parents()
2169 parents = tuple(p.node() for p in parents)
2169 parents = tuple(p.node() for p in parents)
2170 repo.obsstore.create(
2170 repo.obsstore.create(
2171 tr,
2171 tr,
2172 prec,
2172 prec,
2173 succs,
2173 succs,
2174 opts[b'flags'],
2174 opts[b'flags'],
2175 parents=parents,
2175 parents=parents,
2176 date=date,
2176 date=date,
2177 metadata=metadata,
2177 metadata=metadata,
2178 ui=ui,
2178 ui=ui,
2179 )
2179 )
2180 tr.close()
2180 tr.close()
2181 except ValueError as exc:
2181 except ValueError as exc:
2182 raise error.Abort(
2182 raise error.Abort(
2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2184 )
2184 )
2185 finally:
2185 finally:
2186 tr.release()
2186 tr.release()
2187 finally:
2187 finally:
2188 l.release()
2188 l.release()
2189 else:
2189 else:
2190 if opts[b'rev']:
2190 if opts[b'rev']:
2191 revs = scmutil.revrange(repo, opts[b'rev'])
2191 revs = scmutil.revrange(repo, opts[b'rev'])
2192 nodes = [repo[r].node() for r in revs]
2192 nodes = [repo[r].node() for r in revs]
2193 markers = list(
2193 markers = list(
2194 obsutil.getmarkers(
2194 obsutil.getmarkers(
2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2196 )
2196 )
2197 )
2197 )
2198 markers.sort(key=lambda x: x._data)
2198 markers.sort(key=lambda x: x._data)
2199 else:
2199 else:
2200 markers = obsutil.getmarkers(repo)
2200 markers = obsutil.getmarkers(repo)
2201
2201
2202 markerstoiter = markers
2202 markerstoiter = markers
2203 isrelevant = lambda m: True
2203 isrelevant = lambda m: True
2204 if opts.get(b'rev') and opts.get(b'index'):
2204 if opts.get(b'rev') and opts.get(b'index'):
2205 markerstoiter = obsutil.getmarkers(repo)
2205 markerstoiter = obsutil.getmarkers(repo)
2206 markerset = set(markers)
2206 markerset = set(markers)
2207 isrelevant = lambda m: m in markerset
2207 isrelevant = lambda m: m in markerset
2208
2208
2209 fm = ui.formatter(b'debugobsolete', opts)
2209 fm = ui.formatter(b'debugobsolete', opts)
2210 for i, m in enumerate(markerstoiter):
2210 for i, m in enumerate(markerstoiter):
2211 if not isrelevant(m):
2211 if not isrelevant(m):
2212 # marker can be irrelevant when we're iterating over a set
2212 # marker can be irrelevant when we're iterating over a set
2213 # of markers (markerstoiter) which is bigger than the set
2213 # of markers (markerstoiter) which is bigger than the set
2214 # of markers we want to display (markers)
2214 # of markers we want to display (markers)
2215 # this can happen if both --index and --rev options are
2215 # this can happen if both --index and --rev options are
2216 # provided and thus we need to iterate over all of the markers
2216 # provided and thus we need to iterate over all of the markers
2217 # to get the correct indices, but only display the ones that
2217 # to get the correct indices, but only display the ones that
2218 # are relevant to --rev value
2218 # are relevant to --rev value
2219 continue
2219 continue
2220 fm.startitem()
2220 fm.startitem()
2221 ind = i if opts.get(b'index') else None
2221 ind = i if opts.get(b'index') else None
2222 cmdutil.showmarker(fm, m, index=ind)
2222 cmdutil.showmarker(fm, m, index=ind)
2223 fm.end()
2223 fm.end()
2224
2224
2225
2225
2226 @command(
2226 @command(
2227 b'debugp1copies',
2227 b'debugp1copies',
2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2229 _(b'[-r REV]'),
2229 _(b'[-r REV]'),
2230 )
2230 )
2231 def debugp1copies(ui, repo, **opts):
2231 def debugp1copies(ui, repo, **opts):
2232 """dump copy information compared to p1"""
2232 """dump copy information compared to p1"""
2233
2233
2234 opts = pycompat.byteskwargs(opts)
2234 opts = pycompat.byteskwargs(opts)
2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2236 for dst, src in ctx.p1copies().items():
2236 for dst, src in ctx.p1copies().items():
2237 ui.write(b'%s -> %s\n' % (src, dst))
2237 ui.write(b'%s -> %s\n' % (src, dst))
2238
2238
2239
2239
2240 @command(
2240 @command(
2241 b'debugp2copies',
2241 b'debugp2copies',
2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2243 _(b'[-r REV]'),
2243 _(b'[-r REV]'),
2244 )
2244 )
2245 def debugp1copies(ui, repo, **opts):
2245 def debugp1copies(ui, repo, **opts):
2246 """dump copy information compared to p2"""
2246 """dump copy information compared to p2"""
2247
2247
2248 opts = pycompat.byteskwargs(opts)
2248 opts = pycompat.byteskwargs(opts)
2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2250 for dst, src in ctx.p2copies().items():
2250 for dst, src in ctx.p2copies().items():
2251 ui.write(b'%s -> %s\n' % (src, dst))
2251 ui.write(b'%s -> %s\n' % (src, dst))
2252
2252
2253
2253
2254 @command(
2254 @command(
2255 b'debugpathcomplete',
2255 b'debugpathcomplete',
2256 [
2256 [
2257 (b'f', b'full', None, _(b'complete an entire path')),
2257 (b'f', b'full', None, _(b'complete an entire path')),
2258 (b'n', b'normal', None, _(b'show only normal files')),
2258 (b'n', b'normal', None, _(b'show only normal files')),
2259 (b'a', b'added', None, _(b'show only added files')),
2259 (b'a', b'added', None, _(b'show only added files')),
2260 (b'r', b'removed', None, _(b'show only removed files')),
2260 (b'r', b'removed', None, _(b'show only removed files')),
2261 ],
2261 ],
2262 _(b'FILESPEC...'),
2262 _(b'FILESPEC...'),
2263 )
2263 )
2264 def debugpathcomplete(ui, repo, *specs, **opts):
2264 def debugpathcomplete(ui, repo, *specs, **opts):
2265 '''complete part or all of a tracked path
2265 '''complete part or all of a tracked path
2266
2266
2267 This command supports shells that offer path name completion. It
2267 This command supports shells that offer path name completion. It
2268 currently completes only files already known to the dirstate.
2268 currently completes only files already known to the dirstate.
2269
2269
2270 Completion extends only to the next path segment unless
2270 Completion extends only to the next path segment unless
2271 --full is specified, in which case entire paths are used.'''
2271 --full is specified, in which case entire paths are used.'''
2272
2272
2273 def complete(path, acceptable):
2273 def complete(path, acceptable):
2274 dirstate = repo.dirstate
2274 dirstate = repo.dirstate
2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2276 rootdir = repo.root + pycompat.ossep
2276 rootdir = repo.root + pycompat.ossep
2277 if spec != repo.root and not spec.startswith(rootdir):
2277 if spec != repo.root and not spec.startswith(rootdir):
2278 return [], []
2278 return [], []
2279 if os.path.isdir(spec):
2279 if os.path.isdir(spec):
2280 spec += b'/'
2280 spec += b'/'
2281 spec = spec[len(rootdir) :]
2281 spec = spec[len(rootdir) :]
2282 fixpaths = pycompat.ossep != b'/'
2282 fixpaths = pycompat.ossep != b'/'
2283 if fixpaths:
2283 if fixpaths:
2284 spec = spec.replace(pycompat.ossep, b'/')
2284 spec = spec.replace(pycompat.ossep, b'/')
2285 speclen = len(spec)
2285 speclen = len(spec)
2286 fullpaths = opts['full']
2286 fullpaths = opts['full']
2287 files, dirs = set(), set()
2287 files, dirs = set(), set()
2288 adddir, addfile = dirs.add, files.add
2288 adddir, addfile = dirs.add, files.add
2289 for f, st in pycompat.iteritems(dirstate):
2289 for f, st in pycompat.iteritems(dirstate):
2290 if f.startswith(spec) and st[0] in acceptable:
2290 if f.startswith(spec) and st[0] in acceptable:
2291 if fixpaths:
2291 if fixpaths:
2292 f = f.replace(b'/', pycompat.ossep)
2292 f = f.replace(b'/', pycompat.ossep)
2293 if fullpaths:
2293 if fullpaths:
2294 addfile(f)
2294 addfile(f)
2295 continue
2295 continue
2296 s = f.find(pycompat.ossep, speclen)
2296 s = f.find(pycompat.ossep, speclen)
2297 if s >= 0:
2297 if s >= 0:
2298 adddir(f[:s])
2298 adddir(f[:s])
2299 else:
2299 else:
2300 addfile(f)
2300 addfile(f)
2301 return files, dirs
2301 return files, dirs
2302
2302
2303 acceptable = b''
2303 acceptable = b''
2304 if opts['normal']:
2304 if opts['normal']:
2305 acceptable += b'nm'
2305 acceptable += b'nm'
2306 if opts['added']:
2306 if opts['added']:
2307 acceptable += b'a'
2307 acceptable += b'a'
2308 if opts['removed']:
2308 if opts['removed']:
2309 acceptable += b'r'
2309 acceptable += b'r'
2310 cwd = repo.getcwd()
2310 cwd = repo.getcwd()
2311 if not specs:
2311 if not specs:
2312 specs = [b'.']
2312 specs = [b'.']
2313
2313
2314 files, dirs = set(), set()
2314 files, dirs = set(), set()
2315 for spec in specs:
2315 for spec in specs:
2316 f, d = complete(spec, acceptable or b'nmar')
2316 f, d = complete(spec, acceptable or b'nmar')
2317 files.update(f)
2317 files.update(f)
2318 dirs.update(d)
2318 dirs.update(d)
2319 files.update(dirs)
2319 files.update(dirs)
2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2321 ui.write(b'\n')
2321 ui.write(b'\n')
2322
2322
2323
2323
2324 @command(
2324 @command(
2325 b'debugpathcopies',
2325 b'debugpathcopies',
2326 cmdutil.walkopts,
2326 cmdutil.walkopts,
2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2328 inferrepo=True,
2328 inferrepo=True,
2329 )
2329 )
2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2331 """show copies between two revisions"""
2331 """show copies between two revisions"""
2332 ctx1 = scmutil.revsingle(repo, rev1)
2332 ctx1 = scmutil.revsingle(repo, rev1)
2333 ctx2 = scmutil.revsingle(repo, rev2)
2333 ctx2 = scmutil.revsingle(repo, rev2)
2334 m = scmutil.match(ctx1, pats, opts)
2334 m = scmutil.match(ctx1, pats, opts)
2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2336 ui.write(b'%s -> %s\n' % (src, dst))
2336 ui.write(b'%s -> %s\n' % (src, dst))
2337
2337
2338
2338
2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2340 def debugpeer(ui, path):
2340 def debugpeer(ui, path):
2341 """establish a connection to a peer repository"""
2341 """establish a connection to a peer repository"""
2342 # Always enable peer request logging. Requires --debug to display
2342 # Always enable peer request logging. Requires --debug to display
2343 # though.
2343 # though.
2344 overrides = {
2344 overrides = {
2345 (b'devel', b'debug.peer-request'): True,
2345 (b'devel', b'debug.peer-request'): True,
2346 }
2346 }
2347
2347
2348 with ui.configoverride(overrides):
2348 with ui.configoverride(overrides):
2349 peer = hg.peer(ui, {}, path)
2349 peer = hg.peer(ui, {}, path)
2350
2350
2351 local = peer.local() is not None
2351 local = peer.local() is not None
2352 canpush = peer.canpush()
2352 canpush = peer.canpush()
2353
2353
2354 ui.write(_(b'url: %s\n') % peer.url())
2354 ui.write(_(b'url: %s\n') % peer.url())
2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2357
2357
2358
2358
2359 @command(
2359 @command(
2360 b'debugpickmergetool',
2360 b'debugpickmergetool',
2361 [
2361 [
2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2364 ]
2364 ]
2365 + cmdutil.walkopts
2365 + cmdutil.walkopts
2366 + cmdutil.mergetoolopts,
2366 + cmdutil.mergetoolopts,
2367 _(b'[PATTERN]...'),
2367 _(b'[PATTERN]...'),
2368 inferrepo=True,
2368 inferrepo=True,
2369 )
2369 )
2370 def debugpickmergetool(ui, repo, *pats, **opts):
2370 def debugpickmergetool(ui, repo, *pats, **opts):
2371 """examine which merge tool is chosen for specified file
2371 """examine which merge tool is chosen for specified file
2372
2372
2373 As described in :hg:`help merge-tools`, Mercurial examines
2373 As described in :hg:`help merge-tools`, Mercurial examines
2374 configurations below in this order to decide which merge tool is
2374 configurations below in this order to decide which merge tool is
2375 chosen for specified file.
2375 chosen for specified file.
2376
2376
2377 1. ``--tool`` option
2377 1. ``--tool`` option
2378 2. ``HGMERGE`` environment variable
2378 2. ``HGMERGE`` environment variable
2379 3. configurations in ``merge-patterns`` section
2379 3. configurations in ``merge-patterns`` section
2380 4. configuration of ``ui.merge``
2380 4. configuration of ``ui.merge``
2381 5. configurations in ``merge-tools`` section
2381 5. configurations in ``merge-tools`` section
2382 6. ``hgmerge`` tool (for historical reason only)
2382 6. ``hgmerge`` tool (for historical reason only)
2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2384
2384
2385 This command writes out examination result in the style below::
2385 This command writes out examination result in the style below::
2386
2386
2387 FILE = MERGETOOL
2387 FILE = MERGETOOL
2388
2388
2389 By default, all files known in the first parent context of the
2389 By default, all files known in the first parent context of the
2390 working directory are examined. Use file patterns and/or -I/-X
2390 working directory are examined. Use file patterns and/or -I/-X
2391 options to limit target files. -r/--rev is also useful to examine
2391 options to limit target files. -r/--rev is also useful to examine
2392 files in another context without actual updating to it.
2392 files in another context without actual updating to it.
2393
2393
2394 With --debug, this command shows warning messages while matching
2394 With --debug, this command shows warning messages while matching
2395 against ``merge-patterns`` and so on, too. It is recommended to
2395 against ``merge-patterns`` and so on, too. It is recommended to
2396 use this option with explicit file patterns and/or -I/-X options,
2396 use this option with explicit file patterns and/or -I/-X options,
2397 because this option increases amount of output per file according
2397 because this option increases amount of output per file according
2398 to configurations in hgrc.
2398 to configurations in hgrc.
2399
2399
2400 With -v/--verbose, this command shows configurations below at
2400 With -v/--verbose, this command shows configurations below at
2401 first (only if specified).
2401 first (only if specified).
2402
2402
2403 - ``--tool`` option
2403 - ``--tool`` option
2404 - ``HGMERGE`` environment variable
2404 - ``HGMERGE`` environment variable
2405 - configuration of ``ui.merge``
2405 - configuration of ``ui.merge``
2406
2406
2407 If merge tool is chosen before matching against
2407 If merge tool is chosen before matching against
2408 ``merge-patterns``, this command can't show any helpful
2408 ``merge-patterns``, this command can't show any helpful
2409 information, even with --debug. In such case, information above is
2409 information, even with --debug. In such case, information above is
2410 useful to know why a merge tool is chosen.
2410 useful to know why a merge tool is chosen.
2411 """
2411 """
2412 opts = pycompat.byteskwargs(opts)
2412 opts = pycompat.byteskwargs(opts)
2413 overrides = {}
2413 overrides = {}
2414 if opts[b'tool']:
2414 if opts[b'tool']:
2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2417
2417
2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2419 hgmerge = encoding.environ.get(b"HGMERGE")
2419 hgmerge = encoding.environ.get(b"HGMERGE")
2420 if hgmerge is not None:
2420 if hgmerge is not None:
2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2422 uimerge = ui.config(b"ui", b"merge")
2422 uimerge = ui.config(b"ui", b"merge")
2423 if uimerge:
2423 if uimerge:
2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2425
2425
2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2427 m = scmutil.match(ctx, pats, opts)
2427 m = scmutil.match(ctx, pats, opts)
2428 changedelete = opts[b'changedelete']
2428 changedelete = opts[b'changedelete']
2429 for path in ctx.walk(m):
2429 for path in ctx.walk(m):
2430 fctx = ctx[path]
2430 fctx = ctx[path]
2431 try:
2431 try:
2432 if not ui.debugflag:
2432 if not ui.debugflag:
2433 ui.pushbuffer(error=True)
2433 ui.pushbuffer(error=True)
2434 tool, toolpath = filemerge._picktool(
2434 tool, toolpath = filemerge._picktool(
2435 repo,
2435 repo,
2436 ui,
2436 ui,
2437 path,
2437 path,
2438 fctx.isbinary(),
2438 fctx.isbinary(),
2439 b'l' in fctx.flags(),
2439 b'l' in fctx.flags(),
2440 changedelete,
2440 changedelete,
2441 )
2441 )
2442 finally:
2442 finally:
2443 if not ui.debugflag:
2443 if not ui.debugflag:
2444 ui.popbuffer()
2444 ui.popbuffer()
2445 ui.write(b'%s = %s\n' % (path, tool))
2445 ui.write(b'%s = %s\n' % (path, tool))
2446
2446
2447
2447
2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2450 '''access the pushkey key/value protocol
2450 '''access the pushkey key/value protocol
2451
2451
2452 With two args, list the keys in the given namespace.
2452 With two args, list the keys in the given namespace.
2453
2453
2454 With five args, set a key to new if it currently is set to old.
2454 With five args, set a key to new if it currently is set to old.
2455 Reports success or failure.
2455 Reports success or failure.
2456 '''
2456 '''
2457
2457
2458 target = hg.peer(ui, {}, repopath)
2458 target = hg.peer(ui, {}, repopath)
2459 if keyinfo:
2459 if keyinfo:
2460 key, old, new = keyinfo
2460 key, old, new = keyinfo
2461 with target.commandexecutor() as e:
2461 with target.commandexecutor() as e:
2462 r = e.callcommand(
2462 r = e.callcommand(
2463 b'pushkey',
2463 b'pushkey',
2464 {
2464 {
2465 b'namespace': namespace,
2465 b'namespace': namespace,
2466 b'key': key,
2466 b'key': key,
2467 b'old': old,
2467 b'old': old,
2468 b'new': new,
2468 b'new': new,
2469 },
2469 },
2470 ).result()
2470 ).result()
2471
2471
2472 ui.status(pycompat.bytestr(r) + b'\n')
2472 ui.status(pycompat.bytestr(r) + b'\n')
2473 return not r
2473 return not r
2474 else:
2474 else:
2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2476 ui.write(
2476 ui.write(
2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2478 )
2478 )
2479
2479
2480
2480
2481 @command(b'debugpvec', [], _(b'A B'))
2481 @command(b'debugpvec', [], _(b'A B'))
2482 def debugpvec(ui, repo, a, b=None):
2482 def debugpvec(ui, repo, a, b=None):
2483 ca = scmutil.revsingle(repo, a)
2483 ca = scmutil.revsingle(repo, a)
2484 cb = scmutil.revsingle(repo, b)
2484 cb = scmutil.revsingle(repo, b)
2485 pa = pvec.ctxpvec(ca)
2485 pa = pvec.ctxpvec(ca)
2486 pb = pvec.ctxpvec(cb)
2486 pb = pvec.ctxpvec(cb)
2487 if pa == pb:
2487 if pa == pb:
2488 rel = b"="
2488 rel = b"="
2489 elif pa > pb:
2489 elif pa > pb:
2490 rel = b">"
2490 rel = b">"
2491 elif pa < pb:
2491 elif pa < pb:
2492 rel = b"<"
2492 rel = b"<"
2493 elif pa | pb:
2493 elif pa | pb:
2494 rel = b"|"
2494 rel = b"|"
2495 ui.write(_(b"a: %s\n") % pa)
2495 ui.write(_(b"a: %s\n") % pa)
2496 ui.write(_(b"b: %s\n") % pb)
2496 ui.write(_(b"b: %s\n") % pb)
2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2498 ui.write(
2498 ui.write(
2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2500 % (
2500 % (
2501 abs(pa._depth - pb._depth),
2501 abs(pa._depth - pb._depth),
2502 pvec._hamming(pa._vec, pb._vec),
2502 pvec._hamming(pa._vec, pb._vec),
2503 pa.distance(pb),
2503 pa.distance(pb),
2504 rel,
2504 rel,
2505 )
2505 )
2506 )
2506 )
2507
2507
2508
2508
2509 @command(
2509 @command(
2510 b'debugrebuilddirstate|debugrebuildstate',
2510 b'debugrebuilddirstate|debugrebuildstate',
2511 [
2511 [
2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2513 (
2513 (
2514 b'',
2514 b'',
2515 b'minimal',
2515 b'minimal',
2516 None,
2516 None,
2517 _(
2517 _(
2518 b'only rebuild files that are inconsistent with '
2518 b'only rebuild files that are inconsistent with '
2519 b'the working copy parent'
2519 b'the working copy parent'
2520 ),
2520 ),
2521 ),
2521 ),
2522 ],
2522 ],
2523 _(b'[-r REV]'),
2523 _(b'[-r REV]'),
2524 )
2524 )
2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2526 """rebuild the dirstate as it would look like for the given revision
2526 """rebuild the dirstate as it would look like for the given revision
2527
2527
2528 If no revision is specified the first current parent will be used.
2528 If no revision is specified the first current parent will be used.
2529
2529
2530 The dirstate will be set to the files of the given revision.
2530 The dirstate will be set to the files of the given revision.
2531 The actual working directory content or existing dirstate
2531 The actual working directory content or existing dirstate
2532 information such as adds or removes is not considered.
2532 information such as adds or removes is not considered.
2533
2533
2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2535 tracked but are not in the parent manifest, or that exist in the parent
2535 tracked but are not in the parent manifest, or that exist in the parent
2536 manifest but are not in the dirstate. It will not change adds, removes, or
2536 manifest but are not in the dirstate. It will not change adds, removes, or
2537 modified files that are in the working copy parent.
2537 modified files that are in the working copy parent.
2538
2538
2539 One use of this command is to make the next :hg:`status` invocation
2539 One use of this command is to make the next :hg:`status` invocation
2540 check the actual file content.
2540 check the actual file content.
2541 """
2541 """
2542 ctx = scmutil.revsingle(repo, rev)
2542 ctx = scmutil.revsingle(repo, rev)
2543 with repo.wlock():
2543 with repo.wlock():
2544 dirstate = repo.dirstate
2544 dirstate = repo.dirstate
2545 changedfiles = None
2545 changedfiles = None
2546 # See command doc for what minimal does.
2546 # See command doc for what minimal does.
2547 if opts.get('minimal'):
2547 if opts.get('minimal'):
2548 manifestfiles = set(ctx.manifest().keys())
2548 manifestfiles = set(ctx.manifest().keys())
2549 dirstatefiles = set(dirstate)
2549 dirstatefiles = set(dirstate)
2550 manifestonly = manifestfiles - dirstatefiles
2550 manifestonly = manifestfiles - dirstatefiles
2551 dsonly = dirstatefiles - manifestfiles
2551 dsonly = dirstatefiles - manifestfiles
2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2553 changedfiles = manifestonly | dsnotadded
2553 changedfiles = manifestonly | dsnotadded
2554
2554
2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2556
2556
2557
2557
2558 @command(b'debugrebuildfncache', [], b'')
2558 @command(b'debugrebuildfncache', [], b'')
2559 def debugrebuildfncache(ui, repo):
2559 def debugrebuildfncache(ui, repo):
2560 """rebuild the fncache file"""
2560 """rebuild the fncache file"""
2561 repair.rebuildfncache(ui, repo)
2561 repair.rebuildfncache(ui, repo)
2562
2562
2563
2563
2564 @command(
2564 @command(
2565 b'debugrename',
2565 b'debugrename',
2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2567 _(b'[-r REV] [FILE]...'),
2567 _(b'[-r REV] [FILE]...'),
2568 )
2568 )
2569 def debugrename(ui, repo, *pats, **opts):
2569 def debugrename(ui, repo, *pats, **opts):
2570 """dump rename information"""
2570 """dump rename information"""
2571
2571
2572 opts = pycompat.byteskwargs(opts)
2572 opts = pycompat.byteskwargs(opts)
2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2574 m = scmutil.match(ctx, pats, opts)
2574 m = scmutil.match(ctx, pats, opts)
2575 for abs in ctx.walk(m):
2575 for abs in ctx.walk(m):
2576 fctx = ctx[abs]
2576 fctx = ctx[abs]
2577 o = fctx.filelog().renamed(fctx.filenode())
2577 o = fctx.filelog().renamed(fctx.filenode())
2578 rel = repo.pathto(abs)
2578 rel = repo.pathto(abs)
2579 if o:
2579 if o:
2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2581 else:
2581 else:
2582 ui.write(_(b"%s not renamed\n") % rel)
2582 ui.write(_(b"%s not renamed\n") % rel)
2583
2583
2584
2584
2585 @command(
2585 @command(
2586 b'debugrevlog',
2586 b'debugrevlog',
2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2588 _(b'-c|-m|FILE'),
2588 _(b'-c|-m|FILE'),
2589 optionalrepo=True,
2589 optionalrepo=True,
2590 )
2590 )
2591 def debugrevlog(ui, repo, file_=None, **opts):
2591 def debugrevlog(ui, repo, file_=None, **opts):
2592 """show data and statistics about a revlog"""
2592 """show data and statistics about a revlog"""
2593 opts = pycompat.byteskwargs(opts)
2593 opts = pycompat.byteskwargs(opts)
2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2595
2595
2596 if opts.get(b"dump"):
2596 if opts.get(b"dump"):
2597 numrevs = len(r)
2597 numrevs = len(r)
2598 ui.write(
2598 ui.write(
2599 (
2599 (
2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2601 b" rawsize totalsize compression heads chainlen\n"
2601 b" rawsize totalsize compression heads chainlen\n"
2602 )
2602 )
2603 )
2603 )
2604 ts = 0
2604 ts = 0
2605 heads = set()
2605 heads = set()
2606
2606
2607 for rev in pycompat.xrange(numrevs):
2607 for rev in pycompat.xrange(numrevs):
2608 dbase = r.deltaparent(rev)
2608 dbase = r.deltaparent(rev)
2609 if dbase == -1:
2609 if dbase == -1:
2610 dbase = rev
2610 dbase = rev
2611 cbase = r.chainbase(rev)
2611 cbase = r.chainbase(rev)
2612 clen = r.chainlen(rev)
2612 clen = r.chainlen(rev)
2613 p1, p2 = r.parentrevs(rev)
2613 p1, p2 = r.parentrevs(rev)
2614 rs = r.rawsize(rev)
2614 rs = r.rawsize(rev)
2615 ts = ts + rs
2615 ts = ts + rs
2616 heads -= set(r.parentrevs(rev))
2616 heads -= set(r.parentrevs(rev))
2617 heads.add(rev)
2617 heads.add(rev)
2618 try:
2618 try:
2619 compression = ts / r.end(rev)
2619 compression = ts / r.end(rev)
2620 except ZeroDivisionError:
2620 except ZeroDivisionError:
2621 compression = 0
2621 compression = 0
2622 ui.write(
2622 ui.write(
2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2624 b"%11d %5d %8d\n"
2624 b"%11d %5d %8d\n"
2625 % (
2625 % (
2626 rev,
2626 rev,
2627 p1,
2627 p1,
2628 p2,
2628 p2,
2629 r.start(rev),
2629 r.start(rev),
2630 r.end(rev),
2630 r.end(rev),
2631 r.start(dbase),
2631 r.start(dbase),
2632 r.start(cbase),
2632 r.start(cbase),
2633 r.start(p1),
2633 r.start(p1),
2634 r.start(p2),
2634 r.start(p2),
2635 rs,
2635 rs,
2636 ts,
2636 ts,
2637 compression,
2637 compression,
2638 len(heads),
2638 len(heads),
2639 clen,
2639 clen,
2640 )
2640 )
2641 )
2641 )
2642 return 0
2642 return 0
2643
2643
2644 v = r.version
2644 v = r.version
2645 format = v & 0xFFFF
2645 format = v & 0xFFFF
2646 flags = []
2646 flags = []
2647 gdelta = False
2647 gdelta = False
2648 if v & revlog.FLAG_INLINE_DATA:
2648 if v & revlog.FLAG_INLINE_DATA:
2649 flags.append(b'inline')
2649 flags.append(b'inline')
2650 if v & revlog.FLAG_GENERALDELTA:
2650 if v & revlog.FLAG_GENERALDELTA:
2651 gdelta = True
2651 gdelta = True
2652 flags.append(b'generaldelta')
2652 flags.append(b'generaldelta')
2653 if not flags:
2653 if not flags:
2654 flags = [b'(none)']
2654 flags = [b'(none)']
2655
2655
2656 ### tracks merge vs single parent
2656 ### tracks merge vs single parent
2657 nummerges = 0
2657 nummerges = 0
2658
2658
2659 ### tracks ways the "delta" are build
2659 ### tracks ways the "delta" are build
2660 # nodelta
2660 # nodelta
2661 numempty = 0
2661 numempty = 0
2662 numemptytext = 0
2662 numemptytext = 0
2663 numemptydelta = 0
2663 numemptydelta = 0
2664 # full file content
2664 # full file content
2665 numfull = 0
2665 numfull = 0
2666 # intermediate snapshot against a prior snapshot
2666 # intermediate snapshot against a prior snapshot
2667 numsemi = 0
2667 numsemi = 0
2668 # snapshot count per depth
2668 # snapshot count per depth
2669 numsnapdepth = collections.defaultdict(lambda: 0)
2669 numsnapdepth = collections.defaultdict(lambda: 0)
2670 # delta against previous revision
2670 # delta against previous revision
2671 numprev = 0
2671 numprev = 0
2672 # delta against first or second parent (not prev)
2672 # delta against first or second parent (not prev)
2673 nump1 = 0
2673 nump1 = 0
2674 nump2 = 0
2674 nump2 = 0
2675 # delta against neither prev nor parents
2675 # delta against neither prev nor parents
2676 numother = 0
2676 numother = 0
2677 # delta against prev that are also first or second parent
2677 # delta against prev that are also first or second parent
2678 # (details of `numprev`)
2678 # (details of `numprev`)
2679 nump1prev = 0
2679 nump1prev = 0
2680 nump2prev = 0
2680 nump2prev = 0
2681
2681
2682 # data about delta chain of each revs
2682 # data about delta chain of each revs
2683 chainlengths = []
2683 chainlengths = []
2684 chainbases = []
2684 chainbases = []
2685 chainspans = []
2685 chainspans = []
2686
2686
2687 # data about each revision
2687 # data about each revision
2688 datasize = [None, 0, 0]
2688 datasize = [None, 0, 0]
2689 fullsize = [None, 0, 0]
2689 fullsize = [None, 0, 0]
2690 semisize = [None, 0, 0]
2690 semisize = [None, 0, 0]
2691 # snapshot count per depth
2691 # snapshot count per depth
2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2693 deltasize = [None, 0, 0]
2693 deltasize = [None, 0, 0]
2694 chunktypecounts = {}
2694 chunktypecounts = {}
2695 chunktypesizes = {}
2695 chunktypesizes = {}
2696
2696
2697 def addsize(size, l):
2697 def addsize(size, l):
2698 if l[0] is None or size < l[0]:
2698 if l[0] is None or size < l[0]:
2699 l[0] = size
2699 l[0] = size
2700 if size > l[1]:
2700 if size > l[1]:
2701 l[1] = size
2701 l[1] = size
2702 l[2] += size
2702 l[2] += size
2703
2703
2704 numrevs = len(r)
2704 numrevs = len(r)
2705 for rev in pycompat.xrange(numrevs):
2705 for rev in pycompat.xrange(numrevs):
2706 p1, p2 = r.parentrevs(rev)
2706 p1, p2 = r.parentrevs(rev)
2707 delta = r.deltaparent(rev)
2707 delta = r.deltaparent(rev)
2708 if format > 0:
2708 if format > 0:
2709 addsize(r.rawsize(rev), datasize)
2709 addsize(r.rawsize(rev), datasize)
2710 if p2 != nullrev:
2710 if p2 != nullrev:
2711 nummerges += 1
2711 nummerges += 1
2712 size = r.length(rev)
2712 size = r.length(rev)
2713 if delta == nullrev:
2713 if delta == nullrev:
2714 chainlengths.append(0)
2714 chainlengths.append(0)
2715 chainbases.append(r.start(rev))
2715 chainbases.append(r.start(rev))
2716 chainspans.append(size)
2716 chainspans.append(size)
2717 if size == 0:
2717 if size == 0:
2718 numempty += 1
2718 numempty += 1
2719 numemptytext += 1
2719 numemptytext += 1
2720 else:
2720 else:
2721 numfull += 1
2721 numfull += 1
2722 numsnapdepth[0] += 1
2722 numsnapdepth[0] += 1
2723 addsize(size, fullsize)
2723 addsize(size, fullsize)
2724 addsize(size, snapsizedepth[0])
2724 addsize(size, snapsizedepth[0])
2725 else:
2725 else:
2726 chainlengths.append(chainlengths[delta] + 1)
2726 chainlengths.append(chainlengths[delta] + 1)
2727 baseaddr = chainbases[delta]
2727 baseaddr = chainbases[delta]
2728 revaddr = r.start(rev)
2728 revaddr = r.start(rev)
2729 chainbases.append(baseaddr)
2729 chainbases.append(baseaddr)
2730 chainspans.append((revaddr - baseaddr) + size)
2730 chainspans.append((revaddr - baseaddr) + size)
2731 if size == 0:
2731 if size == 0:
2732 numempty += 1
2732 numempty += 1
2733 numemptydelta += 1
2733 numemptydelta += 1
2734 elif r.issnapshot(rev):
2734 elif r.issnapshot(rev):
2735 addsize(size, semisize)
2735 addsize(size, semisize)
2736 numsemi += 1
2736 numsemi += 1
2737 depth = r.snapshotdepth(rev)
2737 depth = r.snapshotdepth(rev)
2738 numsnapdepth[depth] += 1
2738 numsnapdepth[depth] += 1
2739 addsize(size, snapsizedepth[depth])
2739 addsize(size, snapsizedepth[depth])
2740 else:
2740 else:
2741 addsize(size, deltasize)
2741 addsize(size, deltasize)
2742 if delta == rev - 1:
2742 if delta == rev - 1:
2743 numprev += 1
2743 numprev += 1
2744 if delta == p1:
2744 if delta == p1:
2745 nump1prev += 1
2745 nump1prev += 1
2746 elif delta == p2:
2746 elif delta == p2:
2747 nump2prev += 1
2747 nump2prev += 1
2748 elif delta == p1:
2748 elif delta == p1:
2749 nump1 += 1
2749 nump1 += 1
2750 elif delta == p2:
2750 elif delta == p2:
2751 nump2 += 1
2751 nump2 += 1
2752 elif delta != nullrev:
2752 elif delta != nullrev:
2753 numother += 1
2753 numother += 1
2754
2754
2755 # Obtain data on the raw chunks in the revlog.
2755 # Obtain data on the raw chunks in the revlog.
2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2757 segment = r._getsegmentforrevs(rev, rev)[1]
2757 segment = r._getsegmentforrevs(rev, rev)[1]
2758 else:
2758 else:
2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2760 if segment:
2760 if segment:
2761 chunktype = bytes(segment[0:1])
2761 chunktype = bytes(segment[0:1])
2762 else:
2762 else:
2763 chunktype = b'empty'
2763 chunktype = b'empty'
2764
2764
2765 if chunktype not in chunktypecounts:
2765 if chunktype not in chunktypecounts:
2766 chunktypecounts[chunktype] = 0
2766 chunktypecounts[chunktype] = 0
2767 chunktypesizes[chunktype] = 0
2767 chunktypesizes[chunktype] = 0
2768
2768
2769 chunktypecounts[chunktype] += 1
2769 chunktypecounts[chunktype] += 1
2770 chunktypesizes[chunktype] += size
2770 chunktypesizes[chunktype] += size
2771
2771
2772 # Adjust size min value for empty cases
2772 # Adjust size min value for empty cases
2773 for size in (datasize, fullsize, semisize, deltasize):
2773 for size in (datasize, fullsize, semisize, deltasize):
2774 if size[0] is None:
2774 if size[0] is None:
2775 size[0] = 0
2775 size[0] = 0
2776
2776
2777 numdeltas = numrevs - numfull - numempty - numsemi
2777 numdeltas = numrevs - numfull - numempty - numsemi
2778 numoprev = numprev - nump1prev - nump2prev
2778 numoprev = numprev - nump1prev - nump2prev
2779 totalrawsize = datasize[2]
2779 totalrawsize = datasize[2]
2780 datasize[2] /= numrevs
2780 datasize[2] /= numrevs
2781 fulltotal = fullsize[2]
2781 fulltotal = fullsize[2]
2782 if numfull == 0:
2782 if numfull == 0:
2783 fullsize[2] = 0
2783 fullsize[2] = 0
2784 else:
2784 else:
2785 fullsize[2] /= numfull
2785 fullsize[2] /= numfull
2786 semitotal = semisize[2]
2786 semitotal = semisize[2]
2787 snaptotal = {}
2787 snaptotal = {}
2788 if numsemi > 0:
2788 if numsemi > 0:
2789 semisize[2] /= numsemi
2789 semisize[2] /= numsemi
2790 for depth in snapsizedepth:
2790 for depth in snapsizedepth:
2791 snaptotal[depth] = snapsizedepth[depth][2]
2791 snaptotal[depth] = snapsizedepth[depth][2]
2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2793
2793
2794 deltatotal = deltasize[2]
2794 deltatotal = deltasize[2]
2795 if numdeltas > 0:
2795 if numdeltas > 0:
2796 deltasize[2] /= numdeltas
2796 deltasize[2] /= numdeltas
2797 totalsize = fulltotal + semitotal + deltatotal
2797 totalsize = fulltotal + semitotal + deltatotal
2798 avgchainlen = sum(chainlengths) / numrevs
2798 avgchainlen = sum(chainlengths) / numrevs
2799 maxchainlen = max(chainlengths)
2799 maxchainlen = max(chainlengths)
2800 maxchainspan = max(chainspans)
2800 maxchainspan = max(chainspans)
2801 compratio = 1
2801 compratio = 1
2802 if totalsize:
2802 if totalsize:
2803 compratio = totalrawsize / totalsize
2803 compratio = totalrawsize / totalsize
2804
2804
2805 basedfmtstr = b'%%%dd\n'
2805 basedfmtstr = b'%%%dd\n'
2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2807
2807
2808 def dfmtstr(max):
2808 def dfmtstr(max):
2809 return basedfmtstr % len(str(max))
2809 return basedfmtstr % len(str(max))
2810
2810
2811 def pcfmtstr(max, padding=0):
2811 def pcfmtstr(max, padding=0):
2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2813
2813
2814 def pcfmt(value, total):
2814 def pcfmt(value, total):
2815 if total:
2815 if total:
2816 return (value, 100 * float(value) / total)
2816 return (value, 100 * float(value) / total)
2817 else:
2817 else:
2818 return value, 100.0
2818 return value, 100.0
2819
2819
2820 ui.writenoi18n(b'format : %d\n' % format)
2820 ui.writenoi18n(b'format : %d\n' % format)
2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2822
2822
2823 ui.write(b'\n')
2823 ui.write(b'\n')
2824 fmt = pcfmtstr(totalsize)
2824 fmt = pcfmtstr(totalsize)
2825 fmt2 = dfmtstr(totalsize)
2825 fmt2 = dfmtstr(totalsize)
2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2828 ui.writenoi18n(
2828 ui.writenoi18n(
2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2830 )
2830 )
2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2833 ui.writenoi18n(
2833 ui.writenoi18n(
2834 b' text : '
2834 b' text : '
2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2836 )
2836 )
2837 ui.writenoi18n(
2837 ui.writenoi18n(
2838 b' delta : '
2838 b' delta : '
2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2840 )
2840 )
2841 ui.writenoi18n(
2841 ui.writenoi18n(
2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2843 )
2843 )
2844 for depth in sorted(numsnapdepth):
2844 for depth in sorted(numsnapdepth):
2845 ui.write(
2845 ui.write(
2846 (b' lvl-%-3d : ' % depth)
2846 (b' lvl-%-3d : ' % depth)
2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2848 )
2848 )
2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2851 ui.writenoi18n(
2851 ui.writenoi18n(
2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2853 )
2853 )
2854 for depth in sorted(numsnapdepth):
2854 for depth in sorted(numsnapdepth):
2855 ui.write(
2855 ui.write(
2856 (b' lvl-%-3d : ' % depth)
2856 (b' lvl-%-3d : ' % depth)
2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2858 )
2858 )
2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2860
2860
2861 def fmtchunktype(chunktype):
2861 def fmtchunktype(chunktype):
2862 if chunktype == b'empty':
2862 if chunktype == b'empty':
2863 return b' %s : ' % chunktype
2863 return b' %s : ' % chunktype
2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2866 else:
2866 else:
2867 return b' 0x%s : ' % hex(chunktype)
2867 return b' 0x%s : ' % hex(chunktype)
2868
2868
2869 ui.write(b'\n')
2869 ui.write(b'\n')
2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2871 for chunktype in sorted(chunktypecounts):
2871 for chunktype in sorted(chunktypecounts):
2872 ui.write(fmtchunktype(chunktype))
2872 ui.write(fmtchunktype(chunktype))
2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2875 for chunktype in sorted(chunktypecounts):
2875 for chunktype in sorted(chunktypecounts):
2876 ui.write(fmtchunktype(chunktype))
2876 ui.write(fmtchunktype(chunktype))
2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2878
2878
2879 ui.write(b'\n')
2879 ui.write(b'\n')
2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2885
2885
2886 if format > 0:
2886 if format > 0:
2887 ui.write(b'\n')
2887 ui.write(b'\n')
2888 ui.writenoi18n(
2888 ui.writenoi18n(
2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2890 % tuple(datasize)
2890 % tuple(datasize)
2891 )
2891 )
2892 ui.writenoi18n(
2892 ui.writenoi18n(
2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2894 % tuple(fullsize)
2894 % tuple(fullsize)
2895 )
2895 )
2896 ui.writenoi18n(
2896 ui.writenoi18n(
2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2898 % tuple(semisize)
2898 % tuple(semisize)
2899 )
2899 )
2900 for depth in sorted(snapsizedepth):
2900 for depth in sorted(snapsizedepth):
2901 if depth == 0:
2901 if depth == 0:
2902 continue
2902 continue
2903 ui.writenoi18n(
2903 ui.writenoi18n(
2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2905 % ((depth,) + tuple(snapsizedepth[depth]))
2905 % ((depth,) + tuple(snapsizedepth[depth]))
2906 )
2906 )
2907 ui.writenoi18n(
2907 ui.writenoi18n(
2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2909 % tuple(deltasize)
2909 % tuple(deltasize)
2910 )
2910 )
2911
2911
2912 if numdeltas > 0:
2912 if numdeltas > 0:
2913 ui.write(b'\n')
2913 ui.write(b'\n')
2914 fmt = pcfmtstr(numdeltas)
2914 fmt = pcfmtstr(numdeltas)
2915 fmt2 = pcfmtstr(numdeltas, 4)
2915 fmt2 = pcfmtstr(numdeltas, 4)
2916 ui.writenoi18n(
2916 ui.writenoi18n(
2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2918 )
2918 )
2919 if numprev > 0:
2919 if numprev > 0:
2920 ui.writenoi18n(
2920 ui.writenoi18n(
2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2922 )
2922 )
2923 ui.writenoi18n(
2923 ui.writenoi18n(
2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2925 )
2925 )
2926 ui.writenoi18n(
2926 ui.writenoi18n(
2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2928 )
2928 )
2929 if gdelta:
2929 if gdelta:
2930 ui.writenoi18n(
2930 ui.writenoi18n(
2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2932 )
2932 )
2933 ui.writenoi18n(
2933 ui.writenoi18n(
2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2935 )
2935 )
2936 ui.writenoi18n(
2936 ui.writenoi18n(
2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2938 )
2938 )
2939
2939
2940
2940
2941 @command(
2941 @command(
2942 b'debugrevlogindex',
2942 b'debugrevlogindex',
2943 cmdutil.debugrevlogopts
2943 cmdutil.debugrevlogopts
2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2945 _(b'[-f FORMAT] -c|-m|FILE'),
2945 _(b'[-f FORMAT] -c|-m|FILE'),
2946 optionalrepo=True,
2946 optionalrepo=True,
2947 )
2947 )
2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2949 """dump the contents of a revlog index"""
2949 """dump the contents of a revlog index"""
2950 opts = pycompat.byteskwargs(opts)
2950 opts = pycompat.byteskwargs(opts)
2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2952 format = opts.get(b'format', 0)
2952 format = opts.get(b'format', 0)
2953 if format not in (0, 1):
2953 if format not in (0, 1):
2954 raise error.Abort(_(b"unknown format %d") % format)
2954 raise error.Abort(_(b"unknown format %d") % format)
2955
2955
2956 if ui.debugflag:
2956 if ui.debugflag:
2957 shortfn = hex
2957 shortfn = hex
2958 else:
2958 else:
2959 shortfn = short
2959 shortfn = short
2960
2960
2961 # There might not be anything in r, so have a sane default
2961 # There might not be anything in r, so have a sane default
2962 idlen = 12
2962 idlen = 12
2963 for i in r:
2963 for i in r:
2964 idlen = len(shortfn(r.node(i)))
2964 idlen = len(shortfn(r.node(i)))
2965 break
2965 break
2966
2966
2967 if format == 0:
2967 if format == 0:
2968 if ui.verbose:
2968 if ui.verbose:
2969 ui.writenoi18n(
2969 ui.writenoi18n(
2970 b" rev offset length linkrev %s %s p2\n"
2970 b" rev offset length linkrev %s %s p2\n"
2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2972 )
2972 )
2973 else:
2973 else:
2974 ui.writenoi18n(
2974 ui.writenoi18n(
2975 b" rev linkrev %s %s p2\n"
2975 b" rev linkrev %s %s p2\n"
2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2977 )
2977 )
2978 elif format == 1:
2978 elif format == 1:
2979 if ui.verbose:
2979 if ui.verbose:
2980 ui.writenoi18n(
2980 ui.writenoi18n(
2981 (
2981 (
2982 b" rev flag offset length size link p1"
2982 b" rev flag offset length size link p1"
2983 b" p2 %s\n"
2983 b" p2 %s\n"
2984 )
2984 )
2985 % b"nodeid".rjust(idlen)
2985 % b"nodeid".rjust(idlen)
2986 )
2986 )
2987 else:
2987 else:
2988 ui.writenoi18n(
2988 ui.writenoi18n(
2989 b" rev flag size link p1 p2 %s\n"
2989 b" rev flag size link p1 p2 %s\n"
2990 % b"nodeid".rjust(idlen)
2990 % b"nodeid".rjust(idlen)
2991 )
2991 )
2992
2992
2993 for i in r:
2993 for i in r:
2994 node = r.node(i)
2994 node = r.node(i)
2995 if format == 0:
2995 if format == 0:
2996 try:
2996 try:
2997 pp = r.parents(node)
2997 pp = r.parents(node)
2998 except Exception:
2998 except Exception:
2999 pp = [nullid, nullid]
2999 pp = [nullid, nullid]
3000 if ui.verbose:
3000 if ui.verbose:
3001 ui.write(
3001 ui.write(
3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3003 % (
3003 % (
3004 i,
3004 i,
3005 r.start(i),
3005 r.start(i),
3006 r.length(i),
3006 r.length(i),
3007 r.linkrev(i),
3007 r.linkrev(i),
3008 shortfn(node),
3008 shortfn(node),
3009 shortfn(pp[0]),
3009 shortfn(pp[0]),
3010 shortfn(pp[1]),
3010 shortfn(pp[1]),
3011 )
3011 )
3012 )
3012 )
3013 else:
3013 else:
3014 ui.write(
3014 ui.write(
3015 b"% 6d % 7d %s %s %s\n"
3015 b"% 6d % 7d %s %s %s\n"
3016 % (
3016 % (
3017 i,
3017 i,
3018 r.linkrev(i),
3018 r.linkrev(i),
3019 shortfn(node),
3019 shortfn(node),
3020 shortfn(pp[0]),
3020 shortfn(pp[0]),
3021 shortfn(pp[1]),
3021 shortfn(pp[1]),
3022 )
3022 )
3023 )
3023 )
3024 elif format == 1:
3024 elif format == 1:
3025 pr = r.parentrevs(i)
3025 pr = r.parentrevs(i)
3026 if ui.verbose:
3026 if ui.verbose:
3027 ui.write(
3027 ui.write(
3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3029 % (
3029 % (
3030 i,
3030 i,
3031 r.flags(i),
3031 r.flags(i),
3032 r.start(i),
3032 r.start(i),
3033 r.length(i),
3033 r.length(i),
3034 r.rawsize(i),
3034 r.rawsize(i),
3035 r.linkrev(i),
3035 r.linkrev(i),
3036 pr[0],
3036 pr[0],
3037 pr[1],
3037 pr[1],
3038 shortfn(node),
3038 shortfn(node),
3039 )
3039 )
3040 )
3040 )
3041 else:
3041 else:
3042 ui.write(
3042 ui.write(
3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3044 % (
3044 % (
3045 i,
3045 i,
3046 r.flags(i),
3046 r.flags(i),
3047 r.rawsize(i),
3047 r.rawsize(i),
3048 r.linkrev(i),
3048 r.linkrev(i),
3049 pr[0],
3049 pr[0],
3050 pr[1],
3050 pr[1],
3051 shortfn(node),
3051 shortfn(node),
3052 )
3052 )
3053 )
3053 )
3054
3054
3055
3055
3056 @command(
3056 @command(
3057 b'debugrevspec',
3057 b'debugrevspec',
3058 [
3058 [
3059 (
3059 (
3060 b'',
3060 b'',
3061 b'optimize',
3061 b'optimize',
3062 None,
3062 None,
3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3064 ),
3064 ),
3065 (
3065 (
3066 b'',
3066 b'',
3067 b'show-revs',
3067 b'show-revs',
3068 True,
3068 True,
3069 _(b'print list of result revisions (default)'),
3069 _(b'print list of result revisions (default)'),
3070 ),
3070 ),
3071 (
3071 (
3072 b's',
3072 b's',
3073 b'show-set',
3073 b'show-set',
3074 None,
3074 None,
3075 _(b'print internal representation of result set'),
3075 _(b'print internal representation of result set'),
3076 ),
3076 ),
3077 (
3077 (
3078 b'p',
3078 b'p',
3079 b'show-stage',
3079 b'show-stage',
3080 [],
3080 [],
3081 _(b'print parsed tree at the given stage'),
3081 _(b'print parsed tree at the given stage'),
3082 _(b'NAME'),
3082 _(b'NAME'),
3083 ),
3083 ),
3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3086 ],
3086 ],
3087 b'REVSPEC',
3087 b'REVSPEC',
3088 )
3088 )
3089 def debugrevspec(ui, repo, expr, **opts):
3089 def debugrevspec(ui, repo, expr, **opts):
3090 """parse and apply a revision specification
3090 """parse and apply a revision specification
3091
3091
3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3093 Use -p all to print tree at every stage.
3093 Use -p all to print tree at every stage.
3094
3094
3095 Use --no-show-revs option with -s or -p to print only the set
3095 Use --no-show-revs option with -s or -p to print only the set
3096 representation or the parsed tree respectively.
3096 representation or the parsed tree respectively.
3097
3097
3098 Use --verify-optimized to compare the optimized result with the unoptimized
3098 Use --verify-optimized to compare the optimized result with the unoptimized
3099 one. Returns 1 if the optimized result differs.
3099 one. Returns 1 if the optimized result differs.
3100 """
3100 """
3101 opts = pycompat.byteskwargs(opts)
3101 opts = pycompat.byteskwargs(opts)
3102 aliases = ui.configitems(b'revsetalias')
3102 aliases = ui.configitems(b'revsetalias')
3103 stages = [
3103 stages = [
3104 (b'parsed', lambda tree: tree),
3104 (b'parsed', lambda tree: tree),
3105 (
3105 (
3106 b'expanded',
3106 b'expanded',
3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3108 ),
3108 ),
3109 (b'concatenated', revsetlang.foldconcat),
3109 (b'concatenated', revsetlang.foldconcat),
3110 (b'analyzed', revsetlang.analyze),
3110 (b'analyzed', revsetlang.analyze),
3111 (b'optimized', revsetlang.optimize),
3111 (b'optimized', revsetlang.optimize),
3112 ]
3112 ]
3113 if opts[b'no_optimized']:
3113 if opts[b'no_optimized']:
3114 stages = stages[:-1]
3114 stages = stages[:-1]
3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3116 raise error.Abort(
3116 raise error.Abort(
3117 _(b'cannot use --verify-optimized with --no-optimized')
3117 _(b'cannot use --verify-optimized with --no-optimized')
3118 )
3118 )
3119 stagenames = set(n for n, f in stages)
3119 stagenames = set(n for n, f in stages)
3120
3120
3121 showalways = set()
3121 showalways = set()
3122 showchanged = set()
3122 showchanged = set()
3123 if ui.verbose and not opts[b'show_stage']:
3123 if ui.verbose and not opts[b'show_stage']:
3124 # show parsed tree by --verbose (deprecated)
3124 # show parsed tree by --verbose (deprecated)
3125 showalways.add(b'parsed')
3125 showalways.add(b'parsed')
3126 showchanged.update([b'expanded', b'concatenated'])
3126 showchanged.update([b'expanded', b'concatenated'])
3127 if opts[b'optimize']:
3127 if opts[b'optimize']:
3128 showalways.add(b'optimized')
3128 showalways.add(b'optimized')
3129 if opts[b'show_stage'] and opts[b'optimize']:
3129 if opts[b'show_stage'] and opts[b'optimize']:
3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3131 if opts[b'show_stage'] == [b'all']:
3131 if opts[b'show_stage'] == [b'all']:
3132 showalways.update(stagenames)
3132 showalways.update(stagenames)
3133 else:
3133 else:
3134 for n in opts[b'show_stage']:
3134 for n in opts[b'show_stage']:
3135 if n not in stagenames:
3135 if n not in stagenames:
3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3137 showalways.update(opts[b'show_stage'])
3137 showalways.update(opts[b'show_stage'])
3138
3138
3139 treebystage = {}
3139 treebystage = {}
3140 printedtree = None
3140 printedtree = None
3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3142 for n, f in stages:
3142 for n, f in stages:
3143 treebystage[n] = tree = f(tree)
3143 treebystage[n] = tree = f(tree)
3144 if n in showalways or (n in showchanged and tree != printedtree):
3144 if n in showalways or (n in showchanged and tree != printedtree):
3145 if opts[b'show_stage'] or n != b'parsed':
3145 if opts[b'show_stage'] or n != b'parsed':
3146 ui.write(b"* %s:\n" % n)
3146 ui.write(b"* %s:\n" % n)
3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3148 printedtree = tree
3148 printedtree = tree
3149
3149
3150 if opts[b'verify_optimized']:
3150 if opts[b'verify_optimized']:
3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3154 ui.writenoi18n(
3154 ui.writenoi18n(
3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3156 )
3156 )
3157 ui.writenoi18n(
3157 ui.writenoi18n(
3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3159 )
3159 )
3160 arevs = list(arevs)
3160 arevs = list(arevs)
3161 brevs = list(brevs)
3161 brevs = list(brevs)
3162 if arevs == brevs:
3162 if arevs == brevs:
3163 return 0
3163 return 0
3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3168 if tag in ('delete', 'replace'):
3168 if tag in ('delete', 'replace'):
3169 for c in arevs[alo:ahi]:
3169 for c in arevs[alo:ahi]:
3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3171 if tag in ('insert', 'replace'):
3171 if tag in ('insert', 'replace'):
3172 for c in brevs[blo:bhi]:
3172 for c in brevs[blo:bhi]:
3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3174 if tag == 'equal':
3174 if tag == 'equal':
3175 for c in arevs[alo:ahi]:
3175 for c in arevs[alo:ahi]:
3176 ui.write(b' %d\n' % c)
3176 ui.write(b' %d\n' % c)
3177 return 1
3177 return 1
3178
3178
3179 func = revset.makematcher(tree)
3179 func = revset.makematcher(tree)
3180 revs = func(repo)
3180 revs = func(repo)
3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3183 if not opts[b'show_revs']:
3183 if not opts[b'show_revs']:
3184 return
3184 return
3185 for c in revs:
3185 for c in revs:
3186 ui.write(b"%d\n" % c)
3186 ui.write(b"%d\n" % c)
3187
3187
3188
3188
3189 @command(
3189 @command(
3190 b'debugserve',
3190 b'debugserve',
3191 [
3191 [
3192 (
3192 (
3193 b'',
3193 b'',
3194 b'sshstdio',
3194 b'sshstdio',
3195 False,
3195 False,
3196 _(b'run an SSH server bound to process handles'),
3196 _(b'run an SSH server bound to process handles'),
3197 ),
3197 ),
3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3200 ],
3200 ],
3201 b'',
3201 b'',
3202 )
3202 )
3203 def debugserve(ui, repo, **opts):
3203 def debugserve(ui, repo, **opts):
3204 """run a server with advanced settings
3204 """run a server with advanced settings
3205
3205
3206 This command is similar to :hg:`serve`. It exists partially as a
3206 This command is similar to :hg:`serve`. It exists partially as a
3207 workaround to the fact that ``hg serve --stdio`` must have specific
3207 workaround to the fact that ``hg serve --stdio`` must have specific
3208 arguments for security reasons.
3208 arguments for security reasons.
3209 """
3209 """
3210 opts = pycompat.byteskwargs(opts)
3210 opts = pycompat.byteskwargs(opts)
3211
3211
3212 if not opts[b'sshstdio']:
3212 if not opts[b'sshstdio']:
3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3214
3214
3215 logfh = None
3215 logfh = None
3216
3216
3217 if opts[b'logiofd'] and opts[b'logiofile']:
3217 if opts[b'logiofd'] and opts[b'logiofile']:
3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3219
3219
3220 if opts[b'logiofd']:
3220 if opts[b'logiofd']:
3221 # Line buffered because output is line based.
3221 # Ideally we would be line buffered. But line buffering in binary
3222 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3223 # buffering could have performance impacts. But since this isn't
3224 # performance critical code, it should be fine.
3222 try:
3225 try:
3223 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3226 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3224 except OSError as e:
3227 except OSError as e:
3225 if e.errno != errno.ESPIPE:
3228 if e.errno != errno.ESPIPE:
3226 raise
3229 raise
3227 # can't seek a pipe, so `ab` mode fails on py3
3230 # can't seek a pipe, so `ab` mode fails on py3
3228 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3231 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3229 elif opts[b'logiofile']:
3232 elif opts[b'logiofile']:
3230 logfh = open(opts[b'logiofile'], b'ab', 1)
3233 logfh = open(opts[b'logiofile'], b'ab', 0)
3231
3234
3232 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3235 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3233 s.serve_forever()
3236 s.serve_forever()
3234
3237
3235
3238
3236 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3239 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3237 def debugsetparents(ui, repo, rev1, rev2=None):
3240 def debugsetparents(ui, repo, rev1, rev2=None):
3238 """manually set the parents of the current working directory
3241 """manually set the parents of the current working directory
3239
3242
3240 This is useful for writing repository conversion tools, but should
3243 This is useful for writing repository conversion tools, but should
3241 be used with care. For example, neither the working directory nor the
3244 be used with care. For example, neither the working directory nor the
3242 dirstate is updated, so file status may be incorrect after running this
3245 dirstate is updated, so file status may be incorrect after running this
3243 command.
3246 command.
3244
3247
3245 Returns 0 on success.
3248 Returns 0 on success.
3246 """
3249 """
3247
3250
3248 node1 = scmutil.revsingle(repo, rev1).node()
3251 node1 = scmutil.revsingle(repo, rev1).node()
3249 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3252 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3250
3253
3251 with repo.wlock():
3254 with repo.wlock():
3252 repo.setparents(node1, node2)
3255 repo.setparents(node1, node2)
3253
3256
3254
3257
3255 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3258 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3256 def debugsidedata(ui, repo, file_, rev=None, **opts):
3259 def debugsidedata(ui, repo, file_, rev=None, **opts):
3257 """dump the side data for a cl/manifest/file revision
3260 """dump the side data for a cl/manifest/file revision
3258
3261
3259 Use --verbose to dump the sidedata content."""
3262 Use --verbose to dump the sidedata content."""
3260 opts = pycompat.byteskwargs(opts)
3263 opts = pycompat.byteskwargs(opts)
3261 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3264 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3262 if rev is not None:
3265 if rev is not None:
3263 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3264 file_, rev = None, file_
3267 file_, rev = None, file_
3265 elif rev is None:
3268 elif rev is None:
3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3269 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3267 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3270 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3268 r = getattr(r, '_revlog', r)
3271 r = getattr(r, '_revlog', r)
3269 try:
3272 try:
3270 sidedata = r.sidedata(r.lookup(rev))
3273 sidedata = r.sidedata(r.lookup(rev))
3271 except KeyError:
3274 except KeyError:
3272 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3275 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3273 if sidedata:
3276 if sidedata:
3274 sidedata = list(sidedata.items())
3277 sidedata = list(sidedata.items())
3275 sidedata.sort()
3278 sidedata.sort()
3276 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3279 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3277 for key, value in sidedata:
3280 for key, value in sidedata:
3278 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3281 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3279 if ui.verbose:
3282 if ui.verbose:
3280 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3283 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3281
3284
3282
3285
3283 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3286 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3284 def debugssl(ui, repo, source=None, **opts):
3287 def debugssl(ui, repo, source=None, **opts):
3285 '''test a secure connection to a server
3288 '''test a secure connection to a server
3286
3289
3287 This builds the certificate chain for the server on Windows, installing the
3290 This builds the certificate chain for the server on Windows, installing the
3288 missing intermediates and trusted root via Windows Update if necessary. It
3291 missing intermediates and trusted root via Windows Update if necessary. It
3289 does nothing on other platforms.
3292 does nothing on other platforms.
3290
3293
3291 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3294 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3292 that server is used. See :hg:`help urls` for more information.
3295 that server is used. See :hg:`help urls` for more information.
3293
3296
3294 If the update succeeds, retry the original operation. Otherwise, the cause
3297 If the update succeeds, retry the original operation. Otherwise, the cause
3295 of the SSL error is likely another issue.
3298 of the SSL error is likely another issue.
3296 '''
3299 '''
3297 if not pycompat.iswindows:
3300 if not pycompat.iswindows:
3298 raise error.Abort(
3301 raise error.Abort(
3299 _(b'certificate chain building is only possible on Windows')
3302 _(b'certificate chain building is only possible on Windows')
3300 )
3303 )
3301
3304
3302 if not source:
3305 if not source:
3303 if not repo:
3306 if not repo:
3304 raise error.Abort(
3307 raise error.Abort(
3305 _(
3308 _(
3306 b"there is no Mercurial repository here, and no "
3309 b"there is no Mercurial repository here, and no "
3307 b"server specified"
3310 b"server specified"
3308 )
3311 )
3309 )
3312 )
3310 source = b"default"
3313 source = b"default"
3311
3314
3312 source, branches = hg.parseurl(ui.expandpath(source))
3315 source, branches = hg.parseurl(ui.expandpath(source))
3313 url = util.url(source)
3316 url = util.url(source)
3314
3317
3315 defaultport = {b'https': 443, b'ssh': 22}
3318 defaultport = {b'https': 443, b'ssh': 22}
3316 if url.scheme in defaultport:
3319 if url.scheme in defaultport:
3317 try:
3320 try:
3318 addr = (url.host, int(url.port or defaultport[url.scheme]))
3321 addr = (url.host, int(url.port or defaultport[url.scheme]))
3319 except ValueError:
3322 except ValueError:
3320 raise error.Abort(_(b"malformed port number in URL"))
3323 raise error.Abort(_(b"malformed port number in URL"))
3321 else:
3324 else:
3322 raise error.Abort(_(b"only https and ssh connections are supported"))
3325 raise error.Abort(_(b"only https and ssh connections are supported"))
3323
3326
3324 from . import win32
3327 from . import win32
3325
3328
3326 s = ssl.wrap_socket(
3329 s = ssl.wrap_socket(
3327 socket.socket(),
3330 socket.socket(),
3328 ssl_version=ssl.PROTOCOL_TLS,
3331 ssl_version=ssl.PROTOCOL_TLS,
3329 cert_reqs=ssl.CERT_NONE,
3332 cert_reqs=ssl.CERT_NONE,
3330 ca_certs=None,
3333 ca_certs=None,
3331 )
3334 )
3332
3335
3333 try:
3336 try:
3334 s.connect(addr)
3337 s.connect(addr)
3335 cert = s.getpeercert(True)
3338 cert = s.getpeercert(True)
3336
3339
3337 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3340 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3338
3341
3339 complete = win32.checkcertificatechain(cert, build=False)
3342 complete = win32.checkcertificatechain(cert, build=False)
3340
3343
3341 if not complete:
3344 if not complete:
3342 ui.status(_(b'certificate chain is incomplete, updating... '))
3345 ui.status(_(b'certificate chain is incomplete, updating... '))
3343
3346
3344 if not win32.checkcertificatechain(cert):
3347 if not win32.checkcertificatechain(cert):
3345 ui.status(_(b'failed.\n'))
3348 ui.status(_(b'failed.\n'))
3346 else:
3349 else:
3347 ui.status(_(b'done.\n'))
3350 ui.status(_(b'done.\n'))
3348 else:
3351 else:
3349 ui.status(_(b'full certificate chain is available\n'))
3352 ui.status(_(b'full certificate chain is available\n'))
3350 finally:
3353 finally:
3351 s.close()
3354 s.close()
3352
3355
3353
3356
3354 @command(
3357 @command(
3355 b'debugsub',
3358 b'debugsub',
3356 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3359 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3357 _(b'[-r REV] [REV]'),
3360 _(b'[-r REV] [REV]'),
3358 )
3361 )
3359 def debugsub(ui, repo, rev=None):
3362 def debugsub(ui, repo, rev=None):
3360 ctx = scmutil.revsingle(repo, rev, None)
3363 ctx = scmutil.revsingle(repo, rev, None)
3361 for k, v in sorted(ctx.substate.items()):
3364 for k, v in sorted(ctx.substate.items()):
3362 ui.writenoi18n(b'path %s\n' % k)
3365 ui.writenoi18n(b'path %s\n' % k)
3363 ui.writenoi18n(b' source %s\n' % v[0])
3366 ui.writenoi18n(b' source %s\n' % v[0])
3364 ui.writenoi18n(b' revision %s\n' % v[1])
3367 ui.writenoi18n(b' revision %s\n' % v[1])
3365
3368
3366
3369
3367 @command(
3370 @command(
3368 b'debugsuccessorssets',
3371 b'debugsuccessorssets',
3369 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3372 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3370 _(b'[REV]'),
3373 _(b'[REV]'),
3371 )
3374 )
3372 def debugsuccessorssets(ui, repo, *revs, **opts):
3375 def debugsuccessorssets(ui, repo, *revs, **opts):
3373 """show set of successors for revision
3376 """show set of successors for revision
3374
3377
3375 A successors set of changeset A is a consistent group of revisions that
3378 A successors set of changeset A is a consistent group of revisions that
3376 succeed A. It contains non-obsolete changesets only unless closests
3379 succeed A. It contains non-obsolete changesets only unless closests
3377 successors set is set.
3380 successors set is set.
3378
3381
3379 In most cases a changeset A has a single successors set containing a single
3382 In most cases a changeset A has a single successors set containing a single
3380 successor (changeset A replaced by A').
3383 successor (changeset A replaced by A').
3381
3384
3382 A changeset that is made obsolete with no successors are called "pruned".
3385 A changeset that is made obsolete with no successors are called "pruned".
3383 Such changesets have no successors sets at all.
3386 Such changesets have no successors sets at all.
3384
3387
3385 A changeset that has been "split" will have a successors set containing
3388 A changeset that has been "split" will have a successors set containing
3386 more than one successor.
3389 more than one successor.
3387
3390
3388 A changeset that has been rewritten in multiple different ways is called
3391 A changeset that has been rewritten in multiple different ways is called
3389 "divergent". Such changesets have multiple successor sets (each of which
3392 "divergent". Such changesets have multiple successor sets (each of which
3390 may also be split, i.e. have multiple successors).
3393 may also be split, i.e. have multiple successors).
3391
3394
3392 Results are displayed as follows::
3395 Results are displayed as follows::
3393
3396
3394 <rev1>
3397 <rev1>
3395 <successors-1A>
3398 <successors-1A>
3396 <rev2>
3399 <rev2>
3397 <successors-2A>
3400 <successors-2A>
3398 <successors-2B1> <successors-2B2> <successors-2B3>
3401 <successors-2B1> <successors-2B2> <successors-2B3>
3399
3402
3400 Here rev2 has two possible (i.e. divergent) successors sets. The first
3403 Here rev2 has two possible (i.e. divergent) successors sets. The first
3401 holds one element, whereas the second holds three (i.e. the changeset has
3404 holds one element, whereas the second holds three (i.e. the changeset has
3402 been split).
3405 been split).
3403 """
3406 """
3404 # passed to successorssets caching computation from one call to another
3407 # passed to successorssets caching computation from one call to another
3405 cache = {}
3408 cache = {}
3406 ctx2str = bytes
3409 ctx2str = bytes
3407 node2str = short
3410 node2str = short
3408 for rev in scmutil.revrange(repo, revs):
3411 for rev in scmutil.revrange(repo, revs):
3409 ctx = repo[rev]
3412 ctx = repo[rev]
3410 ui.write(b'%s\n' % ctx2str(ctx))
3413 ui.write(b'%s\n' % ctx2str(ctx))
3411 for succsset in obsutil.successorssets(
3414 for succsset in obsutil.successorssets(
3412 repo, ctx.node(), closest=opts['closest'], cache=cache
3415 repo, ctx.node(), closest=opts['closest'], cache=cache
3413 ):
3416 ):
3414 if succsset:
3417 if succsset:
3415 ui.write(b' ')
3418 ui.write(b' ')
3416 ui.write(node2str(succsset[0]))
3419 ui.write(node2str(succsset[0]))
3417 for node in succsset[1:]:
3420 for node in succsset[1:]:
3418 ui.write(b' ')
3421 ui.write(b' ')
3419 ui.write(node2str(node))
3422 ui.write(node2str(node))
3420 ui.write(b'\n')
3423 ui.write(b'\n')
3421
3424
3422
3425
3423 @command(
3426 @command(
3424 b'debugtemplate',
3427 b'debugtemplate',
3425 [
3428 [
3426 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3429 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3427 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3430 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3428 ],
3431 ],
3429 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3432 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3430 optionalrepo=True,
3433 optionalrepo=True,
3431 )
3434 )
3432 def debugtemplate(ui, repo, tmpl, **opts):
3435 def debugtemplate(ui, repo, tmpl, **opts):
3433 """parse and apply a template
3436 """parse and apply a template
3434
3437
3435 If -r/--rev is given, the template is processed as a log template and
3438 If -r/--rev is given, the template is processed as a log template and
3436 applied to the given changesets. Otherwise, it is processed as a generic
3439 applied to the given changesets. Otherwise, it is processed as a generic
3437 template.
3440 template.
3438
3441
3439 Use --verbose to print the parsed tree.
3442 Use --verbose to print the parsed tree.
3440 """
3443 """
3441 revs = None
3444 revs = None
3442 if opts['rev']:
3445 if opts['rev']:
3443 if repo is None:
3446 if repo is None:
3444 raise error.RepoError(
3447 raise error.RepoError(
3445 _(b'there is no Mercurial repository here (.hg not found)')
3448 _(b'there is no Mercurial repository here (.hg not found)')
3446 )
3449 )
3447 revs = scmutil.revrange(repo, opts['rev'])
3450 revs = scmutil.revrange(repo, opts['rev'])
3448
3451
3449 props = {}
3452 props = {}
3450 for d in opts['define']:
3453 for d in opts['define']:
3451 try:
3454 try:
3452 k, v = (e.strip() for e in d.split(b'=', 1))
3455 k, v = (e.strip() for e in d.split(b'=', 1))
3453 if not k or k == b'ui':
3456 if not k or k == b'ui':
3454 raise ValueError
3457 raise ValueError
3455 props[k] = v
3458 props[k] = v
3456 except ValueError:
3459 except ValueError:
3457 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3460 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3458
3461
3459 if ui.verbose:
3462 if ui.verbose:
3460 aliases = ui.configitems(b'templatealias')
3463 aliases = ui.configitems(b'templatealias')
3461 tree = templater.parse(tmpl)
3464 tree = templater.parse(tmpl)
3462 ui.note(templater.prettyformat(tree), b'\n')
3465 ui.note(templater.prettyformat(tree), b'\n')
3463 newtree = templater.expandaliases(tree, aliases)
3466 newtree = templater.expandaliases(tree, aliases)
3464 if newtree != tree:
3467 if newtree != tree:
3465 ui.notenoi18n(
3468 ui.notenoi18n(
3466 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3469 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3467 )
3470 )
3468
3471
3469 if revs is None:
3472 if revs is None:
3470 tres = formatter.templateresources(ui, repo)
3473 tres = formatter.templateresources(ui, repo)
3471 t = formatter.maketemplater(ui, tmpl, resources=tres)
3474 t = formatter.maketemplater(ui, tmpl, resources=tres)
3472 if ui.verbose:
3475 if ui.verbose:
3473 kwds, funcs = t.symbolsuseddefault()
3476 kwds, funcs = t.symbolsuseddefault()
3474 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3477 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3475 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3478 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3476 ui.write(t.renderdefault(props))
3479 ui.write(t.renderdefault(props))
3477 else:
3480 else:
3478 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3481 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3479 if ui.verbose:
3482 if ui.verbose:
3480 kwds, funcs = displayer.t.symbolsuseddefault()
3483 kwds, funcs = displayer.t.symbolsuseddefault()
3481 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3484 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3482 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3485 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3483 for r in revs:
3486 for r in revs:
3484 displayer.show(repo[r], **pycompat.strkwargs(props))
3487 displayer.show(repo[r], **pycompat.strkwargs(props))
3485 displayer.close()
3488 displayer.close()
3486
3489
3487
3490
3488 @command(
3491 @command(
3489 b'debuguigetpass',
3492 b'debuguigetpass',
3490 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3493 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3491 _(b'[-p TEXT]'),
3494 _(b'[-p TEXT]'),
3492 norepo=True,
3495 norepo=True,
3493 )
3496 )
3494 def debuguigetpass(ui, prompt=b''):
3497 def debuguigetpass(ui, prompt=b''):
3495 """show prompt to type password"""
3498 """show prompt to type password"""
3496 r = ui.getpass(prompt)
3499 r = ui.getpass(prompt)
3497 ui.writenoi18n(b'respose: %s\n' % r)
3500 ui.writenoi18n(b'respose: %s\n' % r)
3498
3501
3499
3502
3500 @command(
3503 @command(
3501 b'debuguiprompt',
3504 b'debuguiprompt',
3502 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3505 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3503 _(b'[-p TEXT]'),
3506 _(b'[-p TEXT]'),
3504 norepo=True,
3507 norepo=True,
3505 )
3508 )
3506 def debuguiprompt(ui, prompt=b''):
3509 def debuguiprompt(ui, prompt=b''):
3507 """show plain prompt"""
3510 """show plain prompt"""
3508 r = ui.prompt(prompt)
3511 r = ui.prompt(prompt)
3509 ui.writenoi18n(b'response: %s\n' % r)
3512 ui.writenoi18n(b'response: %s\n' % r)
3510
3513
3511
3514
3512 @command(b'debugupdatecaches', [])
3515 @command(b'debugupdatecaches', [])
3513 def debugupdatecaches(ui, repo, *pats, **opts):
3516 def debugupdatecaches(ui, repo, *pats, **opts):
3514 """warm all known caches in the repository"""
3517 """warm all known caches in the repository"""
3515 with repo.wlock(), repo.lock():
3518 with repo.wlock(), repo.lock():
3516 repo.updatecaches(full=True)
3519 repo.updatecaches(full=True)
3517
3520
3518
3521
3519 @command(
3522 @command(
3520 b'debugupgraderepo',
3523 b'debugupgraderepo',
3521 [
3524 [
3522 (
3525 (
3523 b'o',
3526 b'o',
3524 b'optimize',
3527 b'optimize',
3525 [],
3528 [],
3526 _(b'extra optimization to perform'),
3529 _(b'extra optimization to perform'),
3527 _(b'NAME'),
3530 _(b'NAME'),
3528 ),
3531 ),
3529 (b'', b'run', False, _(b'performs an upgrade')),
3532 (b'', b'run', False, _(b'performs an upgrade')),
3530 (b'', b'backup', True, _(b'keep the old repository content around')),
3533 (b'', b'backup', True, _(b'keep the old repository content around')),
3531 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3534 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3532 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3535 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3533 ],
3536 ],
3534 )
3537 )
3535 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3538 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3536 """upgrade a repository to use different features
3539 """upgrade a repository to use different features
3537
3540
3538 If no arguments are specified, the repository is evaluated for upgrade
3541 If no arguments are specified, the repository is evaluated for upgrade
3539 and a list of problems and potential optimizations is printed.
3542 and a list of problems and potential optimizations is printed.
3540
3543
3541 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3544 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3542 can be influenced via additional arguments. More details will be provided
3545 can be influenced via additional arguments. More details will be provided
3543 by the command output when run without ``--run``.
3546 by the command output when run without ``--run``.
3544
3547
3545 During the upgrade, the repository will be locked and no writes will be
3548 During the upgrade, the repository will be locked and no writes will be
3546 allowed.
3549 allowed.
3547
3550
3548 At the end of the upgrade, the repository may not be readable while new
3551 At the end of the upgrade, the repository may not be readable while new
3549 repository data is swapped in. This window will be as long as it takes to
3552 repository data is swapped in. This window will be as long as it takes to
3550 rename some directories inside the ``.hg`` directory. On most machines, this
3553 rename some directories inside the ``.hg`` directory. On most machines, this
3551 should complete almost instantaneously and the chances of a consumer being
3554 should complete almost instantaneously and the chances of a consumer being
3552 unable to access the repository should be low.
3555 unable to access the repository should be low.
3553
3556
3554 By default, all revlog will be upgraded. You can restrict this using flag
3557 By default, all revlog will be upgraded. You can restrict this using flag
3555 such as `--manifest`:
3558 such as `--manifest`:
3556
3559
3557 * `--manifest`: only optimize the manifest
3560 * `--manifest`: only optimize the manifest
3558 * `--no-manifest`: optimize all revlog but the manifest
3561 * `--no-manifest`: optimize all revlog but the manifest
3559 * `--changelog`: optimize the changelog only
3562 * `--changelog`: optimize the changelog only
3560 * `--no-changelog --no-manifest`: optimize filelogs only
3563 * `--no-changelog --no-manifest`: optimize filelogs only
3561 """
3564 """
3562 return upgrade.upgraderepo(
3565 return upgrade.upgraderepo(
3563 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3566 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3564 )
3567 )
3565
3568
3566
3569
3567 @command(
3570 @command(
3568 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3571 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3569 )
3572 )
3570 def debugwalk(ui, repo, *pats, **opts):
3573 def debugwalk(ui, repo, *pats, **opts):
3571 """show how files match on given patterns"""
3574 """show how files match on given patterns"""
3572 opts = pycompat.byteskwargs(opts)
3575 opts = pycompat.byteskwargs(opts)
3573 m = scmutil.match(repo[None], pats, opts)
3576 m = scmutil.match(repo[None], pats, opts)
3574 if ui.verbose:
3577 if ui.verbose:
3575 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3578 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3576 items = list(repo[None].walk(m))
3579 items = list(repo[None].walk(m))
3577 if not items:
3580 if not items:
3578 return
3581 return
3579 f = lambda fn: fn
3582 f = lambda fn: fn
3580 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3583 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3581 f = lambda fn: util.normpath(fn)
3584 f = lambda fn: util.normpath(fn)
3582 fmt = b'f %%-%ds %%-%ds %%s' % (
3585 fmt = b'f %%-%ds %%-%ds %%s' % (
3583 max([len(abs) for abs in items]),
3586 max([len(abs) for abs in items]),
3584 max([len(repo.pathto(abs)) for abs in items]),
3587 max([len(repo.pathto(abs)) for abs in items]),
3585 )
3588 )
3586 for abs in items:
3589 for abs in items:
3587 line = fmt % (
3590 line = fmt % (
3588 abs,
3591 abs,
3589 f(repo.pathto(abs)),
3592 f(repo.pathto(abs)),
3590 m.exact(abs) and b'exact' or b'',
3593 m.exact(abs) and b'exact' or b'',
3591 )
3594 )
3592 ui.write(b"%s\n" % line.rstrip())
3595 ui.write(b"%s\n" % line.rstrip())
3593
3596
3594
3597
3595 @command(b'debugwhyunstable', [], _(b'REV'))
3598 @command(b'debugwhyunstable', [], _(b'REV'))
3596 def debugwhyunstable(ui, repo, rev):
3599 def debugwhyunstable(ui, repo, rev):
3597 """explain instabilities of a changeset"""
3600 """explain instabilities of a changeset"""
3598 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3601 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3599 dnodes = b''
3602 dnodes = b''
3600 if entry.get(b'divergentnodes'):
3603 if entry.get(b'divergentnodes'):
3601 dnodes = (
3604 dnodes = (
3602 b' '.join(
3605 b' '.join(
3603 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3606 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3604 for ctx in entry[b'divergentnodes']
3607 for ctx in entry[b'divergentnodes']
3605 )
3608 )
3606 + b' '
3609 + b' '
3607 )
3610 )
3608 ui.write(
3611 ui.write(
3609 b'%s: %s%s %s\n'
3612 b'%s: %s%s %s\n'
3610 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3613 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3611 )
3614 )
3612
3615
3613
3616
3614 @command(
3617 @command(
3615 b'debugwireargs',
3618 b'debugwireargs',
3616 [
3619 [
3617 (b'', b'three', b'', b'three'),
3620 (b'', b'three', b'', b'three'),
3618 (b'', b'four', b'', b'four'),
3621 (b'', b'four', b'', b'four'),
3619 (b'', b'five', b'', b'five'),
3622 (b'', b'five', b'', b'five'),
3620 ]
3623 ]
3621 + cmdutil.remoteopts,
3624 + cmdutil.remoteopts,
3622 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3625 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3623 norepo=True,
3626 norepo=True,
3624 )
3627 )
3625 def debugwireargs(ui, repopath, *vals, **opts):
3628 def debugwireargs(ui, repopath, *vals, **opts):
3626 opts = pycompat.byteskwargs(opts)
3629 opts = pycompat.byteskwargs(opts)
3627 repo = hg.peer(ui, opts, repopath)
3630 repo = hg.peer(ui, opts, repopath)
3628 for opt in cmdutil.remoteopts:
3631 for opt in cmdutil.remoteopts:
3629 del opts[opt[1]]
3632 del opts[opt[1]]
3630 args = {}
3633 args = {}
3631 for k, v in pycompat.iteritems(opts):
3634 for k, v in pycompat.iteritems(opts):
3632 if v:
3635 if v:
3633 args[k] = v
3636 args[k] = v
3634 args = pycompat.strkwargs(args)
3637 args = pycompat.strkwargs(args)
3635 # run twice to check that we don't mess up the stream for the next command
3638 # run twice to check that we don't mess up the stream for the next command
3636 res1 = repo.debugwireargs(*vals, **args)
3639 res1 = repo.debugwireargs(*vals, **args)
3637 res2 = repo.debugwireargs(*vals, **args)
3640 res2 = repo.debugwireargs(*vals, **args)
3638 ui.write(b"%s\n" % res1)
3641 ui.write(b"%s\n" % res1)
3639 if res1 != res2:
3642 if res1 != res2:
3640 ui.warn(b"%s\n" % res2)
3643 ui.warn(b"%s\n" % res2)
3641
3644
3642
3645
3643 def _parsewirelangblocks(fh):
3646 def _parsewirelangblocks(fh):
3644 activeaction = None
3647 activeaction = None
3645 blocklines = []
3648 blocklines = []
3646 lastindent = 0
3649 lastindent = 0
3647
3650
3648 for line in fh:
3651 for line in fh:
3649 line = line.rstrip()
3652 line = line.rstrip()
3650 if not line:
3653 if not line:
3651 continue
3654 continue
3652
3655
3653 if line.startswith(b'#'):
3656 if line.startswith(b'#'):
3654 continue
3657 continue
3655
3658
3656 if not line.startswith(b' '):
3659 if not line.startswith(b' '):
3657 # New block. Flush previous one.
3660 # New block. Flush previous one.
3658 if activeaction:
3661 if activeaction:
3659 yield activeaction, blocklines
3662 yield activeaction, blocklines
3660
3663
3661 activeaction = line
3664 activeaction = line
3662 blocklines = []
3665 blocklines = []
3663 lastindent = 0
3666 lastindent = 0
3664 continue
3667 continue
3665
3668
3666 # Else we start with an indent.
3669 # Else we start with an indent.
3667
3670
3668 if not activeaction:
3671 if not activeaction:
3669 raise error.Abort(_(b'indented line outside of block'))
3672 raise error.Abort(_(b'indented line outside of block'))
3670
3673
3671 indent = len(line) - len(line.lstrip())
3674 indent = len(line) - len(line.lstrip())
3672
3675
3673 # If this line is indented more than the last line, concatenate it.
3676 # If this line is indented more than the last line, concatenate it.
3674 if indent > lastindent and blocklines:
3677 if indent > lastindent and blocklines:
3675 blocklines[-1] += line.lstrip()
3678 blocklines[-1] += line.lstrip()
3676 else:
3679 else:
3677 blocklines.append(line)
3680 blocklines.append(line)
3678 lastindent = indent
3681 lastindent = indent
3679
3682
3680 # Flush last block.
3683 # Flush last block.
3681 if activeaction:
3684 if activeaction:
3682 yield activeaction, blocklines
3685 yield activeaction, blocklines
3683
3686
3684
3687
3685 @command(
3688 @command(
3686 b'debugwireproto',
3689 b'debugwireproto',
3687 [
3690 [
3688 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3691 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3689 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3692 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3690 (
3693 (
3691 b'',
3694 b'',
3692 b'noreadstderr',
3695 b'noreadstderr',
3693 False,
3696 False,
3694 _(b'do not read from stderr of the remote'),
3697 _(b'do not read from stderr of the remote'),
3695 ),
3698 ),
3696 (
3699 (
3697 b'',
3700 b'',
3698 b'nologhandshake',
3701 b'nologhandshake',
3699 False,
3702 False,
3700 _(b'do not log I/O related to the peer handshake'),
3703 _(b'do not log I/O related to the peer handshake'),
3701 ),
3704 ),
3702 ]
3705 ]
3703 + cmdutil.remoteopts,
3706 + cmdutil.remoteopts,
3704 _(b'[PATH]'),
3707 _(b'[PATH]'),
3705 optionalrepo=True,
3708 optionalrepo=True,
3706 )
3709 )
3707 def debugwireproto(ui, repo, path=None, **opts):
3710 def debugwireproto(ui, repo, path=None, **opts):
3708 """send wire protocol commands to a server
3711 """send wire protocol commands to a server
3709
3712
3710 This command can be used to issue wire protocol commands to remote
3713 This command can be used to issue wire protocol commands to remote
3711 peers and to debug the raw data being exchanged.
3714 peers and to debug the raw data being exchanged.
3712
3715
3713 ``--localssh`` will start an SSH server against the current repository
3716 ``--localssh`` will start an SSH server against the current repository
3714 and connect to that. By default, the connection will perform a handshake
3717 and connect to that. By default, the connection will perform a handshake
3715 and establish an appropriate peer instance.
3718 and establish an appropriate peer instance.
3716
3719
3717 ``--peer`` can be used to bypass the handshake protocol and construct a
3720 ``--peer`` can be used to bypass the handshake protocol and construct a
3718 peer instance using the specified class type. Valid values are ``raw``,
3721 peer instance using the specified class type. Valid values are ``raw``,
3719 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3722 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3720 raw data payloads and don't support higher-level command actions.
3723 raw data payloads and don't support higher-level command actions.
3721
3724
3722 ``--noreadstderr`` can be used to disable automatic reading from stderr
3725 ``--noreadstderr`` can be used to disable automatic reading from stderr
3723 of the peer (for SSH connections only). Disabling automatic reading of
3726 of the peer (for SSH connections only). Disabling automatic reading of
3724 stderr is useful for making output more deterministic.
3727 stderr is useful for making output more deterministic.
3725
3728
3726 Commands are issued via a mini language which is specified via stdin.
3729 Commands are issued via a mini language which is specified via stdin.
3727 The language consists of individual actions to perform. An action is
3730 The language consists of individual actions to perform. An action is
3728 defined by a block. A block is defined as a line with no leading
3731 defined by a block. A block is defined as a line with no leading
3729 space followed by 0 or more lines with leading space. Blocks are
3732 space followed by 0 or more lines with leading space. Blocks are
3730 effectively a high-level command with additional metadata.
3733 effectively a high-level command with additional metadata.
3731
3734
3732 Lines beginning with ``#`` are ignored.
3735 Lines beginning with ``#`` are ignored.
3733
3736
3734 The following sections denote available actions.
3737 The following sections denote available actions.
3735
3738
3736 raw
3739 raw
3737 ---
3740 ---
3738
3741
3739 Send raw data to the server.
3742 Send raw data to the server.
3740
3743
3741 The block payload contains the raw data to send as one atomic send
3744 The block payload contains the raw data to send as one atomic send
3742 operation. The data may not actually be delivered in a single system
3745 operation. The data may not actually be delivered in a single system
3743 call: it depends on the abilities of the transport being used.
3746 call: it depends on the abilities of the transport being used.
3744
3747
3745 Each line in the block is de-indented and concatenated. Then, that
3748 Each line in the block is de-indented and concatenated. Then, that
3746 value is evaluated as a Python b'' literal. This allows the use of
3749 value is evaluated as a Python b'' literal. This allows the use of
3747 backslash escaping, etc.
3750 backslash escaping, etc.
3748
3751
3749 raw+
3752 raw+
3750 ----
3753 ----
3751
3754
3752 Behaves like ``raw`` except flushes output afterwards.
3755 Behaves like ``raw`` except flushes output afterwards.
3753
3756
3754 command <X>
3757 command <X>
3755 -----------
3758 -----------
3756
3759
3757 Send a request to run a named command, whose name follows the ``command``
3760 Send a request to run a named command, whose name follows the ``command``
3758 string.
3761 string.
3759
3762
3760 Arguments to the command are defined as lines in this block. The format of
3763 Arguments to the command are defined as lines in this block. The format of
3761 each line is ``<key> <value>``. e.g.::
3764 each line is ``<key> <value>``. e.g.::
3762
3765
3763 command listkeys
3766 command listkeys
3764 namespace bookmarks
3767 namespace bookmarks
3765
3768
3766 If the value begins with ``eval:``, it will be interpreted as a Python
3769 If the value begins with ``eval:``, it will be interpreted as a Python
3767 literal expression. Otherwise values are interpreted as Python b'' literals.
3770 literal expression. Otherwise values are interpreted as Python b'' literals.
3768 This allows sending complex types and encoding special byte sequences via
3771 This allows sending complex types and encoding special byte sequences via
3769 backslash escaping.
3772 backslash escaping.
3770
3773
3771 The following arguments have special meaning:
3774 The following arguments have special meaning:
3772
3775
3773 ``PUSHFILE``
3776 ``PUSHFILE``
3774 When defined, the *push* mechanism of the peer will be used instead
3777 When defined, the *push* mechanism of the peer will be used instead
3775 of the static request-response mechanism and the content of the
3778 of the static request-response mechanism and the content of the
3776 file specified in the value of this argument will be sent as the
3779 file specified in the value of this argument will be sent as the
3777 command payload.
3780 command payload.
3778
3781
3779 This can be used to submit a local bundle file to the remote.
3782 This can be used to submit a local bundle file to the remote.
3780
3783
3781 batchbegin
3784 batchbegin
3782 ----------
3785 ----------
3783
3786
3784 Instruct the peer to begin a batched send.
3787 Instruct the peer to begin a batched send.
3785
3788
3786 All ``command`` blocks are queued for execution until the next
3789 All ``command`` blocks are queued for execution until the next
3787 ``batchsubmit`` block.
3790 ``batchsubmit`` block.
3788
3791
3789 batchsubmit
3792 batchsubmit
3790 -----------
3793 -----------
3791
3794
3792 Submit previously queued ``command`` blocks as a batch request.
3795 Submit previously queued ``command`` blocks as a batch request.
3793
3796
3794 This action MUST be paired with a ``batchbegin`` action.
3797 This action MUST be paired with a ``batchbegin`` action.
3795
3798
3796 httprequest <method> <path>
3799 httprequest <method> <path>
3797 ---------------------------
3800 ---------------------------
3798
3801
3799 (HTTP peer only)
3802 (HTTP peer only)
3800
3803
3801 Send an HTTP request to the peer.
3804 Send an HTTP request to the peer.
3802
3805
3803 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3806 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3804
3807
3805 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3808 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3806 headers to add to the request. e.g. ``Accept: foo``.
3809 headers to add to the request. e.g. ``Accept: foo``.
3807
3810
3808 The following arguments are special:
3811 The following arguments are special:
3809
3812
3810 ``BODYFILE``
3813 ``BODYFILE``
3811 The content of the file defined as the value to this argument will be
3814 The content of the file defined as the value to this argument will be
3812 transferred verbatim as the HTTP request body.
3815 transferred verbatim as the HTTP request body.
3813
3816
3814 ``frame <type> <flags> <payload>``
3817 ``frame <type> <flags> <payload>``
3815 Send a unified protocol frame as part of the request body.
3818 Send a unified protocol frame as part of the request body.
3816
3819
3817 All frames will be collected and sent as the body to the HTTP
3820 All frames will be collected and sent as the body to the HTTP
3818 request.
3821 request.
3819
3822
3820 close
3823 close
3821 -----
3824 -----
3822
3825
3823 Close the connection to the server.
3826 Close the connection to the server.
3824
3827
3825 flush
3828 flush
3826 -----
3829 -----
3827
3830
3828 Flush data written to the server.
3831 Flush data written to the server.
3829
3832
3830 readavailable
3833 readavailable
3831 -------------
3834 -------------
3832
3835
3833 Close the write end of the connection and read all available data from
3836 Close the write end of the connection and read all available data from
3834 the server.
3837 the server.
3835
3838
3836 If the connection to the server encompasses multiple pipes, we poll both
3839 If the connection to the server encompasses multiple pipes, we poll both
3837 pipes and read available data.
3840 pipes and read available data.
3838
3841
3839 readline
3842 readline
3840 --------
3843 --------
3841
3844
3842 Read a line of output from the server. If there are multiple output
3845 Read a line of output from the server. If there are multiple output
3843 pipes, reads only the main pipe.
3846 pipes, reads only the main pipe.
3844
3847
3845 ereadline
3848 ereadline
3846 ---------
3849 ---------
3847
3850
3848 Like ``readline``, but read from the stderr pipe, if available.
3851 Like ``readline``, but read from the stderr pipe, if available.
3849
3852
3850 read <X>
3853 read <X>
3851 --------
3854 --------
3852
3855
3853 ``read()`` N bytes from the server's main output pipe.
3856 ``read()`` N bytes from the server's main output pipe.
3854
3857
3855 eread <X>
3858 eread <X>
3856 ---------
3859 ---------
3857
3860
3858 ``read()`` N bytes from the server's stderr pipe, if available.
3861 ``read()`` N bytes from the server's stderr pipe, if available.
3859
3862
3860 Specifying Unified Frame-Based Protocol Frames
3863 Specifying Unified Frame-Based Protocol Frames
3861 ----------------------------------------------
3864 ----------------------------------------------
3862
3865
3863 It is possible to emit a *Unified Frame-Based Protocol* by using special
3866 It is possible to emit a *Unified Frame-Based Protocol* by using special
3864 syntax.
3867 syntax.
3865
3868
3866 A frame is composed as a type, flags, and payload. These can be parsed
3869 A frame is composed as a type, flags, and payload. These can be parsed
3867 from a string of the form:
3870 from a string of the form:
3868
3871
3869 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3872 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3870
3873
3871 ``request-id`` and ``stream-id`` are integers defining the request and
3874 ``request-id`` and ``stream-id`` are integers defining the request and
3872 stream identifiers.
3875 stream identifiers.
3873
3876
3874 ``type`` can be an integer value for the frame type or the string name
3877 ``type`` can be an integer value for the frame type or the string name
3875 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3878 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3876 ``command-name``.
3879 ``command-name``.
3877
3880
3878 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3881 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3879 components. Each component (and there can be just one) can be an integer
3882 components. Each component (and there can be just one) can be an integer
3880 or a flag name for stream flags or frame flags, respectively. Values are
3883 or a flag name for stream flags or frame flags, respectively. Values are
3881 resolved to integers and then bitwise OR'd together.
3884 resolved to integers and then bitwise OR'd together.
3882
3885
3883 ``payload`` represents the raw frame payload. If it begins with
3886 ``payload`` represents the raw frame payload. If it begins with
3884 ``cbor:``, the following string is evaluated as Python code and the
3887 ``cbor:``, the following string is evaluated as Python code and the
3885 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3888 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3886 as a Python byte string literal.
3889 as a Python byte string literal.
3887 """
3890 """
3888 opts = pycompat.byteskwargs(opts)
3891 opts = pycompat.byteskwargs(opts)
3889
3892
3890 if opts[b'localssh'] and not repo:
3893 if opts[b'localssh'] and not repo:
3891 raise error.Abort(_(b'--localssh requires a repository'))
3894 raise error.Abort(_(b'--localssh requires a repository'))
3892
3895
3893 if opts[b'peer'] and opts[b'peer'] not in (
3896 if opts[b'peer'] and opts[b'peer'] not in (
3894 b'raw',
3897 b'raw',
3895 b'http2',
3898 b'http2',
3896 b'ssh1',
3899 b'ssh1',
3897 b'ssh2',
3900 b'ssh2',
3898 ):
3901 ):
3899 raise error.Abort(
3902 raise error.Abort(
3900 _(b'invalid value for --peer'),
3903 _(b'invalid value for --peer'),
3901 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3904 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3902 )
3905 )
3903
3906
3904 if path and opts[b'localssh']:
3907 if path and opts[b'localssh']:
3905 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3908 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3906
3909
3907 if ui.interactive():
3910 if ui.interactive():
3908 ui.write(_(b'(waiting for commands on stdin)\n'))
3911 ui.write(_(b'(waiting for commands on stdin)\n'))
3909
3912
3910 blocks = list(_parsewirelangblocks(ui.fin))
3913 blocks = list(_parsewirelangblocks(ui.fin))
3911
3914
3912 proc = None
3915 proc = None
3913 stdin = None
3916 stdin = None
3914 stdout = None
3917 stdout = None
3915 stderr = None
3918 stderr = None
3916 opener = None
3919 opener = None
3917
3920
3918 if opts[b'localssh']:
3921 if opts[b'localssh']:
3919 # We start the SSH server in its own process so there is process
3922 # We start the SSH server in its own process so there is process
3920 # separation. This prevents a whole class of potential bugs around
3923 # separation. This prevents a whole class of potential bugs around
3921 # shared state from interfering with server operation.
3924 # shared state from interfering with server operation.
3922 args = procutil.hgcmd() + [
3925 args = procutil.hgcmd() + [
3923 b'-R',
3926 b'-R',
3924 repo.root,
3927 repo.root,
3925 b'debugserve',
3928 b'debugserve',
3926 b'--sshstdio',
3929 b'--sshstdio',
3927 ]
3930 ]
3928 proc = subprocess.Popen(
3931 proc = subprocess.Popen(
3929 pycompat.rapply(procutil.tonativestr, args),
3932 pycompat.rapply(procutil.tonativestr, args),
3930 stdin=subprocess.PIPE,
3933 stdin=subprocess.PIPE,
3931 stdout=subprocess.PIPE,
3934 stdout=subprocess.PIPE,
3932 stderr=subprocess.PIPE,
3935 stderr=subprocess.PIPE,
3933 bufsize=0,
3936 bufsize=0,
3934 )
3937 )
3935
3938
3936 stdin = proc.stdin
3939 stdin = proc.stdin
3937 stdout = proc.stdout
3940 stdout = proc.stdout
3938 stderr = proc.stderr
3941 stderr = proc.stderr
3939
3942
3940 # We turn the pipes into observers so we can log I/O.
3943 # We turn the pipes into observers so we can log I/O.
3941 if ui.verbose or opts[b'peer'] == b'raw':
3944 if ui.verbose or opts[b'peer'] == b'raw':
3942 stdin = util.makeloggingfileobject(
3945 stdin = util.makeloggingfileobject(
3943 ui, proc.stdin, b'i', logdata=True
3946 ui, proc.stdin, b'i', logdata=True
3944 )
3947 )
3945 stdout = util.makeloggingfileobject(
3948 stdout = util.makeloggingfileobject(
3946 ui, proc.stdout, b'o', logdata=True
3949 ui, proc.stdout, b'o', logdata=True
3947 )
3950 )
3948 stderr = util.makeloggingfileobject(
3951 stderr = util.makeloggingfileobject(
3949 ui, proc.stderr, b'e', logdata=True
3952 ui, proc.stderr, b'e', logdata=True
3950 )
3953 )
3951
3954
3952 # --localssh also implies the peer connection settings.
3955 # --localssh also implies the peer connection settings.
3953
3956
3954 url = b'ssh://localserver'
3957 url = b'ssh://localserver'
3955 autoreadstderr = not opts[b'noreadstderr']
3958 autoreadstderr = not opts[b'noreadstderr']
3956
3959
3957 if opts[b'peer'] == b'ssh1':
3960 if opts[b'peer'] == b'ssh1':
3958 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3961 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3959 peer = sshpeer.sshv1peer(
3962 peer = sshpeer.sshv1peer(
3960 ui,
3963 ui,
3961 url,
3964 url,
3962 proc,
3965 proc,
3963 stdin,
3966 stdin,
3964 stdout,
3967 stdout,
3965 stderr,
3968 stderr,
3966 None,
3969 None,
3967 autoreadstderr=autoreadstderr,
3970 autoreadstderr=autoreadstderr,
3968 )
3971 )
3969 elif opts[b'peer'] == b'ssh2':
3972 elif opts[b'peer'] == b'ssh2':
3970 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3973 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3971 peer = sshpeer.sshv2peer(
3974 peer = sshpeer.sshv2peer(
3972 ui,
3975 ui,
3973 url,
3976 url,
3974 proc,
3977 proc,
3975 stdin,
3978 stdin,
3976 stdout,
3979 stdout,
3977 stderr,
3980 stderr,
3978 None,
3981 None,
3979 autoreadstderr=autoreadstderr,
3982 autoreadstderr=autoreadstderr,
3980 )
3983 )
3981 elif opts[b'peer'] == b'raw':
3984 elif opts[b'peer'] == b'raw':
3982 ui.write(_(b'using raw connection to peer\n'))
3985 ui.write(_(b'using raw connection to peer\n'))
3983 peer = None
3986 peer = None
3984 else:
3987 else:
3985 ui.write(_(b'creating ssh peer from handshake results\n'))
3988 ui.write(_(b'creating ssh peer from handshake results\n'))
3986 peer = sshpeer.makepeer(
3989 peer = sshpeer.makepeer(
3987 ui,
3990 ui,
3988 url,
3991 url,
3989 proc,
3992 proc,
3990 stdin,
3993 stdin,
3991 stdout,
3994 stdout,
3992 stderr,
3995 stderr,
3993 autoreadstderr=autoreadstderr,
3996 autoreadstderr=autoreadstderr,
3994 )
3997 )
3995
3998
3996 elif path:
3999 elif path:
3997 # We bypass hg.peer() so we can proxy the sockets.
4000 # We bypass hg.peer() so we can proxy the sockets.
3998 # TODO consider not doing this because we skip
4001 # TODO consider not doing this because we skip
3999 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4002 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4000 u = util.url(path)
4003 u = util.url(path)
4001 if u.scheme != b'http':
4004 if u.scheme != b'http':
4002 raise error.Abort(_(b'only http:// paths are currently supported'))
4005 raise error.Abort(_(b'only http:// paths are currently supported'))
4003
4006
4004 url, authinfo = u.authinfo()
4007 url, authinfo = u.authinfo()
4005 openerargs = {
4008 openerargs = {
4006 'useragent': b'Mercurial debugwireproto',
4009 'useragent': b'Mercurial debugwireproto',
4007 }
4010 }
4008
4011
4009 # Turn pipes/sockets into observers so we can log I/O.
4012 # Turn pipes/sockets into observers so we can log I/O.
4010 if ui.verbose:
4013 if ui.verbose:
4011 openerargs.update(
4014 openerargs.update(
4012 {
4015 {
4013 'loggingfh': ui,
4016 'loggingfh': ui,
4014 'loggingname': b's',
4017 'loggingname': b's',
4015 'loggingopts': {'logdata': True, 'logdataapis': False,},
4018 'loggingopts': {'logdata': True, 'logdataapis': False,},
4016 }
4019 }
4017 )
4020 )
4018
4021
4019 if ui.debugflag:
4022 if ui.debugflag:
4020 openerargs['loggingopts']['logdataapis'] = True
4023 openerargs['loggingopts']['logdataapis'] = True
4021
4024
4022 # Don't send default headers when in raw mode. This allows us to
4025 # Don't send default headers when in raw mode. This allows us to
4023 # bypass most of the behavior of our URL handling code so we can
4026 # bypass most of the behavior of our URL handling code so we can
4024 # have near complete control over what's sent on the wire.
4027 # have near complete control over what's sent on the wire.
4025 if opts[b'peer'] == b'raw':
4028 if opts[b'peer'] == b'raw':
4026 openerargs['sendaccept'] = False
4029 openerargs['sendaccept'] = False
4027
4030
4028 opener = urlmod.opener(ui, authinfo, **openerargs)
4031 opener = urlmod.opener(ui, authinfo, **openerargs)
4029
4032
4030 if opts[b'peer'] == b'http2':
4033 if opts[b'peer'] == b'http2':
4031 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4034 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4032 # We go through makepeer() because we need an API descriptor for
4035 # We go through makepeer() because we need an API descriptor for
4033 # the peer instance to be useful.
4036 # the peer instance to be useful.
4034 with ui.configoverride(
4037 with ui.configoverride(
4035 {(b'experimental', b'httppeer.advertise-v2'): True}
4038 {(b'experimental', b'httppeer.advertise-v2'): True}
4036 ):
4039 ):
4037 if opts[b'nologhandshake']:
4040 if opts[b'nologhandshake']:
4038 ui.pushbuffer()
4041 ui.pushbuffer()
4039
4042
4040 peer = httppeer.makepeer(ui, path, opener=opener)
4043 peer = httppeer.makepeer(ui, path, opener=opener)
4041
4044
4042 if opts[b'nologhandshake']:
4045 if opts[b'nologhandshake']:
4043 ui.popbuffer()
4046 ui.popbuffer()
4044
4047
4045 if not isinstance(peer, httppeer.httpv2peer):
4048 if not isinstance(peer, httppeer.httpv2peer):
4046 raise error.Abort(
4049 raise error.Abort(
4047 _(
4050 _(
4048 b'could not instantiate HTTP peer for '
4051 b'could not instantiate HTTP peer for '
4049 b'wire protocol version 2'
4052 b'wire protocol version 2'
4050 ),
4053 ),
4051 hint=_(
4054 hint=_(
4052 b'the server may not have the feature '
4055 b'the server may not have the feature '
4053 b'enabled or is not allowing this '
4056 b'enabled or is not allowing this '
4054 b'client version'
4057 b'client version'
4055 ),
4058 ),
4056 )
4059 )
4057
4060
4058 elif opts[b'peer'] == b'raw':
4061 elif opts[b'peer'] == b'raw':
4059 ui.write(_(b'using raw connection to peer\n'))
4062 ui.write(_(b'using raw connection to peer\n'))
4060 peer = None
4063 peer = None
4061 elif opts[b'peer']:
4064 elif opts[b'peer']:
4062 raise error.Abort(
4065 raise error.Abort(
4063 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4066 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4064 )
4067 )
4065 else:
4068 else:
4066 peer = httppeer.makepeer(ui, path, opener=opener)
4069 peer = httppeer.makepeer(ui, path, opener=opener)
4067
4070
4068 # We /could/ populate stdin/stdout with sock.makefile()...
4071 # We /could/ populate stdin/stdout with sock.makefile()...
4069 else:
4072 else:
4070 raise error.Abort(_(b'unsupported connection configuration'))
4073 raise error.Abort(_(b'unsupported connection configuration'))
4071
4074
4072 batchedcommands = None
4075 batchedcommands = None
4073
4076
4074 # Now perform actions based on the parsed wire language instructions.
4077 # Now perform actions based on the parsed wire language instructions.
4075 for action, lines in blocks:
4078 for action, lines in blocks:
4076 if action in (b'raw', b'raw+'):
4079 if action in (b'raw', b'raw+'):
4077 if not stdin:
4080 if not stdin:
4078 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4081 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4079
4082
4080 # Concatenate the data together.
4083 # Concatenate the data together.
4081 data = b''.join(l.lstrip() for l in lines)
4084 data = b''.join(l.lstrip() for l in lines)
4082 data = stringutil.unescapestr(data)
4085 data = stringutil.unescapestr(data)
4083 stdin.write(data)
4086 stdin.write(data)
4084
4087
4085 if action == b'raw+':
4088 if action == b'raw+':
4086 stdin.flush()
4089 stdin.flush()
4087 elif action == b'flush':
4090 elif action == b'flush':
4088 if not stdin:
4091 if not stdin:
4089 raise error.Abort(_(b'cannot call flush on this peer'))
4092 raise error.Abort(_(b'cannot call flush on this peer'))
4090 stdin.flush()
4093 stdin.flush()
4091 elif action.startswith(b'command'):
4094 elif action.startswith(b'command'):
4092 if not peer:
4095 if not peer:
4093 raise error.Abort(
4096 raise error.Abort(
4094 _(
4097 _(
4095 b'cannot send commands unless peer instance '
4098 b'cannot send commands unless peer instance '
4096 b'is available'
4099 b'is available'
4097 )
4100 )
4098 )
4101 )
4099
4102
4100 command = action.split(b' ', 1)[1]
4103 command = action.split(b' ', 1)[1]
4101
4104
4102 args = {}
4105 args = {}
4103 for line in lines:
4106 for line in lines:
4104 # We need to allow empty values.
4107 # We need to allow empty values.
4105 fields = line.lstrip().split(b' ', 1)
4108 fields = line.lstrip().split(b' ', 1)
4106 if len(fields) == 1:
4109 if len(fields) == 1:
4107 key = fields[0]
4110 key = fields[0]
4108 value = b''
4111 value = b''
4109 else:
4112 else:
4110 key, value = fields
4113 key, value = fields
4111
4114
4112 if value.startswith(b'eval:'):
4115 if value.startswith(b'eval:'):
4113 value = stringutil.evalpythonliteral(value[5:])
4116 value = stringutil.evalpythonliteral(value[5:])
4114 else:
4117 else:
4115 value = stringutil.unescapestr(value)
4118 value = stringutil.unescapestr(value)
4116
4119
4117 args[key] = value
4120 args[key] = value
4118
4121
4119 if batchedcommands is not None:
4122 if batchedcommands is not None:
4120 batchedcommands.append((command, args))
4123 batchedcommands.append((command, args))
4121 continue
4124 continue
4122
4125
4123 ui.status(_(b'sending %s command\n') % command)
4126 ui.status(_(b'sending %s command\n') % command)
4124
4127
4125 if b'PUSHFILE' in args:
4128 if b'PUSHFILE' in args:
4126 with open(args[b'PUSHFILE'], 'rb') as fh:
4129 with open(args[b'PUSHFILE'], 'rb') as fh:
4127 del args[b'PUSHFILE']
4130 del args[b'PUSHFILE']
4128 res, output = peer._callpush(
4131 res, output = peer._callpush(
4129 command, fh, **pycompat.strkwargs(args)
4132 command, fh, **pycompat.strkwargs(args)
4130 )
4133 )
4131 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4134 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4132 ui.status(
4135 ui.status(
4133 _(b'remote output: %s\n') % stringutil.escapestr(output)
4136 _(b'remote output: %s\n') % stringutil.escapestr(output)
4134 )
4137 )
4135 else:
4138 else:
4136 with peer.commandexecutor() as e:
4139 with peer.commandexecutor() as e:
4137 res = e.callcommand(command, args).result()
4140 res = e.callcommand(command, args).result()
4138
4141
4139 if isinstance(res, wireprotov2peer.commandresponse):
4142 if isinstance(res, wireprotov2peer.commandresponse):
4140 val = res.objects()
4143 val = res.objects()
4141 ui.status(
4144 ui.status(
4142 _(b'response: %s\n')
4145 _(b'response: %s\n')
4143 % stringutil.pprint(val, bprefix=True, indent=2)
4146 % stringutil.pprint(val, bprefix=True, indent=2)
4144 )
4147 )
4145 else:
4148 else:
4146 ui.status(
4149 ui.status(
4147 _(b'response: %s\n')
4150 _(b'response: %s\n')
4148 % stringutil.pprint(res, bprefix=True, indent=2)
4151 % stringutil.pprint(res, bprefix=True, indent=2)
4149 )
4152 )
4150
4153
4151 elif action == b'batchbegin':
4154 elif action == b'batchbegin':
4152 if batchedcommands is not None:
4155 if batchedcommands is not None:
4153 raise error.Abort(_(b'nested batchbegin not allowed'))
4156 raise error.Abort(_(b'nested batchbegin not allowed'))
4154
4157
4155 batchedcommands = []
4158 batchedcommands = []
4156 elif action == b'batchsubmit':
4159 elif action == b'batchsubmit':
4157 # There is a batching API we could go through. But it would be
4160 # There is a batching API we could go through. But it would be
4158 # difficult to normalize requests into function calls. It is easier
4161 # difficult to normalize requests into function calls. It is easier
4159 # to bypass this layer and normalize to commands + args.
4162 # to bypass this layer and normalize to commands + args.
4160 ui.status(
4163 ui.status(
4161 _(b'sending batch with %d sub-commands\n')
4164 _(b'sending batch with %d sub-commands\n')
4162 % len(batchedcommands)
4165 % len(batchedcommands)
4163 )
4166 )
4164 assert peer is not None
4167 assert peer is not None
4165 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4168 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4166 ui.status(
4169 ui.status(
4167 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4170 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4168 )
4171 )
4169
4172
4170 batchedcommands = None
4173 batchedcommands = None
4171
4174
4172 elif action.startswith(b'httprequest '):
4175 elif action.startswith(b'httprequest '):
4173 if not opener:
4176 if not opener:
4174 raise error.Abort(
4177 raise error.Abort(
4175 _(b'cannot use httprequest without an HTTP peer')
4178 _(b'cannot use httprequest without an HTTP peer')
4176 )
4179 )
4177
4180
4178 request = action.split(b' ', 2)
4181 request = action.split(b' ', 2)
4179 if len(request) != 3:
4182 if len(request) != 3:
4180 raise error.Abort(
4183 raise error.Abort(
4181 _(
4184 _(
4182 b'invalid httprequest: expected format is '
4185 b'invalid httprequest: expected format is '
4183 b'"httprequest <method> <path>'
4186 b'"httprequest <method> <path>'
4184 )
4187 )
4185 )
4188 )
4186
4189
4187 method, httppath = request[1:]
4190 method, httppath = request[1:]
4188 headers = {}
4191 headers = {}
4189 body = None
4192 body = None
4190 frames = []
4193 frames = []
4191 for line in lines:
4194 for line in lines:
4192 line = line.lstrip()
4195 line = line.lstrip()
4193 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4196 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4194 if m:
4197 if m:
4195 # Headers need to use native strings.
4198 # Headers need to use native strings.
4196 key = pycompat.strurl(m.group(1))
4199 key = pycompat.strurl(m.group(1))
4197 value = pycompat.strurl(m.group(2))
4200 value = pycompat.strurl(m.group(2))
4198 headers[key] = value
4201 headers[key] = value
4199 continue
4202 continue
4200
4203
4201 if line.startswith(b'BODYFILE '):
4204 if line.startswith(b'BODYFILE '):
4202 with open(line.split(b' ', 1), b'rb') as fh:
4205 with open(line.split(b' ', 1), b'rb') as fh:
4203 body = fh.read()
4206 body = fh.read()
4204 elif line.startswith(b'frame '):
4207 elif line.startswith(b'frame '):
4205 frame = wireprotoframing.makeframefromhumanstring(
4208 frame = wireprotoframing.makeframefromhumanstring(
4206 line[len(b'frame ') :]
4209 line[len(b'frame ') :]
4207 )
4210 )
4208
4211
4209 frames.append(frame)
4212 frames.append(frame)
4210 else:
4213 else:
4211 raise error.Abort(
4214 raise error.Abort(
4212 _(b'unknown argument to httprequest: %s') % line
4215 _(b'unknown argument to httprequest: %s') % line
4213 )
4216 )
4214
4217
4215 url = path + httppath
4218 url = path + httppath
4216
4219
4217 if frames:
4220 if frames:
4218 body = b''.join(bytes(f) for f in frames)
4221 body = b''.join(bytes(f) for f in frames)
4219
4222
4220 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4223 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4221
4224
4222 # urllib.Request insists on using has_data() as a proxy for
4225 # urllib.Request insists on using has_data() as a proxy for
4223 # determining the request method. Override that to use our
4226 # determining the request method. Override that to use our
4224 # explicitly requested method.
4227 # explicitly requested method.
4225 req.get_method = lambda: pycompat.sysstr(method)
4228 req.get_method = lambda: pycompat.sysstr(method)
4226
4229
4227 try:
4230 try:
4228 res = opener.open(req)
4231 res = opener.open(req)
4229 body = res.read()
4232 body = res.read()
4230 except util.urlerr.urlerror as e:
4233 except util.urlerr.urlerror as e:
4231 # read() method must be called, but only exists in Python 2
4234 # read() method must be called, but only exists in Python 2
4232 getattr(e, 'read', lambda: None)()
4235 getattr(e, 'read', lambda: None)()
4233 continue
4236 continue
4234
4237
4235 ct = res.headers.get('Content-Type')
4238 ct = res.headers.get('Content-Type')
4236 if ct == 'application/mercurial-cbor':
4239 if ct == 'application/mercurial-cbor':
4237 ui.write(
4240 ui.write(
4238 _(b'cbor> %s\n')
4241 _(b'cbor> %s\n')
4239 % stringutil.pprint(
4242 % stringutil.pprint(
4240 cborutil.decodeall(body), bprefix=True, indent=2
4243 cborutil.decodeall(body), bprefix=True, indent=2
4241 )
4244 )
4242 )
4245 )
4243
4246
4244 elif action == b'close':
4247 elif action == b'close':
4245 assert peer is not None
4248 assert peer is not None
4246 peer.close()
4249 peer.close()
4247 elif action == b'readavailable':
4250 elif action == b'readavailable':
4248 if not stdout or not stderr:
4251 if not stdout or not stderr:
4249 raise error.Abort(
4252 raise error.Abort(
4250 _(b'readavailable not available on this peer')
4253 _(b'readavailable not available on this peer')
4251 )
4254 )
4252
4255
4253 stdin.close()
4256 stdin.close()
4254 stdout.read()
4257 stdout.read()
4255 stderr.read()
4258 stderr.read()
4256
4259
4257 elif action == b'readline':
4260 elif action == b'readline':
4258 if not stdout:
4261 if not stdout:
4259 raise error.Abort(_(b'readline not available on this peer'))
4262 raise error.Abort(_(b'readline not available on this peer'))
4260 stdout.readline()
4263 stdout.readline()
4261 elif action == b'ereadline':
4264 elif action == b'ereadline':
4262 if not stderr:
4265 if not stderr:
4263 raise error.Abort(_(b'ereadline not available on this peer'))
4266 raise error.Abort(_(b'ereadline not available on this peer'))
4264 stderr.readline()
4267 stderr.readline()
4265 elif action.startswith(b'read '):
4268 elif action.startswith(b'read '):
4266 count = int(action.split(b' ', 1)[1])
4269 count = int(action.split(b' ', 1)[1])
4267 if not stdout:
4270 if not stdout:
4268 raise error.Abort(_(b'read not available on this peer'))
4271 raise error.Abort(_(b'read not available on this peer'))
4269 stdout.read(count)
4272 stdout.read(count)
4270 elif action.startswith(b'eread '):
4273 elif action.startswith(b'eread '):
4271 count = int(action.split(b' ', 1)[1])
4274 count = int(action.split(b' ', 1)[1])
4272 if not stderr:
4275 if not stderr:
4273 raise error.Abort(_(b'eread not available on this peer'))
4276 raise error.Abort(_(b'eread not available on this peer'))
4274 stderr.read(count)
4277 stderr.read(count)
4275 else:
4278 else:
4276 raise error.Abort(_(b'unknown action: %s') % action)
4279 raise error.Abort(_(b'unknown action: %s') % action)
4277
4280
4278 if batchedcommands is not None:
4281 if batchedcommands is not None:
4279 raise error.Abort(_(b'unclosed "batchbegin" request'))
4282 raise error.Abort(_(b'unclosed "batchbegin" request'))
4280
4283
4281 if peer:
4284 if peer:
4282 peer.close()
4285 peer.close()
4283
4286
4284 if proc:
4287 if proc:
4285 proc.kill()
4288 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now