##// END OF EJS Templates
tags: fix some type confusion exposed in python 3...
Augie Fackler -
r44824:e80da7a6 default
parent child Browse files
Show More
@@ -1,4367 +1,4367 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import platform
16 import platform
17 import random
17 import random
18 import re
18 import re
19 import socket
19 import socket
20 import ssl
20 import ssl
21 import stat
21 import stat
22 import string
22 import string
23 import subprocess
23 import subprocess
24 import sys
24 import sys
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 encoding,
48 encoding,
49 error,
49 error,
50 exchange,
50 exchange,
51 extensions,
51 extensions,
52 filemerge,
52 filemerge,
53 filesetlang,
53 filesetlang,
54 formatter,
54 formatter,
55 hg,
55 hg,
56 httppeer,
56 httppeer,
57 localrepo,
57 localrepo,
58 lock as lockmod,
58 lock as lockmod,
59 logcmdutil,
59 logcmdutil,
60 merge as mergemod,
60 merge as mergemod,
61 obsolete,
61 obsolete,
62 obsutil,
62 obsutil,
63 pathutil,
63 pathutil,
64 phases,
64 phases,
65 policy,
65 policy,
66 pvec,
66 pvec,
67 pycompat,
67 pycompat,
68 registrar,
68 registrar,
69 repair,
69 repair,
70 revlog,
70 revlog,
71 revset,
71 revset,
72 revsetlang,
72 revsetlang,
73 scmutil,
73 scmutil,
74 setdiscovery,
74 setdiscovery,
75 simplemerge,
75 simplemerge,
76 sshpeer,
76 sshpeer,
77 sslutil,
77 sslutil,
78 streamclone,
78 streamclone,
79 tags as tagsmod,
79 tags as tagsmod,
80 templater,
80 templater,
81 treediscovery,
81 treediscovery,
82 upgrade,
82 upgrade,
83 url as urlmod,
83 url as urlmod,
84 util,
84 util,
85 vfs as vfsmod,
85 vfs as vfsmod,
86 wireprotoframing,
86 wireprotoframing,
87 wireprotoserver,
87 wireprotoserver,
88 wireprotov2peer,
88 wireprotov2peer,
89 )
89 )
90 from .utils import (
90 from .utils import (
91 cborutil,
91 cborutil,
92 compression,
92 compression,
93 dateutil,
93 dateutil,
94 procutil,
94 procutil,
95 stringutil,
95 stringutil,
96 )
96 )
97
97
98 from .revlogutils import (
98 from .revlogutils import (
99 deltas as deltautil,
99 deltas as deltautil,
100 nodemap,
100 nodemap,
101 )
101 )
102
102
103 release = lockmod.release
103 release = lockmod.release
104
104
105 command = registrar.command()
105 command = registrar.command()
106
106
107
107
108 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
108 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 def debugancestor(ui, repo, *args):
109 def debugancestor(ui, repo, *args):
110 """find the ancestor revision of two revisions in a given index"""
110 """find the ancestor revision of two revisions in a given index"""
111 if len(args) == 3:
111 if len(args) == 3:
112 index, rev1, rev2 = args
112 index, rev1, rev2 = args
113 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
113 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 lookup = r.lookup
114 lookup = r.lookup
115 elif len(args) == 2:
115 elif len(args) == 2:
116 if not repo:
116 if not repo:
117 raise error.Abort(
117 raise error.Abort(
118 _(b'there is no Mercurial repository here (.hg not found)')
118 _(b'there is no Mercurial repository here (.hg not found)')
119 )
119 )
120 rev1, rev2 = args
120 rev1, rev2 = args
121 r = repo.changelog
121 r = repo.changelog
122 lookup = repo.lookup
122 lookup = repo.lookup
123 else:
123 else:
124 raise error.Abort(_(b'either two or three arguments required'))
124 raise error.Abort(_(b'either two or three arguments required'))
125 a = r.ancestor(lookup(rev1), lookup(rev2))
125 a = r.ancestor(lookup(rev1), lookup(rev2))
126 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
126 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127
127
128
128
129 @command(b'debugapplystreamclonebundle', [], b'FILE')
129 @command(b'debugapplystreamclonebundle', [], b'FILE')
130 def debugapplystreamclonebundle(ui, repo, fname):
130 def debugapplystreamclonebundle(ui, repo, fname):
131 """apply a stream clone bundle file"""
131 """apply a stream clone bundle file"""
132 f = hg.openpath(ui, fname)
132 f = hg.openpath(ui, fname)
133 gen = exchange.readbundle(ui, f, fname)
133 gen = exchange.readbundle(ui, f, fname)
134 gen.apply(repo)
134 gen.apply(repo)
135
135
136
136
137 @command(
137 @command(
138 b'debugbuilddag',
138 b'debugbuilddag',
139 [
139 [
140 (
140 (
141 b'm',
141 b'm',
142 b'mergeable-file',
142 b'mergeable-file',
143 None,
143 None,
144 _(b'add single file mergeable changes'),
144 _(b'add single file mergeable changes'),
145 ),
145 ),
146 (
146 (
147 b'o',
147 b'o',
148 b'overwritten-file',
148 b'overwritten-file',
149 None,
149 None,
150 _(b'add single file all revs overwrite'),
150 _(b'add single file all revs overwrite'),
151 ),
151 ),
152 (b'n', b'new-file', None, _(b'add new file at each rev')),
152 (b'n', b'new-file', None, _(b'add new file at each rev')),
153 ],
153 ],
154 _(b'[OPTION]... [TEXT]'),
154 _(b'[OPTION]... [TEXT]'),
155 )
155 )
156 def debugbuilddag(
156 def debugbuilddag(
157 ui,
157 ui,
158 repo,
158 repo,
159 text=None,
159 text=None,
160 mergeable_file=False,
160 mergeable_file=False,
161 overwritten_file=False,
161 overwritten_file=False,
162 new_file=False,
162 new_file=False,
163 ):
163 ):
164 """builds a repo with a given DAG from scratch in the current empty repo
164 """builds a repo with a given DAG from scratch in the current empty repo
165
165
166 The description of the DAG is read from stdin if not given on the
166 The description of the DAG is read from stdin if not given on the
167 command line.
167 command line.
168
168
169 Elements:
169 Elements:
170
170
171 - "+n" is a linear run of n nodes based on the current default parent
171 - "+n" is a linear run of n nodes based on the current default parent
172 - "." is a single node based on the current default parent
172 - "." is a single node based on the current default parent
173 - "$" resets the default parent to null (implied at the start);
173 - "$" resets the default parent to null (implied at the start);
174 otherwise the default parent is always the last node created
174 otherwise the default parent is always the last node created
175 - "<p" sets the default parent to the backref p
175 - "<p" sets the default parent to the backref p
176 - "*p" is a fork at parent p, which is a backref
176 - "*p" is a fork at parent p, which is a backref
177 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
177 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
178 - "/p2" is a merge of the preceding node and p2
178 - "/p2" is a merge of the preceding node and p2
179 - ":tag" defines a local tag for the preceding node
179 - ":tag" defines a local tag for the preceding node
180 - "@branch" sets the named branch for subsequent nodes
180 - "@branch" sets the named branch for subsequent nodes
181 - "#...\\n" is a comment up to the end of the line
181 - "#...\\n" is a comment up to the end of the line
182
182
183 Whitespace between the above elements is ignored.
183 Whitespace between the above elements is ignored.
184
184
185 A backref is either
185 A backref is either
186
186
187 - a number n, which references the node curr-n, where curr is the current
187 - a number n, which references the node curr-n, where curr is the current
188 node, or
188 node, or
189 - the name of a local tag you placed earlier using ":tag", or
189 - the name of a local tag you placed earlier using ":tag", or
190 - empty to denote the default parent.
190 - empty to denote the default parent.
191
191
192 All string valued-elements are either strictly alphanumeric, or must
192 All string valued-elements are either strictly alphanumeric, or must
193 be enclosed in double quotes ("..."), with "\\" as escape character.
193 be enclosed in double quotes ("..."), with "\\" as escape character.
194 """
194 """
195
195
196 if text is None:
196 if text is None:
197 ui.status(_(b"reading DAG from stdin\n"))
197 ui.status(_(b"reading DAG from stdin\n"))
198 text = ui.fin.read()
198 text = ui.fin.read()
199
199
200 cl = repo.changelog
200 cl = repo.changelog
201 if len(cl) > 0:
201 if len(cl) > 0:
202 raise error.Abort(_(b'repository is not empty'))
202 raise error.Abort(_(b'repository is not empty'))
203
203
204 # determine number of revs in DAG
204 # determine number of revs in DAG
205 total = 0
205 total = 0
206 for type, data in dagparser.parsedag(text):
206 for type, data in dagparser.parsedag(text):
207 if type == b'n':
207 if type == b'n':
208 total += 1
208 total += 1
209
209
210 if mergeable_file:
210 if mergeable_file:
211 linesperrev = 2
211 linesperrev = 2
212 # make a file with k lines per rev
212 # make a file with k lines per rev
213 initialmergedlines = [
213 initialmergedlines = [
214 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
214 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
215 ]
215 ]
216 initialmergedlines.append(b"")
216 initialmergedlines.append(b"")
217
217
218 tags = []
218 tags = []
219 progress = ui.makeprogress(
219 progress = ui.makeprogress(
220 _(b'building'), unit=_(b'revisions'), total=total
220 _(b'building'), unit=_(b'revisions'), total=total
221 )
221 )
222 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
222 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
223 at = -1
223 at = -1
224 atbranch = b'default'
224 atbranch = b'default'
225 nodeids = []
225 nodeids = []
226 id = 0
226 id = 0
227 progress.update(id)
227 progress.update(id)
228 for type, data in dagparser.parsedag(text):
228 for type, data in dagparser.parsedag(text):
229 if type == b'n':
229 if type == b'n':
230 ui.note((b'node %s\n' % pycompat.bytestr(data)))
230 ui.note((b'node %s\n' % pycompat.bytestr(data)))
231 id, ps = data
231 id, ps = data
232
232
233 files = []
233 files = []
234 filecontent = {}
234 filecontent = {}
235
235
236 p2 = None
236 p2 = None
237 if mergeable_file:
237 if mergeable_file:
238 fn = b"mf"
238 fn = b"mf"
239 p1 = repo[ps[0]]
239 p1 = repo[ps[0]]
240 if len(ps) > 1:
240 if len(ps) > 1:
241 p2 = repo[ps[1]]
241 p2 = repo[ps[1]]
242 pa = p1.ancestor(p2)
242 pa = p1.ancestor(p2)
243 base, local, other = [
243 base, local, other = [
244 x[fn].data() for x in (pa, p1, p2)
244 x[fn].data() for x in (pa, p1, p2)
245 ]
245 ]
246 m3 = simplemerge.Merge3Text(base, local, other)
246 m3 = simplemerge.Merge3Text(base, local, other)
247 ml = [l.strip() for l in m3.merge_lines()]
247 ml = [l.strip() for l in m3.merge_lines()]
248 ml.append(b"")
248 ml.append(b"")
249 elif at > 0:
249 elif at > 0:
250 ml = p1[fn].data().split(b"\n")
250 ml = p1[fn].data().split(b"\n")
251 else:
251 else:
252 ml = initialmergedlines
252 ml = initialmergedlines
253 ml[id * linesperrev] += b" r%i" % id
253 ml[id * linesperrev] += b" r%i" % id
254 mergedtext = b"\n".join(ml)
254 mergedtext = b"\n".join(ml)
255 files.append(fn)
255 files.append(fn)
256 filecontent[fn] = mergedtext
256 filecontent[fn] = mergedtext
257
257
258 if overwritten_file:
258 if overwritten_file:
259 fn = b"of"
259 fn = b"of"
260 files.append(fn)
260 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
261 filecontent[fn] = b"r%i\n" % id
262
262
263 if new_file:
263 if new_file:
264 fn = b"nf%i" % id
264 fn = b"nf%i" % id
265 files.append(fn)
265 files.append(fn)
266 filecontent[fn] = b"r%i\n" % id
266 filecontent[fn] = b"r%i\n" % id
267 if len(ps) > 1:
267 if len(ps) > 1:
268 if not p2:
268 if not p2:
269 p2 = repo[ps[1]]
269 p2 = repo[ps[1]]
270 for fn in p2:
270 for fn in p2:
271 if fn.startswith(b"nf"):
271 if fn.startswith(b"nf"):
272 files.append(fn)
272 files.append(fn)
273 filecontent[fn] = p2[fn].data()
273 filecontent[fn] = p2[fn].data()
274
274
275 def fctxfn(repo, cx, path):
275 def fctxfn(repo, cx, path):
276 if path in filecontent:
276 if path in filecontent:
277 return context.memfilectx(
277 return context.memfilectx(
278 repo, cx, path, filecontent[path]
278 repo, cx, path, filecontent[path]
279 )
279 )
280 return None
280 return None
281
281
282 if len(ps) == 0 or ps[0] < 0:
282 if len(ps) == 0 or ps[0] < 0:
283 pars = [None, None]
283 pars = [None, None]
284 elif len(ps) == 1:
284 elif len(ps) == 1:
285 pars = [nodeids[ps[0]], None]
285 pars = [nodeids[ps[0]], None]
286 else:
286 else:
287 pars = [nodeids[p] for p in ps]
287 pars = [nodeids[p] for p in ps]
288 cx = context.memctx(
288 cx = context.memctx(
289 repo,
289 repo,
290 pars,
290 pars,
291 b"r%i" % id,
291 b"r%i" % id,
292 files,
292 files,
293 fctxfn,
293 fctxfn,
294 date=(id, 0),
294 date=(id, 0),
295 user=b"debugbuilddag",
295 user=b"debugbuilddag",
296 extra={b'branch': atbranch},
296 extra={b'branch': atbranch},
297 )
297 )
298 nodeid = repo.commitctx(cx)
298 nodeid = repo.commitctx(cx)
299 nodeids.append(nodeid)
299 nodeids.append(nodeid)
300 at = id
300 at = id
301 elif type == b'l':
301 elif type == b'l':
302 id, name = data
302 id, name = data
303 ui.note((b'tag %s\n' % name))
303 ui.note((b'tag %s\n' % name))
304 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
304 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
305 elif type == b'a':
305 elif type == b'a':
306 ui.note((b'branch %s\n' % data))
306 ui.note((b'branch %s\n' % data))
307 atbranch = data
307 atbranch = data
308 progress.update(id)
308 progress.update(id)
309
309
310 if tags:
310 if tags:
311 repo.vfs.write(b"localtags", b"".join(tags))
311 repo.vfs.write(b"localtags", b"".join(tags))
312
312
313
313
314 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
314 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
315 indent_string = b' ' * indent
315 indent_string = b' ' * indent
316 if all:
316 if all:
317 ui.writenoi18n(
317 ui.writenoi18n(
318 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
318 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
319 % indent_string
319 % indent_string
320 )
320 )
321
321
322 def showchunks(named):
322 def showchunks(named):
323 ui.write(b"\n%s%s\n" % (indent_string, named))
323 ui.write(b"\n%s%s\n" % (indent_string, named))
324 for deltadata in gen.deltaiter():
324 for deltadata in gen.deltaiter():
325 node, p1, p2, cs, deltabase, delta, flags = deltadata
325 node, p1, p2, cs, deltabase, delta, flags = deltadata
326 ui.write(
326 ui.write(
327 b"%s%s %s %s %s %s %d\n"
327 b"%s%s %s %s %s %s %d\n"
328 % (
328 % (
329 indent_string,
329 indent_string,
330 hex(node),
330 hex(node),
331 hex(p1),
331 hex(p1),
332 hex(p2),
332 hex(p2),
333 hex(cs),
333 hex(cs),
334 hex(deltabase),
334 hex(deltabase),
335 len(delta),
335 len(delta),
336 )
336 )
337 )
337 )
338
338
339 gen.changelogheader()
339 gen.changelogheader()
340 showchunks(b"changelog")
340 showchunks(b"changelog")
341 gen.manifestheader()
341 gen.manifestheader()
342 showchunks(b"manifest")
342 showchunks(b"manifest")
343 for chunkdata in iter(gen.filelogheader, {}):
343 for chunkdata in iter(gen.filelogheader, {}):
344 fname = chunkdata[b'filename']
344 fname = chunkdata[b'filename']
345 showchunks(fname)
345 showchunks(fname)
346 else:
346 else:
347 if isinstance(gen, bundle2.unbundle20):
347 if isinstance(gen, bundle2.unbundle20):
348 raise error.Abort(_(b'use debugbundle2 for this file'))
348 raise error.Abort(_(b'use debugbundle2 for this file'))
349 gen.changelogheader()
349 gen.changelogheader()
350 for deltadata in gen.deltaiter():
350 for deltadata in gen.deltaiter():
351 node, p1, p2, cs, deltabase, delta, flags = deltadata
351 node, p1, p2, cs, deltabase, delta, flags = deltadata
352 ui.write(b"%s%s\n" % (indent_string, hex(node)))
352 ui.write(b"%s%s\n" % (indent_string, hex(node)))
353
353
354
354
355 def _debugobsmarkers(ui, part, indent=0, **opts):
355 def _debugobsmarkers(ui, part, indent=0, **opts):
356 """display version and markers contained in 'data'"""
356 """display version and markers contained in 'data'"""
357 opts = pycompat.byteskwargs(opts)
357 opts = pycompat.byteskwargs(opts)
358 data = part.read()
358 data = part.read()
359 indent_string = b' ' * indent
359 indent_string = b' ' * indent
360 try:
360 try:
361 version, markers = obsolete._readmarkers(data)
361 version, markers = obsolete._readmarkers(data)
362 except error.UnknownVersion as exc:
362 except error.UnknownVersion as exc:
363 msg = b"%sunsupported version: %s (%d bytes)\n"
363 msg = b"%sunsupported version: %s (%d bytes)\n"
364 msg %= indent_string, exc.version, len(data)
364 msg %= indent_string, exc.version, len(data)
365 ui.write(msg)
365 ui.write(msg)
366 else:
366 else:
367 msg = b"%sversion: %d (%d bytes)\n"
367 msg = b"%sversion: %d (%d bytes)\n"
368 msg %= indent_string, version, len(data)
368 msg %= indent_string, version, len(data)
369 ui.write(msg)
369 ui.write(msg)
370 fm = ui.formatter(b'debugobsolete', opts)
370 fm = ui.formatter(b'debugobsolete', opts)
371 for rawmarker in sorted(markers):
371 for rawmarker in sorted(markers):
372 m = obsutil.marker(None, rawmarker)
372 m = obsutil.marker(None, rawmarker)
373 fm.startitem()
373 fm.startitem()
374 fm.plain(indent_string)
374 fm.plain(indent_string)
375 cmdutil.showmarker(fm, m)
375 cmdutil.showmarker(fm, m)
376 fm.end()
376 fm.end()
377
377
378
378
379 def _debugphaseheads(ui, data, indent=0):
379 def _debugphaseheads(ui, data, indent=0):
380 """display version and markers contained in 'data'"""
380 """display version and markers contained in 'data'"""
381 indent_string = b' ' * indent
381 indent_string = b' ' * indent
382 headsbyphase = phases.binarydecode(data)
382 headsbyphase = phases.binarydecode(data)
383 for phase in phases.allphases:
383 for phase in phases.allphases:
384 for head in headsbyphase[phase]:
384 for head in headsbyphase[phase]:
385 ui.write(indent_string)
385 ui.write(indent_string)
386 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
386 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
387
387
388
388
389 def _quasirepr(thing):
389 def _quasirepr(thing):
390 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
390 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
391 return b'{%s}' % (
391 return b'{%s}' % (
392 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
392 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
393 )
393 )
394 return pycompat.bytestr(repr(thing))
394 return pycompat.bytestr(repr(thing))
395
395
396
396
397 def _debugbundle2(ui, gen, all=None, **opts):
397 def _debugbundle2(ui, gen, all=None, **opts):
398 """lists the contents of a bundle2"""
398 """lists the contents of a bundle2"""
399 if not isinstance(gen, bundle2.unbundle20):
399 if not isinstance(gen, bundle2.unbundle20):
400 raise error.Abort(_(b'not a bundle2 file'))
400 raise error.Abort(_(b'not a bundle2 file'))
401 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
401 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
402 parttypes = opts.get('part_type', [])
402 parttypes = opts.get('part_type', [])
403 for part in gen.iterparts():
403 for part in gen.iterparts():
404 if parttypes and part.type not in parttypes:
404 if parttypes and part.type not in parttypes:
405 continue
405 continue
406 msg = b'%s -- %s (mandatory: %r)\n'
406 msg = b'%s -- %s (mandatory: %r)\n'
407 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
407 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
408 if part.type == b'changegroup':
408 if part.type == b'changegroup':
409 version = part.params.get(b'version', b'01')
409 version = part.params.get(b'version', b'01')
410 cg = changegroup.getunbundler(version, part, b'UN')
410 cg = changegroup.getunbundler(version, part, b'UN')
411 if not ui.quiet:
411 if not ui.quiet:
412 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
412 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
413 if part.type == b'obsmarkers':
413 if part.type == b'obsmarkers':
414 if not ui.quiet:
414 if not ui.quiet:
415 _debugobsmarkers(ui, part, indent=4, **opts)
415 _debugobsmarkers(ui, part, indent=4, **opts)
416 if part.type == b'phase-heads':
416 if part.type == b'phase-heads':
417 if not ui.quiet:
417 if not ui.quiet:
418 _debugphaseheads(ui, part, indent=4)
418 _debugphaseheads(ui, part, indent=4)
419
419
420
420
421 @command(
421 @command(
422 b'debugbundle',
422 b'debugbundle',
423 [
423 [
424 (b'a', b'all', None, _(b'show all details')),
424 (b'a', b'all', None, _(b'show all details')),
425 (b'', b'part-type', [], _(b'show only the named part type')),
425 (b'', b'part-type', [], _(b'show only the named part type')),
426 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
426 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
427 ],
427 ],
428 _(b'FILE'),
428 _(b'FILE'),
429 norepo=True,
429 norepo=True,
430 )
430 )
431 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
431 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
432 """lists the contents of a bundle"""
432 """lists the contents of a bundle"""
433 with hg.openpath(ui, bundlepath) as f:
433 with hg.openpath(ui, bundlepath) as f:
434 if spec:
434 if spec:
435 spec = exchange.getbundlespec(ui, f)
435 spec = exchange.getbundlespec(ui, f)
436 ui.write(b'%s\n' % spec)
436 ui.write(b'%s\n' % spec)
437 return
437 return
438
438
439 gen = exchange.readbundle(ui, f, bundlepath)
439 gen = exchange.readbundle(ui, f, bundlepath)
440 if isinstance(gen, bundle2.unbundle20):
440 if isinstance(gen, bundle2.unbundle20):
441 return _debugbundle2(ui, gen, all=all, **opts)
441 return _debugbundle2(ui, gen, all=all, **opts)
442 _debugchangegroup(ui, gen, all=all, **opts)
442 _debugchangegroup(ui, gen, all=all, **opts)
443
443
444
444
445 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
445 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
446 def debugcapabilities(ui, path, **opts):
446 def debugcapabilities(ui, path, **opts):
447 """lists the capabilities of a remote peer"""
447 """lists the capabilities of a remote peer"""
448 opts = pycompat.byteskwargs(opts)
448 opts = pycompat.byteskwargs(opts)
449 peer = hg.peer(ui, opts, path)
449 peer = hg.peer(ui, opts, path)
450 caps = peer.capabilities()
450 caps = peer.capabilities()
451 ui.writenoi18n(b'Main capabilities:\n')
451 ui.writenoi18n(b'Main capabilities:\n')
452 for c in sorted(caps):
452 for c in sorted(caps):
453 ui.write(b' %s\n' % c)
453 ui.write(b' %s\n' % c)
454 b2caps = bundle2.bundle2caps(peer)
454 b2caps = bundle2.bundle2caps(peer)
455 if b2caps:
455 if b2caps:
456 ui.writenoi18n(b'Bundle2 capabilities:\n')
456 ui.writenoi18n(b'Bundle2 capabilities:\n')
457 for key, values in sorted(pycompat.iteritems(b2caps)):
457 for key, values in sorted(pycompat.iteritems(b2caps)):
458 ui.write(b' %s\n' % key)
458 ui.write(b' %s\n' % key)
459 for v in values:
459 for v in values:
460 ui.write(b' %s\n' % v)
460 ui.write(b' %s\n' % v)
461
461
462
462
463 @command(b'debugcheckstate', [], b'')
463 @command(b'debugcheckstate', [], b'')
464 def debugcheckstate(ui, repo):
464 def debugcheckstate(ui, repo):
465 """validate the correctness of the current dirstate"""
465 """validate the correctness of the current dirstate"""
466 parent1, parent2 = repo.dirstate.parents()
466 parent1, parent2 = repo.dirstate.parents()
467 m1 = repo[parent1].manifest()
467 m1 = repo[parent1].manifest()
468 m2 = repo[parent2].manifest()
468 m2 = repo[parent2].manifest()
469 errors = 0
469 errors = 0
470 for f in repo.dirstate:
470 for f in repo.dirstate:
471 state = repo.dirstate[f]
471 state = repo.dirstate[f]
472 if state in b"nr" and f not in m1:
472 if state in b"nr" and f not in m1:
473 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
473 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
474 errors += 1
474 errors += 1
475 if state in b"a" and f in m1:
475 if state in b"a" and f in m1:
476 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
476 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
477 errors += 1
477 errors += 1
478 if state in b"m" and f not in m1 and f not in m2:
478 if state in b"m" and f not in m1 and f not in m2:
479 ui.warn(
479 ui.warn(
480 _(b"%s in state %s, but not in either manifest\n") % (f, state)
480 _(b"%s in state %s, but not in either manifest\n") % (f, state)
481 )
481 )
482 errors += 1
482 errors += 1
483 for f in m1:
483 for f in m1:
484 state = repo.dirstate[f]
484 state = repo.dirstate[f]
485 if state not in b"nrm":
485 if state not in b"nrm":
486 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
486 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
487 errors += 1
487 errors += 1
488 if errors:
488 if errors:
489 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
489 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
490 raise error.Abort(errstr)
490 raise error.Abort(errstr)
491
491
492
492
493 @command(
493 @command(
494 b'debugcolor',
494 b'debugcolor',
495 [(b'', b'style', None, _(b'show all configured styles'))],
495 [(b'', b'style', None, _(b'show all configured styles'))],
496 b'hg debugcolor',
496 b'hg debugcolor',
497 )
497 )
498 def debugcolor(ui, repo, **opts):
498 def debugcolor(ui, repo, **opts):
499 """show available color, effects or style"""
499 """show available color, effects or style"""
500 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
500 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
501 if opts.get('style'):
501 if opts.get('style'):
502 return _debugdisplaystyle(ui)
502 return _debugdisplaystyle(ui)
503 else:
503 else:
504 return _debugdisplaycolor(ui)
504 return _debugdisplaycolor(ui)
505
505
506
506
507 def _debugdisplaycolor(ui):
507 def _debugdisplaycolor(ui):
508 ui = ui.copy()
508 ui = ui.copy()
509 ui._styles.clear()
509 ui._styles.clear()
510 for effect in color._activeeffects(ui).keys():
510 for effect in color._activeeffects(ui).keys():
511 ui._styles[effect] = effect
511 ui._styles[effect] = effect
512 if ui._terminfoparams:
512 if ui._terminfoparams:
513 for k, v in ui.configitems(b'color'):
513 for k, v in ui.configitems(b'color'):
514 if k.startswith(b'color.'):
514 if k.startswith(b'color.'):
515 ui._styles[k] = k[6:]
515 ui._styles[k] = k[6:]
516 elif k.startswith(b'terminfo.'):
516 elif k.startswith(b'terminfo.'):
517 ui._styles[k] = k[9:]
517 ui._styles[k] = k[9:]
518 ui.write(_(b'available colors:\n'))
518 ui.write(_(b'available colors:\n'))
519 # sort label with a '_' after the other to group '_background' entry.
519 # sort label with a '_' after the other to group '_background' entry.
520 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
520 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
521 for colorname, label in items:
521 for colorname, label in items:
522 ui.write(b'%s\n' % colorname, label=label)
522 ui.write(b'%s\n' % colorname, label=label)
523
523
524
524
525 def _debugdisplaystyle(ui):
525 def _debugdisplaystyle(ui):
526 ui.write(_(b'available style:\n'))
526 ui.write(_(b'available style:\n'))
527 if not ui._styles:
527 if not ui._styles:
528 return
528 return
529 width = max(len(s) for s in ui._styles)
529 width = max(len(s) for s in ui._styles)
530 for label, effects in sorted(ui._styles.items()):
530 for label, effects in sorted(ui._styles.items()):
531 ui.write(b'%s' % label, label=label)
531 ui.write(b'%s' % label, label=label)
532 if effects:
532 if effects:
533 # 50
533 # 50
534 ui.write(b': ')
534 ui.write(b': ')
535 ui.write(b' ' * (max(0, width - len(label))))
535 ui.write(b' ' * (max(0, width - len(label))))
536 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
536 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
537 ui.write(b'\n')
537 ui.write(b'\n')
538
538
539
539
540 @command(b'debugcreatestreamclonebundle', [], b'FILE')
540 @command(b'debugcreatestreamclonebundle', [], b'FILE')
541 def debugcreatestreamclonebundle(ui, repo, fname):
541 def debugcreatestreamclonebundle(ui, repo, fname):
542 """create a stream clone bundle file
542 """create a stream clone bundle file
543
543
544 Stream bundles are special bundles that are essentially archives of
544 Stream bundles are special bundles that are essentially archives of
545 revlog files. They are commonly used for cloning very quickly.
545 revlog files. They are commonly used for cloning very quickly.
546 """
546 """
547 # TODO we may want to turn this into an abort when this functionality
547 # TODO we may want to turn this into an abort when this functionality
548 # is moved into `hg bundle`.
548 # is moved into `hg bundle`.
549 if phases.hassecret(repo):
549 if phases.hassecret(repo):
550 ui.warn(
550 ui.warn(
551 _(
551 _(
552 b'(warning: stream clone bundle will contain secret '
552 b'(warning: stream clone bundle will contain secret '
553 b'revisions)\n'
553 b'revisions)\n'
554 )
554 )
555 )
555 )
556
556
557 requirements, gen = streamclone.generatebundlev1(repo)
557 requirements, gen = streamclone.generatebundlev1(repo)
558 changegroup.writechunks(ui, gen, fname)
558 changegroup.writechunks(ui, gen, fname)
559
559
560 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
560 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
561
561
562
562
563 @command(
563 @command(
564 b'debugdag',
564 b'debugdag',
565 [
565 [
566 (b't', b'tags', None, _(b'use tags as labels')),
566 (b't', b'tags', None, _(b'use tags as labels')),
567 (b'b', b'branches', None, _(b'annotate with branch names')),
567 (b'b', b'branches', None, _(b'annotate with branch names')),
568 (b'', b'dots', None, _(b'use dots for runs')),
568 (b'', b'dots', None, _(b'use dots for runs')),
569 (b's', b'spaces', None, _(b'separate elements by spaces')),
569 (b's', b'spaces', None, _(b'separate elements by spaces')),
570 ],
570 ],
571 _(b'[OPTION]... [FILE [REV]...]'),
571 _(b'[OPTION]... [FILE [REV]...]'),
572 optionalrepo=True,
572 optionalrepo=True,
573 )
573 )
574 def debugdag(ui, repo, file_=None, *revs, **opts):
574 def debugdag(ui, repo, file_=None, *revs, **opts):
575 """format the changelog or an index DAG as a concise textual description
575 """format the changelog or an index DAG as a concise textual description
576
576
577 If you pass a revlog index, the revlog's DAG is emitted. If you list
577 If you pass a revlog index, the revlog's DAG is emitted. If you list
578 revision numbers, they get labeled in the output as rN.
578 revision numbers, they get labeled in the output as rN.
579
579
580 Otherwise, the changelog DAG of the current repo is emitted.
580 Otherwise, the changelog DAG of the current repo is emitted.
581 """
581 """
582 spaces = opts.get('spaces')
582 spaces = opts.get('spaces')
583 dots = opts.get('dots')
583 dots = opts.get('dots')
584 if file_:
584 if file_:
585 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
585 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
586 revs = set((int(r) for r in revs))
586 revs = set((int(r) for r in revs))
587
587
588 def events():
588 def events():
589 for r in rlog:
589 for r in rlog:
590 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
590 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
591 if r in revs:
591 if r in revs:
592 yield b'l', (r, b"r%i" % r)
592 yield b'l', (r, b"r%i" % r)
593
593
594 elif repo:
594 elif repo:
595 cl = repo.changelog
595 cl = repo.changelog
596 tags = opts.get('tags')
596 tags = opts.get('tags')
597 branches = opts.get('branches')
597 branches = opts.get('branches')
598 if tags:
598 if tags:
599 labels = {}
599 labels = {}
600 for l, n in repo.tags().items():
600 for l, n in repo.tags().items():
601 labels.setdefault(cl.rev(n), []).append(l)
601 labels.setdefault(cl.rev(n), []).append(l)
602
602
603 def events():
603 def events():
604 b = b"default"
604 b = b"default"
605 for r in cl:
605 for r in cl:
606 if branches:
606 if branches:
607 newb = cl.read(cl.node(r))[5][b'branch']
607 newb = cl.read(cl.node(r))[5][b'branch']
608 if newb != b:
608 if newb != b:
609 yield b'a', newb
609 yield b'a', newb
610 b = newb
610 b = newb
611 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
611 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
612 if tags:
612 if tags:
613 ls = labels.get(r)
613 ls = labels.get(r)
614 if ls:
614 if ls:
615 for l in ls:
615 for l in ls:
616 yield b'l', (r, l)
616 yield b'l', (r, l)
617
617
618 else:
618 else:
619 raise error.Abort(_(b'need repo for changelog dag'))
619 raise error.Abort(_(b'need repo for changelog dag'))
620
620
621 for line in dagparser.dagtextlines(
621 for line in dagparser.dagtextlines(
622 events(),
622 events(),
623 addspaces=spaces,
623 addspaces=spaces,
624 wraplabels=True,
624 wraplabels=True,
625 wrapannotations=True,
625 wrapannotations=True,
626 wrapnonlinear=dots,
626 wrapnonlinear=dots,
627 usedots=dots,
627 usedots=dots,
628 maxlinewidth=70,
628 maxlinewidth=70,
629 ):
629 ):
630 ui.write(line)
630 ui.write(line)
631 ui.write(b"\n")
631 ui.write(b"\n")
632
632
633
633
634 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
634 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
635 def debugdata(ui, repo, file_, rev=None, **opts):
635 def debugdata(ui, repo, file_, rev=None, **opts):
636 """dump the contents of a data file revision"""
636 """dump the contents of a data file revision"""
637 opts = pycompat.byteskwargs(opts)
637 opts = pycompat.byteskwargs(opts)
638 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
638 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
639 if rev is not None:
639 if rev is not None:
640 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
640 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
641 file_, rev = None, file_
641 file_, rev = None, file_
642 elif rev is None:
642 elif rev is None:
643 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
643 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
644 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
644 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
645 try:
645 try:
646 ui.write(r.rawdata(r.lookup(rev)))
646 ui.write(r.rawdata(r.lookup(rev)))
647 except KeyError:
647 except KeyError:
648 raise error.Abort(_(b'invalid revision identifier %s') % rev)
648 raise error.Abort(_(b'invalid revision identifier %s') % rev)
649
649
650
650
651 @command(
651 @command(
652 b'debugdate',
652 b'debugdate',
653 [(b'e', b'extended', None, _(b'try extended date formats'))],
653 [(b'e', b'extended', None, _(b'try extended date formats'))],
654 _(b'[-e] DATE [RANGE]'),
654 _(b'[-e] DATE [RANGE]'),
655 norepo=True,
655 norepo=True,
656 optionalrepo=True,
656 optionalrepo=True,
657 )
657 )
658 def debugdate(ui, date, range=None, **opts):
658 def debugdate(ui, date, range=None, **opts):
659 """parse and display a date"""
659 """parse and display a date"""
660 if opts["extended"]:
660 if opts["extended"]:
661 d = dateutil.parsedate(date, dateutil.extendeddateformats)
661 d = dateutil.parsedate(date, dateutil.extendeddateformats)
662 else:
662 else:
663 d = dateutil.parsedate(date)
663 d = dateutil.parsedate(date)
664 ui.writenoi18n(b"internal: %d %d\n" % d)
664 ui.writenoi18n(b"internal: %d %d\n" % d)
665 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
665 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
666 if range:
666 if range:
667 m = dateutil.matchdate(range)
667 m = dateutil.matchdate(range)
668 ui.writenoi18n(b"match: %s\n" % m(d[0]))
668 ui.writenoi18n(b"match: %s\n" % m(d[0]))
669
669
670
670
671 @command(
671 @command(
672 b'debugdeltachain',
672 b'debugdeltachain',
673 cmdutil.debugrevlogopts + cmdutil.formatteropts,
673 cmdutil.debugrevlogopts + cmdutil.formatteropts,
674 _(b'-c|-m|FILE'),
674 _(b'-c|-m|FILE'),
675 optionalrepo=True,
675 optionalrepo=True,
676 )
676 )
677 def debugdeltachain(ui, repo, file_=None, **opts):
677 def debugdeltachain(ui, repo, file_=None, **opts):
678 """dump information about delta chains in a revlog
678 """dump information about delta chains in a revlog
679
679
680 Output can be templatized. Available template keywords are:
680 Output can be templatized. Available template keywords are:
681
681
682 :``rev``: revision number
682 :``rev``: revision number
683 :``chainid``: delta chain identifier (numbered by unique base)
683 :``chainid``: delta chain identifier (numbered by unique base)
684 :``chainlen``: delta chain length to this revision
684 :``chainlen``: delta chain length to this revision
685 :``prevrev``: previous revision in delta chain
685 :``prevrev``: previous revision in delta chain
686 :``deltatype``: role of delta / how it was computed
686 :``deltatype``: role of delta / how it was computed
687 :``compsize``: compressed size of revision
687 :``compsize``: compressed size of revision
688 :``uncompsize``: uncompressed size of revision
688 :``uncompsize``: uncompressed size of revision
689 :``chainsize``: total size of compressed revisions in chain
689 :``chainsize``: total size of compressed revisions in chain
690 :``chainratio``: total chain size divided by uncompressed revision size
690 :``chainratio``: total chain size divided by uncompressed revision size
691 (new delta chains typically start at ratio 2.00)
691 (new delta chains typically start at ratio 2.00)
692 :``lindist``: linear distance from base revision in delta chain to end
692 :``lindist``: linear distance from base revision in delta chain to end
693 of this revision
693 of this revision
694 :``extradist``: total size of revisions not part of this delta chain from
694 :``extradist``: total size of revisions not part of this delta chain from
695 base of delta chain to end of this revision; a measurement
695 base of delta chain to end of this revision; a measurement
696 of how much extra data we need to read/seek across to read
696 of how much extra data we need to read/seek across to read
697 the delta chain for this revision
697 the delta chain for this revision
698 :``extraratio``: extradist divided by chainsize; another representation of
698 :``extraratio``: extradist divided by chainsize; another representation of
699 how much unrelated data is needed to load this delta chain
699 how much unrelated data is needed to load this delta chain
700
700
701 If the repository is configured to use the sparse read, additional keywords
701 If the repository is configured to use the sparse read, additional keywords
702 are available:
702 are available:
703
703
704 :``readsize``: total size of data read from the disk for a revision
704 :``readsize``: total size of data read from the disk for a revision
705 (sum of the sizes of all the blocks)
705 (sum of the sizes of all the blocks)
706 :``largestblock``: size of the largest block of data read from the disk
706 :``largestblock``: size of the largest block of data read from the disk
707 :``readdensity``: density of useful bytes in the data read from the disk
707 :``readdensity``: density of useful bytes in the data read from the disk
708 :``srchunks``: in how many data hunks the whole revision would be read
708 :``srchunks``: in how many data hunks the whole revision would be read
709
709
710 The sparse read can be enabled with experimental.sparse-read = True
710 The sparse read can be enabled with experimental.sparse-read = True
711 """
711 """
712 opts = pycompat.byteskwargs(opts)
712 opts = pycompat.byteskwargs(opts)
713 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
713 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
714 index = r.index
714 index = r.index
715 start = r.start
715 start = r.start
716 length = r.length
716 length = r.length
717 generaldelta = r.version & revlog.FLAG_GENERALDELTA
717 generaldelta = r.version & revlog.FLAG_GENERALDELTA
718 withsparseread = getattr(r, '_withsparseread', False)
718 withsparseread = getattr(r, '_withsparseread', False)
719
719
720 def revinfo(rev):
720 def revinfo(rev):
721 e = index[rev]
721 e = index[rev]
722 compsize = e[1]
722 compsize = e[1]
723 uncompsize = e[2]
723 uncompsize = e[2]
724 chainsize = 0
724 chainsize = 0
725
725
726 if generaldelta:
726 if generaldelta:
727 if e[3] == e[5]:
727 if e[3] == e[5]:
728 deltatype = b'p1'
728 deltatype = b'p1'
729 elif e[3] == e[6]:
729 elif e[3] == e[6]:
730 deltatype = b'p2'
730 deltatype = b'p2'
731 elif e[3] == rev - 1:
731 elif e[3] == rev - 1:
732 deltatype = b'prev'
732 deltatype = b'prev'
733 elif e[3] == rev:
733 elif e[3] == rev:
734 deltatype = b'base'
734 deltatype = b'base'
735 else:
735 else:
736 deltatype = b'other'
736 deltatype = b'other'
737 else:
737 else:
738 if e[3] == rev:
738 if e[3] == rev:
739 deltatype = b'base'
739 deltatype = b'base'
740 else:
740 else:
741 deltatype = b'prev'
741 deltatype = b'prev'
742
742
743 chain = r._deltachain(rev)[0]
743 chain = r._deltachain(rev)[0]
744 for iterrev in chain:
744 for iterrev in chain:
745 e = index[iterrev]
745 e = index[iterrev]
746 chainsize += e[1]
746 chainsize += e[1]
747
747
748 return compsize, uncompsize, deltatype, chain, chainsize
748 return compsize, uncompsize, deltatype, chain, chainsize
749
749
750 fm = ui.formatter(b'debugdeltachain', opts)
750 fm = ui.formatter(b'debugdeltachain', opts)
751
751
752 fm.plain(
752 fm.plain(
753 b' rev chain# chainlen prev delta '
753 b' rev chain# chainlen prev delta '
754 b'size rawsize chainsize ratio lindist extradist '
754 b'size rawsize chainsize ratio lindist extradist '
755 b'extraratio'
755 b'extraratio'
756 )
756 )
757 if withsparseread:
757 if withsparseread:
758 fm.plain(b' readsize largestblk rddensity srchunks')
758 fm.plain(b' readsize largestblk rddensity srchunks')
759 fm.plain(b'\n')
759 fm.plain(b'\n')
760
760
761 chainbases = {}
761 chainbases = {}
762 for rev in r:
762 for rev in r:
763 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
763 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
764 chainbase = chain[0]
764 chainbase = chain[0]
765 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
765 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
766 basestart = start(chainbase)
766 basestart = start(chainbase)
767 revstart = start(rev)
767 revstart = start(rev)
768 lineardist = revstart + comp - basestart
768 lineardist = revstart + comp - basestart
769 extradist = lineardist - chainsize
769 extradist = lineardist - chainsize
770 try:
770 try:
771 prevrev = chain[-2]
771 prevrev = chain[-2]
772 except IndexError:
772 except IndexError:
773 prevrev = -1
773 prevrev = -1
774
774
775 if uncomp != 0:
775 if uncomp != 0:
776 chainratio = float(chainsize) / float(uncomp)
776 chainratio = float(chainsize) / float(uncomp)
777 else:
777 else:
778 chainratio = chainsize
778 chainratio = chainsize
779
779
780 if chainsize != 0:
780 if chainsize != 0:
781 extraratio = float(extradist) / float(chainsize)
781 extraratio = float(extradist) / float(chainsize)
782 else:
782 else:
783 extraratio = extradist
783 extraratio = extradist
784
784
785 fm.startitem()
785 fm.startitem()
786 fm.write(
786 fm.write(
787 b'rev chainid chainlen prevrev deltatype compsize '
787 b'rev chainid chainlen prevrev deltatype compsize '
788 b'uncompsize chainsize chainratio lindist extradist '
788 b'uncompsize chainsize chainratio lindist extradist '
789 b'extraratio',
789 b'extraratio',
790 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
790 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
791 rev,
791 rev,
792 chainid,
792 chainid,
793 len(chain),
793 len(chain),
794 prevrev,
794 prevrev,
795 deltatype,
795 deltatype,
796 comp,
796 comp,
797 uncomp,
797 uncomp,
798 chainsize,
798 chainsize,
799 chainratio,
799 chainratio,
800 lineardist,
800 lineardist,
801 extradist,
801 extradist,
802 extraratio,
802 extraratio,
803 rev=rev,
803 rev=rev,
804 chainid=chainid,
804 chainid=chainid,
805 chainlen=len(chain),
805 chainlen=len(chain),
806 prevrev=prevrev,
806 prevrev=prevrev,
807 deltatype=deltatype,
807 deltatype=deltatype,
808 compsize=comp,
808 compsize=comp,
809 uncompsize=uncomp,
809 uncompsize=uncomp,
810 chainsize=chainsize,
810 chainsize=chainsize,
811 chainratio=chainratio,
811 chainratio=chainratio,
812 lindist=lineardist,
812 lindist=lineardist,
813 extradist=extradist,
813 extradist=extradist,
814 extraratio=extraratio,
814 extraratio=extraratio,
815 )
815 )
816 if withsparseread:
816 if withsparseread:
817 readsize = 0
817 readsize = 0
818 largestblock = 0
818 largestblock = 0
819 srchunks = 0
819 srchunks = 0
820
820
821 for revschunk in deltautil.slicechunk(r, chain):
821 for revschunk in deltautil.slicechunk(r, chain):
822 srchunks += 1
822 srchunks += 1
823 blkend = start(revschunk[-1]) + length(revschunk[-1])
823 blkend = start(revschunk[-1]) + length(revschunk[-1])
824 blksize = blkend - start(revschunk[0])
824 blksize = blkend - start(revschunk[0])
825
825
826 readsize += blksize
826 readsize += blksize
827 if largestblock < blksize:
827 if largestblock < blksize:
828 largestblock = blksize
828 largestblock = blksize
829
829
830 if readsize:
830 if readsize:
831 readdensity = float(chainsize) / float(readsize)
831 readdensity = float(chainsize) / float(readsize)
832 else:
832 else:
833 readdensity = 1
833 readdensity = 1
834
834
835 fm.write(
835 fm.write(
836 b'readsize largestblock readdensity srchunks',
836 b'readsize largestblock readdensity srchunks',
837 b' %10d %10d %9.5f %8d',
837 b' %10d %10d %9.5f %8d',
838 readsize,
838 readsize,
839 largestblock,
839 largestblock,
840 readdensity,
840 readdensity,
841 srchunks,
841 srchunks,
842 readsize=readsize,
842 readsize=readsize,
843 largestblock=largestblock,
843 largestblock=largestblock,
844 readdensity=readdensity,
844 readdensity=readdensity,
845 srchunks=srchunks,
845 srchunks=srchunks,
846 )
846 )
847
847
848 fm.plain(b'\n')
848 fm.plain(b'\n')
849
849
850 fm.end()
850 fm.end()
851
851
852
852
853 @command(
853 @command(
854 b'debugdirstate|debugstate',
854 b'debugdirstate|debugstate',
855 [
855 [
856 (
856 (
857 b'',
857 b'',
858 b'nodates',
858 b'nodates',
859 None,
859 None,
860 _(b'do not display the saved mtime (DEPRECATED)'),
860 _(b'do not display the saved mtime (DEPRECATED)'),
861 ),
861 ),
862 (b'', b'dates', True, _(b'display the saved mtime')),
862 (b'', b'dates', True, _(b'display the saved mtime')),
863 (b'', b'datesort', None, _(b'sort by saved mtime')),
863 (b'', b'datesort', None, _(b'sort by saved mtime')),
864 ],
864 ],
865 _(b'[OPTION]...'),
865 _(b'[OPTION]...'),
866 )
866 )
867 def debugstate(ui, repo, **opts):
867 def debugstate(ui, repo, **opts):
868 """show the contents of the current dirstate"""
868 """show the contents of the current dirstate"""
869
869
870 nodates = not opts['dates']
870 nodates = not opts['dates']
871 if opts.get('nodates') is not None:
871 if opts.get('nodates') is not None:
872 nodates = True
872 nodates = True
873 datesort = opts.get('datesort')
873 datesort = opts.get('datesort')
874
874
875 if datesort:
875 if datesort:
876 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
876 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
877 else:
877 else:
878 keyfunc = None # sort by filename
878 keyfunc = None # sort by filename
879 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
879 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
880 if ent[3] == -1:
880 if ent[3] == -1:
881 timestr = b'unset '
881 timestr = b'unset '
882 elif nodates:
882 elif nodates:
883 timestr = b'set '
883 timestr = b'set '
884 else:
884 else:
885 timestr = time.strftime(
885 timestr = time.strftime(
886 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
886 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
887 )
887 )
888 timestr = encoding.strtolocal(timestr)
888 timestr = encoding.strtolocal(timestr)
889 if ent[1] & 0o20000:
889 if ent[1] & 0o20000:
890 mode = b'lnk'
890 mode = b'lnk'
891 else:
891 else:
892 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
892 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
893 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
893 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
894 for f in repo.dirstate.copies():
894 for f in repo.dirstate.copies():
895 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
895 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
896
896
897
897
898 @command(
898 @command(
899 b'debugdiscovery',
899 b'debugdiscovery',
900 [
900 [
901 (b'', b'old', None, _(b'use old-style discovery')),
901 (b'', b'old', None, _(b'use old-style discovery')),
902 (
902 (
903 b'',
903 b'',
904 b'nonheads',
904 b'nonheads',
905 None,
905 None,
906 _(b'use old-style discovery with non-heads included'),
906 _(b'use old-style discovery with non-heads included'),
907 ),
907 ),
908 (b'', b'rev', [], b'restrict discovery to this set of revs'),
908 (b'', b'rev', [], b'restrict discovery to this set of revs'),
909 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
909 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
910 ]
910 ]
911 + cmdutil.remoteopts,
911 + cmdutil.remoteopts,
912 _(b'[--rev REV] [OTHER]'),
912 _(b'[--rev REV] [OTHER]'),
913 )
913 )
914 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
914 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
915 """runs the changeset discovery protocol in isolation"""
915 """runs the changeset discovery protocol in isolation"""
916 opts = pycompat.byteskwargs(opts)
916 opts = pycompat.byteskwargs(opts)
917 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
917 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
918 remote = hg.peer(repo, opts, remoteurl)
918 remote = hg.peer(repo, opts, remoteurl)
919 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
919 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
920
920
921 # make sure tests are repeatable
921 # make sure tests are repeatable
922 random.seed(int(opts[b'seed']))
922 random.seed(int(opts[b'seed']))
923
923
924 if opts.get(b'old'):
924 if opts.get(b'old'):
925
925
926 def doit(pushedrevs, remoteheads, remote=remote):
926 def doit(pushedrevs, remoteheads, remote=remote):
927 if not util.safehasattr(remote, b'branches'):
927 if not util.safehasattr(remote, b'branches'):
928 # enable in-client legacy support
928 # enable in-client legacy support
929 remote = localrepo.locallegacypeer(remote.local())
929 remote = localrepo.locallegacypeer(remote.local())
930 common, _in, hds = treediscovery.findcommonincoming(
930 common, _in, hds = treediscovery.findcommonincoming(
931 repo, remote, force=True
931 repo, remote, force=True
932 )
932 )
933 common = set(common)
933 common = set(common)
934 if not opts.get(b'nonheads'):
934 if not opts.get(b'nonheads'):
935 ui.writenoi18n(
935 ui.writenoi18n(
936 b"unpruned common: %s\n"
936 b"unpruned common: %s\n"
937 % b" ".join(sorted(short(n) for n in common))
937 % b" ".join(sorted(short(n) for n in common))
938 )
938 )
939
939
940 clnode = repo.changelog.node
940 clnode = repo.changelog.node
941 common = repo.revs(b'heads(::%ln)', common)
941 common = repo.revs(b'heads(::%ln)', common)
942 common = {clnode(r) for r in common}
942 common = {clnode(r) for r in common}
943 return common, hds
943 return common, hds
944
944
945 else:
945 else:
946
946
947 def doit(pushedrevs, remoteheads, remote=remote):
947 def doit(pushedrevs, remoteheads, remote=remote):
948 nodes = None
948 nodes = None
949 if pushedrevs:
949 if pushedrevs:
950 revs = scmutil.revrange(repo, pushedrevs)
950 revs = scmutil.revrange(repo, pushedrevs)
951 nodes = [repo[r].node() for r in revs]
951 nodes = [repo[r].node() for r in revs]
952 common, any, hds = setdiscovery.findcommonheads(
952 common, any, hds = setdiscovery.findcommonheads(
953 ui, repo, remote, ancestorsof=nodes
953 ui, repo, remote, ancestorsof=nodes
954 )
954 )
955 return common, hds
955 return common, hds
956
956
957 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
957 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
958 localrevs = opts[b'rev']
958 localrevs = opts[b'rev']
959 with util.timedcm('debug-discovery') as t:
959 with util.timedcm('debug-discovery') as t:
960 common, hds = doit(localrevs, remoterevs)
960 common, hds = doit(localrevs, remoterevs)
961
961
962 # compute all statistics
962 # compute all statistics
963 common = set(common)
963 common = set(common)
964 rheads = set(hds)
964 rheads = set(hds)
965 lheads = set(repo.heads())
965 lheads = set(repo.heads())
966
966
967 data = {}
967 data = {}
968 data[b'elapsed'] = t.elapsed
968 data[b'elapsed'] = t.elapsed
969 data[b'nb-common'] = len(common)
969 data[b'nb-common'] = len(common)
970 data[b'nb-common-local'] = len(common & lheads)
970 data[b'nb-common-local'] = len(common & lheads)
971 data[b'nb-common-remote'] = len(common & rheads)
971 data[b'nb-common-remote'] = len(common & rheads)
972 data[b'nb-common-both'] = len(common & rheads & lheads)
972 data[b'nb-common-both'] = len(common & rheads & lheads)
973 data[b'nb-local'] = len(lheads)
973 data[b'nb-local'] = len(lheads)
974 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
974 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
975 data[b'nb-remote'] = len(rheads)
975 data[b'nb-remote'] = len(rheads)
976 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
976 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
977 data[b'nb-revs'] = len(repo.revs(b'all()'))
977 data[b'nb-revs'] = len(repo.revs(b'all()'))
978 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
978 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
979 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
979 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
980
980
981 # display discovery summary
981 # display discovery summary
982 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
982 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
983 ui.writenoi18n(b"heads summary:\n")
983 ui.writenoi18n(b"heads summary:\n")
984 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
984 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
985 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
986 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
986 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
987 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
988 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
988 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
989 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
989 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
990 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
990 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
991 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
991 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
992 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
992 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
993 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
993 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
994 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
994 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
995 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
995 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
996 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
996 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
997
997
998 if ui.verbose:
998 if ui.verbose:
999 ui.writenoi18n(
999 ui.writenoi18n(
1000 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1000 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1001 )
1001 )
1002
1002
1003
1003
1004 _chunksize = 4 << 10
1004 _chunksize = 4 << 10
1005
1005
1006
1006
1007 @command(
1007 @command(
1008 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1008 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1009 )
1009 )
1010 def debugdownload(ui, repo, url, output=None, **opts):
1010 def debugdownload(ui, repo, url, output=None, **opts):
1011 """download a resource using Mercurial logic and config
1011 """download a resource using Mercurial logic and config
1012 """
1012 """
1013 fh = urlmod.open(ui, url, output)
1013 fh = urlmod.open(ui, url, output)
1014
1014
1015 dest = ui
1015 dest = ui
1016 if output:
1016 if output:
1017 dest = open(output, b"wb", _chunksize)
1017 dest = open(output, b"wb", _chunksize)
1018 try:
1018 try:
1019 data = fh.read(_chunksize)
1019 data = fh.read(_chunksize)
1020 while data:
1020 while data:
1021 dest.write(data)
1021 dest.write(data)
1022 data = fh.read(_chunksize)
1022 data = fh.read(_chunksize)
1023 finally:
1023 finally:
1024 if output:
1024 if output:
1025 dest.close()
1025 dest.close()
1026
1026
1027
1027
1028 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1028 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1029 def debugextensions(ui, repo, **opts):
1029 def debugextensions(ui, repo, **opts):
1030 '''show information about active extensions'''
1030 '''show information about active extensions'''
1031 opts = pycompat.byteskwargs(opts)
1031 opts = pycompat.byteskwargs(opts)
1032 exts = extensions.extensions(ui)
1032 exts = extensions.extensions(ui)
1033 hgver = util.version()
1033 hgver = util.version()
1034 fm = ui.formatter(b'debugextensions', opts)
1034 fm = ui.formatter(b'debugextensions', opts)
1035 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1035 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1036 isinternal = extensions.ismoduleinternal(extmod)
1036 isinternal = extensions.ismoduleinternal(extmod)
1037 extsource = None
1037 extsource = None
1038
1038
1039 if util.safehasattr(extmod, '__file__'):
1039 if util.safehasattr(extmod, '__file__'):
1040 extsource = pycompat.fsencode(extmod.__file__)
1040 extsource = pycompat.fsencode(extmod.__file__)
1041 elif getattr(sys, 'oxidized', False):
1041 elif getattr(sys, 'oxidized', False):
1042 extsource = pycompat.sysexecutable
1042 extsource = pycompat.sysexecutable
1043 if isinternal:
1043 if isinternal:
1044 exttestedwith = [] # never expose magic string to users
1044 exttestedwith = [] # never expose magic string to users
1045 else:
1045 else:
1046 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1046 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1047 extbuglink = getattr(extmod, 'buglink', None)
1047 extbuglink = getattr(extmod, 'buglink', None)
1048
1048
1049 fm.startitem()
1049 fm.startitem()
1050
1050
1051 if ui.quiet or ui.verbose:
1051 if ui.quiet or ui.verbose:
1052 fm.write(b'name', b'%s\n', extname)
1052 fm.write(b'name', b'%s\n', extname)
1053 else:
1053 else:
1054 fm.write(b'name', b'%s', extname)
1054 fm.write(b'name', b'%s', extname)
1055 if isinternal or hgver in exttestedwith:
1055 if isinternal or hgver in exttestedwith:
1056 fm.plain(b'\n')
1056 fm.plain(b'\n')
1057 elif not exttestedwith:
1057 elif not exttestedwith:
1058 fm.plain(_(b' (untested!)\n'))
1058 fm.plain(_(b' (untested!)\n'))
1059 else:
1059 else:
1060 lasttestedversion = exttestedwith[-1]
1060 lasttestedversion = exttestedwith[-1]
1061 fm.plain(b' (%s!)\n' % lasttestedversion)
1061 fm.plain(b' (%s!)\n' % lasttestedversion)
1062
1062
1063 fm.condwrite(
1063 fm.condwrite(
1064 ui.verbose and extsource,
1064 ui.verbose and extsource,
1065 b'source',
1065 b'source',
1066 _(b' location: %s\n'),
1066 _(b' location: %s\n'),
1067 extsource or b"",
1067 extsource or b"",
1068 )
1068 )
1069
1069
1070 if ui.verbose:
1070 if ui.verbose:
1071 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1071 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1072 fm.data(bundled=isinternal)
1072 fm.data(bundled=isinternal)
1073
1073
1074 fm.condwrite(
1074 fm.condwrite(
1075 ui.verbose and exttestedwith,
1075 ui.verbose and exttestedwith,
1076 b'testedwith',
1076 b'testedwith',
1077 _(b' tested with: %s\n'),
1077 _(b' tested with: %s\n'),
1078 fm.formatlist(exttestedwith, name=b'ver'),
1078 fm.formatlist(exttestedwith, name=b'ver'),
1079 )
1079 )
1080
1080
1081 fm.condwrite(
1081 fm.condwrite(
1082 ui.verbose and extbuglink,
1082 ui.verbose and extbuglink,
1083 b'buglink',
1083 b'buglink',
1084 _(b' bug reporting: %s\n'),
1084 _(b' bug reporting: %s\n'),
1085 extbuglink or b"",
1085 extbuglink or b"",
1086 )
1086 )
1087
1087
1088 fm.end()
1088 fm.end()
1089
1089
1090
1090
1091 @command(
1091 @command(
1092 b'debugfileset',
1092 b'debugfileset',
1093 [
1093 [
1094 (
1094 (
1095 b'r',
1095 b'r',
1096 b'rev',
1096 b'rev',
1097 b'',
1097 b'',
1098 _(b'apply the filespec on this revision'),
1098 _(b'apply the filespec on this revision'),
1099 _(b'REV'),
1099 _(b'REV'),
1100 ),
1100 ),
1101 (
1101 (
1102 b'',
1102 b'',
1103 b'all-files',
1103 b'all-files',
1104 False,
1104 False,
1105 _(b'test files from all revisions and working directory'),
1105 _(b'test files from all revisions and working directory'),
1106 ),
1106 ),
1107 (
1107 (
1108 b's',
1108 b's',
1109 b'show-matcher',
1109 b'show-matcher',
1110 None,
1110 None,
1111 _(b'print internal representation of matcher'),
1111 _(b'print internal representation of matcher'),
1112 ),
1112 ),
1113 (
1113 (
1114 b'p',
1114 b'p',
1115 b'show-stage',
1115 b'show-stage',
1116 [],
1116 [],
1117 _(b'print parsed tree at the given stage'),
1117 _(b'print parsed tree at the given stage'),
1118 _(b'NAME'),
1118 _(b'NAME'),
1119 ),
1119 ),
1120 ],
1120 ],
1121 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1121 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1122 )
1122 )
1123 def debugfileset(ui, repo, expr, **opts):
1123 def debugfileset(ui, repo, expr, **opts):
1124 '''parse and apply a fileset specification'''
1124 '''parse and apply a fileset specification'''
1125 from . import fileset
1125 from . import fileset
1126
1126
1127 fileset.symbols # force import of fileset so we have predicates to optimize
1127 fileset.symbols # force import of fileset so we have predicates to optimize
1128 opts = pycompat.byteskwargs(opts)
1128 opts = pycompat.byteskwargs(opts)
1129 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1129 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1130
1130
1131 stages = [
1131 stages = [
1132 (b'parsed', pycompat.identity),
1132 (b'parsed', pycompat.identity),
1133 (b'analyzed', filesetlang.analyze),
1133 (b'analyzed', filesetlang.analyze),
1134 (b'optimized', filesetlang.optimize),
1134 (b'optimized', filesetlang.optimize),
1135 ]
1135 ]
1136 stagenames = set(n for n, f in stages)
1136 stagenames = set(n for n, f in stages)
1137
1137
1138 showalways = set()
1138 showalways = set()
1139 if ui.verbose and not opts[b'show_stage']:
1139 if ui.verbose and not opts[b'show_stage']:
1140 # show parsed tree by --verbose (deprecated)
1140 # show parsed tree by --verbose (deprecated)
1141 showalways.add(b'parsed')
1141 showalways.add(b'parsed')
1142 if opts[b'show_stage'] == [b'all']:
1142 if opts[b'show_stage'] == [b'all']:
1143 showalways.update(stagenames)
1143 showalways.update(stagenames)
1144 else:
1144 else:
1145 for n in opts[b'show_stage']:
1145 for n in opts[b'show_stage']:
1146 if n not in stagenames:
1146 if n not in stagenames:
1147 raise error.Abort(_(b'invalid stage name: %s') % n)
1147 raise error.Abort(_(b'invalid stage name: %s') % n)
1148 showalways.update(opts[b'show_stage'])
1148 showalways.update(opts[b'show_stage'])
1149
1149
1150 tree = filesetlang.parse(expr)
1150 tree = filesetlang.parse(expr)
1151 for n, f in stages:
1151 for n, f in stages:
1152 tree = f(tree)
1152 tree = f(tree)
1153 if n in showalways:
1153 if n in showalways:
1154 if opts[b'show_stage'] or n != b'parsed':
1154 if opts[b'show_stage'] or n != b'parsed':
1155 ui.write(b"* %s:\n" % n)
1155 ui.write(b"* %s:\n" % n)
1156 ui.write(filesetlang.prettyformat(tree), b"\n")
1156 ui.write(filesetlang.prettyformat(tree), b"\n")
1157
1157
1158 files = set()
1158 files = set()
1159 if opts[b'all_files']:
1159 if opts[b'all_files']:
1160 for r in repo:
1160 for r in repo:
1161 c = repo[r]
1161 c = repo[r]
1162 files.update(c.files())
1162 files.update(c.files())
1163 files.update(c.substate)
1163 files.update(c.substate)
1164 if opts[b'all_files'] or ctx.rev() is None:
1164 if opts[b'all_files'] or ctx.rev() is None:
1165 wctx = repo[None]
1165 wctx = repo[None]
1166 files.update(
1166 files.update(
1167 repo.dirstate.walk(
1167 repo.dirstate.walk(
1168 scmutil.matchall(repo),
1168 scmutil.matchall(repo),
1169 subrepos=list(wctx.substate),
1169 subrepos=list(wctx.substate),
1170 unknown=True,
1170 unknown=True,
1171 ignored=True,
1171 ignored=True,
1172 )
1172 )
1173 )
1173 )
1174 files.update(wctx.substate)
1174 files.update(wctx.substate)
1175 else:
1175 else:
1176 files.update(ctx.files())
1176 files.update(ctx.files())
1177 files.update(ctx.substate)
1177 files.update(ctx.substate)
1178
1178
1179 m = ctx.matchfileset(repo.getcwd(), expr)
1179 m = ctx.matchfileset(repo.getcwd(), expr)
1180 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1180 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1181 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1181 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1182 for f in sorted(files):
1182 for f in sorted(files):
1183 if not m(f):
1183 if not m(f):
1184 continue
1184 continue
1185 ui.write(b"%s\n" % f)
1185 ui.write(b"%s\n" % f)
1186
1186
1187
1187
1188 @command(b'debugformat', [] + cmdutil.formatteropts)
1188 @command(b'debugformat', [] + cmdutil.formatteropts)
1189 def debugformat(ui, repo, **opts):
1189 def debugformat(ui, repo, **opts):
1190 """display format information about the current repository
1190 """display format information about the current repository
1191
1191
1192 Use --verbose to get extra information about current config value and
1192 Use --verbose to get extra information about current config value and
1193 Mercurial default."""
1193 Mercurial default."""
1194 opts = pycompat.byteskwargs(opts)
1194 opts = pycompat.byteskwargs(opts)
1195 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1195 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1196 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1196 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1197
1197
1198 def makeformatname(name):
1198 def makeformatname(name):
1199 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1199 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1200
1200
1201 fm = ui.formatter(b'debugformat', opts)
1201 fm = ui.formatter(b'debugformat', opts)
1202 if fm.isplain():
1202 if fm.isplain():
1203
1203
1204 def formatvalue(value):
1204 def formatvalue(value):
1205 if util.safehasattr(value, b'startswith'):
1205 if util.safehasattr(value, b'startswith'):
1206 return value
1206 return value
1207 if value:
1207 if value:
1208 return b'yes'
1208 return b'yes'
1209 else:
1209 else:
1210 return b'no'
1210 return b'no'
1211
1211
1212 else:
1212 else:
1213 formatvalue = pycompat.identity
1213 formatvalue = pycompat.identity
1214
1214
1215 fm.plain(b'format-variant')
1215 fm.plain(b'format-variant')
1216 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1216 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1217 fm.plain(b' repo')
1217 fm.plain(b' repo')
1218 if ui.verbose:
1218 if ui.verbose:
1219 fm.plain(b' config default')
1219 fm.plain(b' config default')
1220 fm.plain(b'\n')
1220 fm.plain(b'\n')
1221 for fv in upgrade.allformatvariant:
1221 for fv in upgrade.allformatvariant:
1222 fm.startitem()
1222 fm.startitem()
1223 repovalue = fv.fromrepo(repo)
1223 repovalue = fv.fromrepo(repo)
1224 configvalue = fv.fromconfig(repo)
1224 configvalue = fv.fromconfig(repo)
1225
1225
1226 if repovalue != configvalue:
1226 if repovalue != configvalue:
1227 namelabel = b'formatvariant.name.mismatchconfig'
1227 namelabel = b'formatvariant.name.mismatchconfig'
1228 repolabel = b'formatvariant.repo.mismatchconfig'
1228 repolabel = b'formatvariant.repo.mismatchconfig'
1229 elif repovalue != fv.default:
1229 elif repovalue != fv.default:
1230 namelabel = b'formatvariant.name.mismatchdefault'
1230 namelabel = b'formatvariant.name.mismatchdefault'
1231 repolabel = b'formatvariant.repo.mismatchdefault'
1231 repolabel = b'formatvariant.repo.mismatchdefault'
1232 else:
1232 else:
1233 namelabel = b'formatvariant.name.uptodate'
1233 namelabel = b'formatvariant.name.uptodate'
1234 repolabel = b'formatvariant.repo.uptodate'
1234 repolabel = b'formatvariant.repo.uptodate'
1235
1235
1236 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1236 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1237 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1237 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1238 if fv.default != configvalue:
1238 if fv.default != configvalue:
1239 configlabel = b'formatvariant.config.special'
1239 configlabel = b'formatvariant.config.special'
1240 else:
1240 else:
1241 configlabel = b'formatvariant.config.default'
1241 configlabel = b'formatvariant.config.default'
1242 fm.condwrite(
1242 fm.condwrite(
1243 ui.verbose,
1243 ui.verbose,
1244 b'config',
1244 b'config',
1245 b' %6s',
1245 b' %6s',
1246 formatvalue(configvalue),
1246 formatvalue(configvalue),
1247 label=configlabel,
1247 label=configlabel,
1248 )
1248 )
1249 fm.condwrite(
1249 fm.condwrite(
1250 ui.verbose,
1250 ui.verbose,
1251 b'default',
1251 b'default',
1252 b' %7s',
1252 b' %7s',
1253 formatvalue(fv.default),
1253 formatvalue(fv.default),
1254 label=b'formatvariant.default',
1254 label=b'formatvariant.default',
1255 )
1255 )
1256 fm.plain(b'\n')
1256 fm.plain(b'\n')
1257 fm.end()
1257 fm.end()
1258
1258
1259
1259
1260 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1260 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1261 def debugfsinfo(ui, path=b"."):
1261 def debugfsinfo(ui, path=b"."):
1262 """show information detected about current filesystem"""
1262 """show information detected about current filesystem"""
1263 ui.writenoi18n(b'path: %s\n' % path)
1263 ui.writenoi18n(b'path: %s\n' % path)
1264 ui.writenoi18n(
1264 ui.writenoi18n(
1265 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1265 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1266 )
1266 )
1267 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1267 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1268 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1268 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1269 ui.writenoi18n(
1269 ui.writenoi18n(
1270 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1270 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1271 )
1271 )
1272 ui.writenoi18n(
1272 ui.writenoi18n(
1273 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1273 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1274 )
1274 )
1275 casesensitive = b'(unknown)'
1275 casesensitive = b'(unknown)'
1276 try:
1276 try:
1277 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1277 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1278 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1278 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1279 except OSError:
1279 except OSError:
1280 pass
1280 pass
1281 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1281 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1282
1282
1283
1283
1284 @command(
1284 @command(
1285 b'debuggetbundle',
1285 b'debuggetbundle',
1286 [
1286 [
1287 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1287 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1288 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1288 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1289 (
1289 (
1290 b't',
1290 b't',
1291 b'type',
1291 b'type',
1292 b'bzip2',
1292 b'bzip2',
1293 _(b'bundle compression type to use'),
1293 _(b'bundle compression type to use'),
1294 _(b'TYPE'),
1294 _(b'TYPE'),
1295 ),
1295 ),
1296 ],
1296 ],
1297 _(b'REPO FILE [-H|-C ID]...'),
1297 _(b'REPO FILE [-H|-C ID]...'),
1298 norepo=True,
1298 norepo=True,
1299 )
1299 )
1300 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1300 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1301 """retrieves a bundle from a repo
1301 """retrieves a bundle from a repo
1302
1302
1303 Every ID must be a full-length hex node id string. Saves the bundle to the
1303 Every ID must be a full-length hex node id string. Saves the bundle to the
1304 given file.
1304 given file.
1305 """
1305 """
1306 opts = pycompat.byteskwargs(opts)
1306 opts = pycompat.byteskwargs(opts)
1307 repo = hg.peer(ui, opts, repopath)
1307 repo = hg.peer(ui, opts, repopath)
1308 if not repo.capable(b'getbundle'):
1308 if not repo.capable(b'getbundle'):
1309 raise error.Abort(b"getbundle() not supported by target repository")
1309 raise error.Abort(b"getbundle() not supported by target repository")
1310 args = {}
1310 args = {}
1311 if common:
1311 if common:
1312 args['common'] = [bin(s) for s in common]
1312 args['common'] = [bin(s) for s in common]
1313 if head:
1313 if head:
1314 args['heads'] = [bin(s) for s in head]
1314 args['heads'] = [bin(s) for s in head]
1315 # TODO: get desired bundlecaps from command line.
1315 # TODO: get desired bundlecaps from command line.
1316 args['bundlecaps'] = None
1316 args['bundlecaps'] = None
1317 bundle = repo.getbundle(b'debug', **args)
1317 bundle = repo.getbundle(b'debug', **args)
1318
1318
1319 bundletype = opts.get(b'type', b'bzip2').lower()
1319 bundletype = opts.get(b'type', b'bzip2').lower()
1320 btypes = {
1320 btypes = {
1321 b'none': b'HG10UN',
1321 b'none': b'HG10UN',
1322 b'bzip2': b'HG10BZ',
1322 b'bzip2': b'HG10BZ',
1323 b'gzip': b'HG10GZ',
1323 b'gzip': b'HG10GZ',
1324 b'bundle2': b'HG20',
1324 b'bundle2': b'HG20',
1325 }
1325 }
1326 bundletype = btypes.get(bundletype)
1326 bundletype = btypes.get(bundletype)
1327 if bundletype not in bundle2.bundletypes:
1327 if bundletype not in bundle2.bundletypes:
1328 raise error.Abort(_(b'unknown bundle type specified with --type'))
1328 raise error.Abort(_(b'unknown bundle type specified with --type'))
1329 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1329 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1330
1330
1331
1331
1332 @command(b'debugignore', [], b'[FILE]')
1332 @command(b'debugignore', [], b'[FILE]')
1333 def debugignore(ui, repo, *files, **opts):
1333 def debugignore(ui, repo, *files, **opts):
1334 """display the combined ignore pattern and information about ignored files
1334 """display the combined ignore pattern and information about ignored files
1335
1335
1336 With no argument display the combined ignore pattern.
1336 With no argument display the combined ignore pattern.
1337
1337
1338 Given space separated file names, shows if the given file is ignored and
1338 Given space separated file names, shows if the given file is ignored and
1339 if so, show the ignore rule (file and line number) that matched it.
1339 if so, show the ignore rule (file and line number) that matched it.
1340 """
1340 """
1341 ignore = repo.dirstate._ignore
1341 ignore = repo.dirstate._ignore
1342 if not files:
1342 if not files:
1343 # Show all the patterns
1343 # Show all the patterns
1344 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1344 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1345 else:
1345 else:
1346 m = scmutil.match(repo[None], pats=files)
1346 m = scmutil.match(repo[None], pats=files)
1347 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1347 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1348 for f in m.files():
1348 for f in m.files():
1349 nf = util.normpath(f)
1349 nf = util.normpath(f)
1350 ignored = None
1350 ignored = None
1351 ignoredata = None
1351 ignoredata = None
1352 if nf != b'.':
1352 if nf != b'.':
1353 if ignore(nf):
1353 if ignore(nf):
1354 ignored = nf
1354 ignored = nf
1355 ignoredata = repo.dirstate._ignorefileandline(nf)
1355 ignoredata = repo.dirstate._ignorefileandline(nf)
1356 else:
1356 else:
1357 for p in pathutil.finddirs(nf):
1357 for p in pathutil.finddirs(nf):
1358 if ignore(p):
1358 if ignore(p):
1359 ignored = p
1359 ignored = p
1360 ignoredata = repo.dirstate._ignorefileandline(p)
1360 ignoredata = repo.dirstate._ignorefileandline(p)
1361 break
1361 break
1362 if ignored:
1362 if ignored:
1363 if ignored == nf:
1363 if ignored == nf:
1364 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1364 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1365 else:
1365 else:
1366 ui.write(
1366 ui.write(
1367 _(
1367 _(
1368 b"%s is ignored because of "
1368 b"%s is ignored because of "
1369 b"containing directory %s\n"
1369 b"containing directory %s\n"
1370 )
1370 )
1371 % (uipathfn(f), ignored)
1371 % (uipathfn(f), ignored)
1372 )
1372 )
1373 ignorefile, lineno, line = ignoredata
1373 ignorefile, lineno, line = ignoredata
1374 ui.write(
1374 ui.write(
1375 _(b"(ignore rule in %s, line %d: '%s')\n")
1375 _(b"(ignore rule in %s, line %d: '%s')\n")
1376 % (ignorefile, lineno, line)
1376 % (ignorefile, lineno, line)
1377 )
1377 )
1378 else:
1378 else:
1379 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1379 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1380
1380
1381
1381
1382 @command(
1382 @command(
1383 b'debugindex',
1383 b'debugindex',
1384 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1384 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1385 _(b'-c|-m|FILE'),
1385 _(b'-c|-m|FILE'),
1386 )
1386 )
1387 def debugindex(ui, repo, file_=None, **opts):
1387 def debugindex(ui, repo, file_=None, **opts):
1388 """dump index data for a storage primitive"""
1388 """dump index data for a storage primitive"""
1389 opts = pycompat.byteskwargs(opts)
1389 opts = pycompat.byteskwargs(opts)
1390 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1390 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1391
1391
1392 if ui.debugflag:
1392 if ui.debugflag:
1393 shortfn = hex
1393 shortfn = hex
1394 else:
1394 else:
1395 shortfn = short
1395 shortfn = short
1396
1396
1397 idlen = 12
1397 idlen = 12
1398 for i in store:
1398 for i in store:
1399 idlen = len(shortfn(store.node(i)))
1399 idlen = len(shortfn(store.node(i)))
1400 break
1400 break
1401
1401
1402 fm = ui.formatter(b'debugindex', opts)
1402 fm = ui.formatter(b'debugindex', opts)
1403 fm.plain(
1403 fm.plain(
1404 b' rev linkrev %s %s p2\n'
1404 b' rev linkrev %s %s p2\n'
1405 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1405 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1406 )
1406 )
1407
1407
1408 for rev in store:
1408 for rev in store:
1409 node = store.node(rev)
1409 node = store.node(rev)
1410 parents = store.parents(node)
1410 parents = store.parents(node)
1411
1411
1412 fm.startitem()
1412 fm.startitem()
1413 fm.write(b'rev', b'%6d ', rev)
1413 fm.write(b'rev', b'%6d ', rev)
1414 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1414 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1415 fm.write(b'node', b'%s ', shortfn(node))
1415 fm.write(b'node', b'%s ', shortfn(node))
1416 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1416 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1417 fm.write(b'p2', b'%s', shortfn(parents[1]))
1417 fm.write(b'p2', b'%s', shortfn(parents[1]))
1418 fm.plain(b'\n')
1418 fm.plain(b'\n')
1419
1419
1420 fm.end()
1420 fm.end()
1421
1421
1422
1422
1423 @command(
1423 @command(
1424 b'debugindexdot',
1424 b'debugindexdot',
1425 cmdutil.debugrevlogopts,
1425 cmdutil.debugrevlogopts,
1426 _(b'-c|-m|FILE'),
1426 _(b'-c|-m|FILE'),
1427 optionalrepo=True,
1427 optionalrepo=True,
1428 )
1428 )
1429 def debugindexdot(ui, repo, file_=None, **opts):
1429 def debugindexdot(ui, repo, file_=None, **opts):
1430 """dump an index DAG as a graphviz dot file"""
1430 """dump an index DAG as a graphviz dot file"""
1431 opts = pycompat.byteskwargs(opts)
1431 opts = pycompat.byteskwargs(opts)
1432 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1432 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1433 ui.writenoi18n(b"digraph G {\n")
1433 ui.writenoi18n(b"digraph G {\n")
1434 for i in r:
1434 for i in r:
1435 node = r.node(i)
1435 node = r.node(i)
1436 pp = r.parents(node)
1436 pp = r.parents(node)
1437 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1437 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1438 if pp[1] != nullid:
1438 if pp[1] != nullid:
1439 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1439 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1440 ui.write(b"}\n")
1440 ui.write(b"}\n")
1441
1441
1442
1442
1443 @command(b'debugindexstats', [])
1443 @command(b'debugindexstats', [])
1444 def debugindexstats(ui, repo):
1444 def debugindexstats(ui, repo):
1445 """show stats related to the changelog index"""
1445 """show stats related to the changelog index"""
1446 repo.changelog.shortest(nullid, 1)
1446 repo.changelog.shortest(nullid, 1)
1447 index = repo.changelog.index
1447 index = repo.changelog.index
1448 if not util.safehasattr(index, b'stats'):
1448 if not util.safehasattr(index, b'stats'):
1449 raise error.Abort(_(b'debugindexstats only works with native code'))
1449 raise error.Abort(_(b'debugindexstats only works with native code'))
1450 for k, v in sorted(index.stats().items()):
1450 for k, v in sorted(index.stats().items()):
1451 ui.write(b'%s: %d\n' % (k, v))
1451 ui.write(b'%s: %d\n' % (k, v))
1452
1452
1453
1453
1454 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1454 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1455 def debuginstall(ui, **opts):
1455 def debuginstall(ui, **opts):
1456 '''test Mercurial installation
1456 '''test Mercurial installation
1457
1457
1458 Returns 0 on success.
1458 Returns 0 on success.
1459 '''
1459 '''
1460 opts = pycompat.byteskwargs(opts)
1460 opts = pycompat.byteskwargs(opts)
1461
1461
1462 problems = 0
1462 problems = 0
1463
1463
1464 fm = ui.formatter(b'debuginstall', opts)
1464 fm = ui.formatter(b'debuginstall', opts)
1465 fm.startitem()
1465 fm.startitem()
1466
1466
1467 # encoding
1467 # encoding
1468 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1468 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1469 err = None
1469 err = None
1470 try:
1470 try:
1471 codecs.lookup(pycompat.sysstr(encoding.encoding))
1471 codecs.lookup(pycompat.sysstr(encoding.encoding))
1472 except LookupError as inst:
1472 except LookupError as inst:
1473 err = stringutil.forcebytestr(inst)
1473 err = stringutil.forcebytestr(inst)
1474 problems += 1
1474 problems += 1
1475 fm.condwrite(
1475 fm.condwrite(
1476 err,
1476 err,
1477 b'encodingerror',
1477 b'encodingerror',
1478 _(b" %s\n (check that your locale is properly set)\n"),
1478 _(b" %s\n (check that your locale is properly set)\n"),
1479 err,
1479 err,
1480 )
1480 )
1481
1481
1482 # Python
1482 # Python
1483 pythonlib = None
1483 pythonlib = None
1484 if util.safehasattr(os, '__file__'):
1484 if util.safehasattr(os, '__file__'):
1485 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1485 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1486 elif getattr(sys, 'oxidized', False):
1486 elif getattr(sys, 'oxidized', False):
1487 pythonlib = pycompat.sysexecutable
1487 pythonlib = pycompat.sysexecutable
1488
1488
1489 fm.write(
1489 fm.write(
1490 b'pythonexe',
1490 b'pythonexe',
1491 _(b"checking Python executable (%s)\n"),
1491 _(b"checking Python executable (%s)\n"),
1492 pycompat.sysexecutable or _(b"unknown"),
1492 pycompat.sysexecutable or _(b"unknown"),
1493 )
1493 )
1494 fm.write(
1494 fm.write(
1495 b'pythonimplementation',
1495 b'pythonimplementation',
1496 _(b"checking Python implementation (%s)\n"),
1496 _(b"checking Python implementation (%s)\n"),
1497 pycompat.sysbytes(platform.python_implementation()),
1497 pycompat.sysbytes(platform.python_implementation()),
1498 )
1498 )
1499 fm.write(
1499 fm.write(
1500 b'pythonver',
1500 b'pythonver',
1501 _(b"checking Python version (%s)\n"),
1501 _(b"checking Python version (%s)\n"),
1502 (b"%d.%d.%d" % sys.version_info[:3]),
1502 (b"%d.%d.%d" % sys.version_info[:3]),
1503 )
1503 )
1504 fm.write(
1504 fm.write(
1505 b'pythonlib',
1505 b'pythonlib',
1506 _(b"checking Python lib (%s)...\n"),
1506 _(b"checking Python lib (%s)...\n"),
1507 pythonlib or _(b"unknown"),
1507 pythonlib or _(b"unknown"),
1508 )
1508 )
1509
1509
1510 security = set(sslutil.supportedprotocols)
1510 security = set(sslutil.supportedprotocols)
1511 if sslutil.hassni:
1511 if sslutil.hassni:
1512 security.add(b'sni')
1512 security.add(b'sni')
1513
1513
1514 fm.write(
1514 fm.write(
1515 b'pythonsecurity',
1515 b'pythonsecurity',
1516 _(b"checking Python security support (%s)\n"),
1516 _(b"checking Python security support (%s)\n"),
1517 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1517 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1518 )
1518 )
1519
1519
1520 # These are warnings, not errors. So don't increment problem count. This
1520 # These are warnings, not errors. So don't increment problem count. This
1521 # may change in the future.
1521 # may change in the future.
1522 if b'tls1.2' not in security:
1522 if b'tls1.2' not in security:
1523 fm.plain(
1523 fm.plain(
1524 _(
1524 _(
1525 b' TLS 1.2 not supported by Python install; '
1525 b' TLS 1.2 not supported by Python install; '
1526 b'network connections lack modern security\n'
1526 b'network connections lack modern security\n'
1527 )
1527 )
1528 )
1528 )
1529 if b'sni' not in security:
1529 if b'sni' not in security:
1530 fm.plain(
1530 fm.plain(
1531 _(
1531 _(
1532 b' SNI not supported by Python install; may have '
1532 b' SNI not supported by Python install; may have '
1533 b'connectivity issues with some servers\n'
1533 b'connectivity issues with some servers\n'
1534 )
1534 )
1535 )
1535 )
1536
1536
1537 # TODO print CA cert info
1537 # TODO print CA cert info
1538
1538
1539 # hg version
1539 # hg version
1540 hgver = util.version()
1540 hgver = util.version()
1541 fm.write(
1541 fm.write(
1542 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1542 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1543 )
1543 )
1544 fm.write(
1544 fm.write(
1545 b'hgverextra',
1545 b'hgverextra',
1546 _(b"checking Mercurial custom build (%s)\n"),
1546 _(b"checking Mercurial custom build (%s)\n"),
1547 b'+'.join(hgver.split(b'+')[1:]),
1547 b'+'.join(hgver.split(b'+')[1:]),
1548 )
1548 )
1549
1549
1550 # compiled modules
1550 # compiled modules
1551 hgmodules = None
1551 hgmodules = None
1552 if util.safehasattr(sys.modules[__name__], '__file__'):
1552 if util.safehasattr(sys.modules[__name__], '__file__'):
1553 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1553 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1554 elif getattr(sys, 'oxidized', False):
1554 elif getattr(sys, 'oxidized', False):
1555 hgmodules = pycompat.sysexecutable
1555 hgmodules = pycompat.sysexecutable
1556
1556
1557 fm.write(
1557 fm.write(
1558 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1558 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1559 )
1559 )
1560 fm.write(
1560 fm.write(
1561 b'hgmodules',
1561 b'hgmodules',
1562 _(b"checking installed modules (%s)...\n"),
1562 _(b"checking installed modules (%s)...\n"),
1563 hgmodules or _(b"unknown"),
1563 hgmodules or _(b"unknown"),
1564 )
1564 )
1565
1565
1566 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1566 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1567 rustext = rustandc # for now, that's the only case
1567 rustext = rustandc # for now, that's the only case
1568 cext = policy.policy in (b'c', b'allow') or rustandc
1568 cext = policy.policy in (b'c', b'allow') or rustandc
1569 nopure = cext or rustext
1569 nopure = cext or rustext
1570 if nopure:
1570 if nopure:
1571 err = None
1571 err = None
1572 try:
1572 try:
1573 if cext:
1573 if cext:
1574 from .cext import ( # pytype: disable=import-error
1574 from .cext import ( # pytype: disable=import-error
1575 base85,
1575 base85,
1576 bdiff,
1576 bdiff,
1577 mpatch,
1577 mpatch,
1578 osutil,
1578 osutil,
1579 )
1579 )
1580
1580
1581 # quiet pyflakes
1581 # quiet pyflakes
1582 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1582 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1583 if rustext:
1583 if rustext:
1584 from .rustext import ( # pytype: disable=import-error
1584 from .rustext import ( # pytype: disable=import-error
1585 ancestor,
1585 ancestor,
1586 dirstate,
1586 dirstate,
1587 )
1587 )
1588
1588
1589 dir(ancestor), dir(dirstate) # quiet pyflakes
1589 dir(ancestor), dir(dirstate) # quiet pyflakes
1590 except Exception as inst:
1590 except Exception as inst:
1591 err = stringutil.forcebytestr(inst)
1591 err = stringutil.forcebytestr(inst)
1592 problems += 1
1592 problems += 1
1593 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1593 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1594
1594
1595 compengines = util.compengines._engines.values()
1595 compengines = util.compengines._engines.values()
1596 fm.write(
1596 fm.write(
1597 b'compengines',
1597 b'compengines',
1598 _(b'checking registered compression engines (%s)\n'),
1598 _(b'checking registered compression engines (%s)\n'),
1599 fm.formatlist(
1599 fm.formatlist(
1600 sorted(e.name() for e in compengines),
1600 sorted(e.name() for e in compengines),
1601 name=b'compengine',
1601 name=b'compengine',
1602 fmt=b'%s',
1602 fmt=b'%s',
1603 sep=b', ',
1603 sep=b', ',
1604 ),
1604 ),
1605 )
1605 )
1606 fm.write(
1606 fm.write(
1607 b'compenginesavail',
1607 b'compenginesavail',
1608 _(b'checking available compression engines (%s)\n'),
1608 _(b'checking available compression engines (%s)\n'),
1609 fm.formatlist(
1609 fm.formatlist(
1610 sorted(e.name() for e in compengines if e.available()),
1610 sorted(e.name() for e in compengines if e.available()),
1611 name=b'compengine',
1611 name=b'compengine',
1612 fmt=b'%s',
1612 fmt=b'%s',
1613 sep=b', ',
1613 sep=b', ',
1614 ),
1614 ),
1615 )
1615 )
1616 wirecompengines = compression.compengines.supportedwireengines(
1616 wirecompengines = compression.compengines.supportedwireengines(
1617 compression.SERVERROLE
1617 compression.SERVERROLE
1618 )
1618 )
1619 fm.write(
1619 fm.write(
1620 b'compenginesserver',
1620 b'compenginesserver',
1621 _(
1621 _(
1622 b'checking available compression engines '
1622 b'checking available compression engines '
1623 b'for wire protocol (%s)\n'
1623 b'for wire protocol (%s)\n'
1624 ),
1624 ),
1625 fm.formatlist(
1625 fm.formatlist(
1626 [e.name() for e in wirecompengines if e.wireprotosupport()],
1626 [e.name() for e in wirecompengines if e.wireprotosupport()],
1627 name=b'compengine',
1627 name=b'compengine',
1628 fmt=b'%s',
1628 fmt=b'%s',
1629 sep=b', ',
1629 sep=b', ',
1630 ),
1630 ),
1631 )
1631 )
1632 re2 = b'missing'
1632 re2 = b'missing'
1633 if util._re2:
1633 if util._re2:
1634 re2 = b'available'
1634 re2 = b'available'
1635 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1635 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1636 fm.data(re2=bool(util._re2))
1636 fm.data(re2=bool(util._re2))
1637
1637
1638 # templates
1638 # templates
1639 p = templater.templatepaths()
1639 p = templater.templatepaths()
1640 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1640 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1641 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1641 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1642 if p:
1642 if p:
1643 m = templater.templatepath(b"map-cmdline.default")
1643 m = templater.templatepath(b"map-cmdline.default")
1644 if m:
1644 if m:
1645 # template found, check if it is working
1645 # template found, check if it is working
1646 err = None
1646 err = None
1647 try:
1647 try:
1648 templater.templater.frommapfile(m)
1648 templater.templater.frommapfile(m)
1649 except Exception as inst:
1649 except Exception as inst:
1650 err = stringutil.forcebytestr(inst)
1650 err = stringutil.forcebytestr(inst)
1651 p = None
1651 p = None
1652 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1652 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1653 else:
1653 else:
1654 p = None
1654 p = None
1655 fm.condwrite(
1655 fm.condwrite(
1656 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1656 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1657 )
1657 )
1658 fm.condwrite(
1658 fm.condwrite(
1659 not m,
1659 not m,
1660 b'defaulttemplatenotfound',
1660 b'defaulttemplatenotfound',
1661 _(b" template '%s' not found\n"),
1661 _(b" template '%s' not found\n"),
1662 b"default",
1662 b"default",
1663 )
1663 )
1664 if not p:
1664 if not p:
1665 problems += 1
1665 problems += 1
1666 fm.condwrite(
1666 fm.condwrite(
1667 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1667 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1668 )
1668 )
1669
1669
1670 # editor
1670 # editor
1671 editor = ui.geteditor()
1671 editor = ui.geteditor()
1672 editor = util.expandpath(editor)
1672 editor = util.expandpath(editor)
1673 editorbin = procutil.shellsplit(editor)[0]
1673 editorbin = procutil.shellsplit(editor)[0]
1674 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1674 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1675 cmdpath = procutil.findexe(editorbin)
1675 cmdpath = procutil.findexe(editorbin)
1676 fm.condwrite(
1676 fm.condwrite(
1677 not cmdpath and editor == b'vi',
1677 not cmdpath and editor == b'vi',
1678 b'vinotfound',
1678 b'vinotfound',
1679 _(
1679 _(
1680 b" No commit editor set and can't find %s in PATH\n"
1680 b" No commit editor set and can't find %s in PATH\n"
1681 b" (specify a commit editor in your configuration"
1681 b" (specify a commit editor in your configuration"
1682 b" file)\n"
1682 b" file)\n"
1683 ),
1683 ),
1684 not cmdpath and editor == b'vi' and editorbin,
1684 not cmdpath and editor == b'vi' and editorbin,
1685 )
1685 )
1686 fm.condwrite(
1686 fm.condwrite(
1687 not cmdpath and editor != b'vi',
1687 not cmdpath and editor != b'vi',
1688 b'editornotfound',
1688 b'editornotfound',
1689 _(
1689 _(
1690 b" Can't find editor '%s' in PATH\n"
1690 b" Can't find editor '%s' in PATH\n"
1691 b" (specify a commit editor in your configuration"
1691 b" (specify a commit editor in your configuration"
1692 b" file)\n"
1692 b" file)\n"
1693 ),
1693 ),
1694 not cmdpath and editorbin,
1694 not cmdpath and editorbin,
1695 )
1695 )
1696 if not cmdpath and editor != b'vi':
1696 if not cmdpath and editor != b'vi':
1697 problems += 1
1697 problems += 1
1698
1698
1699 # check username
1699 # check username
1700 username = None
1700 username = None
1701 err = None
1701 err = None
1702 try:
1702 try:
1703 username = ui.username()
1703 username = ui.username()
1704 except error.Abort as e:
1704 except error.Abort as e:
1705 err = stringutil.forcebytestr(e)
1705 err = stringutil.forcebytestr(e)
1706 problems += 1
1706 problems += 1
1707
1707
1708 fm.condwrite(
1708 fm.condwrite(
1709 username, b'username', _(b"checking username (%s)\n"), username
1709 username, b'username', _(b"checking username (%s)\n"), username
1710 )
1710 )
1711 fm.condwrite(
1711 fm.condwrite(
1712 err,
1712 err,
1713 b'usernameerror',
1713 b'usernameerror',
1714 _(
1714 _(
1715 b"checking username...\n %s\n"
1715 b"checking username...\n %s\n"
1716 b" (specify a username in your configuration file)\n"
1716 b" (specify a username in your configuration file)\n"
1717 ),
1717 ),
1718 err,
1718 err,
1719 )
1719 )
1720
1720
1721 for name, mod in extensions.extensions():
1721 for name, mod in extensions.extensions():
1722 handler = getattr(mod, 'debuginstall', None)
1722 handler = getattr(mod, 'debuginstall', None)
1723 if handler is not None:
1723 if handler is not None:
1724 problems += handler(ui, fm)
1724 problems += handler(ui, fm)
1725
1725
1726 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1726 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1727 if not problems:
1727 if not problems:
1728 fm.data(problems=problems)
1728 fm.data(problems=problems)
1729 fm.condwrite(
1729 fm.condwrite(
1730 problems,
1730 problems,
1731 b'problems',
1731 b'problems',
1732 _(b"%d problems detected, please check your install!\n"),
1732 _(b"%d problems detected, please check your install!\n"),
1733 problems,
1733 problems,
1734 )
1734 )
1735 fm.end()
1735 fm.end()
1736
1736
1737 return problems
1737 return problems
1738
1738
1739
1739
1740 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1740 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1741 def debugknown(ui, repopath, *ids, **opts):
1741 def debugknown(ui, repopath, *ids, **opts):
1742 """test whether node ids are known to a repo
1742 """test whether node ids are known to a repo
1743
1743
1744 Every ID must be a full-length hex node id string. Returns a list of 0s
1744 Every ID must be a full-length hex node id string. Returns a list of 0s
1745 and 1s indicating unknown/known.
1745 and 1s indicating unknown/known.
1746 """
1746 """
1747 opts = pycompat.byteskwargs(opts)
1747 opts = pycompat.byteskwargs(opts)
1748 repo = hg.peer(ui, opts, repopath)
1748 repo = hg.peer(ui, opts, repopath)
1749 if not repo.capable(b'known'):
1749 if not repo.capable(b'known'):
1750 raise error.Abort(b"known() not supported by target repository")
1750 raise error.Abort(b"known() not supported by target repository")
1751 flags = repo.known([bin(s) for s in ids])
1751 flags = repo.known([bin(s) for s in ids])
1752 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1752 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1753
1753
1754
1754
1755 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1755 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1756 def debuglabelcomplete(ui, repo, *args):
1756 def debuglabelcomplete(ui, repo, *args):
1757 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1757 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1758 debugnamecomplete(ui, repo, *args)
1758 debugnamecomplete(ui, repo, *args)
1759
1759
1760
1760
1761 @command(
1761 @command(
1762 b'debuglocks',
1762 b'debuglocks',
1763 [
1763 [
1764 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1764 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1765 (
1765 (
1766 b'W',
1766 b'W',
1767 b'force-wlock',
1767 b'force-wlock',
1768 None,
1768 None,
1769 _(b'free the working state lock (DANGEROUS)'),
1769 _(b'free the working state lock (DANGEROUS)'),
1770 ),
1770 ),
1771 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1771 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1772 (
1772 (
1773 b'S',
1773 b'S',
1774 b'set-wlock',
1774 b'set-wlock',
1775 None,
1775 None,
1776 _(b'set the working state lock until stopped'),
1776 _(b'set the working state lock until stopped'),
1777 ),
1777 ),
1778 ],
1778 ],
1779 _(b'[OPTION]...'),
1779 _(b'[OPTION]...'),
1780 )
1780 )
1781 def debuglocks(ui, repo, **opts):
1781 def debuglocks(ui, repo, **opts):
1782 """show or modify state of locks
1782 """show or modify state of locks
1783
1783
1784 By default, this command will show which locks are held. This
1784 By default, this command will show which locks are held. This
1785 includes the user and process holding the lock, the amount of time
1785 includes the user and process holding the lock, the amount of time
1786 the lock has been held, and the machine name where the process is
1786 the lock has been held, and the machine name where the process is
1787 running if it's not local.
1787 running if it's not local.
1788
1788
1789 Locks protect the integrity of Mercurial's data, so should be
1789 Locks protect the integrity of Mercurial's data, so should be
1790 treated with care. System crashes or other interruptions may cause
1790 treated with care. System crashes or other interruptions may cause
1791 locks to not be properly released, though Mercurial will usually
1791 locks to not be properly released, though Mercurial will usually
1792 detect and remove such stale locks automatically.
1792 detect and remove such stale locks automatically.
1793
1793
1794 However, detecting stale locks may not always be possible (for
1794 However, detecting stale locks may not always be possible (for
1795 instance, on a shared filesystem). Removing locks may also be
1795 instance, on a shared filesystem). Removing locks may also be
1796 blocked by filesystem permissions.
1796 blocked by filesystem permissions.
1797
1797
1798 Setting a lock will prevent other commands from changing the data.
1798 Setting a lock will prevent other commands from changing the data.
1799 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1799 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1800 The set locks are removed when the command exits.
1800 The set locks are removed when the command exits.
1801
1801
1802 Returns 0 if no locks are held.
1802 Returns 0 if no locks are held.
1803
1803
1804 """
1804 """
1805
1805
1806 if opts.get('force_lock'):
1806 if opts.get('force_lock'):
1807 repo.svfs.unlink(b'lock')
1807 repo.svfs.unlink(b'lock')
1808 if opts.get('force_wlock'):
1808 if opts.get('force_wlock'):
1809 repo.vfs.unlink(b'wlock')
1809 repo.vfs.unlink(b'wlock')
1810 if opts.get('force_lock') or opts.get('force_wlock'):
1810 if opts.get('force_lock') or opts.get('force_wlock'):
1811 return 0
1811 return 0
1812
1812
1813 locks = []
1813 locks = []
1814 try:
1814 try:
1815 if opts.get('set_wlock'):
1815 if opts.get('set_wlock'):
1816 try:
1816 try:
1817 locks.append(repo.wlock(False))
1817 locks.append(repo.wlock(False))
1818 except error.LockHeld:
1818 except error.LockHeld:
1819 raise error.Abort(_(b'wlock is already held'))
1819 raise error.Abort(_(b'wlock is already held'))
1820 if opts.get('set_lock'):
1820 if opts.get('set_lock'):
1821 try:
1821 try:
1822 locks.append(repo.lock(False))
1822 locks.append(repo.lock(False))
1823 except error.LockHeld:
1823 except error.LockHeld:
1824 raise error.Abort(_(b'lock is already held'))
1824 raise error.Abort(_(b'lock is already held'))
1825 if len(locks):
1825 if len(locks):
1826 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1826 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1827 return 0
1827 return 0
1828 finally:
1828 finally:
1829 release(*locks)
1829 release(*locks)
1830
1830
1831 now = time.time()
1831 now = time.time()
1832 held = 0
1832 held = 0
1833
1833
1834 def report(vfs, name, method):
1834 def report(vfs, name, method):
1835 # this causes stale locks to get reaped for more accurate reporting
1835 # this causes stale locks to get reaped for more accurate reporting
1836 try:
1836 try:
1837 l = method(False)
1837 l = method(False)
1838 except error.LockHeld:
1838 except error.LockHeld:
1839 l = None
1839 l = None
1840
1840
1841 if l:
1841 if l:
1842 l.release()
1842 l.release()
1843 else:
1843 else:
1844 try:
1844 try:
1845 st = vfs.lstat(name)
1845 st = vfs.lstat(name)
1846 age = now - st[stat.ST_MTIME]
1846 age = now - st[stat.ST_MTIME]
1847 user = util.username(st.st_uid)
1847 user = util.username(st.st_uid)
1848 locker = vfs.readlock(name)
1848 locker = vfs.readlock(name)
1849 if b":" in locker:
1849 if b":" in locker:
1850 host, pid = locker.split(b':')
1850 host, pid = locker.split(b':')
1851 if host == socket.gethostname():
1851 if host == socket.gethostname():
1852 locker = b'user %s, process %s' % (user or b'None', pid)
1852 locker = b'user %s, process %s' % (user or b'None', pid)
1853 else:
1853 else:
1854 locker = b'user %s, process %s, host %s' % (
1854 locker = b'user %s, process %s, host %s' % (
1855 user or b'None',
1855 user or b'None',
1856 pid,
1856 pid,
1857 host,
1857 host,
1858 )
1858 )
1859 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1859 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1860 return 1
1860 return 1
1861 except OSError as e:
1861 except OSError as e:
1862 if e.errno != errno.ENOENT:
1862 if e.errno != errno.ENOENT:
1863 raise
1863 raise
1864
1864
1865 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1865 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1866 return 0
1866 return 0
1867
1867
1868 held += report(repo.svfs, b"lock", repo.lock)
1868 held += report(repo.svfs, b"lock", repo.lock)
1869 held += report(repo.vfs, b"wlock", repo.wlock)
1869 held += report(repo.vfs, b"wlock", repo.wlock)
1870
1870
1871 return held
1871 return held
1872
1872
1873
1873
1874 @command(
1874 @command(
1875 b'debugmanifestfulltextcache',
1875 b'debugmanifestfulltextcache',
1876 [
1876 [
1877 (b'', b'clear', False, _(b'clear the cache')),
1877 (b'', b'clear', False, _(b'clear the cache')),
1878 (
1878 (
1879 b'a',
1879 b'a',
1880 b'add',
1880 b'add',
1881 [],
1881 [],
1882 _(b'add the given manifest nodes to the cache'),
1882 _(b'add the given manifest nodes to the cache'),
1883 _(b'NODE'),
1883 _(b'NODE'),
1884 ),
1884 ),
1885 ],
1885 ],
1886 b'',
1886 b'',
1887 )
1887 )
1888 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1888 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1889 """show, clear or amend the contents of the manifest fulltext cache"""
1889 """show, clear or amend the contents of the manifest fulltext cache"""
1890
1890
1891 def getcache():
1891 def getcache():
1892 r = repo.manifestlog.getstorage(b'')
1892 r = repo.manifestlog.getstorage(b'')
1893 try:
1893 try:
1894 return r._fulltextcache
1894 return r._fulltextcache
1895 except AttributeError:
1895 except AttributeError:
1896 msg = _(
1896 msg = _(
1897 b"Current revlog implementation doesn't appear to have a "
1897 b"Current revlog implementation doesn't appear to have a "
1898 b"manifest fulltext cache\n"
1898 b"manifest fulltext cache\n"
1899 )
1899 )
1900 raise error.Abort(msg)
1900 raise error.Abort(msg)
1901
1901
1902 if opts.get('clear'):
1902 if opts.get('clear'):
1903 with repo.wlock():
1903 with repo.wlock():
1904 cache = getcache()
1904 cache = getcache()
1905 cache.clear(clear_persisted_data=True)
1905 cache.clear(clear_persisted_data=True)
1906 return
1906 return
1907
1907
1908 if add:
1908 if add:
1909 with repo.wlock():
1909 with repo.wlock():
1910 m = repo.manifestlog
1910 m = repo.manifestlog
1911 store = m.getstorage(b'')
1911 store = m.getstorage(b'')
1912 for n in add:
1912 for n in add:
1913 try:
1913 try:
1914 manifest = m[store.lookup(n)]
1914 manifest = m[store.lookup(n)]
1915 except error.LookupError as e:
1915 except error.LookupError as e:
1916 raise error.Abort(e, hint=b"Check your manifest node id")
1916 raise error.Abort(e, hint=b"Check your manifest node id")
1917 manifest.read() # stores revisision in cache too
1917 manifest.read() # stores revisision in cache too
1918 return
1918 return
1919
1919
1920 cache = getcache()
1920 cache = getcache()
1921 if not len(cache):
1921 if not len(cache):
1922 ui.write(_(b'cache empty\n'))
1922 ui.write(_(b'cache empty\n'))
1923 else:
1923 else:
1924 ui.write(
1924 ui.write(
1925 _(
1925 _(
1926 b'cache contains %d manifest entries, in order of most to '
1926 b'cache contains %d manifest entries, in order of most to '
1927 b'least recent:\n'
1927 b'least recent:\n'
1928 )
1928 )
1929 % (len(cache),)
1929 % (len(cache),)
1930 )
1930 )
1931 totalsize = 0
1931 totalsize = 0
1932 for nodeid in cache:
1932 for nodeid in cache:
1933 # Use cache.get to not update the LRU order
1933 # Use cache.get to not update the LRU order
1934 data = cache.peek(nodeid)
1934 data = cache.peek(nodeid)
1935 size = len(data)
1935 size = len(data)
1936 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1936 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1937 ui.write(
1937 ui.write(
1938 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1938 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1939 )
1939 )
1940 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1940 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1941 ui.write(
1941 ui.write(
1942 _(b'total cache data size %s, on-disk %s\n')
1942 _(b'total cache data size %s, on-disk %s\n')
1943 % (util.bytecount(totalsize), util.bytecount(ondisk))
1943 % (util.bytecount(totalsize), util.bytecount(ondisk))
1944 )
1944 )
1945
1945
1946
1946
1947 @command(b'debugmergestate', [], b'')
1947 @command(b'debugmergestate', [], b'')
1948 def debugmergestate(ui, repo, *args):
1948 def debugmergestate(ui, repo, *args):
1949 """print merge state
1949 """print merge state
1950
1950
1951 Use --verbose to print out information about whether v1 or v2 merge state
1951 Use --verbose to print out information about whether v1 or v2 merge state
1952 was chosen."""
1952 was chosen."""
1953
1953
1954 def _hashornull(h):
1954 def _hashornull(h):
1955 if h == nullhex:
1955 if h == nullhex:
1956 return b'null'
1956 return b'null'
1957 else:
1957 else:
1958 return h
1958 return h
1959
1959
1960 def printrecords(version):
1960 def printrecords(version):
1961 ui.writenoi18n(b'* version %d records\n' % version)
1961 ui.writenoi18n(b'* version %d records\n' % version)
1962 if version == 1:
1962 if version == 1:
1963 records = v1records
1963 records = v1records
1964 else:
1964 else:
1965 records = v2records
1965 records = v2records
1966
1966
1967 for rtype, record in records:
1967 for rtype, record in records:
1968 # pretty print some record types
1968 # pretty print some record types
1969 if rtype == b'L':
1969 if rtype == b'L':
1970 ui.writenoi18n(b'local: %s\n' % record)
1970 ui.writenoi18n(b'local: %s\n' % record)
1971 elif rtype == b'O':
1971 elif rtype == b'O':
1972 ui.writenoi18n(b'other: %s\n' % record)
1972 ui.writenoi18n(b'other: %s\n' % record)
1973 elif rtype == b'm':
1973 elif rtype == b'm':
1974 driver, mdstate = record.split(b'\0', 1)
1974 driver, mdstate = record.split(b'\0', 1)
1975 ui.writenoi18n(
1975 ui.writenoi18n(
1976 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1976 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1977 )
1977 )
1978 elif rtype in b'FDC':
1978 elif rtype in b'FDC':
1979 r = record.split(b'\0')
1979 r = record.split(b'\0')
1980 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1980 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1981 if version == 1:
1981 if version == 1:
1982 onode = b'not stored in v1 format'
1982 onode = b'not stored in v1 format'
1983 flags = r[7]
1983 flags = r[7]
1984 else:
1984 else:
1985 onode, flags = r[7:9]
1985 onode, flags = r[7:9]
1986 ui.writenoi18n(
1986 ui.writenoi18n(
1987 b'file: %s (record type "%s", state "%s", hash %s)\n'
1987 b'file: %s (record type "%s", state "%s", hash %s)\n'
1988 % (f, rtype, state, _hashornull(hash))
1988 % (f, rtype, state, _hashornull(hash))
1989 )
1989 )
1990 ui.writenoi18n(
1990 ui.writenoi18n(
1991 b' local path: %s (flags "%s")\n' % (lfile, flags)
1991 b' local path: %s (flags "%s")\n' % (lfile, flags)
1992 )
1992 )
1993 ui.writenoi18n(
1993 ui.writenoi18n(
1994 b' ancestor path: %s (node %s)\n'
1994 b' ancestor path: %s (node %s)\n'
1995 % (afile, _hashornull(anode))
1995 % (afile, _hashornull(anode))
1996 )
1996 )
1997 ui.writenoi18n(
1997 ui.writenoi18n(
1998 b' other path: %s (node %s)\n'
1998 b' other path: %s (node %s)\n'
1999 % (ofile, _hashornull(onode))
1999 % (ofile, _hashornull(onode))
2000 )
2000 )
2001 elif rtype == b'f':
2001 elif rtype == b'f':
2002 filename, rawextras = record.split(b'\0', 1)
2002 filename, rawextras = record.split(b'\0', 1)
2003 extras = rawextras.split(b'\0')
2003 extras = rawextras.split(b'\0')
2004 i = 0
2004 i = 0
2005 extrastrings = []
2005 extrastrings = []
2006 while i < len(extras):
2006 while i < len(extras):
2007 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
2007 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
2008 i += 2
2008 i += 2
2009
2009
2010 ui.writenoi18n(
2010 ui.writenoi18n(
2011 b'file extras: %s (%s)\n'
2011 b'file extras: %s (%s)\n'
2012 % (filename, b', '.join(extrastrings))
2012 % (filename, b', '.join(extrastrings))
2013 )
2013 )
2014 elif rtype == b'l':
2014 elif rtype == b'l':
2015 labels = record.split(b'\0', 2)
2015 labels = record.split(b'\0', 2)
2016 labels = [l for l in labels if len(l) > 0]
2016 labels = [l for l in labels if len(l) > 0]
2017 ui.writenoi18n(b'labels:\n')
2017 ui.writenoi18n(b'labels:\n')
2018 ui.write((b' local: %s\n' % labels[0]))
2018 ui.write((b' local: %s\n' % labels[0]))
2019 ui.write((b' other: %s\n' % labels[1]))
2019 ui.write((b' other: %s\n' % labels[1]))
2020 if len(labels) > 2:
2020 if len(labels) > 2:
2021 ui.write((b' base: %s\n' % labels[2]))
2021 ui.write((b' base: %s\n' % labels[2]))
2022 else:
2022 else:
2023 ui.writenoi18n(
2023 ui.writenoi18n(
2024 b'unrecognized entry: %s\t%s\n'
2024 b'unrecognized entry: %s\t%s\n'
2025 % (rtype, record.replace(b'\0', b'\t'))
2025 % (rtype, record.replace(b'\0', b'\t'))
2026 )
2026 )
2027
2027
2028 # Avoid mergestate.read() since it may raise an exception for unsupported
2028 # Avoid mergestate.read() since it may raise an exception for unsupported
2029 # merge state records. We shouldn't be doing this, but this is OK since this
2029 # merge state records. We shouldn't be doing this, but this is OK since this
2030 # command is pretty low-level.
2030 # command is pretty low-level.
2031 ms = mergemod.mergestate(repo)
2031 ms = mergemod.mergestate(repo)
2032
2032
2033 # sort so that reasonable information is on top
2033 # sort so that reasonable information is on top
2034 v1records = ms._readrecordsv1()
2034 v1records = ms._readrecordsv1()
2035 v2records = ms._readrecordsv2()
2035 v2records = ms._readrecordsv2()
2036 order = b'LOml'
2036 order = b'LOml'
2037
2037
2038 def key(r):
2038 def key(r):
2039 idx = order.find(r[0])
2039 idx = order.find(r[0])
2040 if idx == -1:
2040 if idx == -1:
2041 return (1, r[1])
2041 return (1, r[1])
2042 else:
2042 else:
2043 return (0, idx)
2043 return (0, idx)
2044
2044
2045 v1records.sort(key=key)
2045 v1records.sort(key=key)
2046 v2records.sort(key=key)
2046 v2records.sort(key=key)
2047
2047
2048 if not v1records and not v2records:
2048 if not v1records and not v2records:
2049 ui.writenoi18n(b'no merge state found\n')
2049 ui.writenoi18n(b'no merge state found\n')
2050 elif not v2records:
2050 elif not v2records:
2051 ui.notenoi18n(b'no version 2 merge state\n')
2051 ui.notenoi18n(b'no version 2 merge state\n')
2052 printrecords(1)
2052 printrecords(1)
2053 elif ms._v1v2match(v1records, v2records):
2053 elif ms._v1v2match(v1records, v2records):
2054 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2054 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2055 printrecords(2)
2055 printrecords(2)
2056 else:
2056 else:
2057 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2057 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2058 printrecords(1)
2058 printrecords(1)
2059 if ui.verbose:
2059 if ui.verbose:
2060 printrecords(2)
2060 printrecords(2)
2061
2061
2062
2062
2063 @command(b'debugnamecomplete', [], _(b'NAME...'))
2063 @command(b'debugnamecomplete', [], _(b'NAME...'))
2064 def debugnamecomplete(ui, repo, *args):
2064 def debugnamecomplete(ui, repo, *args):
2065 '''complete "names" - tags, open branch names, bookmark names'''
2065 '''complete "names" - tags, open branch names, bookmark names'''
2066
2066
2067 names = set()
2067 names = set()
2068 # since we previously only listed open branches, we will handle that
2068 # since we previously only listed open branches, we will handle that
2069 # specially (after this for loop)
2069 # specially (after this for loop)
2070 for name, ns in pycompat.iteritems(repo.names):
2070 for name, ns in pycompat.iteritems(repo.names):
2071 if name != b'branches':
2071 if name != b'branches':
2072 names.update(ns.listnames(repo))
2072 names.update(ns.listnames(repo))
2073 names.update(
2073 names.update(
2074 tag
2074 tag
2075 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2075 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2076 if not closed
2076 if not closed
2077 )
2077 )
2078 completions = set()
2078 completions = set()
2079 if not args:
2079 if not args:
2080 args = [b'']
2080 args = [b'']
2081 for a in args:
2081 for a in args:
2082 completions.update(n for n in names if n.startswith(a))
2082 completions.update(n for n in names if n.startswith(a))
2083 ui.write(b'\n'.join(sorted(completions)))
2083 ui.write(b'\n'.join(sorted(completions)))
2084 ui.write(b'\n')
2084 ui.write(b'\n')
2085
2085
2086
2086
2087 @command(
2087 @command(
2088 b'debugnodemap',
2088 b'debugnodemap',
2089 [
2089 [
2090 (
2090 (
2091 b'',
2091 b'',
2092 b'dump-new',
2092 b'dump-new',
2093 False,
2093 False,
2094 _(b'write a (new) persistent binary nodemap on stdin'),
2094 _(b'write a (new) persistent binary nodemap on stdin'),
2095 ),
2095 ),
2096 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2096 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2097 (
2097 (
2098 b'',
2098 b'',
2099 b'check',
2099 b'check',
2100 False,
2100 False,
2101 _(b'check that the data on disk data are correct.'),
2101 _(b'check that the data on disk data are correct.'),
2102 ),
2102 ),
2103 (
2103 (
2104 b'',
2104 b'',
2105 b'metadata',
2105 b'metadata',
2106 False,
2106 False,
2107 _(b'display the on disk meta data for the nodemap'),
2107 _(b'display the on disk meta data for the nodemap'),
2108 ),
2108 ),
2109 ],
2109 ],
2110 )
2110 )
2111 def debugnodemap(ui, repo, **opts):
2111 def debugnodemap(ui, repo, **opts):
2112 """write and inspect on disk nodemap
2112 """write and inspect on disk nodemap
2113 """
2113 """
2114 if opts['dump_new']:
2114 if opts['dump_new']:
2115 unfi = repo.unfiltered()
2115 unfi = repo.unfiltered()
2116 cl = unfi.changelog
2116 cl = unfi.changelog
2117 data = nodemap.persistent_data(cl.index)
2117 data = nodemap.persistent_data(cl.index)
2118 ui.write(data)
2118 ui.write(data)
2119 elif opts['dump_disk']:
2119 elif opts['dump_disk']:
2120 unfi = repo.unfiltered()
2120 unfi = repo.unfiltered()
2121 cl = unfi.changelog
2121 cl = unfi.changelog
2122 nm_data = nodemap.persisted_data(cl)
2122 nm_data = nodemap.persisted_data(cl)
2123 if nm_data is not None:
2123 if nm_data is not None:
2124 docket, data = nm_data
2124 docket, data = nm_data
2125 ui.write(data)
2125 ui.write(data)
2126 elif opts['check']:
2126 elif opts['check']:
2127 unfi = repo.unfiltered()
2127 unfi = repo.unfiltered()
2128 cl = unfi.changelog
2128 cl = unfi.changelog
2129 nm_data = nodemap.persisted_data(cl)
2129 nm_data = nodemap.persisted_data(cl)
2130 if nm_data is not None:
2130 if nm_data is not None:
2131 docket, data = nm_data
2131 docket, data = nm_data
2132 return nodemap.check_data(ui, cl.index, data)
2132 return nodemap.check_data(ui, cl.index, data)
2133 elif opts['metadata']:
2133 elif opts['metadata']:
2134 unfi = repo.unfiltered()
2134 unfi = repo.unfiltered()
2135 cl = unfi.changelog
2135 cl = unfi.changelog
2136 nm_data = nodemap.persisted_data(cl)
2136 nm_data = nodemap.persisted_data(cl)
2137 if nm_data is not None:
2137 if nm_data is not None:
2138 docket, data = nm_data
2138 docket, data = nm_data
2139 ui.write((b"uid: %s\n") % docket.uid)
2139 ui.write((b"uid: %s\n") % docket.uid)
2140 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2140 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2141 ui.write((b"data-length: %d\n") % docket.data_length)
2141 ui.write((b"data-length: %d\n") % docket.data_length)
2142 ui.write((b"data-unused: %d\n") % docket.data_unused)
2142 ui.write((b"data-unused: %d\n") % docket.data_unused)
2143
2143
2144
2144
2145 @command(
2145 @command(
2146 b'debugobsolete',
2146 b'debugobsolete',
2147 [
2147 [
2148 (b'', b'flags', 0, _(b'markers flag')),
2148 (b'', b'flags', 0, _(b'markers flag')),
2149 (
2149 (
2150 b'',
2150 b'',
2151 b'record-parents',
2151 b'record-parents',
2152 False,
2152 False,
2153 _(b'record parent information for the precursor'),
2153 _(b'record parent information for the precursor'),
2154 ),
2154 ),
2155 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2155 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2156 (
2156 (
2157 b'',
2157 b'',
2158 b'exclusive',
2158 b'exclusive',
2159 False,
2159 False,
2160 _(b'restrict display to markers only relevant to REV'),
2160 _(b'restrict display to markers only relevant to REV'),
2161 ),
2161 ),
2162 (b'', b'index', False, _(b'display index of the marker')),
2162 (b'', b'index', False, _(b'display index of the marker')),
2163 (b'', b'delete', [], _(b'delete markers specified by indices')),
2163 (b'', b'delete', [], _(b'delete markers specified by indices')),
2164 ]
2164 ]
2165 + cmdutil.commitopts2
2165 + cmdutil.commitopts2
2166 + cmdutil.formatteropts,
2166 + cmdutil.formatteropts,
2167 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2167 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2168 )
2168 )
2169 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2169 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2170 """create arbitrary obsolete marker
2170 """create arbitrary obsolete marker
2171
2171
2172 With no arguments, displays the list of obsolescence markers."""
2172 With no arguments, displays the list of obsolescence markers."""
2173
2173
2174 opts = pycompat.byteskwargs(opts)
2174 opts = pycompat.byteskwargs(opts)
2175
2175
2176 def parsenodeid(s):
2176 def parsenodeid(s):
2177 try:
2177 try:
2178 # We do not use revsingle/revrange functions here to accept
2178 # We do not use revsingle/revrange functions here to accept
2179 # arbitrary node identifiers, possibly not present in the
2179 # arbitrary node identifiers, possibly not present in the
2180 # local repository.
2180 # local repository.
2181 n = bin(s)
2181 n = bin(s)
2182 if len(n) != len(nullid):
2182 if len(n) != len(nullid):
2183 raise TypeError()
2183 raise TypeError()
2184 return n
2184 return n
2185 except TypeError:
2185 except TypeError:
2186 raise error.Abort(
2186 raise error.Abort(
2187 b'changeset references must be full hexadecimal '
2187 b'changeset references must be full hexadecimal '
2188 b'node identifiers'
2188 b'node identifiers'
2189 )
2189 )
2190
2190
2191 if opts.get(b'delete'):
2191 if opts.get(b'delete'):
2192 indices = []
2192 indices = []
2193 for v in opts.get(b'delete'):
2193 for v in opts.get(b'delete'):
2194 try:
2194 try:
2195 indices.append(int(v))
2195 indices.append(int(v))
2196 except ValueError:
2196 except ValueError:
2197 raise error.Abort(
2197 raise error.Abort(
2198 _(b'invalid index value: %r') % v,
2198 _(b'invalid index value: %r') % v,
2199 hint=_(b'use integers for indices'),
2199 hint=_(b'use integers for indices'),
2200 )
2200 )
2201
2201
2202 if repo.currenttransaction():
2202 if repo.currenttransaction():
2203 raise error.Abort(
2203 raise error.Abort(
2204 _(b'cannot delete obsmarkers in the middle of transaction.')
2204 _(b'cannot delete obsmarkers in the middle of transaction.')
2205 )
2205 )
2206
2206
2207 with repo.lock():
2207 with repo.lock():
2208 n = repair.deleteobsmarkers(repo.obsstore, indices)
2208 n = repair.deleteobsmarkers(repo.obsstore, indices)
2209 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2209 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2210
2210
2211 return
2211 return
2212
2212
2213 if precursor is not None:
2213 if precursor is not None:
2214 if opts[b'rev']:
2214 if opts[b'rev']:
2215 raise error.Abort(b'cannot select revision when creating marker')
2215 raise error.Abort(b'cannot select revision when creating marker')
2216 metadata = {}
2216 metadata = {}
2217 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2217 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2218 succs = tuple(parsenodeid(succ) for succ in successors)
2218 succs = tuple(parsenodeid(succ) for succ in successors)
2219 l = repo.lock()
2219 l = repo.lock()
2220 try:
2220 try:
2221 tr = repo.transaction(b'debugobsolete')
2221 tr = repo.transaction(b'debugobsolete')
2222 try:
2222 try:
2223 date = opts.get(b'date')
2223 date = opts.get(b'date')
2224 if date:
2224 if date:
2225 date = dateutil.parsedate(date)
2225 date = dateutil.parsedate(date)
2226 else:
2226 else:
2227 date = None
2227 date = None
2228 prec = parsenodeid(precursor)
2228 prec = parsenodeid(precursor)
2229 parents = None
2229 parents = None
2230 if opts[b'record_parents']:
2230 if opts[b'record_parents']:
2231 if prec not in repo.unfiltered():
2231 if prec not in repo.unfiltered():
2232 raise error.Abort(
2232 raise error.Abort(
2233 b'cannot used --record-parents on '
2233 b'cannot used --record-parents on '
2234 b'unknown changesets'
2234 b'unknown changesets'
2235 )
2235 )
2236 parents = repo.unfiltered()[prec].parents()
2236 parents = repo.unfiltered()[prec].parents()
2237 parents = tuple(p.node() for p in parents)
2237 parents = tuple(p.node() for p in parents)
2238 repo.obsstore.create(
2238 repo.obsstore.create(
2239 tr,
2239 tr,
2240 prec,
2240 prec,
2241 succs,
2241 succs,
2242 opts[b'flags'],
2242 opts[b'flags'],
2243 parents=parents,
2243 parents=parents,
2244 date=date,
2244 date=date,
2245 metadata=metadata,
2245 metadata=metadata,
2246 ui=ui,
2246 ui=ui,
2247 )
2247 )
2248 tr.close()
2248 tr.close()
2249 except ValueError as exc:
2249 except ValueError as exc:
2250 raise error.Abort(
2250 raise error.Abort(
2251 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2251 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2252 )
2252 )
2253 finally:
2253 finally:
2254 tr.release()
2254 tr.release()
2255 finally:
2255 finally:
2256 l.release()
2256 l.release()
2257 else:
2257 else:
2258 if opts[b'rev']:
2258 if opts[b'rev']:
2259 revs = scmutil.revrange(repo, opts[b'rev'])
2259 revs = scmutil.revrange(repo, opts[b'rev'])
2260 nodes = [repo[r].node() for r in revs]
2260 nodes = [repo[r].node() for r in revs]
2261 markers = list(
2261 markers = list(
2262 obsutil.getmarkers(
2262 obsutil.getmarkers(
2263 repo, nodes=nodes, exclusive=opts[b'exclusive']
2263 repo, nodes=nodes, exclusive=opts[b'exclusive']
2264 )
2264 )
2265 )
2265 )
2266 markers.sort(key=lambda x: x._data)
2266 markers.sort(key=lambda x: x._data)
2267 else:
2267 else:
2268 markers = obsutil.getmarkers(repo)
2268 markers = obsutil.getmarkers(repo)
2269
2269
2270 markerstoiter = markers
2270 markerstoiter = markers
2271 isrelevant = lambda m: True
2271 isrelevant = lambda m: True
2272 if opts.get(b'rev') and opts.get(b'index'):
2272 if opts.get(b'rev') and opts.get(b'index'):
2273 markerstoiter = obsutil.getmarkers(repo)
2273 markerstoiter = obsutil.getmarkers(repo)
2274 markerset = set(markers)
2274 markerset = set(markers)
2275 isrelevant = lambda m: m in markerset
2275 isrelevant = lambda m: m in markerset
2276
2276
2277 fm = ui.formatter(b'debugobsolete', opts)
2277 fm = ui.formatter(b'debugobsolete', opts)
2278 for i, m in enumerate(markerstoiter):
2278 for i, m in enumerate(markerstoiter):
2279 if not isrelevant(m):
2279 if not isrelevant(m):
2280 # marker can be irrelevant when we're iterating over a set
2280 # marker can be irrelevant when we're iterating over a set
2281 # of markers (markerstoiter) which is bigger than the set
2281 # of markers (markerstoiter) which is bigger than the set
2282 # of markers we want to display (markers)
2282 # of markers we want to display (markers)
2283 # this can happen if both --index and --rev options are
2283 # this can happen if both --index and --rev options are
2284 # provided and thus we need to iterate over all of the markers
2284 # provided and thus we need to iterate over all of the markers
2285 # to get the correct indices, but only display the ones that
2285 # to get the correct indices, but only display the ones that
2286 # are relevant to --rev value
2286 # are relevant to --rev value
2287 continue
2287 continue
2288 fm.startitem()
2288 fm.startitem()
2289 ind = i if opts.get(b'index') else None
2289 ind = i if opts.get(b'index') else None
2290 cmdutil.showmarker(fm, m, index=ind)
2290 cmdutil.showmarker(fm, m, index=ind)
2291 fm.end()
2291 fm.end()
2292
2292
2293
2293
2294 @command(
2294 @command(
2295 b'debugp1copies',
2295 b'debugp1copies',
2296 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2296 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2297 _(b'[-r REV]'),
2297 _(b'[-r REV]'),
2298 )
2298 )
2299 def debugp1copies(ui, repo, **opts):
2299 def debugp1copies(ui, repo, **opts):
2300 """dump copy information compared to p1"""
2300 """dump copy information compared to p1"""
2301
2301
2302 opts = pycompat.byteskwargs(opts)
2302 opts = pycompat.byteskwargs(opts)
2303 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2303 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2304 for dst, src in ctx.p1copies().items():
2304 for dst, src in ctx.p1copies().items():
2305 ui.write(b'%s -> %s\n' % (src, dst))
2305 ui.write(b'%s -> %s\n' % (src, dst))
2306
2306
2307
2307
2308 @command(
2308 @command(
2309 b'debugp2copies',
2309 b'debugp2copies',
2310 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2310 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2311 _(b'[-r REV]'),
2311 _(b'[-r REV]'),
2312 )
2312 )
2313 def debugp1copies(ui, repo, **opts):
2313 def debugp1copies(ui, repo, **opts):
2314 """dump copy information compared to p2"""
2314 """dump copy information compared to p2"""
2315
2315
2316 opts = pycompat.byteskwargs(opts)
2316 opts = pycompat.byteskwargs(opts)
2317 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2317 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2318 for dst, src in ctx.p2copies().items():
2318 for dst, src in ctx.p2copies().items():
2319 ui.write(b'%s -> %s\n' % (src, dst))
2319 ui.write(b'%s -> %s\n' % (src, dst))
2320
2320
2321
2321
2322 @command(
2322 @command(
2323 b'debugpathcomplete',
2323 b'debugpathcomplete',
2324 [
2324 [
2325 (b'f', b'full', None, _(b'complete an entire path')),
2325 (b'f', b'full', None, _(b'complete an entire path')),
2326 (b'n', b'normal', None, _(b'show only normal files')),
2326 (b'n', b'normal', None, _(b'show only normal files')),
2327 (b'a', b'added', None, _(b'show only added files')),
2327 (b'a', b'added', None, _(b'show only added files')),
2328 (b'r', b'removed', None, _(b'show only removed files')),
2328 (b'r', b'removed', None, _(b'show only removed files')),
2329 ],
2329 ],
2330 _(b'FILESPEC...'),
2330 _(b'FILESPEC...'),
2331 )
2331 )
2332 def debugpathcomplete(ui, repo, *specs, **opts):
2332 def debugpathcomplete(ui, repo, *specs, **opts):
2333 '''complete part or all of a tracked path
2333 '''complete part or all of a tracked path
2334
2334
2335 This command supports shells that offer path name completion. It
2335 This command supports shells that offer path name completion. It
2336 currently completes only files already known to the dirstate.
2336 currently completes only files already known to the dirstate.
2337
2337
2338 Completion extends only to the next path segment unless
2338 Completion extends only to the next path segment unless
2339 --full is specified, in which case entire paths are used.'''
2339 --full is specified, in which case entire paths are used.'''
2340
2340
2341 def complete(path, acceptable):
2341 def complete(path, acceptable):
2342 dirstate = repo.dirstate
2342 dirstate = repo.dirstate
2343 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2343 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2344 rootdir = repo.root + pycompat.ossep
2344 rootdir = repo.root + pycompat.ossep
2345 if spec != repo.root and not spec.startswith(rootdir):
2345 if spec != repo.root and not spec.startswith(rootdir):
2346 return [], []
2346 return [], []
2347 if os.path.isdir(spec):
2347 if os.path.isdir(spec):
2348 spec += b'/'
2348 spec += b'/'
2349 spec = spec[len(rootdir) :]
2349 spec = spec[len(rootdir) :]
2350 fixpaths = pycompat.ossep != b'/'
2350 fixpaths = pycompat.ossep != b'/'
2351 if fixpaths:
2351 if fixpaths:
2352 spec = spec.replace(pycompat.ossep, b'/')
2352 spec = spec.replace(pycompat.ossep, b'/')
2353 speclen = len(spec)
2353 speclen = len(spec)
2354 fullpaths = opts['full']
2354 fullpaths = opts['full']
2355 files, dirs = set(), set()
2355 files, dirs = set(), set()
2356 adddir, addfile = dirs.add, files.add
2356 adddir, addfile = dirs.add, files.add
2357 for f, st in pycompat.iteritems(dirstate):
2357 for f, st in pycompat.iteritems(dirstate):
2358 if f.startswith(spec) and st[0] in acceptable:
2358 if f.startswith(spec) and st[0] in acceptable:
2359 if fixpaths:
2359 if fixpaths:
2360 f = f.replace(b'/', pycompat.ossep)
2360 f = f.replace(b'/', pycompat.ossep)
2361 if fullpaths:
2361 if fullpaths:
2362 addfile(f)
2362 addfile(f)
2363 continue
2363 continue
2364 s = f.find(pycompat.ossep, speclen)
2364 s = f.find(pycompat.ossep, speclen)
2365 if s >= 0:
2365 if s >= 0:
2366 adddir(f[:s])
2366 adddir(f[:s])
2367 else:
2367 else:
2368 addfile(f)
2368 addfile(f)
2369 return files, dirs
2369 return files, dirs
2370
2370
2371 acceptable = b''
2371 acceptable = b''
2372 if opts['normal']:
2372 if opts['normal']:
2373 acceptable += b'nm'
2373 acceptable += b'nm'
2374 if opts['added']:
2374 if opts['added']:
2375 acceptable += b'a'
2375 acceptable += b'a'
2376 if opts['removed']:
2376 if opts['removed']:
2377 acceptable += b'r'
2377 acceptable += b'r'
2378 cwd = repo.getcwd()
2378 cwd = repo.getcwd()
2379 if not specs:
2379 if not specs:
2380 specs = [b'.']
2380 specs = [b'.']
2381
2381
2382 files, dirs = set(), set()
2382 files, dirs = set(), set()
2383 for spec in specs:
2383 for spec in specs:
2384 f, d = complete(spec, acceptable or b'nmar')
2384 f, d = complete(spec, acceptable or b'nmar')
2385 files.update(f)
2385 files.update(f)
2386 dirs.update(d)
2386 dirs.update(d)
2387 files.update(dirs)
2387 files.update(dirs)
2388 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2388 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2389 ui.write(b'\n')
2389 ui.write(b'\n')
2390
2390
2391
2391
2392 @command(
2392 @command(
2393 b'debugpathcopies',
2393 b'debugpathcopies',
2394 cmdutil.walkopts,
2394 cmdutil.walkopts,
2395 b'hg debugpathcopies REV1 REV2 [FILE]',
2395 b'hg debugpathcopies REV1 REV2 [FILE]',
2396 inferrepo=True,
2396 inferrepo=True,
2397 )
2397 )
2398 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2398 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2399 """show copies between two revisions"""
2399 """show copies between two revisions"""
2400 ctx1 = scmutil.revsingle(repo, rev1)
2400 ctx1 = scmutil.revsingle(repo, rev1)
2401 ctx2 = scmutil.revsingle(repo, rev2)
2401 ctx2 = scmutil.revsingle(repo, rev2)
2402 m = scmutil.match(ctx1, pats, opts)
2402 m = scmutil.match(ctx1, pats, opts)
2403 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2403 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2404 ui.write(b'%s -> %s\n' % (src, dst))
2404 ui.write(b'%s -> %s\n' % (src, dst))
2405
2405
2406
2406
2407 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2407 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2408 def debugpeer(ui, path):
2408 def debugpeer(ui, path):
2409 """establish a connection to a peer repository"""
2409 """establish a connection to a peer repository"""
2410 # Always enable peer request logging. Requires --debug to display
2410 # Always enable peer request logging. Requires --debug to display
2411 # though.
2411 # though.
2412 overrides = {
2412 overrides = {
2413 (b'devel', b'debug.peer-request'): True,
2413 (b'devel', b'debug.peer-request'): True,
2414 }
2414 }
2415
2415
2416 with ui.configoverride(overrides):
2416 with ui.configoverride(overrides):
2417 peer = hg.peer(ui, {}, path)
2417 peer = hg.peer(ui, {}, path)
2418
2418
2419 local = peer.local() is not None
2419 local = peer.local() is not None
2420 canpush = peer.canpush()
2420 canpush = peer.canpush()
2421
2421
2422 ui.write(_(b'url: %s\n') % peer.url())
2422 ui.write(_(b'url: %s\n') % peer.url())
2423 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2423 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2424 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2424 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2425
2425
2426
2426
2427 @command(
2427 @command(
2428 b'debugpickmergetool',
2428 b'debugpickmergetool',
2429 [
2429 [
2430 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2430 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2431 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2431 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2432 ]
2432 ]
2433 + cmdutil.walkopts
2433 + cmdutil.walkopts
2434 + cmdutil.mergetoolopts,
2434 + cmdutil.mergetoolopts,
2435 _(b'[PATTERN]...'),
2435 _(b'[PATTERN]...'),
2436 inferrepo=True,
2436 inferrepo=True,
2437 )
2437 )
2438 def debugpickmergetool(ui, repo, *pats, **opts):
2438 def debugpickmergetool(ui, repo, *pats, **opts):
2439 """examine which merge tool is chosen for specified file
2439 """examine which merge tool is chosen for specified file
2440
2440
2441 As described in :hg:`help merge-tools`, Mercurial examines
2441 As described in :hg:`help merge-tools`, Mercurial examines
2442 configurations below in this order to decide which merge tool is
2442 configurations below in this order to decide which merge tool is
2443 chosen for specified file.
2443 chosen for specified file.
2444
2444
2445 1. ``--tool`` option
2445 1. ``--tool`` option
2446 2. ``HGMERGE`` environment variable
2446 2. ``HGMERGE`` environment variable
2447 3. configurations in ``merge-patterns`` section
2447 3. configurations in ``merge-patterns`` section
2448 4. configuration of ``ui.merge``
2448 4. configuration of ``ui.merge``
2449 5. configurations in ``merge-tools`` section
2449 5. configurations in ``merge-tools`` section
2450 6. ``hgmerge`` tool (for historical reason only)
2450 6. ``hgmerge`` tool (for historical reason only)
2451 7. default tool for fallback (``:merge`` or ``:prompt``)
2451 7. default tool for fallback (``:merge`` or ``:prompt``)
2452
2452
2453 This command writes out examination result in the style below::
2453 This command writes out examination result in the style below::
2454
2454
2455 FILE = MERGETOOL
2455 FILE = MERGETOOL
2456
2456
2457 By default, all files known in the first parent context of the
2457 By default, all files known in the first parent context of the
2458 working directory are examined. Use file patterns and/or -I/-X
2458 working directory are examined. Use file patterns and/or -I/-X
2459 options to limit target files. -r/--rev is also useful to examine
2459 options to limit target files. -r/--rev is also useful to examine
2460 files in another context without actual updating to it.
2460 files in another context without actual updating to it.
2461
2461
2462 With --debug, this command shows warning messages while matching
2462 With --debug, this command shows warning messages while matching
2463 against ``merge-patterns`` and so on, too. It is recommended to
2463 against ``merge-patterns`` and so on, too. It is recommended to
2464 use this option with explicit file patterns and/or -I/-X options,
2464 use this option with explicit file patterns and/or -I/-X options,
2465 because this option increases amount of output per file according
2465 because this option increases amount of output per file according
2466 to configurations in hgrc.
2466 to configurations in hgrc.
2467
2467
2468 With -v/--verbose, this command shows configurations below at
2468 With -v/--verbose, this command shows configurations below at
2469 first (only if specified).
2469 first (only if specified).
2470
2470
2471 - ``--tool`` option
2471 - ``--tool`` option
2472 - ``HGMERGE`` environment variable
2472 - ``HGMERGE`` environment variable
2473 - configuration of ``ui.merge``
2473 - configuration of ``ui.merge``
2474
2474
2475 If merge tool is chosen before matching against
2475 If merge tool is chosen before matching against
2476 ``merge-patterns``, this command can't show any helpful
2476 ``merge-patterns``, this command can't show any helpful
2477 information, even with --debug. In such case, information above is
2477 information, even with --debug. In such case, information above is
2478 useful to know why a merge tool is chosen.
2478 useful to know why a merge tool is chosen.
2479 """
2479 """
2480 opts = pycompat.byteskwargs(opts)
2480 opts = pycompat.byteskwargs(opts)
2481 overrides = {}
2481 overrides = {}
2482 if opts[b'tool']:
2482 if opts[b'tool']:
2483 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2483 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2484 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2484 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2485
2485
2486 with ui.configoverride(overrides, b'debugmergepatterns'):
2486 with ui.configoverride(overrides, b'debugmergepatterns'):
2487 hgmerge = encoding.environ.get(b"HGMERGE")
2487 hgmerge = encoding.environ.get(b"HGMERGE")
2488 if hgmerge is not None:
2488 if hgmerge is not None:
2489 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2489 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2490 uimerge = ui.config(b"ui", b"merge")
2490 uimerge = ui.config(b"ui", b"merge")
2491 if uimerge:
2491 if uimerge:
2492 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2492 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2493
2493
2494 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2494 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2495 m = scmutil.match(ctx, pats, opts)
2495 m = scmutil.match(ctx, pats, opts)
2496 changedelete = opts[b'changedelete']
2496 changedelete = opts[b'changedelete']
2497 for path in ctx.walk(m):
2497 for path in ctx.walk(m):
2498 fctx = ctx[path]
2498 fctx = ctx[path]
2499 try:
2499 try:
2500 if not ui.debugflag:
2500 if not ui.debugflag:
2501 ui.pushbuffer(error=True)
2501 ui.pushbuffer(error=True)
2502 tool, toolpath = filemerge._picktool(
2502 tool, toolpath = filemerge._picktool(
2503 repo,
2503 repo,
2504 ui,
2504 ui,
2505 path,
2505 path,
2506 fctx.isbinary(),
2506 fctx.isbinary(),
2507 b'l' in fctx.flags(),
2507 b'l' in fctx.flags(),
2508 changedelete,
2508 changedelete,
2509 )
2509 )
2510 finally:
2510 finally:
2511 if not ui.debugflag:
2511 if not ui.debugflag:
2512 ui.popbuffer()
2512 ui.popbuffer()
2513 ui.write(b'%s = %s\n' % (path, tool))
2513 ui.write(b'%s = %s\n' % (path, tool))
2514
2514
2515
2515
2516 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2516 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2517 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2517 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2518 '''access the pushkey key/value protocol
2518 '''access the pushkey key/value protocol
2519
2519
2520 With two args, list the keys in the given namespace.
2520 With two args, list the keys in the given namespace.
2521
2521
2522 With five args, set a key to new if it currently is set to old.
2522 With five args, set a key to new if it currently is set to old.
2523 Reports success or failure.
2523 Reports success or failure.
2524 '''
2524 '''
2525
2525
2526 target = hg.peer(ui, {}, repopath)
2526 target = hg.peer(ui, {}, repopath)
2527 if keyinfo:
2527 if keyinfo:
2528 key, old, new = keyinfo
2528 key, old, new = keyinfo
2529 with target.commandexecutor() as e:
2529 with target.commandexecutor() as e:
2530 r = e.callcommand(
2530 r = e.callcommand(
2531 b'pushkey',
2531 b'pushkey',
2532 {
2532 {
2533 b'namespace': namespace,
2533 b'namespace': namespace,
2534 b'key': key,
2534 b'key': key,
2535 b'old': old,
2535 b'old': old,
2536 b'new': new,
2536 b'new': new,
2537 },
2537 },
2538 ).result()
2538 ).result()
2539
2539
2540 ui.status(pycompat.bytestr(r) + b'\n')
2540 ui.status(pycompat.bytestr(r) + b'\n')
2541 return not r
2541 return not r
2542 else:
2542 else:
2543 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2543 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2544 ui.write(
2544 ui.write(
2545 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2545 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2546 )
2546 )
2547
2547
2548
2548
2549 @command(b'debugpvec', [], _(b'A B'))
2549 @command(b'debugpvec', [], _(b'A B'))
2550 def debugpvec(ui, repo, a, b=None):
2550 def debugpvec(ui, repo, a, b=None):
2551 ca = scmutil.revsingle(repo, a)
2551 ca = scmutil.revsingle(repo, a)
2552 cb = scmutil.revsingle(repo, b)
2552 cb = scmutil.revsingle(repo, b)
2553 pa = pvec.ctxpvec(ca)
2553 pa = pvec.ctxpvec(ca)
2554 pb = pvec.ctxpvec(cb)
2554 pb = pvec.ctxpvec(cb)
2555 if pa == pb:
2555 if pa == pb:
2556 rel = b"="
2556 rel = b"="
2557 elif pa > pb:
2557 elif pa > pb:
2558 rel = b">"
2558 rel = b">"
2559 elif pa < pb:
2559 elif pa < pb:
2560 rel = b"<"
2560 rel = b"<"
2561 elif pa | pb:
2561 elif pa | pb:
2562 rel = b"|"
2562 rel = b"|"
2563 ui.write(_(b"a: %s\n") % pa)
2563 ui.write(_(b"a: %s\n") % pa)
2564 ui.write(_(b"b: %s\n") % pb)
2564 ui.write(_(b"b: %s\n") % pb)
2565 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2565 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2566 ui.write(
2566 ui.write(
2567 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2567 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2568 % (
2568 % (
2569 abs(pa._depth - pb._depth),
2569 abs(pa._depth - pb._depth),
2570 pvec._hamming(pa._vec, pb._vec),
2570 pvec._hamming(pa._vec, pb._vec),
2571 pa.distance(pb),
2571 pa.distance(pb),
2572 rel,
2572 rel,
2573 )
2573 )
2574 )
2574 )
2575
2575
2576
2576
2577 @command(
2577 @command(
2578 b'debugrebuilddirstate|debugrebuildstate',
2578 b'debugrebuilddirstate|debugrebuildstate',
2579 [
2579 [
2580 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2580 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2581 (
2581 (
2582 b'',
2582 b'',
2583 b'minimal',
2583 b'minimal',
2584 None,
2584 None,
2585 _(
2585 _(
2586 b'only rebuild files that are inconsistent with '
2586 b'only rebuild files that are inconsistent with '
2587 b'the working copy parent'
2587 b'the working copy parent'
2588 ),
2588 ),
2589 ),
2589 ),
2590 ],
2590 ],
2591 _(b'[-r REV]'),
2591 _(b'[-r REV]'),
2592 )
2592 )
2593 def debugrebuilddirstate(ui, repo, rev, **opts):
2593 def debugrebuilddirstate(ui, repo, rev, **opts):
2594 """rebuild the dirstate as it would look like for the given revision
2594 """rebuild the dirstate as it would look like for the given revision
2595
2595
2596 If no revision is specified the first current parent will be used.
2596 If no revision is specified the first current parent will be used.
2597
2597
2598 The dirstate will be set to the files of the given revision.
2598 The dirstate will be set to the files of the given revision.
2599 The actual working directory content or existing dirstate
2599 The actual working directory content or existing dirstate
2600 information such as adds or removes is not considered.
2600 information such as adds or removes is not considered.
2601
2601
2602 ``minimal`` will only rebuild the dirstate status for files that claim to be
2602 ``minimal`` will only rebuild the dirstate status for files that claim to be
2603 tracked but are not in the parent manifest, or that exist in the parent
2603 tracked but are not in the parent manifest, or that exist in the parent
2604 manifest but are not in the dirstate. It will not change adds, removes, or
2604 manifest but are not in the dirstate. It will not change adds, removes, or
2605 modified files that are in the working copy parent.
2605 modified files that are in the working copy parent.
2606
2606
2607 One use of this command is to make the next :hg:`status` invocation
2607 One use of this command is to make the next :hg:`status` invocation
2608 check the actual file content.
2608 check the actual file content.
2609 """
2609 """
2610 ctx = scmutil.revsingle(repo, rev)
2610 ctx = scmutil.revsingle(repo, rev)
2611 with repo.wlock():
2611 with repo.wlock():
2612 dirstate = repo.dirstate
2612 dirstate = repo.dirstate
2613 changedfiles = None
2613 changedfiles = None
2614 # See command doc for what minimal does.
2614 # See command doc for what minimal does.
2615 if opts.get('minimal'):
2615 if opts.get('minimal'):
2616 manifestfiles = set(ctx.manifest().keys())
2616 manifestfiles = set(ctx.manifest().keys())
2617 dirstatefiles = set(dirstate)
2617 dirstatefiles = set(dirstate)
2618 manifestonly = manifestfiles - dirstatefiles
2618 manifestonly = manifestfiles - dirstatefiles
2619 dsonly = dirstatefiles - manifestfiles
2619 dsonly = dirstatefiles - manifestfiles
2620 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2620 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2621 changedfiles = manifestonly | dsnotadded
2621 changedfiles = manifestonly | dsnotadded
2622
2622
2623 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2623 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2624
2624
2625
2625
2626 @command(b'debugrebuildfncache', [], b'')
2626 @command(b'debugrebuildfncache', [], b'')
2627 def debugrebuildfncache(ui, repo):
2627 def debugrebuildfncache(ui, repo):
2628 """rebuild the fncache file"""
2628 """rebuild the fncache file"""
2629 repair.rebuildfncache(ui, repo)
2629 repair.rebuildfncache(ui, repo)
2630
2630
2631
2631
2632 @command(
2632 @command(
2633 b'debugrename',
2633 b'debugrename',
2634 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2634 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2635 _(b'[-r REV] [FILE]...'),
2635 _(b'[-r REV] [FILE]...'),
2636 )
2636 )
2637 def debugrename(ui, repo, *pats, **opts):
2637 def debugrename(ui, repo, *pats, **opts):
2638 """dump rename information"""
2638 """dump rename information"""
2639
2639
2640 opts = pycompat.byteskwargs(opts)
2640 opts = pycompat.byteskwargs(opts)
2641 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2641 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2642 m = scmutil.match(ctx, pats, opts)
2642 m = scmutil.match(ctx, pats, opts)
2643 for abs in ctx.walk(m):
2643 for abs in ctx.walk(m):
2644 fctx = ctx[abs]
2644 fctx = ctx[abs]
2645 o = fctx.filelog().renamed(fctx.filenode())
2645 o = fctx.filelog().renamed(fctx.filenode())
2646 rel = repo.pathto(abs)
2646 rel = repo.pathto(abs)
2647 if o:
2647 if o:
2648 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2648 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2649 else:
2649 else:
2650 ui.write(_(b"%s not renamed\n") % rel)
2650 ui.write(_(b"%s not renamed\n") % rel)
2651
2651
2652
2652
2653 @command(
2653 @command(
2654 b'debugrevlog',
2654 b'debugrevlog',
2655 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2655 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2656 _(b'-c|-m|FILE'),
2656 _(b'-c|-m|FILE'),
2657 optionalrepo=True,
2657 optionalrepo=True,
2658 )
2658 )
2659 def debugrevlog(ui, repo, file_=None, **opts):
2659 def debugrevlog(ui, repo, file_=None, **opts):
2660 """show data and statistics about a revlog"""
2660 """show data and statistics about a revlog"""
2661 opts = pycompat.byteskwargs(opts)
2661 opts = pycompat.byteskwargs(opts)
2662 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2662 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2663
2663
2664 if opts.get(b"dump"):
2664 if opts.get(b"dump"):
2665 numrevs = len(r)
2665 numrevs = len(r)
2666 ui.write(
2666 ui.write(
2667 (
2667 (
2668 b"# rev p1rev p2rev start end deltastart base p1 p2"
2668 b"# rev p1rev p2rev start end deltastart base p1 p2"
2669 b" rawsize totalsize compression heads chainlen\n"
2669 b" rawsize totalsize compression heads chainlen\n"
2670 )
2670 )
2671 )
2671 )
2672 ts = 0
2672 ts = 0
2673 heads = set()
2673 heads = set()
2674
2674
2675 for rev in pycompat.xrange(numrevs):
2675 for rev in pycompat.xrange(numrevs):
2676 dbase = r.deltaparent(rev)
2676 dbase = r.deltaparent(rev)
2677 if dbase == -1:
2677 if dbase == -1:
2678 dbase = rev
2678 dbase = rev
2679 cbase = r.chainbase(rev)
2679 cbase = r.chainbase(rev)
2680 clen = r.chainlen(rev)
2680 clen = r.chainlen(rev)
2681 p1, p2 = r.parentrevs(rev)
2681 p1, p2 = r.parentrevs(rev)
2682 rs = r.rawsize(rev)
2682 rs = r.rawsize(rev)
2683 ts = ts + rs
2683 ts = ts + rs
2684 heads -= set(r.parentrevs(rev))
2684 heads -= set(r.parentrevs(rev))
2685 heads.add(rev)
2685 heads.add(rev)
2686 try:
2686 try:
2687 compression = ts / r.end(rev)
2687 compression = ts / r.end(rev)
2688 except ZeroDivisionError:
2688 except ZeroDivisionError:
2689 compression = 0
2689 compression = 0
2690 ui.write(
2690 ui.write(
2691 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2691 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2692 b"%11d %5d %8d\n"
2692 b"%11d %5d %8d\n"
2693 % (
2693 % (
2694 rev,
2694 rev,
2695 p1,
2695 p1,
2696 p2,
2696 p2,
2697 r.start(rev),
2697 r.start(rev),
2698 r.end(rev),
2698 r.end(rev),
2699 r.start(dbase),
2699 r.start(dbase),
2700 r.start(cbase),
2700 r.start(cbase),
2701 r.start(p1),
2701 r.start(p1),
2702 r.start(p2),
2702 r.start(p2),
2703 rs,
2703 rs,
2704 ts,
2704 ts,
2705 compression,
2705 compression,
2706 len(heads),
2706 len(heads),
2707 clen,
2707 clen,
2708 )
2708 )
2709 )
2709 )
2710 return 0
2710 return 0
2711
2711
2712 v = r.version
2712 v = r.version
2713 format = v & 0xFFFF
2713 format = v & 0xFFFF
2714 flags = []
2714 flags = []
2715 gdelta = False
2715 gdelta = False
2716 if v & revlog.FLAG_INLINE_DATA:
2716 if v & revlog.FLAG_INLINE_DATA:
2717 flags.append(b'inline')
2717 flags.append(b'inline')
2718 if v & revlog.FLAG_GENERALDELTA:
2718 if v & revlog.FLAG_GENERALDELTA:
2719 gdelta = True
2719 gdelta = True
2720 flags.append(b'generaldelta')
2720 flags.append(b'generaldelta')
2721 if not flags:
2721 if not flags:
2722 flags = [b'(none)']
2722 flags = [b'(none)']
2723
2723
2724 ### tracks merge vs single parent
2724 ### tracks merge vs single parent
2725 nummerges = 0
2725 nummerges = 0
2726
2726
2727 ### tracks ways the "delta" are build
2727 ### tracks ways the "delta" are build
2728 # nodelta
2728 # nodelta
2729 numempty = 0
2729 numempty = 0
2730 numemptytext = 0
2730 numemptytext = 0
2731 numemptydelta = 0
2731 numemptydelta = 0
2732 # full file content
2732 # full file content
2733 numfull = 0
2733 numfull = 0
2734 # intermediate snapshot against a prior snapshot
2734 # intermediate snapshot against a prior snapshot
2735 numsemi = 0
2735 numsemi = 0
2736 # snapshot count per depth
2736 # snapshot count per depth
2737 numsnapdepth = collections.defaultdict(lambda: 0)
2737 numsnapdepth = collections.defaultdict(lambda: 0)
2738 # delta against previous revision
2738 # delta against previous revision
2739 numprev = 0
2739 numprev = 0
2740 # delta against first or second parent (not prev)
2740 # delta against first or second parent (not prev)
2741 nump1 = 0
2741 nump1 = 0
2742 nump2 = 0
2742 nump2 = 0
2743 # delta against neither prev nor parents
2743 # delta against neither prev nor parents
2744 numother = 0
2744 numother = 0
2745 # delta against prev that are also first or second parent
2745 # delta against prev that are also first or second parent
2746 # (details of `numprev`)
2746 # (details of `numprev`)
2747 nump1prev = 0
2747 nump1prev = 0
2748 nump2prev = 0
2748 nump2prev = 0
2749
2749
2750 # data about delta chain of each revs
2750 # data about delta chain of each revs
2751 chainlengths = []
2751 chainlengths = []
2752 chainbases = []
2752 chainbases = []
2753 chainspans = []
2753 chainspans = []
2754
2754
2755 # data about each revision
2755 # data about each revision
2756 datasize = [None, 0, 0]
2756 datasize = [None, 0, 0]
2757 fullsize = [None, 0, 0]
2757 fullsize = [None, 0, 0]
2758 semisize = [None, 0, 0]
2758 semisize = [None, 0, 0]
2759 # snapshot count per depth
2759 # snapshot count per depth
2760 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2760 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2761 deltasize = [None, 0, 0]
2761 deltasize = [None, 0, 0]
2762 chunktypecounts = {}
2762 chunktypecounts = {}
2763 chunktypesizes = {}
2763 chunktypesizes = {}
2764
2764
2765 def addsize(size, l):
2765 def addsize(size, l):
2766 if l[0] is None or size < l[0]:
2766 if l[0] is None or size < l[0]:
2767 l[0] = size
2767 l[0] = size
2768 if size > l[1]:
2768 if size > l[1]:
2769 l[1] = size
2769 l[1] = size
2770 l[2] += size
2770 l[2] += size
2771
2771
2772 numrevs = len(r)
2772 numrevs = len(r)
2773 for rev in pycompat.xrange(numrevs):
2773 for rev in pycompat.xrange(numrevs):
2774 p1, p2 = r.parentrevs(rev)
2774 p1, p2 = r.parentrevs(rev)
2775 delta = r.deltaparent(rev)
2775 delta = r.deltaparent(rev)
2776 if format > 0:
2776 if format > 0:
2777 addsize(r.rawsize(rev), datasize)
2777 addsize(r.rawsize(rev), datasize)
2778 if p2 != nullrev:
2778 if p2 != nullrev:
2779 nummerges += 1
2779 nummerges += 1
2780 size = r.length(rev)
2780 size = r.length(rev)
2781 if delta == nullrev:
2781 if delta == nullrev:
2782 chainlengths.append(0)
2782 chainlengths.append(0)
2783 chainbases.append(r.start(rev))
2783 chainbases.append(r.start(rev))
2784 chainspans.append(size)
2784 chainspans.append(size)
2785 if size == 0:
2785 if size == 0:
2786 numempty += 1
2786 numempty += 1
2787 numemptytext += 1
2787 numemptytext += 1
2788 else:
2788 else:
2789 numfull += 1
2789 numfull += 1
2790 numsnapdepth[0] += 1
2790 numsnapdepth[0] += 1
2791 addsize(size, fullsize)
2791 addsize(size, fullsize)
2792 addsize(size, snapsizedepth[0])
2792 addsize(size, snapsizedepth[0])
2793 else:
2793 else:
2794 chainlengths.append(chainlengths[delta] + 1)
2794 chainlengths.append(chainlengths[delta] + 1)
2795 baseaddr = chainbases[delta]
2795 baseaddr = chainbases[delta]
2796 revaddr = r.start(rev)
2796 revaddr = r.start(rev)
2797 chainbases.append(baseaddr)
2797 chainbases.append(baseaddr)
2798 chainspans.append((revaddr - baseaddr) + size)
2798 chainspans.append((revaddr - baseaddr) + size)
2799 if size == 0:
2799 if size == 0:
2800 numempty += 1
2800 numempty += 1
2801 numemptydelta += 1
2801 numemptydelta += 1
2802 elif r.issnapshot(rev):
2802 elif r.issnapshot(rev):
2803 addsize(size, semisize)
2803 addsize(size, semisize)
2804 numsemi += 1
2804 numsemi += 1
2805 depth = r.snapshotdepth(rev)
2805 depth = r.snapshotdepth(rev)
2806 numsnapdepth[depth] += 1
2806 numsnapdepth[depth] += 1
2807 addsize(size, snapsizedepth[depth])
2807 addsize(size, snapsizedepth[depth])
2808 else:
2808 else:
2809 addsize(size, deltasize)
2809 addsize(size, deltasize)
2810 if delta == rev - 1:
2810 if delta == rev - 1:
2811 numprev += 1
2811 numprev += 1
2812 if delta == p1:
2812 if delta == p1:
2813 nump1prev += 1
2813 nump1prev += 1
2814 elif delta == p2:
2814 elif delta == p2:
2815 nump2prev += 1
2815 nump2prev += 1
2816 elif delta == p1:
2816 elif delta == p1:
2817 nump1 += 1
2817 nump1 += 1
2818 elif delta == p2:
2818 elif delta == p2:
2819 nump2 += 1
2819 nump2 += 1
2820 elif delta != nullrev:
2820 elif delta != nullrev:
2821 numother += 1
2821 numother += 1
2822
2822
2823 # Obtain data on the raw chunks in the revlog.
2823 # Obtain data on the raw chunks in the revlog.
2824 if util.safehasattr(r, b'_getsegmentforrevs'):
2824 if util.safehasattr(r, b'_getsegmentforrevs'):
2825 segment = r._getsegmentforrevs(rev, rev)[1]
2825 segment = r._getsegmentforrevs(rev, rev)[1]
2826 else:
2826 else:
2827 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2827 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2828 if segment:
2828 if segment:
2829 chunktype = bytes(segment[0:1])
2829 chunktype = bytes(segment[0:1])
2830 else:
2830 else:
2831 chunktype = b'empty'
2831 chunktype = b'empty'
2832
2832
2833 if chunktype not in chunktypecounts:
2833 if chunktype not in chunktypecounts:
2834 chunktypecounts[chunktype] = 0
2834 chunktypecounts[chunktype] = 0
2835 chunktypesizes[chunktype] = 0
2835 chunktypesizes[chunktype] = 0
2836
2836
2837 chunktypecounts[chunktype] += 1
2837 chunktypecounts[chunktype] += 1
2838 chunktypesizes[chunktype] += size
2838 chunktypesizes[chunktype] += size
2839
2839
2840 # Adjust size min value for empty cases
2840 # Adjust size min value for empty cases
2841 for size in (datasize, fullsize, semisize, deltasize):
2841 for size in (datasize, fullsize, semisize, deltasize):
2842 if size[0] is None:
2842 if size[0] is None:
2843 size[0] = 0
2843 size[0] = 0
2844
2844
2845 numdeltas = numrevs - numfull - numempty - numsemi
2845 numdeltas = numrevs - numfull - numempty - numsemi
2846 numoprev = numprev - nump1prev - nump2prev
2846 numoprev = numprev - nump1prev - nump2prev
2847 totalrawsize = datasize[2]
2847 totalrawsize = datasize[2]
2848 datasize[2] /= numrevs
2848 datasize[2] /= numrevs
2849 fulltotal = fullsize[2]
2849 fulltotal = fullsize[2]
2850 if numfull == 0:
2850 if numfull == 0:
2851 fullsize[2] = 0
2851 fullsize[2] = 0
2852 else:
2852 else:
2853 fullsize[2] /= numfull
2853 fullsize[2] /= numfull
2854 semitotal = semisize[2]
2854 semitotal = semisize[2]
2855 snaptotal = {}
2855 snaptotal = {}
2856 if numsemi > 0:
2856 if numsemi > 0:
2857 semisize[2] /= numsemi
2857 semisize[2] /= numsemi
2858 for depth in snapsizedepth:
2858 for depth in snapsizedepth:
2859 snaptotal[depth] = snapsizedepth[depth][2]
2859 snaptotal[depth] = snapsizedepth[depth][2]
2860 snapsizedepth[depth][2] /= numsnapdepth[depth]
2860 snapsizedepth[depth][2] /= numsnapdepth[depth]
2861
2861
2862 deltatotal = deltasize[2]
2862 deltatotal = deltasize[2]
2863 if numdeltas > 0:
2863 if numdeltas > 0:
2864 deltasize[2] /= numdeltas
2864 deltasize[2] /= numdeltas
2865 totalsize = fulltotal + semitotal + deltatotal
2865 totalsize = fulltotal + semitotal + deltatotal
2866 avgchainlen = sum(chainlengths) / numrevs
2866 avgchainlen = sum(chainlengths) / numrevs
2867 maxchainlen = max(chainlengths)
2867 maxchainlen = max(chainlengths)
2868 maxchainspan = max(chainspans)
2868 maxchainspan = max(chainspans)
2869 compratio = 1
2869 compratio = 1
2870 if totalsize:
2870 if totalsize:
2871 compratio = totalrawsize / totalsize
2871 compratio = totalrawsize / totalsize
2872
2872
2873 basedfmtstr = b'%%%dd\n'
2873 basedfmtstr = b'%%%dd\n'
2874 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2874 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2875
2875
2876 def dfmtstr(max):
2876 def dfmtstr(max):
2877 return basedfmtstr % len(str(max))
2877 return basedfmtstr % len(str(max))
2878
2878
2879 def pcfmtstr(max, padding=0):
2879 def pcfmtstr(max, padding=0):
2880 return basepcfmtstr % (len(str(max)), b' ' * padding)
2880 return basepcfmtstr % (len(str(max)), b' ' * padding)
2881
2881
2882 def pcfmt(value, total):
2882 def pcfmt(value, total):
2883 if total:
2883 if total:
2884 return (value, 100 * float(value) / total)
2884 return (value, 100 * float(value) / total)
2885 else:
2885 else:
2886 return value, 100.0
2886 return value, 100.0
2887
2887
2888 ui.writenoi18n(b'format : %d\n' % format)
2888 ui.writenoi18n(b'format : %d\n' % format)
2889 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2889 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2890
2890
2891 ui.write(b'\n')
2891 ui.write(b'\n')
2892 fmt = pcfmtstr(totalsize)
2892 fmt = pcfmtstr(totalsize)
2893 fmt2 = dfmtstr(totalsize)
2893 fmt2 = dfmtstr(totalsize)
2894 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2894 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2895 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2895 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2896 ui.writenoi18n(
2896 ui.writenoi18n(
2897 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2897 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2898 )
2898 )
2899 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2899 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2900 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2900 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2901 ui.writenoi18n(
2901 ui.writenoi18n(
2902 b' text : '
2902 b' text : '
2903 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2903 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2904 )
2904 )
2905 ui.writenoi18n(
2905 ui.writenoi18n(
2906 b' delta : '
2906 b' delta : '
2907 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2907 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2908 )
2908 )
2909 ui.writenoi18n(
2909 ui.writenoi18n(
2910 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2910 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2911 )
2911 )
2912 for depth in sorted(numsnapdepth):
2912 for depth in sorted(numsnapdepth):
2913 ui.write(
2913 ui.write(
2914 (b' lvl-%-3d : ' % depth)
2914 (b' lvl-%-3d : ' % depth)
2915 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2915 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2916 )
2916 )
2917 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2917 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2918 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2918 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2919 ui.writenoi18n(
2919 ui.writenoi18n(
2920 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2920 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2921 )
2921 )
2922 for depth in sorted(numsnapdepth):
2922 for depth in sorted(numsnapdepth):
2923 ui.write(
2923 ui.write(
2924 (b' lvl-%-3d : ' % depth)
2924 (b' lvl-%-3d : ' % depth)
2925 + fmt % pcfmt(snaptotal[depth], totalsize)
2925 + fmt % pcfmt(snaptotal[depth], totalsize)
2926 )
2926 )
2927 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2927 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2928
2928
2929 def fmtchunktype(chunktype):
2929 def fmtchunktype(chunktype):
2930 if chunktype == b'empty':
2930 if chunktype == b'empty':
2931 return b' %s : ' % chunktype
2931 return b' %s : ' % chunktype
2932 elif chunktype in pycompat.bytestr(string.ascii_letters):
2932 elif chunktype in pycompat.bytestr(string.ascii_letters):
2933 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2933 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2934 else:
2934 else:
2935 return b' 0x%s : ' % hex(chunktype)
2935 return b' 0x%s : ' % hex(chunktype)
2936
2936
2937 ui.write(b'\n')
2937 ui.write(b'\n')
2938 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2938 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2939 for chunktype in sorted(chunktypecounts):
2939 for chunktype in sorted(chunktypecounts):
2940 ui.write(fmtchunktype(chunktype))
2940 ui.write(fmtchunktype(chunktype))
2941 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2941 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2942 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2942 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2943 for chunktype in sorted(chunktypecounts):
2943 for chunktype in sorted(chunktypecounts):
2944 ui.write(fmtchunktype(chunktype))
2944 ui.write(fmtchunktype(chunktype))
2945 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2945 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2946
2946
2947 ui.write(b'\n')
2947 ui.write(b'\n')
2948 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2948 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2949 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2949 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2950 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2950 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2951 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2951 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2952 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2952 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2953
2953
2954 if format > 0:
2954 if format > 0:
2955 ui.write(b'\n')
2955 ui.write(b'\n')
2956 ui.writenoi18n(
2956 ui.writenoi18n(
2957 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2957 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2958 % tuple(datasize)
2958 % tuple(datasize)
2959 )
2959 )
2960 ui.writenoi18n(
2960 ui.writenoi18n(
2961 b'full revision size (min/max/avg) : %d / %d / %d\n'
2961 b'full revision size (min/max/avg) : %d / %d / %d\n'
2962 % tuple(fullsize)
2962 % tuple(fullsize)
2963 )
2963 )
2964 ui.writenoi18n(
2964 ui.writenoi18n(
2965 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2965 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2966 % tuple(semisize)
2966 % tuple(semisize)
2967 )
2967 )
2968 for depth in sorted(snapsizedepth):
2968 for depth in sorted(snapsizedepth):
2969 if depth == 0:
2969 if depth == 0:
2970 continue
2970 continue
2971 ui.writenoi18n(
2971 ui.writenoi18n(
2972 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2972 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2973 % ((depth,) + tuple(snapsizedepth[depth]))
2973 % ((depth,) + tuple(snapsizedepth[depth]))
2974 )
2974 )
2975 ui.writenoi18n(
2975 ui.writenoi18n(
2976 b'delta size (min/max/avg) : %d / %d / %d\n'
2976 b'delta size (min/max/avg) : %d / %d / %d\n'
2977 % tuple(deltasize)
2977 % tuple(deltasize)
2978 )
2978 )
2979
2979
2980 if numdeltas > 0:
2980 if numdeltas > 0:
2981 ui.write(b'\n')
2981 ui.write(b'\n')
2982 fmt = pcfmtstr(numdeltas)
2982 fmt = pcfmtstr(numdeltas)
2983 fmt2 = pcfmtstr(numdeltas, 4)
2983 fmt2 = pcfmtstr(numdeltas, 4)
2984 ui.writenoi18n(
2984 ui.writenoi18n(
2985 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2985 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2986 )
2986 )
2987 if numprev > 0:
2987 if numprev > 0:
2988 ui.writenoi18n(
2988 ui.writenoi18n(
2989 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2989 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2990 )
2990 )
2991 ui.writenoi18n(
2991 ui.writenoi18n(
2992 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2992 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2993 )
2993 )
2994 ui.writenoi18n(
2994 ui.writenoi18n(
2995 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2995 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2996 )
2996 )
2997 if gdelta:
2997 if gdelta:
2998 ui.writenoi18n(
2998 ui.writenoi18n(
2999 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2999 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3000 )
3000 )
3001 ui.writenoi18n(
3001 ui.writenoi18n(
3002 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3002 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3003 )
3003 )
3004 ui.writenoi18n(
3004 ui.writenoi18n(
3005 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3005 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3006 )
3006 )
3007
3007
3008
3008
3009 @command(
3009 @command(
3010 b'debugrevlogindex',
3010 b'debugrevlogindex',
3011 cmdutil.debugrevlogopts
3011 cmdutil.debugrevlogopts
3012 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3012 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3013 _(b'[-f FORMAT] -c|-m|FILE'),
3013 _(b'[-f FORMAT] -c|-m|FILE'),
3014 optionalrepo=True,
3014 optionalrepo=True,
3015 )
3015 )
3016 def debugrevlogindex(ui, repo, file_=None, **opts):
3016 def debugrevlogindex(ui, repo, file_=None, **opts):
3017 """dump the contents of a revlog index"""
3017 """dump the contents of a revlog index"""
3018 opts = pycompat.byteskwargs(opts)
3018 opts = pycompat.byteskwargs(opts)
3019 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3019 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3020 format = opts.get(b'format', 0)
3020 format = opts.get(b'format', 0)
3021 if format not in (0, 1):
3021 if format not in (0, 1):
3022 raise error.Abort(_(b"unknown format %d") % format)
3022 raise error.Abort(_(b"unknown format %d") % format)
3023
3023
3024 if ui.debugflag:
3024 if ui.debugflag:
3025 shortfn = hex
3025 shortfn = hex
3026 else:
3026 else:
3027 shortfn = short
3027 shortfn = short
3028
3028
3029 # There might not be anything in r, so have a sane default
3029 # There might not be anything in r, so have a sane default
3030 idlen = 12
3030 idlen = 12
3031 for i in r:
3031 for i in r:
3032 idlen = len(shortfn(r.node(i)))
3032 idlen = len(shortfn(r.node(i)))
3033 break
3033 break
3034
3034
3035 if format == 0:
3035 if format == 0:
3036 if ui.verbose:
3036 if ui.verbose:
3037 ui.writenoi18n(
3037 ui.writenoi18n(
3038 b" rev offset length linkrev %s %s p2\n"
3038 b" rev offset length linkrev %s %s p2\n"
3039 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3039 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3040 )
3040 )
3041 else:
3041 else:
3042 ui.writenoi18n(
3042 ui.writenoi18n(
3043 b" rev linkrev %s %s p2\n"
3043 b" rev linkrev %s %s p2\n"
3044 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3044 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3045 )
3045 )
3046 elif format == 1:
3046 elif format == 1:
3047 if ui.verbose:
3047 if ui.verbose:
3048 ui.writenoi18n(
3048 ui.writenoi18n(
3049 (
3049 (
3050 b" rev flag offset length size link p1"
3050 b" rev flag offset length size link p1"
3051 b" p2 %s\n"
3051 b" p2 %s\n"
3052 )
3052 )
3053 % b"nodeid".rjust(idlen)
3053 % b"nodeid".rjust(idlen)
3054 )
3054 )
3055 else:
3055 else:
3056 ui.writenoi18n(
3056 ui.writenoi18n(
3057 b" rev flag size link p1 p2 %s\n"
3057 b" rev flag size link p1 p2 %s\n"
3058 % b"nodeid".rjust(idlen)
3058 % b"nodeid".rjust(idlen)
3059 )
3059 )
3060
3060
3061 for i in r:
3061 for i in r:
3062 node = r.node(i)
3062 node = r.node(i)
3063 if format == 0:
3063 if format == 0:
3064 try:
3064 try:
3065 pp = r.parents(node)
3065 pp = r.parents(node)
3066 except Exception:
3066 except Exception:
3067 pp = [nullid, nullid]
3067 pp = [nullid, nullid]
3068 if ui.verbose:
3068 if ui.verbose:
3069 ui.write(
3069 ui.write(
3070 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3070 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3071 % (
3071 % (
3072 i,
3072 i,
3073 r.start(i),
3073 r.start(i),
3074 r.length(i),
3074 r.length(i),
3075 r.linkrev(i),
3075 r.linkrev(i),
3076 shortfn(node),
3076 shortfn(node),
3077 shortfn(pp[0]),
3077 shortfn(pp[0]),
3078 shortfn(pp[1]),
3078 shortfn(pp[1]),
3079 )
3079 )
3080 )
3080 )
3081 else:
3081 else:
3082 ui.write(
3082 ui.write(
3083 b"% 6d % 7d %s %s %s\n"
3083 b"% 6d % 7d %s %s %s\n"
3084 % (
3084 % (
3085 i,
3085 i,
3086 r.linkrev(i),
3086 r.linkrev(i),
3087 shortfn(node),
3087 shortfn(node),
3088 shortfn(pp[0]),
3088 shortfn(pp[0]),
3089 shortfn(pp[1]),
3089 shortfn(pp[1]),
3090 )
3090 )
3091 )
3091 )
3092 elif format == 1:
3092 elif format == 1:
3093 pr = r.parentrevs(i)
3093 pr = r.parentrevs(i)
3094 if ui.verbose:
3094 if ui.verbose:
3095 ui.write(
3095 ui.write(
3096 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3096 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3097 % (
3097 % (
3098 i,
3098 i,
3099 r.flags(i),
3099 r.flags(i),
3100 r.start(i),
3100 r.start(i),
3101 r.length(i),
3101 r.length(i),
3102 r.rawsize(i),
3102 r.rawsize(i),
3103 r.linkrev(i),
3103 r.linkrev(i),
3104 pr[0],
3104 pr[0],
3105 pr[1],
3105 pr[1],
3106 shortfn(node),
3106 shortfn(node),
3107 )
3107 )
3108 )
3108 )
3109 else:
3109 else:
3110 ui.write(
3110 ui.write(
3111 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3111 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3112 % (
3112 % (
3113 i,
3113 i,
3114 r.flags(i),
3114 r.flags(i),
3115 r.rawsize(i),
3115 r.rawsize(i),
3116 r.linkrev(i),
3116 r.linkrev(i),
3117 pr[0],
3117 pr[0],
3118 pr[1],
3118 pr[1],
3119 shortfn(node),
3119 shortfn(node),
3120 )
3120 )
3121 )
3121 )
3122
3122
3123
3123
3124 @command(
3124 @command(
3125 b'debugrevspec',
3125 b'debugrevspec',
3126 [
3126 [
3127 (
3127 (
3128 b'',
3128 b'',
3129 b'optimize',
3129 b'optimize',
3130 None,
3130 None,
3131 _(b'print parsed tree after optimizing (DEPRECATED)'),
3131 _(b'print parsed tree after optimizing (DEPRECATED)'),
3132 ),
3132 ),
3133 (
3133 (
3134 b'',
3134 b'',
3135 b'show-revs',
3135 b'show-revs',
3136 True,
3136 True,
3137 _(b'print list of result revisions (default)'),
3137 _(b'print list of result revisions (default)'),
3138 ),
3138 ),
3139 (
3139 (
3140 b's',
3140 b's',
3141 b'show-set',
3141 b'show-set',
3142 None,
3142 None,
3143 _(b'print internal representation of result set'),
3143 _(b'print internal representation of result set'),
3144 ),
3144 ),
3145 (
3145 (
3146 b'p',
3146 b'p',
3147 b'show-stage',
3147 b'show-stage',
3148 [],
3148 [],
3149 _(b'print parsed tree at the given stage'),
3149 _(b'print parsed tree at the given stage'),
3150 _(b'NAME'),
3150 _(b'NAME'),
3151 ),
3151 ),
3152 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3152 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3153 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3153 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3154 ],
3154 ],
3155 b'REVSPEC',
3155 b'REVSPEC',
3156 )
3156 )
3157 def debugrevspec(ui, repo, expr, **opts):
3157 def debugrevspec(ui, repo, expr, **opts):
3158 """parse and apply a revision specification
3158 """parse and apply a revision specification
3159
3159
3160 Use -p/--show-stage option to print the parsed tree at the given stages.
3160 Use -p/--show-stage option to print the parsed tree at the given stages.
3161 Use -p all to print tree at every stage.
3161 Use -p all to print tree at every stage.
3162
3162
3163 Use --no-show-revs option with -s or -p to print only the set
3163 Use --no-show-revs option with -s or -p to print only the set
3164 representation or the parsed tree respectively.
3164 representation or the parsed tree respectively.
3165
3165
3166 Use --verify-optimized to compare the optimized result with the unoptimized
3166 Use --verify-optimized to compare the optimized result with the unoptimized
3167 one. Returns 1 if the optimized result differs.
3167 one. Returns 1 if the optimized result differs.
3168 """
3168 """
3169 opts = pycompat.byteskwargs(opts)
3169 opts = pycompat.byteskwargs(opts)
3170 aliases = ui.configitems(b'revsetalias')
3170 aliases = ui.configitems(b'revsetalias')
3171 stages = [
3171 stages = [
3172 (b'parsed', lambda tree: tree),
3172 (b'parsed', lambda tree: tree),
3173 (
3173 (
3174 b'expanded',
3174 b'expanded',
3175 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3175 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3176 ),
3176 ),
3177 (b'concatenated', revsetlang.foldconcat),
3177 (b'concatenated', revsetlang.foldconcat),
3178 (b'analyzed', revsetlang.analyze),
3178 (b'analyzed', revsetlang.analyze),
3179 (b'optimized', revsetlang.optimize),
3179 (b'optimized', revsetlang.optimize),
3180 ]
3180 ]
3181 if opts[b'no_optimized']:
3181 if opts[b'no_optimized']:
3182 stages = stages[:-1]
3182 stages = stages[:-1]
3183 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3183 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3184 raise error.Abort(
3184 raise error.Abort(
3185 _(b'cannot use --verify-optimized with --no-optimized')
3185 _(b'cannot use --verify-optimized with --no-optimized')
3186 )
3186 )
3187 stagenames = set(n for n, f in stages)
3187 stagenames = set(n for n, f in stages)
3188
3188
3189 showalways = set()
3189 showalways = set()
3190 showchanged = set()
3190 showchanged = set()
3191 if ui.verbose and not opts[b'show_stage']:
3191 if ui.verbose and not opts[b'show_stage']:
3192 # show parsed tree by --verbose (deprecated)
3192 # show parsed tree by --verbose (deprecated)
3193 showalways.add(b'parsed')
3193 showalways.add(b'parsed')
3194 showchanged.update([b'expanded', b'concatenated'])
3194 showchanged.update([b'expanded', b'concatenated'])
3195 if opts[b'optimize']:
3195 if opts[b'optimize']:
3196 showalways.add(b'optimized')
3196 showalways.add(b'optimized')
3197 if opts[b'show_stage'] and opts[b'optimize']:
3197 if opts[b'show_stage'] and opts[b'optimize']:
3198 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3198 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3199 if opts[b'show_stage'] == [b'all']:
3199 if opts[b'show_stage'] == [b'all']:
3200 showalways.update(stagenames)
3200 showalways.update(stagenames)
3201 else:
3201 else:
3202 for n in opts[b'show_stage']:
3202 for n in opts[b'show_stage']:
3203 if n not in stagenames:
3203 if n not in stagenames:
3204 raise error.Abort(_(b'invalid stage name: %s') % n)
3204 raise error.Abort(_(b'invalid stage name: %s') % n)
3205 showalways.update(opts[b'show_stage'])
3205 showalways.update(opts[b'show_stage'])
3206
3206
3207 treebystage = {}
3207 treebystage = {}
3208 printedtree = None
3208 printedtree = None
3209 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3209 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3210 for n, f in stages:
3210 for n, f in stages:
3211 treebystage[n] = tree = f(tree)
3211 treebystage[n] = tree = f(tree)
3212 if n in showalways or (n in showchanged and tree != printedtree):
3212 if n in showalways or (n in showchanged and tree != printedtree):
3213 if opts[b'show_stage'] or n != b'parsed':
3213 if opts[b'show_stage'] or n != b'parsed':
3214 ui.write(b"* %s:\n" % n)
3214 ui.write(b"* %s:\n" % n)
3215 ui.write(revsetlang.prettyformat(tree), b"\n")
3215 ui.write(revsetlang.prettyformat(tree), b"\n")
3216 printedtree = tree
3216 printedtree = tree
3217
3217
3218 if opts[b'verify_optimized']:
3218 if opts[b'verify_optimized']:
3219 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3219 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3220 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3220 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3221 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3221 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3222 ui.writenoi18n(
3222 ui.writenoi18n(
3223 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3223 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3224 )
3224 )
3225 ui.writenoi18n(
3225 ui.writenoi18n(
3226 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3226 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3227 )
3227 )
3228 arevs = list(arevs)
3228 arevs = list(arevs)
3229 brevs = list(brevs)
3229 brevs = list(brevs)
3230 if arevs == brevs:
3230 if arevs == brevs:
3231 return 0
3231 return 0
3232 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3232 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3233 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3233 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3234 sm = difflib.SequenceMatcher(None, arevs, brevs)
3234 sm = difflib.SequenceMatcher(None, arevs, brevs)
3235 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3235 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3236 if tag in ('delete', 'replace'):
3236 if tag in ('delete', 'replace'):
3237 for c in arevs[alo:ahi]:
3237 for c in arevs[alo:ahi]:
3238 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3238 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3239 if tag in ('insert', 'replace'):
3239 if tag in ('insert', 'replace'):
3240 for c in brevs[blo:bhi]:
3240 for c in brevs[blo:bhi]:
3241 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3241 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3242 if tag == 'equal':
3242 if tag == 'equal':
3243 for c in arevs[alo:ahi]:
3243 for c in arevs[alo:ahi]:
3244 ui.write(b' %d\n' % c)
3244 ui.write(b' %d\n' % c)
3245 return 1
3245 return 1
3246
3246
3247 func = revset.makematcher(tree)
3247 func = revset.makematcher(tree)
3248 revs = func(repo)
3248 revs = func(repo)
3249 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3249 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3250 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3250 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3251 if not opts[b'show_revs']:
3251 if not opts[b'show_revs']:
3252 return
3252 return
3253 for c in revs:
3253 for c in revs:
3254 ui.write(b"%d\n" % c)
3254 ui.write(b"%d\n" % c)
3255
3255
3256
3256
3257 @command(
3257 @command(
3258 b'debugserve',
3258 b'debugserve',
3259 [
3259 [
3260 (
3260 (
3261 b'',
3261 b'',
3262 b'sshstdio',
3262 b'sshstdio',
3263 False,
3263 False,
3264 _(b'run an SSH server bound to process handles'),
3264 _(b'run an SSH server bound to process handles'),
3265 ),
3265 ),
3266 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3266 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3267 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3267 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3268 ],
3268 ],
3269 b'',
3269 b'',
3270 )
3270 )
3271 def debugserve(ui, repo, **opts):
3271 def debugserve(ui, repo, **opts):
3272 """run a server with advanced settings
3272 """run a server with advanced settings
3273
3273
3274 This command is similar to :hg:`serve`. It exists partially as a
3274 This command is similar to :hg:`serve`. It exists partially as a
3275 workaround to the fact that ``hg serve --stdio`` must have specific
3275 workaround to the fact that ``hg serve --stdio`` must have specific
3276 arguments for security reasons.
3276 arguments for security reasons.
3277 """
3277 """
3278 opts = pycompat.byteskwargs(opts)
3278 opts = pycompat.byteskwargs(opts)
3279
3279
3280 if not opts[b'sshstdio']:
3280 if not opts[b'sshstdio']:
3281 raise error.Abort(_(b'only --sshstdio is currently supported'))
3281 raise error.Abort(_(b'only --sshstdio is currently supported'))
3282
3282
3283 logfh = None
3283 logfh = None
3284
3284
3285 if opts[b'logiofd'] and opts[b'logiofile']:
3285 if opts[b'logiofd'] and opts[b'logiofile']:
3286 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3286 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3287
3287
3288 if opts[b'logiofd']:
3288 if opts[b'logiofd']:
3289 # Ideally we would be line buffered. But line buffering in binary
3289 # Ideally we would be line buffered. But line buffering in binary
3290 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3290 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3291 # buffering could have performance impacts. But since this isn't
3291 # buffering could have performance impacts. But since this isn't
3292 # performance critical code, it should be fine.
3292 # performance critical code, it should be fine.
3293 try:
3293 try:
3294 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3294 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3295 except OSError as e:
3295 except OSError as e:
3296 if e.errno != errno.ESPIPE:
3296 if e.errno != errno.ESPIPE:
3297 raise
3297 raise
3298 # can't seek a pipe, so `ab` mode fails on py3
3298 # can't seek a pipe, so `ab` mode fails on py3
3299 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3299 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3300 elif opts[b'logiofile']:
3300 elif opts[b'logiofile']:
3301 logfh = open(opts[b'logiofile'], b'ab', 0)
3301 logfh = open(opts[b'logiofile'], b'ab', 0)
3302
3302
3303 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3303 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3304 s.serve_forever()
3304 s.serve_forever()
3305
3305
3306
3306
3307 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3307 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3308 def debugsetparents(ui, repo, rev1, rev2=None):
3308 def debugsetparents(ui, repo, rev1, rev2=None):
3309 """manually set the parents of the current working directory
3309 """manually set the parents of the current working directory
3310
3310
3311 This is useful for writing repository conversion tools, but should
3311 This is useful for writing repository conversion tools, but should
3312 be used with care. For example, neither the working directory nor the
3312 be used with care. For example, neither the working directory nor the
3313 dirstate is updated, so file status may be incorrect after running this
3313 dirstate is updated, so file status may be incorrect after running this
3314 command.
3314 command.
3315
3315
3316 Returns 0 on success.
3316 Returns 0 on success.
3317 """
3317 """
3318
3318
3319 node1 = scmutil.revsingle(repo, rev1).node()
3319 node1 = scmutil.revsingle(repo, rev1).node()
3320 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3320 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3321
3321
3322 with repo.wlock():
3322 with repo.wlock():
3323 repo.setparents(node1, node2)
3323 repo.setparents(node1, node2)
3324
3324
3325
3325
3326 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3326 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3327 def debugsidedata(ui, repo, file_, rev=None, **opts):
3327 def debugsidedata(ui, repo, file_, rev=None, **opts):
3328 """dump the side data for a cl/manifest/file revision
3328 """dump the side data for a cl/manifest/file revision
3329
3329
3330 Use --verbose to dump the sidedata content."""
3330 Use --verbose to dump the sidedata content."""
3331 opts = pycompat.byteskwargs(opts)
3331 opts = pycompat.byteskwargs(opts)
3332 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3332 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3333 if rev is not None:
3333 if rev is not None:
3334 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3334 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3335 file_, rev = None, file_
3335 file_, rev = None, file_
3336 elif rev is None:
3336 elif rev is None:
3337 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3337 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3338 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3338 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3339 r = getattr(r, '_revlog', r)
3339 r = getattr(r, '_revlog', r)
3340 try:
3340 try:
3341 sidedata = r.sidedata(r.lookup(rev))
3341 sidedata = r.sidedata(r.lookup(rev))
3342 except KeyError:
3342 except KeyError:
3343 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3343 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3344 if sidedata:
3344 if sidedata:
3345 sidedata = list(sidedata.items())
3345 sidedata = list(sidedata.items())
3346 sidedata.sort()
3346 sidedata.sort()
3347 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3347 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3348 for key, value in sidedata:
3348 for key, value in sidedata:
3349 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3349 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3350 if ui.verbose:
3350 if ui.verbose:
3351 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3351 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3352
3352
3353
3353
3354 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3354 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3355 def debugssl(ui, repo, source=None, **opts):
3355 def debugssl(ui, repo, source=None, **opts):
3356 '''test a secure connection to a server
3356 '''test a secure connection to a server
3357
3357
3358 This builds the certificate chain for the server on Windows, installing the
3358 This builds the certificate chain for the server on Windows, installing the
3359 missing intermediates and trusted root via Windows Update if necessary. It
3359 missing intermediates and trusted root via Windows Update if necessary. It
3360 does nothing on other platforms.
3360 does nothing on other platforms.
3361
3361
3362 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3362 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3363 that server is used. See :hg:`help urls` for more information.
3363 that server is used. See :hg:`help urls` for more information.
3364
3364
3365 If the update succeeds, retry the original operation. Otherwise, the cause
3365 If the update succeeds, retry the original operation. Otherwise, the cause
3366 of the SSL error is likely another issue.
3366 of the SSL error is likely another issue.
3367 '''
3367 '''
3368 if not pycompat.iswindows:
3368 if not pycompat.iswindows:
3369 raise error.Abort(
3369 raise error.Abort(
3370 _(b'certificate chain building is only possible on Windows')
3370 _(b'certificate chain building is only possible on Windows')
3371 )
3371 )
3372
3372
3373 if not source:
3373 if not source:
3374 if not repo:
3374 if not repo:
3375 raise error.Abort(
3375 raise error.Abort(
3376 _(
3376 _(
3377 b"there is no Mercurial repository here, and no "
3377 b"there is no Mercurial repository here, and no "
3378 b"server specified"
3378 b"server specified"
3379 )
3379 )
3380 )
3380 )
3381 source = b"default"
3381 source = b"default"
3382
3382
3383 source, branches = hg.parseurl(ui.expandpath(source))
3383 source, branches = hg.parseurl(ui.expandpath(source))
3384 url = util.url(source)
3384 url = util.url(source)
3385
3385
3386 defaultport = {b'https': 443, b'ssh': 22}
3386 defaultport = {b'https': 443, b'ssh': 22}
3387 if url.scheme in defaultport:
3387 if url.scheme in defaultport:
3388 try:
3388 try:
3389 addr = (url.host, int(url.port or defaultport[url.scheme]))
3389 addr = (url.host, int(url.port or defaultport[url.scheme]))
3390 except ValueError:
3390 except ValueError:
3391 raise error.Abort(_(b"malformed port number in URL"))
3391 raise error.Abort(_(b"malformed port number in URL"))
3392 else:
3392 else:
3393 raise error.Abort(_(b"only https and ssh connections are supported"))
3393 raise error.Abort(_(b"only https and ssh connections are supported"))
3394
3394
3395 from . import win32
3395 from . import win32
3396
3396
3397 s = ssl.wrap_socket(
3397 s = ssl.wrap_socket(
3398 socket.socket(),
3398 socket.socket(),
3399 ssl_version=ssl.PROTOCOL_TLS,
3399 ssl_version=ssl.PROTOCOL_TLS,
3400 cert_reqs=ssl.CERT_NONE,
3400 cert_reqs=ssl.CERT_NONE,
3401 ca_certs=None,
3401 ca_certs=None,
3402 )
3402 )
3403
3403
3404 try:
3404 try:
3405 s.connect(addr)
3405 s.connect(addr)
3406 cert = s.getpeercert(True)
3406 cert = s.getpeercert(True)
3407
3407
3408 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3408 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3409
3409
3410 complete = win32.checkcertificatechain(cert, build=False)
3410 complete = win32.checkcertificatechain(cert, build=False)
3411
3411
3412 if not complete:
3412 if not complete:
3413 ui.status(_(b'certificate chain is incomplete, updating... '))
3413 ui.status(_(b'certificate chain is incomplete, updating... '))
3414
3414
3415 if not win32.checkcertificatechain(cert):
3415 if not win32.checkcertificatechain(cert):
3416 ui.status(_(b'failed.\n'))
3416 ui.status(_(b'failed.\n'))
3417 else:
3417 else:
3418 ui.status(_(b'done.\n'))
3418 ui.status(_(b'done.\n'))
3419 else:
3419 else:
3420 ui.status(_(b'full certificate chain is available\n'))
3420 ui.status(_(b'full certificate chain is available\n'))
3421 finally:
3421 finally:
3422 s.close()
3422 s.close()
3423
3423
3424
3424
3425 @command(
3425 @command(
3426 b'debugsub',
3426 b'debugsub',
3427 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3427 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3428 _(b'[-r REV] [REV]'),
3428 _(b'[-r REV] [REV]'),
3429 )
3429 )
3430 def debugsub(ui, repo, rev=None):
3430 def debugsub(ui, repo, rev=None):
3431 ctx = scmutil.revsingle(repo, rev, None)
3431 ctx = scmutil.revsingle(repo, rev, None)
3432 for k, v in sorted(ctx.substate.items()):
3432 for k, v in sorted(ctx.substate.items()):
3433 ui.writenoi18n(b'path %s\n' % k)
3433 ui.writenoi18n(b'path %s\n' % k)
3434 ui.writenoi18n(b' source %s\n' % v[0])
3434 ui.writenoi18n(b' source %s\n' % v[0])
3435 ui.writenoi18n(b' revision %s\n' % v[1])
3435 ui.writenoi18n(b' revision %s\n' % v[1])
3436
3436
3437
3437
3438 @command(
3438 @command(
3439 b'debugsuccessorssets',
3439 b'debugsuccessorssets',
3440 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3440 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3441 _(b'[REV]'),
3441 _(b'[REV]'),
3442 )
3442 )
3443 def debugsuccessorssets(ui, repo, *revs, **opts):
3443 def debugsuccessorssets(ui, repo, *revs, **opts):
3444 """show set of successors for revision
3444 """show set of successors for revision
3445
3445
3446 A successors set of changeset A is a consistent group of revisions that
3446 A successors set of changeset A is a consistent group of revisions that
3447 succeed A. It contains non-obsolete changesets only unless closests
3447 succeed A. It contains non-obsolete changesets only unless closests
3448 successors set is set.
3448 successors set is set.
3449
3449
3450 In most cases a changeset A has a single successors set containing a single
3450 In most cases a changeset A has a single successors set containing a single
3451 successor (changeset A replaced by A').
3451 successor (changeset A replaced by A').
3452
3452
3453 A changeset that is made obsolete with no successors are called "pruned".
3453 A changeset that is made obsolete with no successors are called "pruned".
3454 Such changesets have no successors sets at all.
3454 Such changesets have no successors sets at all.
3455
3455
3456 A changeset that has been "split" will have a successors set containing
3456 A changeset that has been "split" will have a successors set containing
3457 more than one successor.
3457 more than one successor.
3458
3458
3459 A changeset that has been rewritten in multiple different ways is called
3459 A changeset that has been rewritten in multiple different ways is called
3460 "divergent". Such changesets have multiple successor sets (each of which
3460 "divergent". Such changesets have multiple successor sets (each of which
3461 may also be split, i.e. have multiple successors).
3461 may also be split, i.e. have multiple successors).
3462
3462
3463 Results are displayed as follows::
3463 Results are displayed as follows::
3464
3464
3465 <rev1>
3465 <rev1>
3466 <successors-1A>
3466 <successors-1A>
3467 <rev2>
3467 <rev2>
3468 <successors-2A>
3468 <successors-2A>
3469 <successors-2B1> <successors-2B2> <successors-2B3>
3469 <successors-2B1> <successors-2B2> <successors-2B3>
3470
3470
3471 Here rev2 has two possible (i.e. divergent) successors sets. The first
3471 Here rev2 has two possible (i.e. divergent) successors sets. The first
3472 holds one element, whereas the second holds three (i.e. the changeset has
3472 holds one element, whereas the second holds three (i.e. the changeset has
3473 been split).
3473 been split).
3474 """
3474 """
3475 # passed to successorssets caching computation from one call to another
3475 # passed to successorssets caching computation from one call to another
3476 cache = {}
3476 cache = {}
3477 ctx2str = bytes
3477 ctx2str = bytes
3478 node2str = short
3478 node2str = short
3479 for rev in scmutil.revrange(repo, revs):
3479 for rev in scmutil.revrange(repo, revs):
3480 ctx = repo[rev]
3480 ctx = repo[rev]
3481 ui.write(b'%s\n' % ctx2str(ctx))
3481 ui.write(b'%s\n' % ctx2str(ctx))
3482 for succsset in obsutil.successorssets(
3482 for succsset in obsutil.successorssets(
3483 repo, ctx.node(), closest=opts['closest'], cache=cache
3483 repo, ctx.node(), closest=opts['closest'], cache=cache
3484 ):
3484 ):
3485 if succsset:
3485 if succsset:
3486 ui.write(b' ')
3486 ui.write(b' ')
3487 ui.write(node2str(succsset[0]))
3487 ui.write(node2str(succsset[0]))
3488 for node in succsset[1:]:
3488 for node in succsset[1:]:
3489 ui.write(b' ')
3489 ui.write(b' ')
3490 ui.write(node2str(node))
3490 ui.write(node2str(node))
3491 ui.write(b'\n')
3491 ui.write(b'\n')
3492
3492
3493
3493
3494 @command(b'debugtagscache', [])
3494 @command(b'debugtagscache', [])
3495 def debugtagscache(ui, repo):
3495 def debugtagscache(ui, repo):
3496 """display the contents of .hg/cache/hgtagsfnodes1"""
3496 """display the contents of .hg/cache/hgtagsfnodes1"""
3497 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3497 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3498 for r in repo:
3498 for r in repo:
3499 node = repo[r].node()
3499 node = repo[r].node()
3500 tagsnode = cache.getfnode(node, computemissing=False)
3500 tagsnode = cache.getfnode(node, computemissing=False)
3501 tagsnodedisplay = hex(tagsnode) if tagsnode else 'missing/invalid'
3501 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3502 ui.write(b'%s %s %s\n' % (r, hex(node), tagsnodedisplay))
3502 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3503
3503
3504
3504
3505 @command(
3505 @command(
3506 b'debugtemplate',
3506 b'debugtemplate',
3507 [
3507 [
3508 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3508 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3509 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3509 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3510 ],
3510 ],
3511 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3511 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3512 optionalrepo=True,
3512 optionalrepo=True,
3513 )
3513 )
3514 def debugtemplate(ui, repo, tmpl, **opts):
3514 def debugtemplate(ui, repo, tmpl, **opts):
3515 """parse and apply a template
3515 """parse and apply a template
3516
3516
3517 If -r/--rev is given, the template is processed as a log template and
3517 If -r/--rev is given, the template is processed as a log template and
3518 applied to the given changesets. Otherwise, it is processed as a generic
3518 applied to the given changesets. Otherwise, it is processed as a generic
3519 template.
3519 template.
3520
3520
3521 Use --verbose to print the parsed tree.
3521 Use --verbose to print the parsed tree.
3522 """
3522 """
3523 revs = None
3523 revs = None
3524 if opts['rev']:
3524 if opts['rev']:
3525 if repo is None:
3525 if repo is None:
3526 raise error.RepoError(
3526 raise error.RepoError(
3527 _(b'there is no Mercurial repository here (.hg not found)')
3527 _(b'there is no Mercurial repository here (.hg not found)')
3528 )
3528 )
3529 revs = scmutil.revrange(repo, opts['rev'])
3529 revs = scmutil.revrange(repo, opts['rev'])
3530
3530
3531 props = {}
3531 props = {}
3532 for d in opts['define']:
3532 for d in opts['define']:
3533 try:
3533 try:
3534 k, v = (e.strip() for e in d.split(b'=', 1))
3534 k, v = (e.strip() for e in d.split(b'=', 1))
3535 if not k or k == b'ui':
3535 if not k or k == b'ui':
3536 raise ValueError
3536 raise ValueError
3537 props[k] = v
3537 props[k] = v
3538 except ValueError:
3538 except ValueError:
3539 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3539 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3540
3540
3541 if ui.verbose:
3541 if ui.verbose:
3542 aliases = ui.configitems(b'templatealias')
3542 aliases = ui.configitems(b'templatealias')
3543 tree = templater.parse(tmpl)
3543 tree = templater.parse(tmpl)
3544 ui.note(templater.prettyformat(tree), b'\n')
3544 ui.note(templater.prettyformat(tree), b'\n')
3545 newtree = templater.expandaliases(tree, aliases)
3545 newtree = templater.expandaliases(tree, aliases)
3546 if newtree != tree:
3546 if newtree != tree:
3547 ui.notenoi18n(
3547 ui.notenoi18n(
3548 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3548 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3549 )
3549 )
3550
3550
3551 if revs is None:
3551 if revs is None:
3552 tres = formatter.templateresources(ui, repo)
3552 tres = formatter.templateresources(ui, repo)
3553 t = formatter.maketemplater(ui, tmpl, resources=tres)
3553 t = formatter.maketemplater(ui, tmpl, resources=tres)
3554 if ui.verbose:
3554 if ui.verbose:
3555 kwds, funcs = t.symbolsuseddefault()
3555 kwds, funcs = t.symbolsuseddefault()
3556 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3556 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3557 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3557 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3558 ui.write(t.renderdefault(props))
3558 ui.write(t.renderdefault(props))
3559 else:
3559 else:
3560 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3560 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3561 if ui.verbose:
3561 if ui.verbose:
3562 kwds, funcs = displayer.t.symbolsuseddefault()
3562 kwds, funcs = displayer.t.symbolsuseddefault()
3563 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3563 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3564 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3564 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3565 for r in revs:
3565 for r in revs:
3566 displayer.show(repo[r], **pycompat.strkwargs(props))
3566 displayer.show(repo[r], **pycompat.strkwargs(props))
3567 displayer.close()
3567 displayer.close()
3568
3568
3569
3569
3570 @command(
3570 @command(
3571 b'debuguigetpass',
3571 b'debuguigetpass',
3572 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3572 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3573 _(b'[-p TEXT]'),
3573 _(b'[-p TEXT]'),
3574 norepo=True,
3574 norepo=True,
3575 )
3575 )
3576 def debuguigetpass(ui, prompt=b''):
3576 def debuguigetpass(ui, prompt=b''):
3577 """show prompt to type password"""
3577 """show prompt to type password"""
3578 r = ui.getpass(prompt)
3578 r = ui.getpass(prompt)
3579 ui.writenoi18n(b'respose: %s\n' % r)
3579 ui.writenoi18n(b'respose: %s\n' % r)
3580
3580
3581
3581
3582 @command(
3582 @command(
3583 b'debuguiprompt',
3583 b'debuguiprompt',
3584 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3584 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3585 _(b'[-p TEXT]'),
3585 _(b'[-p TEXT]'),
3586 norepo=True,
3586 norepo=True,
3587 )
3587 )
3588 def debuguiprompt(ui, prompt=b''):
3588 def debuguiprompt(ui, prompt=b''):
3589 """show plain prompt"""
3589 """show plain prompt"""
3590 r = ui.prompt(prompt)
3590 r = ui.prompt(prompt)
3591 ui.writenoi18n(b'response: %s\n' % r)
3591 ui.writenoi18n(b'response: %s\n' % r)
3592
3592
3593
3593
3594 @command(b'debugupdatecaches', [])
3594 @command(b'debugupdatecaches', [])
3595 def debugupdatecaches(ui, repo, *pats, **opts):
3595 def debugupdatecaches(ui, repo, *pats, **opts):
3596 """warm all known caches in the repository"""
3596 """warm all known caches in the repository"""
3597 with repo.wlock(), repo.lock():
3597 with repo.wlock(), repo.lock():
3598 repo.updatecaches(full=True)
3598 repo.updatecaches(full=True)
3599
3599
3600
3600
3601 @command(
3601 @command(
3602 b'debugupgraderepo',
3602 b'debugupgraderepo',
3603 [
3603 [
3604 (
3604 (
3605 b'o',
3605 b'o',
3606 b'optimize',
3606 b'optimize',
3607 [],
3607 [],
3608 _(b'extra optimization to perform'),
3608 _(b'extra optimization to perform'),
3609 _(b'NAME'),
3609 _(b'NAME'),
3610 ),
3610 ),
3611 (b'', b'run', False, _(b'performs an upgrade')),
3611 (b'', b'run', False, _(b'performs an upgrade')),
3612 (b'', b'backup', True, _(b'keep the old repository content around')),
3612 (b'', b'backup', True, _(b'keep the old repository content around')),
3613 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3613 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3614 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3614 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3615 ],
3615 ],
3616 )
3616 )
3617 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3617 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3618 """upgrade a repository to use different features
3618 """upgrade a repository to use different features
3619
3619
3620 If no arguments are specified, the repository is evaluated for upgrade
3620 If no arguments are specified, the repository is evaluated for upgrade
3621 and a list of problems and potential optimizations is printed.
3621 and a list of problems and potential optimizations is printed.
3622
3622
3623 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3623 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3624 can be influenced via additional arguments. More details will be provided
3624 can be influenced via additional arguments. More details will be provided
3625 by the command output when run without ``--run``.
3625 by the command output when run without ``--run``.
3626
3626
3627 During the upgrade, the repository will be locked and no writes will be
3627 During the upgrade, the repository will be locked and no writes will be
3628 allowed.
3628 allowed.
3629
3629
3630 At the end of the upgrade, the repository may not be readable while new
3630 At the end of the upgrade, the repository may not be readable while new
3631 repository data is swapped in. This window will be as long as it takes to
3631 repository data is swapped in. This window will be as long as it takes to
3632 rename some directories inside the ``.hg`` directory. On most machines, this
3632 rename some directories inside the ``.hg`` directory. On most machines, this
3633 should complete almost instantaneously and the chances of a consumer being
3633 should complete almost instantaneously and the chances of a consumer being
3634 unable to access the repository should be low.
3634 unable to access the repository should be low.
3635
3635
3636 By default, all revlog will be upgraded. You can restrict this using flag
3636 By default, all revlog will be upgraded. You can restrict this using flag
3637 such as `--manifest`:
3637 such as `--manifest`:
3638
3638
3639 * `--manifest`: only optimize the manifest
3639 * `--manifest`: only optimize the manifest
3640 * `--no-manifest`: optimize all revlog but the manifest
3640 * `--no-manifest`: optimize all revlog but the manifest
3641 * `--changelog`: optimize the changelog only
3641 * `--changelog`: optimize the changelog only
3642 * `--no-changelog --no-manifest`: optimize filelogs only
3642 * `--no-changelog --no-manifest`: optimize filelogs only
3643 """
3643 """
3644 return upgrade.upgraderepo(
3644 return upgrade.upgraderepo(
3645 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3645 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3646 )
3646 )
3647
3647
3648
3648
3649 @command(
3649 @command(
3650 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3650 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3651 )
3651 )
3652 def debugwalk(ui, repo, *pats, **opts):
3652 def debugwalk(ui, repo, *pats, **opts):
3653 """show how files match on given patterns"""
3653 """show how files match on given patterns"""
3654 opts = pycompat.byteskwargs(opts)
3654 opts = pycompat.byteskwargs(opts)
3655 m = scmutil.match(repo[None], pats, opts)
3655 m = scmutil.match(repo[None], pats, opts)
3656 if ui.verbose:
3656 if ui.verbose:
3657 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3657 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3658 items = list(repo[None].walk(m))
3658 items = list(repo[None].walk(m))
3659 if not items:
3659 if not items:
3660 return
3660 return
3661 f = lambda fn: fn
3661 f = lambda fn: fn
3662 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3662 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3663 f = lambda fn: util.normpath(fn)
3663 f = lambda fn: util.normpath(fn)
3664 fmt = b'f %%-%ds %%-%ds %%s' % (
3664 fmt = b'f %%-%ds %%-%ds %%s' % (
3665 max([len(abs) for abs in items]),
3665 max([len(abs) for abs in items]),
3666 max([len(repo.pathto(abs)) for abs in items]),
3666 max([len(repo.pathto(abs)) for abs in items]),
3667 )
3667 )
3668 for abs in items:
3668 for abs in items:
3669 line = fmt % (
3669 line = fmt % (
3670 abs,
3670 abs,
3671 f(repo.pathto(abs)),
3671 f(repo.pathto(abs)),
3672 m.exact(abs) and b'exact' or b'',
3672 m.exact(abs) and b'exact' or b'',
3673 )
3673 )
3674 ui.write(b"%s\n" % line.rstrip())
3674 ui.write(b"%s\n" % line.rstrip())
3675
3675
3676
3676
3677 @command(b'debugwhyunstable', [], _(b'REV'))
3677 @command(b'debugwhyunstable', [], _(b'REV'))
3678 def debugwhyunstable(ui, repo, rev):
3678 def debugwhyunstable(ui, repo, rev):
3679 """explain instabilities of a changeset"""
3679 """explain instabilities of a changeset"""
3680 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3680 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3681 dnodes = b''
3681 dnodes = b''
3682 if entry.get(b'divergentnodes'):
3682 if entry.get(b'divergentnodes'):
3683 dnodes = (
3683 dnodes = (
3684 b' '.join(
3684 b' '.join(
3685 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3685 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3686 for ctx in entry[b'divergentnodes']
3686 for ctx in entry[b'divergentnodes']
3687 )
3687 )
3688 + b' '
3688 + b' '
3689 )
3689 )
3690 ui.write(
3690 ui.write(
3691 b'%s: %s%s %s\n'
3691 b'%s: %s%s %s\n'
3692 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3692 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3693 )
3693 )
3694
3694
3695
3695
3696 @command(
3696 @command(
3697 b'debugwireargs',
3697 b'debugwireargs',
3698 [
3698 [
3699 (b'', b'three', b'', b'three'),
3699 (b'', b'three', b'', b'three'),
3700 (b'', b'four', b'', b'four'),
3700 (b'', b'four', b'', b'four'),
3701 (b'', b'five', b'', b'five'),
3701 (b'', b'five', b'', b'five'),
3702 ]
3702 ]
3703 + cmdutil.remoteopts,
3703 + cmdutil.remoteopts,
3704 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3704 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3705 norepo=True,
3705 norepo=True,
3706 )
3706 )
3707 def debugwireargs(ui, repopath, *vals, **opts):
3707 def debugwireargs(ui, repopath, *vals, **opts):
3708 opts = pycompat.byteskwargs(opts)
3708 opts = pycompat.byteskwargs(opts)
3709 repo = hg.peer(ui, opts, repopath)
3709 repo = hg.peer(ui, opts, repopath)
3710 for opt in cmdutil.remoteopts:
3710 for opt in cmdutil.remoteopts:
3711 del opts[opt[1]]
3711 del opts[opt[1]]
3712 args = {}
3712 args = {}
3713 for k, v in pycompat.iteritems(opts):
3713 for k, v in pycompat.iteritems(opts):
3714 if v:
3714 if v:
3715 args[k] = v
3715 args[k] = v
3716 args = pycompat.strkwargs(args)
3716 args = pycompat.strkwargs(args)
3717 # run twice to check that we don't mess up the stream for the next command
3717 # run twice to check that we don't mess up the stream for the next command
3718 res1 = repo.debugwireargs(*vals, **args)
3718 res1 = repo.debugwireargs(*vals, **args)
3719 res2 = repo.debugwireargs(*vals, **args)
3719 res2 = repo.debugwireargs(*vals, **args)
3720 ui.write(b"%s\n" % res1)
3720 ui.write(b"%s\n" % res1)
3721 if res1 != res2:
3721 if res1 != res2:
3722 ui.warn(b"%s\n" % res2)
3722 ui.warn(b"%s\n" % res2)
3723
3723
3724
3724
3725 def _parsewirelangblocks(fh):
3725 def _parsewirelangblocks(fh):
3726 activeaction = None
3726 activeaction = None
3727 blocklines = []
3727 blocklines = []
3728 lastindent = 0
3728 lastindent = 0
3729
3729
3730 for line in fh:
3730 for line in fh:
3731 line = line.rstrip()
3731 line = line.rstrip()
3732 if not line:
3732 if not line:
3733 continue
3733 continue
3734
3734
3735 if line.startswith(b'#'):
3735 if line.startswith(b'#'):
3736 continue
3736 continue
3737
3737
3738 if not line.startswith(b' '):
3738 if not line.startswith(b' '):
3739 # New block. Flush previous one.
3739 # New block. Flush previous one.
3740 if activeaction:
3740 if activeaction:
3741 yield activeaction, blocklines
3741 yield activeaction, blocklines
3742
3742
3743 activeaction = line
3743 activeaction = line
3744 blocklines = []
3744 blocklines = []
3745 lastindent = 0
3745 lastindent = 0
3746 continue
3746 continue
3747
3747
3748 # Else we start with an indent.
3748 # Else we start with an indent.
3749
3749
3750 if not activeaction:
3750 if not activeaction:
3751 raise error.Abort(_(b'indented line outside of block'))
3751 raise error.Abort(_(b'indented line outside of block'))
3752
3752
3753 indent = len(line) - len(line.lstrip())
3753 indent = len(line) - len(line.lstrip())
3754
3754
3755 # If this line is indented more than the last line, concatenate it.
3755 # If this line is indented more than the last line, concatenate it.
3756 if indent > lastindent and blocklines:
3756 if indent > lastindent and blocklines:
3757 blocklines[-1] += line.lstrip()
3757 blocklines[-1] += line.lstrip()
3758 else:
3758 else:
3759 blocklines.append(line)
3759 blocklines.append(line)
3760 lastindent = indent
3760 lastindent = indent
3761
3761
3762 # Flush last block.
3762 # Flush last block.
3763 if activeaction:
3763 if activeaction:
3764 yield activeaction, blocklines
3764 yield activeaction, blocklines
3765
3765
3766
3766
3767 @command(
3767 @command(
3768 b'debugwireproto',
3768 b'debugwireproto',
3769 [
3769 [
3770 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3770 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3771 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3771 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3772 (
3772 (
3773 b'',
3773 b'',
3774 b'noreadstderr',
3774 b'noreadstderr',
3775 False,
3775 False,
3776 _(b'do not read from stderr of the remote'),
3776 _(b'do not read from stderr of the remote'),
3777 ),
3777 ),
3778 (
3778 (
3779 b'',
3779 b'',
3780 b'nologhandshake',
3780 b'nologhandshake',
3781 False,
3781 False,
3782 _(b'do not log I/O related to the peer handshake'),
3782 _(b'do not log I/O related to the peer handshake'),
3783 ),
3783 ),
3784 ]
3784 ]
3785 + cmdutil.remoteopts,
3785 + cmdutil.remoteopts,
3786 _(b'[PATH]'),
3786 _(b'[PATH]'),
3787 optionalrepo=True,
3787 optionalrepo=True,
3788 )
3788 )
3789 def debugwireproto(ui, repo, path=None, **opts):
3789 def debugwireproto(ui, repo, path=None, **opts):
3790 """send wire protocol commands to a server
3790 """send wire protocol commands to a server
3791
3791
3792 This command can be used to issue wire protocol commands to remote
3792 This command can be used to issue wire protocol commands to remote
3793 peers and to debug the raw data being exchanged.
3793 peers and to debug the raw data being exchanged.
3794
3794
3795 ``--localssh`` will start an SSH server against the current repository
3795 ``--localssh`` will start an SSH server against the current repository
3796 and connect to that. By default, the connection will perform a handshake
3796 and connect to that. By default, the connection will perform a handshake
3797 and establish an appropriate peer instance.
3797 and establish an appropriate peer instance.
3798
3798
3799 ``--peer`` can be used to bypass the handshake protocol and construct a
3799 ``--peer`` can be used to bypass the handshake protocol and construct a
3800 peer instance using the specified class type. Valid values are ``raw``,
3800 peer instance using the specified class type. Valid values are ``raw``,
3801 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3801 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3802 raw data payloads and don't support higher-level command actions.
3802 raw data payloads and don't support higher-level command actions.
3803
3803
3804 ``--noreadstderr`` can be used to disable automatic reading from stderr
3804 ``--noreadstderr`` can be used to disable automatic reading from stderr
3805 of the peer (for SSH connections only). Disabling automatic reading of
3805 of the peer (for SSH connections only). Disabling automatic reading of
3806 stderr is useful for making output more deterministic.
3806 stderr is useful for making output more deterministic.
3807
3807
3808 Commands are issued via a mini language which is specified via stdin.
3808 Commands are issued via a mini language which is specified via stdin.
3809 The language consists of individual actions to perform. An action is
3809 The language consists of individual actions to perform. An action is
3810 defined by a block. A block is defined as a line with no leading
3810 defined by a block. A block is defined as a line with no leading
3811 space followed by 0 or more lines with leading space. Blocks are
3811 space followed by 0 or more lines with leading space. Blocks are
3812 effectively a high-level command with additional metadata.
3812 effectively a high-level command with additional metadata.
3813
3813
3814 Lines beginning with ``#`` are ignored.
3814 Lines beginning with ``#`` are ignored.
3815
3815
3816 The following sections denote available actions.
3816 The following sections denote available actions.
3817
3817
3818 raw
3818 raw
3819 ---
3819 ---
3820
3820
3821 Send raw data to the server.
3821 Send raw data to the server.
3822
3822
3823 The block payload contains the raw data to send as one atomic send
3823 The block payload contains the raw data to send as one atomic send
3824 operation. The data may not actually be delivered in a single system
3824 operation. The data may not actually be delivered in a single system
3825 call: it depends on the abilities of the transport being used.
3825 call: it depends on the abilities of the transport being used.
3826
3826
3827 Each line in the block is de-indented and concatenated. Then, that
3827 Each line in the block is de-indented and concatenated. Then, that
3828 value is evaluated as a Python b'' literal. This allows the use of
3828 value is evaluated as a Python b'' literal. This allows the use of
3829 backslash escaping, etc.
3829 backslash escaping, etc.
3830
3830
3831 raw+
3831 raw+
3832 ----
3832 ----
3833
3833
3834 Behaves like ``raw`` except flushes output afterwards.
3834 Behaves like ``raw`` except flushes output afterwards.
3835
3835
3836 command <X>
3836 command <X>
3837 -----------
3837 -----------
3838
3838
3839 Send a request to run a named command, whose name follows the ``command``
3839 Send a request to run a named command, whose name follows the ``command``
3840 string.
3840 string.
3841
3841
3842 Arguments to the command are defined as lines in this block. The format of
3842 Arguments to the command are defined as lines in this block. The format of
3843 each line is ``<key> <value>``. e.g.::
3843 each line is ``<key> <value>``. e.g.::
3844
3844
3845 command listkeys
3845 command listkeys
3846 namespace bookmarks
3846 namespace bookmarks
3847
3847
3848 If the value begins with ``eval:``, it will be interpreted as a Python
3848 If the value begins with ``eval:``, it will be interpreted as a Python
3849 literal expression. Otherwise values are interpreted as Python b'' literals.
3849 literal expression. Otherwise values are interpreted as Python b'' literals.
3850 This allows sending complex types and encoding special byte sequences via
3850 This allows sending complex types and encoding special byte sequences via
3851 backslash escaping.
3851 backslash escaping.
3852
3852
3853 The following arguments have special meaning:
3853 The following arguments have special meaning:
3854
3854
3855 ``PUSHFILE``
3855 ``PUSHFILE``
3856 When defined, the *push* mechanism of the peer will be used instead
3856 When defined, the *push* mechanism of the peer will be used instead
3857 of the static request-response mechanism and the content of the
3857 of the static request-response mechanism and the content of the
3858 file specified in the value of this argument will be sent as the
3858 file specified in the value of this argument will be sent as the
3859 command payload.
3859 command payload.
3860
3860
3861 This can be used to submit a local bundle file to the remote.
3861 This can be used to submit a local bundle file to the remote.
3862
3862
3863 batchbegin
3863 batchbegin
3864 ----------
3864 ----------
3865
3865
3866 Instruct the peer to begin a batched send.
3866 Instruct the peer to begin a batched send.
3867
3867
3868 All ``command`` blocks are queued for execution until the next
3868 All ``command`` blocks are queued for execution until the next
3869 ``batchsubmit`` block.
3869 ``batchsubmit`` block.
3870
3870
3871 batchsubmit
3871 batchsubmit
3872 -----------
3872 -----------
3873
3873
3874 Submit previously queued ``command`` blocks as a batch request.
3874 Submit previously queued ``command`` blocks as a batch request.
3875
3875
3876 This action MUST be paired with a ``batchbegin`` action.
3876 This action MUST be paired with a ``batchbegin`` action.
3877
3877
3878 httprequest <method> <path>
3878 httprequest <method> <path>
3879 ---------------------------
3879 ---------------------------
3880
3880
3881 (HTTP peer only)
3881 (HTTP peer only)
3882
3882
3883 Send an HTTP request to the peer.
3883 Send an HTTP request to the peer.
3884
3884
3885 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3885 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3886
3886
3887 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3887 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3888 headers to add to the request. e.g. ``Accept: foo``.
3888 headers to add to the request. e.g. ``Accept: foo``.
3889
3889
3890 The following arguments are special:
3890 The following arguments are special:
3891
3891
3892 ``BODYFILE``
3892 ``BODYFILE``
3893 The content of the file defined as the value to this argument will be
3893 The content of the file defined as the value to this argument will be
3894 transferred verbatim as the HTTP request body.
3894 transferred verbatim as the HTTP request body.
3895
3895
3896 ``frame <type> <flags> <payload>``
3896 ``frame <type> <flags> <payload>``
3897 Send a unified protocol frame as part of the request body.
3897 Send a unified protocol frame as part of the request body.
3898
3898
3899 All frames will be collected and sent as the body to the HTTP
3899 All frames will be collected and sent as the body to the HTTP
3900 request.
3900 request.
3901
3901
3902 close
3902 close
3903 -----
3903 -----
3904
3904
3905 Close the connection to the server.
3905 Close the connection to the server.
3906
3906
3907 flush
3907 flush
3908 -----
3908 -----
3909
3909
3910 Flush data written to the server.
3910 Flush data written to the server.
3911
3911
3912 readavailable
3912 readavailable
3913 -------------
3913 -------------
3914
3914
3915 Close the write end of the connection and read all available data from
3915 Close the write end of the connection and read all available data from
3916 the server.
3916 the server.
3917
3917
3918 If the connection to the server encompasses multiple pipes, we poll both
3918 If the connection to the server encompasses multiple pipes, we poll both
3919 pipes and read available data.
3919 pipes and read available data.
3920
3920
3921 readline
3921 readline
3922 --------
3922 --------
3923
3923
3924 Read a line of output from the server. If there are multiple output
3924 Read a line of output from the server. If there are multiple output
3925 pipes, reads only the main pipe.
3925 pipes, reads only the main pipe.
3926
3926
3927 ereadline
3927 ereadline
3928 ---------
3928 ---------
3929
3929
3930 Like ``readline``, but read from the stderr pipe, if available.
3930 Like ``readline``, but read from the stderr pipe, if available.
3931
3931
3932 read <X>
3932 read <X>
3933 --------
3933 --------
3934
3934
3935 ``read()`` N bytes from the server's main output pipe.
3935 ``read()`` N bytes from the server's main output pipe.
3936
3936
3937 eread <X>
3937 eread <X>
3938 ---------
3938 ---------
3939
3939
3940 ``read()`` N bytes from the server's stderr pipe, if available.
3940 ``read()`` N bytes from the server's stderr pipe, if available.
3941
3941
3942 Specifying Unified Frame-Based Protocol Frames
3942 Specifying Unified Frame-Based Protocol Frames
3943 ----------------------------------------------
3943 ----------------------------------------------
3944
3944
3945 It is possible to emit a *Unified Frame-Based Protocol* by using special
3945 It is possible to emit a *Unified Frame-Based Protocol* by using special
3946 syntax.
3946 syntax.
3947
3947
3948 A frame is composed as a type, flags, and payload. These can be parsed
3948 A frame is composed as a type, flags, and payload. These can be parsed
3949 from a string of the form:
3949 from a string of the form:
3950
3950
3951 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3951 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3952
3952
3953 ``request-id`` and ``stream-id`` are integers defining the request and
3953 ``request-id`` and ``stream-id`` are integers defining the request and
3954 stream identifiers.
3954 stream identifiers.
3955
3955
3956 ``type`` can be an integer value for the frame type or the string name
3956 ``type`` can be an integer value for the frame type or the string name
3957 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3957 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3958 ``command-name``.
3958 ``command-name``.
3959
3959
3960 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3960 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3961 components. Each component (and there can be just one) can be an integer
3961 components. Each component (and there can be just one) can be an integer
3962 or a flag name for stream flags or frame flags, respectively. Values are
3962 or a flag name for stream flags or frame flags, respectively. Values are
3963 resolved to integers and then bitwise OR'd together.
3963 resolved to integers and then bitwise OR'd together.
3964
3964
3965 ``payload`` represents the raw frame payload. If it begins with
3965 ``payload`` represents the raw frame payload. If it begins with
3966 ``cbor:``, the following string is evaluated as Python code and the
3966 ``cbor:``, the following string is evaluated as Python code and the
3967 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3967 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3968 as a Python byte string literal.
3968 as a Python byte string literal.
3969 """
3969 """
3970 opts = pycompat.byteskwargs(opts)
3970 opts = pycompat.byteskwargs(opts)
3971
3971
3972 if opts[b'localssh'] and not repo:
3972 if opts[b'localssh'] and not repo:
3973 raise error.Abort(_(b'--localssh requires a repository'))
3973 raise error.Abort(_(b'--localssh requires a repository'))
3974
3974
3975 if opts[b'peer'] and opts[b'peer'] not in (
3975 if opts[b'peer'] and opts[b'peer'] not in (
3976 b'raw',
3976 b'raw',
3977 b'http2',
3977 b'http2',
3978 b'ssh1',
3978 b'ssh1',
3979 b'ssh2',
3979 b'ssh2',
3980 ):
3980 ):
3981 raise error.Abort(
3981 raise error.Abort(
3982 _(b'invalid value for --peer'),
3982 _(b'invalid value for --peer'),
3983 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3983 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3984 )
3984 )
3985
3985
3986 if path and opts[b'localssh']:
3986 if path and opts[b'localssh']:
3987 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3987 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3988
3988
3989 if ui.interactive():
3989 if ui.interactive():
3990 ui.write(_(b'(waiting for commands on stdin)\n'))
3990 ui.write(_(b'(waiting for commands on stdin)\n'))
3991
3991
3992 blocks = list(_parsewirelangblocks(ui.fin))
3992 blocks = list(_parsewirelangblocks(ui.fin))
3993
3993
3994 proc = None
3994 proc = None
3995 stdin = None
3995 stdin = None
3996 stdout = None
3996 stdout = None
3997 stderr = None
3997 stderr = None
3998 opener = None
3998 opener = None
3999
3999
4000 if opts[b'localssh']:
4000 if opts[b'localssh']:
4001 # We start the SSH server in its own process so there is process
4001 # We start the SSH server in its own process so there is process
4002 # separation. This prevents a whole class of potential bugs around
4002 # separation. This prevents a whole class of potential bugs around
4003 # shared state from interfering with server operation.
4003 # shared state from interfering with server operation.
4004 args = procutil.hgcmd() + [
4004 args = procutil.hgcmd() + [
4005 b'-R',
4005 b'-R',
4006 repo.root,
4006 repo.root,
4007 b'debugserve',
4007 b'debugserve',
4008 b'--sshstdio',
4008 b'--sshstdio',
4009 ]
4009 ]
4010 proc = subprocess.Popen(
4010 proc = subprocess.Popen(
4011 pycompat.rapply(procutil.tonativestr, args),
4011 pycompat.rapply(procutil.tonativestr, args),
4012 stdin=subprocess.PIPE,
4012 stdin=subprocess.PIPE,
4013 stdout=subprocess.PIPE,
4013 stdout=subprocess.PIPE,
4014 stderr=subprocess.PIPE,
4014 stderr=subprocess.PIPE,
4015 bufsize=0,
4015 bufsize=0,
4016 )
4016 )
4017
4017
4018 stdin = proc.stdin
4018 stdin = proc.stdin
4019 stdout = proc.stdout
4019 stdout = proc.stdout
4020 stderr = proc.stderr
4020 stderr = proc.stderr
4021
4021
4022 # We turn the pipes into observers so we can log I/O.
4022 # We turn the pipes into observers so we can log I/O.
4023 if ui.verbose or opts[b'peer'] == b'raw':
4023 if ui.verbose or opts[b'peer'] == b'raw':
4024 stdin = util.makeloggingfileobject(
4024 stdin = util.makeloggingfileobject(
4025 ui, proc.stdin, b'i', logdata=True
4025 ui, proc.stdin, b'i', logdata=True
4026 )
4026 )
4027 stdout = util.makeloggingfileobject(
4027 stdout = util.makeloggingfileobject(
4028 ui, proc.stdout, b'o', logdata=True
4028 ui, proc.stdout, b'o', logdata=True
4029 )
4029 )
4030 stderr = util.makeloggingfileobject(
4030 stderr = util.makeloggingfileobject(
4031 ui, proc.stderr, b'e', logdata=True
4031 ui, proc.stderr, b'e', logdata=True
4032 )
4032 )
4033
4033
4034 # --localssh also implies the peer connection settings.
4034 # --localssh also implies the peer connection settings.
4035
4035
4036 url = b'ssh://localserver'
4036 url = b'ssh://localserver'
4037 autoreadstderr = not opts[b'noreadstderr']
4037 autoreadstderr = not opts[b'noreadstderr']
4038
4038
4039 if opts[b'peer'] == b'ssh1':
4039 if opts[b'peer'] == b'ssh1':
4040 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4040 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4041 peer = sshpeer.sshv1peer(
4041 peer = sshpeer.sshv1peer(
4042 ui,
4042 ui,
4043 url,
4043 url,
4044 proc,
4044 proc,
4045 stdin,
4045 stdin,
4046 stdout,
4046 stdout,
4047 stderr,
4047 stderr,
4048 None,
4048 None,
4049 autoreadstderr=autoreadstderr,
4049 autoreadstderr=autoreadstderr,
4050 )
4050 )
4051 elif opts[b'peer'] == b'ssh2':
4051 elif opts[b'peer'] == b'ssh2':
4052 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4052 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4053 peer = sshpeer.sshv2peer(
4053 peer = sshpeer.sshv2peer(
4054 ui,
4054 ui,
4055 url,
4055 url,
4056 proc,
4056 proc,
4057 stdin,
4057 stdin,
4058 stdout,
4058 stdout,
4059 stderr,
4059 stderr,
4060 None,
4060 None,
4061 autoreadstderr=autoreadstderr,
4061 autoreadstderr=autoreadstderr,
4062 )
4062 )
4063 elif opts[b'peer'] == b'raw':
4063 elif opts[b'peer'] == b'raw':
4064 ui.write(_(b'using raw connection to peer\n'))
4064 ui.write(_(b'using raw connection to peer\n'))
4065 peer = None
4065 peer = None
4066 else:
4066 else:
4067 ui.write(_(b'creating ssh peer from handshake results\n'))
4067 ui.write(_(b'creating ssh peer from handshake results\n'))
4068 peer = sshpeer.makepeer(
4068 peer = sshpeer.makepeer(
4069 ui,
4069 ui,
4070 url,
4070 url,
4071 proc,
4071 proc,
4072 stdin,
4072 stdin,
4073 stdout,
4073 stdout,
4074 stderr,
4074 stderr,
4075 autoreadstderr=autoreadstderr,
4075 autoreadstderr=autoreadstderr,
4076 )
4076 )
4077
4077
4078 elif path:
4078 elif path:
4079 # We bypass hg.peer() so we can proxy the sockets.
4079 # We bypass hg.peer() so we can proxy the sockets.
4080 # TODO consider not doing this because we skip
4080 # TODO consider not doing this because we skip
4081 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4081 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4082 u = util.url(path)
4082 u = util.url(path)
4083 if u.scheme != b'http':
4083 if u.scheme != b'http':
4084 raise error.Abort(_(b'only http:// paths are currently supported'))
4084 raise error.Abort(_(b'only http:// paths are currently supported'))
4085
4085
4086 url, authinfo = u.authinfo()
4086 url, authinfo = u.authinfo()
4087 openerargs = {
4087 openerargs = {
4088 'useragent': b'Mercurial debugwireproto',
4088 'useragent': b'Mercurial debugwireproto',
4089 }
4089 }
4090
4090
4091 # Turn pipes/sockets into observers so we can log I/O.
4091 # Turn pipes/sockets into observers so we can log I/O.
4092 if ui.verbose:
4092 if ui.verbose:
4093 openerargs.update(
4093 openerargs.update(
4094 {
4094 {
4095 'loggingfh': ui,
4095 'loggingfh': ui,
4096 'loggingname': b's',
4096 'loggingname': b's',
4097 'loggingopts': {'logdata': True, 'logdataapis': False,},
4097 'loggingopts': {'logdata': True, 'logdataapis': False,},
4098 }
4098 }
4099 )
4099 )
4100
4100
4101 if ui.debugflag:
4101 if ui.debugflag:
4102 openerargs['loggingopts']['logdataapis'] = True
4102 openerargs['loggingopts']['logdataapis'] = True
4103
4103
4104 # Don't send default headers when in raw mode. This allows us to
4104 # Don't send default headers when in raw mode. This allows us to
4105 # bypass most of the behavior of our URL handling code so we can
4105 # bypass most of the behavior of our URL handling code so we can
4106 # have near complete control over what's sent on the wire.
4106 # have near complete control over what's sent on the wire.
4107 if opts[b'peer'] == b'raw':
4107 if opts[b'peer'] == b'raw':
4108 openerargs['sendaccept'] = False
4108 openerargs['sendaccept'] = False
4109
4109
4110 opener = urlmod.opener(ui, authinfo, **openerargs)
4110 opener = urlmod.opener(ui, authinfo, **openerargs)
4111
4111
4112 if opts[b'peer'] == b'http2':
4112 if opts[b'peer'] == b'http2':
4113 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4113 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4114 # We go through makepeer() because we need an API descriptor for
4114 # We go through makepeer() because we need an API descriptor for
4115 # the peer instance to be useful.
4115 # the peer instance to be useful.
4116 with ui.configoverride(
4116 with ui.configoverride(
4117 {(b'experimental', b'httppeer.advertise-v2'): True}
4117 {(b'experimental', b'httppeer.advertise-v2'): True}
4118 ):
4118 ):
4119 if opts[b'nologhandshake']:
4119 if opts[b'nologhandshake']:
4120 ui.pushbuffer()
4120 ui.pushbuffer()
4121
4121
4122 peer = httppeer.makepeer(ui, path, opener=opener)
4122 peer = httppeer.makepeer(ui, path, opener=opener)
4123
4123
4124 if opts[b'nologhandshake']:
4124 if opts[b'nologhandshake']:
4125 ui.popbuffer()
4125 ui.popbuffer()
4126
4126
4127 if not isinstance(peer, httppeer.httpv2peer):
4127 if not isinstance(peer, httppeer.httpv2peer):
4128 raise error.Abort(
4128 raise error.Abort(
4129 _(
4129 _(
4130 b'could not instantiate HTTP peer for '
4130 b'could not instantiate HTTP peer for '
4131 b'wire protocol version 2'
4131 b'wire protocol version 2'
4132 ),
4132 ),
4133 hint=_(
4133 hint=_(
4134 b'the server may not have the feature '
4134 b'the server may not have the feature '
4135 b'enabled or is not allowing this '
4135 b'enabled or is not allowing this '
4136 b'client version'
4136 b'client version'
4137 ),
4137 ),
4138 )
4138 )
4139
4139
4140 elif opts[b'peer'] == b'raw':
4140 elif opts[b'peer'] == b'raw':
4141 ui.write(_(b'using raw connection to peer\n'))
4141 ui.write(_(b'using raw connection to peer\n'))
4142 peer = None
4142 peer = None
4143 elif opts[b'peer']:
4143 elif opts[b'peer']:
4144 raise error.Abort(
4144 raise error.Abort(
4145 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4145 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4146 )
4146 )
4147 else:
4147 else:
4148 peer = httppeer.makepeer(ui, path, opener=opener)
4148 peer = httppeer.makepeer(ui, path, opener=opener)
4149
4149
4150 # We /could/ populate stdin/stdout with sock.makefile()...
4150 # We /could/ populate stdin/stdout with sock.makefile()...
4151 else:
4151 else:
4152 raise error.Abort(_(b'unsupported connection configuration'))
4152 raise error.Abort(_(b'unsupported connection configuration'))
4153
4153
4154 batchedcommands = None
4154 batchedcommands = None
4155
4155
4156 # Now perform actions based on the parsed wire language instructions.
4156 # Now perform actions based on the parsed wire language instructions.
4157 for action, lines in blocks:
4157 for action, lines in blocks:
4158 if action in (b'raw', b'raw+'):
4158 if action in (b'raw', b'raw+'):
4159 if not stdin:
4159 if not stdin:
4160 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4160 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4161
4161
4162 # Concatenate the data together.
4162 # Concatenate the data together.
4163 data = b''.join(l.lstrip() for l in lines)
4163 data = b''.join(l.lstrip() for l in lines)
4164 data = stringutil.unescapestr(data)
4164 data = stringutil.unescapestr(data)
4165 stdin.write(data)
4165 stdin.write(data)
4166
4166
4167 if action == b'raw+':
4167 if action == b'raw+':
4168 stdin.flush()
4168 stdin.flush()
4169 elif action == b'flush':
4169 elif action == b'flush':
4170 if not stdin:
4170 if not stdin:
4171 raise error.Abort(_(b'cannot call flush on this peer'))
4171 raise error.Abort(_(b'cannot call flush on this peer'))
4172 stdin.flush()
4172 stdin.flush()
4173 elif action.startswith(b'command'):
4173 elif action.startswith(b'command'):
4174 if not peer:
4174 if not peer:
4175 raise error.Abort(
4175 raise error.Abort(
4176 _(
4176 _(
4177 b'cannot send commands unless peer instance '
4177 b'cannot send commands unless peer instance '
4178 b'is available'
4178 b'is available'
4179 )
4179 )
4180 )
4180 )
4181
4181
4182 command = action.split(b' ', 1)[1]
4182 command = action.split(b' ', 1)[1]
4183
4183
4184 args = {}
4184 args = {}
4185 for line in lines:
4185 for line in lines:
4186 # We need to allow empty values.
4186 # We need to allow empty values.
4187 fields = line.lstrip().split(b' ', 1)
4187 fields = line.lstrip().split(b' ', 1)
4188 if len(fields) == 1:
4188 if len(fields) == 1:
4189 key = fields[0]
4189 key = fields[0]
4190 value = b''
4190 value = b''
4191 else:
4191 else:
4192 key, value = fields
4192 key, value = fields
4193
4193
4194 if value.startswith(b'eval:'):
4194 if value.startswith(b'eval:'):
4195 value = stringutil.evalpythonliteral(value[5:])
4195 value = stringutil.evalpythonliteral(value[5:])
4196 else:
4196 else:
4197 value = stringutil.unescapestr(value)
4197 value = stringutil.unescapestr(value)
4198
4198
4199 args[key] = value
4199 args[key] = value
4200
4200
4201 if batchedcommands is not None:
4201 if batchedcommands is not None:
4202 batchedcommands.append((command, args))
4202 batchedcommands.append((command, args))
4203 continue
4203 continue
4204
4204
4205 ui.status(_(b'sending %s command\n') % command)
4205 ui.status(_(b'sending %s command\n') % command)
4206
4206
4207 if b'PUSHFILE' in args:
4207 if b'PUSHFILE' in args:
4208 with open(args[b'PUSHFILE'], 'rb') as fh:
4208 with open(args[b'PUSHFILE'], 'rb') as fh:
4209 del args[b'PUSHFILE']
4209 del args[b'PUSHFILE']
4210 res, output = peer._callpush(
4210 res, output = peer._callpush(
4211 command, fh, **pycompat.strkwargs(args)
4211 command, fh, **pycompat.strkwargs(args)
4212 )
4212 )
4213 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4213 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4214 ui.status(
4214 ui.status(
4215 _(b'remote output: %s\n') % stringutil.escapestr(output)
4215 _(b'remote output: %s\n') % stringutil.escapestr(output)
4216 )
4216 )
4217 else:
4217 else:
4218 with peer.commandexecutor() as e:
4218 with peer.commandexecutor() as e:
4219 res = e.callcommand(command, args).result()
4219 res = e.callcommand(command, args).result()
4220
4220
4221 if isinstance(res, wireprotov2peer.commandresponse):
4221 if isinstance(res, wireprotov2peer.commandresponse):
4222 val = res.objects()
4222 val = res.objects()
4223 ui.status(
4223 ui.status(
4224 _(b'response: %s\n')
4224 _(b'response: %s\n')
4225 % stringutil.pprint(val, bprefix=True, indent=2)
4225 % stringutil.pprint(val, bprefix=True, indent=2)
4226 )
4226 )
4227 else:
4227 else:
4228 ui.status(
4228 ui.status(
4229 _(b'response: %s\n')
4229 _(b'response: %s\n')
4230 % stringutil.pprint(res, bprefix=True, indent=2)
4230 % stringutil.pprint(res, bprefix=True, indent=2)
4231 )
4231 )
4232
4232
4233 elif action == b'batchbegin':
4233 elif action == b'batchbegin':
4234 if batchedcommands is not None:
4234 if batchedcommands is not None:
4235 raise error.Abort(_(b'nested batchbegin not allowed'))
4235 raise error.Abort(_(b'nested batchbegin not allowed'))
4236
4236
4237 batchedcommands = []
4237 batchedcommands = []
4238 elif action == b'batchsubmit':
4238 elif action == b'batchsubmit':
4239 # There is a batching API we could go through. But it would be
4239 # There is a batching API we could go through. But it would be
4240 # difficult to normalize requests into function calls. It is easier
4240 # difficult to normalize requests into function calls. It is easier
4241 # to bypass this layer and normalize to commands + args.
4241 # to bypass this layer and normalize to commands + args.
4242 ui.status(
4242 ui.status(
4243 _(b'sending batch with %d sub-commands\n')
4243 _(b'sending batch with %d sub-commands\n')
4244 % len(batchedcommands)
4244 % len(batchedcommands)
4245 )
4245 )
4246 assert peer is not None
4246 assert peer is not None
4247 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4247 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4248 ui.status(
4248 ui.status(
4249 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4249 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4250 )
4250 )
4251
4251
4252 batchedcommands = None
4252 batchedcommands = None
4253
4253
4254 elif action.startswith(b'httprequest '):
4254 elif action.startswith(b'httprequest '):
4255 if not opener:
4255 if not opener:
4256 raise error.Abort(
4256 raise error.Abort(
4257 _(b'cannot use httprequest without an HTTP peer')
4257 _(b'cannot use httprequest without an HTTP peer')
4258 )
4258 )
4259
4259
4260 request = action.split(b' ', 2)
4260 request = action.split(b' ', 2)
4261 if len(request) != 3:
4261 if len(request) != 3:
4262 raise error.Abort(
4262 raise error.Abort(
4263 _(
4263 _(
4264 b'invalid httprequest: expected format is '
4264 b'invalid httprequest: expected format is '
4265 b'"httprequest <method> <path>'
4265 b'"httprequest <method> <path>'
4266 )
4266 )
4267 )
4267 )
4268
4268
4269 method, httppath = request[1:]
4269 method, httppath = request[1:]
4270 headers = {}
4270 headers = {}
4271 body = None
4271 body = None
4272 frames = []
4272 frames = []
4273 for line in lines:
4273 for line in lines:
4274 line = line.lstrip()
4274 line = line.lstrip()
4275 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4275 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4276 if m:
4276 if m:
4277 # Headers need to use native strings.
4277 # Headers need to use native strings.
4278 key = pycompat.strurl(m.group(1))
4278 key = pycompat.strurl(m.group(1))
4279 value = pycompat.strurl(m.group(2))
4279 value = pycompat.strurl(m.group(2))
4280 headers[key] = value
4280 headers[key] = value
4281 continue
4281 continue
4282
4282
4283 if line.startswith(b'BODYFILE '):
4283 if line.startswith(b'BODYFILE '):
4284 with open(line.split(b' ', 1), b'rb') as fh:
4284 with open(line.split(b' ', 1), b'rb') as fh:
4285 body = fh.read()
4285 body = fh.read()
4286 elif line.startswith(b'frame '):
4286 elif line.startswith(b'frame '):
4287 frame = wireprotoframing.makeframefromhumanstring(
4287 frame = wireprotoframing.makeframefromhumanstring(
4288 line[len(b'frame ') :]
4288 line[len(b'frame ') :]
4289 )
4289 )
4290
4290
4291 frames.append(frame)
4291 frames.append(frame)
4292 else:
4292 else:
4293 raise error.Abort(
4293 raise error.Abort(
4294 _(b'unknown argument to httprequest: %s') % line
4294 _(b'unknown argument to httprequest: %s') % line
4295 )
4295 )
4296
4296
4297 url = path + httppath
4297 url = path + httppath
4298
4298
4299 if frames:
4299 if frames:
4300 body = b''.join(bytes(f) for f in frames)
4300 body = b''.join(bytes(f) for f in frames)
4301
4301
4302 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4302 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4303
4303
4304 # urllib.Request insists on using has_data() as a proxy for
4304 # urllib.Request insists on using has_data() as a proxy for
4305 # determining the request method. Override that to use our
4305 # determining the request method. Override that to use our
4306 # explicitly requested method.
4306 # explicitly requested method.
4307 req.get_method = lambda: pycompat.sysstr(method)
4307 req.get_method = lambda: pycompat.sysstr(method)
4308
4308
4309 try:
4309 try:
4310 res = opener.open(req)
4310 res = opener.open(req)
4311 body = res.read()
4311 body = res.read()
4312 except util.urlerr.urlerror as e:
4312 except util.urlerr.urlerror as e:
4313 # read() method must be called, but only exists in Python 2
4313 # read() method must be called, but only exists in Python 2
4314 getattr(e, 'read', lambda: None)()
4314 getattr(e, 'read', lambda: None)()
4315 continue
4315 continue
4316
4316
4317 ct = res.headers.get('Content-Type')
4317 ct = res.headers.get('Content-Type')
4318 if ct == 'application/mercurial-cbor':
4318 if ct == 'application/mercurial-cbor':
4319 ui.write(
4319 ui.write(
4320 _(b'cbor> %s\n')
4320 _(b'cbor> %s\n')
4321 % stringutil.pprint(
4321 % stringutil.pprint(
4322 cborutil.decodeall(body), bprefix=True, indent=2
4322 cborutil.decodeall(body), bprefix=True, indent=2
4323 )
4323 )
4324 )
4324 )
4325
4325
4326 elif action == b'close':
4326 elif action == b'close':
4327 assert peer is not None
4327 assert peer is not None
4328 peer.close()
4328 peer.close()
4329 elif action == b'readavailable':
4329 elif action == b'readavailable':
4330 if not stdout or not stderr:
4330 if not stdout or not stderr:
4331 raise error.Abort(
4331 raise error.Abort(
4332 _(b'readavailable not available on this peer')
4332 _(b'readavailable not available on this peer')
4333 )
4333 )
4334
4334
4335 stdin.close()
4335 stdin.close()
4336 stdout.read()
4336 stdout.read()
4337 stderr.read()
4337 stderr.read()
4338
4338
4339 elif action == b'readline':
4339 elif action == b'readline':
4340 if not stdout:
4340 if not stdout:
4341 raise error.Abort(_(b'readline not available on this peer'))
4341 raise error.Abort(_(b'readline not available on this peer'))
4342 stdout.readline()
4342 stdout.readline()
4343 elif action == b'ereadline':
4343 elif action == b'ereadline':
4344 if not stderr:
4344 if not stderr:
4345 raise error.Abort(_(b'ereadline not available on this peer'))
4345 raise error.Abort(_(b'ereadline not available on this peer'))
4346 stderr.readline()
4346 stderr.readline()
4347 elif action.startswith(b'read '):
4347 elif action.startswith(b'read '):
4348 count = int(action.split(b' ', 1)[1])
4348 count = int(action.split(b' ', 1)[1])
4349 if not stdout:
4349 if not stdout:
4350 raise error.Abort(_(b'read not available on this peer'))
4350 raise error.Abort(_(b'read not available on this peer'))
4351 stdout.read(count)
4351 stdout.read(count)
4352 elif action.startswith(b'eread '):
4352 elif action.startswith(b'eread '):
4353 count = int(action.split(b' ', 1)[1])
4353 count = int(action.split(b' ', 1)[1])
4354 if not stderr:
4354 if not stderr:
4355 raise error.Abort(_(b'eread not available on this peer'))
4355 raise error.Abort(_(b'eread not available on this peer'))
4356 stderr.read(count)
4356 stderr.read(count)
4357 else:
4357 else:
4358 raise error.Abort(_(b'unknown action: %s') % action)
4358 raise error.Abort(_(b'unknown action: %s') % action)
4359
4359
4360 if batchedcommands is not None:
4360 if batchedcommands is not None:
4361 raise error.Abort(_(b'unclosed "batchbegin" request'))
4361 raise error.Abort(_(b'unclosed "batchbegin" request'))
4362
4362
4363 if peer:
4363 if peer:
4364 peer.close()
4364 peer.close()
4365
4365
4366 if proc:
4366 if proc:
4367 proc.kill()
4367 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now