##// END OF EJS Templates
debug-revlog: move the --dump code in `revlogutils` module...
marmoute -
r50554:7c0a3838 default
parent child Browse files
Show More
@@ -1,5091 +1,5047
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mdiff,
62 mdiff,
63 mergestate as mergestatemod,
63 mergestate as mergestatemod,
64 metadata,
64 metadata,
65 obsolete,
65 obsolete,
66 obsutil,
66 obsutil,
67 pathutil,
67 pathutil,
68 phases,
68 phases,
69 policy,
69 policy,
70 pvec,
70 pvec,
71 pycompat,
71 pycompat,
72 registrar,
72 registrar,
73 repair,
73 repair,
74 repoview,
74 repoview,
75 requirements,
75 requirements,
76 revlog,
76 revlog,
77 revlogutils,
77 revlogutils,
78 revset,
78 revset,
79 revsetlang,
79 revsetlang,
80 scmutil,
80 scmutil,
81 setdiscovery,
81 setdiscovery,
82 simplemerge,
82 simplemerge,
83 sshpeer,
83 sshpeer,
84 sslutil,
84 sslutil,
85 streamclone,
85 streamclone,
86 strip,
86 strip,
87 tags as tagsmod,
87 tags as tagsmod,
88 templater,
88 templater,
89 treediscovery,
89 treediscovery,
90 upgrade,
90 upgrade,
91 url as urlmod,
91 url as urlmod,
92 util,
92 util,
93 vfs as vfsmod,
93 vfs as vfsmod,
94 wireprotoframing,
94 wireprotoframing,
95 wireprotoserver,
95 wireprotoserver,
96 )
96 )
97 from .interfaces import repository
97 from .interfaces import repository
98 from .utils import (
98 from .utils import (
99 cborutil,
99 cborutil,
100 compression,
100 compression,
101 dateutil,
101 dateutil,
102 procutil,
102 procutil,
103 stringutil,
103 stringutil,
104 urlutil,
104 urlutil,
105 )
105 )
106
106
107 from .revlogutils import (
107 from .revlogutils import (
108 constants as revlog_constants,
108 constants as revlog_constants,
109 debug as revlog_debug,
109 debug as revlog_debug,
110 deltas as deltautil,
110 deltas as deltautil,
111 nodemap,
111 nodemap,
112 rewrite,
112 rewrite,
113 sidedata,
113 sidedata,
114 )
114 )
115
115
116 release = lockmod.release
116 release = lockmod.release
117
117
118 table = {}
118 table = {}
119 table.update(strip.command._table)
119 table.update(strip.command._table)
120 command = registrar.command(table)
120 command = registrar.command(table)
121
121
122
122
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 def debugancestor(ui, repo, *args):
124 def debugancestor(ui, repo, *args):
125 """find the ancestor revision of two revisions in a given index"""
125 """find the ancestor revision of two revisions in a given index"""
126 if len(args) == 3:
126 if len(args) == 3:
127 index, rev1, rev2 = args
127 index, rev1, rev2 = args
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 lookup = r.lookup
129 lookup = r.lookup
130 elif len(args) == 2:
130 elif len(args) == 2:
131 if not repo:
131 if not repo:
132 raise error.Abort(
132 raise error.Abort(
133 _(b'there is no Mercurial repository here (.hg not found)')
133 _(b'there is no Mercurial repository here (.hg not found)')
134 )
134 )
135 rev1, rev2 = args
135 rev1, rev2 = args
136 r = repo.changelog
136 r = repo.changelog
137 lookup = repo.lookup
137 lookup = repo.lookup
138 else:
138 else:
139 raise error.Abort(_(b'either two or three arguments required'))
139 raise error.Abort(_(b'either two or three arguments required'))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142
142
143
143
144 @command(b'debugantivirusrunning', [])
144 @command(b'debugantivirusrunning', [])
145 def debugantivirusrunning(ui, repo):
145 def debugantivirusrunning(ui, repo):
146 """attempt to trigger an antivirus scanner to see if one is active"""
146 """attempt to trigger an antivirus scanner to see if one is active"""
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 f.write(
148 f.write(
149 util.b85decode(
149 util.b85decode(
150 # This is a base85-armored version of the EICAR test file. See
150 # This is a base85-armored version of the EICAR test file. See
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 )
154 )
155 )
155 )
156 # Give an AV engine time to scan the file.
156 # Give an AV engine time to scan the file.
157 time.sleep(2)
157 time.sleep(2)
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159
159
160
160
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 def debugapplystreamclonebundle(ui, repo, fname):
162 def debugapplystreamclonebundle(ui, repo, fname):
163 """apply a stream clone bundle file"""
163 """apply a stream clone bundle file"""
164 f = hg.openpath(ui, fname)
164 f = hg.openpath(ui, fname)
165 gen = exchange.readbundle(ui, f, fname)
165 gen = exchange.readbundle(ui, f, fname)
166 gen.apply(repo)
166 gen.apply(repo)
167
167
168
168
169 @command(
169 @command(
170 b'debugbuilddag',
170 b'debugbuilddag',
171 [
171 [
172 (
172 (
173 b'm',
173 b'm',
174 b'mergeable-file',
174 b'mergeable-file',
175 None,
175 None,
176 _(b'add single file mergeable changes'),
176 _(b'add single file mergeable changes'),
177 ),
177 ),
178 (
178 (
179 b'o',
179 b'o',
180 b'overwritten-file',
180 b'overwritten-file',
181 None,
181 None,
182 _(b'add single file all revs overwrite'),
182 _(b'add single file all revs overwrite'),
183 ),
183 ),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 (
185 (
186 b'',
186 b'',
187 b'from-existing',
187 b'from-existing',
188 None,
188 None,
189 _(b'continue from a non-empty repository'),
189 _(b'continue from a non-empty repository'),
190 ),
190 ),
191 ],
191 ],
192 _(b'[OPTION]... [TEXT]'),
192 _(b'[OPTION]... [TEXT]'),
193 )
193 )
194 def debugbuilddag(
194 def debugbuilddag(
195 ui,
195 ui,
196 repo,
196 repo,
197 text=None,
197 text=None,
198 mergeable_file=False,
198 mergeable_file=False,
199 overwritten_file=False,
199 overwritten_file=False,
200 new_file=False,
200 new_file=False,
201 from_existing=False,
201 from_existing=False,
202 ):
202 ):
203 """builds a repo with a given DAG from scratch in the current empty repo
203 """builds a repo with a given DAG from scratch in the current empty repo
204
204
205 The description of the DAG is read from stdin if not given on the
205 The description of the DAG is read from stdin if not given on the
206 command line.
206 command line.
207
207
208 Elements:
208 Elements:
209
209
210 - "+n" is a linear run of n nodes based on the current default parent
210 - "+n" is a linear run of n nodes based on the current default parent
211 - "." is a single node based on the current default parent
211 - "." is a single node based on the current default parent
212 - "$" resets the default parent to null (implied at the start);
212 - "$" resets the default parent to null (implied at the start);
213 otherwise the default parent is always the last node created
213 otherwise the default parent is always the last node created
214 - "<p" sets the default parent to the backref p
214 - "<p" sets the default parent to the backref p
215 - "*p" is a fork at parent p, which is a backref
215 - "*p" is a fork at parent p, which is a backref
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 - "/p2" is a merge of the preceding node and p2
217 - "/p2" is a merge of the preceding node and p2
218 - ":tag" defines a local tag for the preceding node
218 - ":tag" defines a local tag for the preceding node
219 - "@branch" sets the named branch for subsequent nodes
219 - "@branch" sets the named branch for subsequent nodes
220 - "#...\\n" is a comment up to the end of the line
220 - "#...\\n" is a comment up to the end of the line
221
221
222 Whitespace between the above elements is ignored.
222 Whitespace between the above elements is ignored.
223
223
224 A backref is either
224 A backref is either
225
225
226 - a number n, which references the node curr-n, where curr is the current
226 - a number n, which references the node curr-n, where curr is the current
227 node, or
227 node, or
228 - the name of a local tag you placed earlier using ":tag", or
228 - the name of a local tag you placed earlier using ":tag", or
229 - empty to denote the default parent.
229 - empty to denote the default parent.
230
230
231 All string valued-elements are either strictly alphanumeric, or must
231 All string valued-elements are either strictly alphanumeric, or must
232 be enclosed in double quotes ("..."), with "\\" as escape character.
232 be enclosed in double quotes ("..."), with "\\" as escape character.
233 """
233 """
234
234
235 if text is None:
235 if text is None:
236 ui.status(_(b"reading DAG from stdin\n"))
236 ui.status(_(b"reading DAG from stdin\n"))
237 text = ui.fin.read()
237 text = ui.fin.read()
238
238
239 cl = repo.changelog
239 cl = repo.changelog
240 if len(cl) > 0 and not from_existing:
240 if len(cl) > 0 and not from_existing:
241 raise error.Abort(_(b'repository is not empty'))
241 raise error.Abort(_(b'repository is not empty'))
242
242
243 # determine number of revs in DAG
243 # determine number of revs in DAG
244 total = 0
244 total = 0
245 for type, data in dagparser.parsedag(text):
245 for type, data in dagparser.parsedag(text):
246 if type == b'n':
246 if type == b'n':
247 total += 1
247 total += 1
248
248
249 if mergeable_file:
249 if mergeable_file:
250 linesperrev = 2
250 linesperrev = 2
251 # make a file with k lines per rev
251 # make a file with k lines per rev
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 initialmergedlines.append(b"")
253 initialmergedlines.append(b"")
254
254
255 tags = []
255 tags = []
256 progress = ui.makeprogress(
256 progress = ui.makeprogress(
257 _(b'building'), unit=_(b'revisions'), total=total
257 _(b'building'), unit=_(b'revisions'), total=total
258 )
258 )
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 at = -1
260 at = -1
261 atbranch = b'default'
261 atbranch = b'default'
262 nodeids = []
262 nodeids = []
263 id = 0
263 id = 0
264 progress.update(id)
264 progress.update(id)
265 for type, data in dagparser.parsedag(text):
265 for type, data in dagparser.parsedag(text):
266 if type == b'n':
266 if type == b'n':
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 id, ps = data
268 id, ps = data
269
269
270 files = []
270 files = []
271 filecontent = {}
271 filecontent = {}
272
272
273 p2 = None
273 p2 = None
274 if mergeable_file:
274 if mergeable_file:
275 fn = b"mf"
275 fn = b"mf"
276 p1 = repo[ps[0]]
276 p1 = repo[ps[0]]
277 if len(ps) > 1:
277 if len(ps) > 1:
278 p2 = repo[ps[1]]
278 p2 = repo[ps[1]]
279 pa = p1.ancestor(p2)
279 pa = p1.ancestor(p2)
280 base, local, other = [
280 base, local, other = [
281 x[fn].data() for x in (pa, p1, p2)
281 x[fn].data() for x in (pa, p1, p2)
282 ]
282 ]
283 m3 = simplemerge.Merge3Text(base, local, other)
283 m3 = simplemerge.Merge3Text(base, local, other)
284 ml = [
284 ml = [
285 l.strip()
285 l.strip()
286 for l in simplemerge.render_minimized(m3)[0]
286 for l in simplemerge.render_minimized(m3)[0]
287 ]
287 ]
288 ml.append(b"")
288 ml.append(b"")
289 elif at > 0:
289 elif at > 0:
290 ml = p1[fn].data().split(b"\n")
290 ml = p1[fn].data().split(b"\n")
291 else:
291 else:
292 ml = initialmergedlines
292 ml = initialmergedlines
293 ml[id * linesperrev] += b" r%i" % id
293 ml[id * linesperrev] += b" r%i" % id
294 mergedtext = b"\n".join(ml)
294 mergedtext = b"\n".join(ml)
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = mergedtext
296 filecontent[fn] = mergedtext
297
297
298 if overwritten_file:
298 if overwritten_file:
299 fn = b"of"
299 fn = b"of"
300 files.append(fn)
300 files.append(fn)
301 filecontent[fn] = b"r%i\n" % id
301 filecontent[fn] = b"r%i\n" % id
302
302
303 if new_file:
303 if new_file:
304 fn = b"nf%i" % id
304 fn = b"nf%i" % id
305 files.append(fn)
305 files.append(fn)
306 filecontent[fn] = b"r%i\n" % id
306 filecontent[fn] = b"r%i\n" % id
307 if len(ps) > 1:
307 if len(ps) > 1:
308 if not p2:
308 if not p2:
309 p2 = repo[ps[1]]
309 p2 = repo[ps[1]]
310 for fn in p2:
310 for fn in p2:
311 if fn.startswith(b"nf"):
311 if fn.startswith(b"nf"):
312 files.append(fn)
312 files.append(fn)
313 filecontent[fn] = p2[fn].data()
313 filecontent[fn] = p2[fn].data()
314
314
315 def fctxfn(repo, cx, path):
315 def fctxfn(repo, cx, path):
316 if path in filecontent:
316 if path in filecontent:
317 return context.memfilectx(
317 return context.memfilectx(
318 repo, cx, path, filecontent[path]
318 repo, cx, path, filecontent[path]
319 )
319 )
320 return None
320 return None
321
321
322 if len(ps) == 0 or ps[0] < 0:
322 if len(ps) == 0 or ps[0] < 0:
323 pars = [None, None]
323 pars = [None, None]
324 elif len(ps) == 1:
324 elif len(ps) == 1:
325 pars = [nodeids[ps[0]], None]
325 pars = [nodeids[ps[0]], None]
326 else:
326 else:
327 pars = [nodeids[p] for p in ps]
327 pars = [nodeids[p] for p in ps]
328 cx = context.memctx(
328 cx = context.memctx(
329 repo,
329 repo,
330 pars,
330 pars,
331 b"r%i" % id,
331 b"r%i" % id,
332 files,
332 files,
333 fctxfn,
333 fctxfn,
334 date=(id, 0),
334 date=(id, 0),
335 user=b"debugbuilddag",
335 user=b"debugbuilddag",
336 extra={b'branch': atbranch},
336 extra={b'branch': atbranch},
337 )
337 )
338 nodeid = repo.commitctx(cx)
338 nodeid = repo.commitctx(cx)
339 nodeids.append(nodeid)
339 nodeids.append(nodeid)
340 at = id
340 at = id
341 elif type == b'l':
341 elif type == b'l':
342 id, name = data
342 id, name = data
343 ui.note((b'tag %s\n' % name))
343 ui.note((b'tag %s\n' % name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 elif type == b'a':
345 elif type == b'a':
346 ui.note((b'branch %s\n' % data))
346 ui.note((b'branch %s\n' % data))
347 atbranch = data
347 atbranch = data
348 progress.update(id)
348 progress.update(id)
349
349
350 if tags:
350 if tags:
351 repo.vfs.write(b"localtags", b"".join(tags))
351 repo.vfs.write(b"localtags", b"".join(tags))
352
352
353
353
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 indent_string = b' ' * indent
355 indent_string = b' ' * indent
356 if all:
356 if all:
357 ui.writenoi18n(
357 ui.writenoi18n(
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 % indent_string
359 % indent_string
360 )
360 )
361
361
362 def showchunks(named):
362 def showchunks(named):
363 ui.write(b"\n%s%s\n" % (indent_string, named))
363 ui.write(b"\n%s%s\n" % (indent_string, named))
364 for deltadata in gen.deltaiter():
364 for deltadata in gen.deltaiter():
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 ui.write(
366 ui.write(
367 b"%s%s %s %s %s %s %d\n"
367 b"%s%s %s %s %s %s %d\n"
368 % (
368 % (
369 indent_string,
369 indent_string,
370 hex(node),
370 hex(node),
371 hex(p1),
371 hex(p1),
372 hex(p2),
372 hex(p2),
373 hex(cs),
373 hex(cs),
374 hex(deltabase),
374 hex(deltabase),
375 len(delta),
375 len(delta),
376 )
376 )
377 )
377 )
378
378
379 gen.changelogheader()
379 gen.changelogheader()
380 showchunks(b"changelog")
380 showchunks(b"changelog")
381 gen.manifestheader()
381 gen.manifestheader()
382 showchunks(b"manifest")
382 showchunks(b"manifest")
383 for chunkdata in iter(gen.filelogheader, {}):
383 for chunkdata in iter(gen.filelogheader, {}):
384 fname = chunkdata[b'filename']
384 fname = chunkdata[b'filename']
385 showchunks(fname)
385 showchunks(fname)
386 else:
386 else:
387 if isinstance(gen, bundle2.unbundle20):
387 if isinstance(gen, bundle2.unbundle20):
388 raise error.Abort(_(b'use debugbundle2 for this file'))
388 raise error.Abort(_(b'use debugbundle2 for this file'))
389 gen.changelogheader()
389 gen.changelogheader()
390 for deltadata in gen.deltaiter():
390 for deltadata in gen.deltaiter():
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393
393
394
394
395 def _debugobsmarkers(ui, part, indent=0, **opts):
395 def _debugobsmarkers(ui, part, indent=0, **opts):
396 """display version and markers contained in 'data'"""
396 """display version and markers contained in 'data'"""
397 opts = pycompat.byteskwargs(opts)
397 opts = pycompat.byteskwargs(opts)
398 data = part.read()
398 data = part.read()
399 indent_string = b' ' * indent
399 indent_string = b' ' * indent
400 try:
400 try:
401 version, markers = obsolete._readmarkers(data)
401 version, markers = obsolete._readmarkers(data)
402 except error.UnknownVersion as exc:
402 except error.UnknownVersion as exc:
403 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg = b"%sunsupported version: %s (%d bytes)\n"
404 msg %= indent_string, exc.version, len(data)
404 msg %= indent_string, exc.version, len(data)
405 ui.write(msg)
405 ui.write(msg)
406 else:
406 else:
407 msg = b"%sversion: %d (%d bytes)\n"
407 msg = b"%sversion: %d (%d bytes)\n"
408 msg %= indent_string, version, len(data)
408 msg %= indent_string, version, len(data)
409 ui.write(msg)
409 ui.write(msg)
410 fm = ui.formatter(b'debugobsolete', opts)
410 fm = ui.formatter(b'debugobsolete', opts)
411 for rawmarker in sorted(markers):
411 for rawmarker in sorted(markers):
412 m = obsutil.marker(None, rawmarker)
412 m = obsutil.marker(None, rawmarker)
413 fm.startitem()
413 fm.startitem()
414 fm.plain(indent_string)
414 fm.plain(indent_string)
415 cmdutil.showmarker(fm, m)
415 cmdutil.showmarker(fm, m)
416 fm.end()
416 fm.end()
417
417
418
418
419 def _debugphaseheads(ui, data, indent=0):
419 def _debugphaseheads(ui, data, indent=0):
420 """display version and markers contained in 'data'"""
420 """display version and markers contained in 'data'"""
421 indent_string = b' ' * indent
421 indent_string = b' ' * indent
422 headsbyphase = phases.binarydecode(data)
422 headsbyphase = phases.binarydecode(data)
423 for phase in phases.allphases:
423 for phase in phases.allphases:
424 for head in headsbyphase[phase]:
424 for head in headsbyphase[phase]:
425 ui.write(indent_string)
425 ui.write(indent_string)
426 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427
427
428
428
429 def _quasirepr(thing):
429 def _quasirepr(thing):
430 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 return b'{%s}' % (
431 return b'{%s}' % (
432 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 )
433 )
434 return pycompat.bytestr(repr(thing))
434 return pycompat.bytestr(repr(thing))
435
435
436
436
437 def _debugbundle2(ui, gen, all=None, **opts):
437 def _debugbundle2(ui, gen, all=None, **opts):
438 """lists the contents of a bundle2"""
438 """lists the contents of a bundle2"""
439 if not isinstance(gen, bundle2.unbundle20):
439 if not isinstance(gen, bundle2.unbundle20):
440 raise error.Abort(_(b'not a bundle2 file'))
440 raise error.Abort(_(b'not a bundle2 file'))
441 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 parttypes = opts.get('part_type', [])
442 parttypes = opts.get('part_type', [])
443 for part in gen.iterparts():
443 for part in gen.iterparts():
444 if parttypes and part.type not in parttypes:
444 if parttypes and part.type not in parttypes:
445 continue
445 continue
446 msg = b'%s -- %s (mandatory: %r)\n'
446 msg = b'%s -- %s (mandatory: %r)\n'
447 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 if part.type == b'changegroup':
448 if part.type == b'changegroup':
449 version = part.params.get(b'version', b'01')
449 version = part.params.get(b'version', b'01')
450 cg = changegroup.getunbundler(version, part, b'UN')
450 cg = changegroup.getunbundler(version, part, b'UN')
451 if not ui.quiet:
451 if not ui.quiet:
452 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 if part.type == b'obsmarkers':
453 if part.type == b'obsmarkers':
454 if not ui.quiet:
454 if not ui.quiet:
455 _debugobsmarkers(ui, part, indent=4, **opts)
455 _debugobsmarkers(ui, part, indent=4, **opts)
456 if part.type == b'phase-heads':
456 if part.type == b'phase-heads':
457 if not ui.quiet:
457 if not ui.quiet:
458 _debugphaseheads(ui, part, indent=4)
458 _debugphaseheads(ui, part, indent=4)
459
459
460
460
461 @command(
461 @command(
462 b'debugbundle',
462 b'debugbundle',
463 [
463 [
464 (b'a', b'all', None, _(b'show all details')),
464 (b'a', b'all', None, _(b'show all details')),
465 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'part-type', [], _(b'show only the named part type')),
466 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 ],
467 ],
468 _(b'FILE'),
468 _(b'FILE'),
469 norepo=True,
469 norepo=True,
470 )
470 )
471 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 """lists the contents of a bundle"""
472 """lists the contents of a bundle"""
473 with hg.openpath(ui, bundlepath) as f:
473 with hg.openpath(ui, bundlepath) as f:
474 if spec:
474 if spec:
475 spec = exchange.getbundlespec(ui, f)
475 spec = exchange.getbundlespec(ui, f)
476 ui.write(b'%s\n' % spec)
476 ui.write(b'%s\n' % spec)
477 return
477 return
478
478
479 gen = exchange.readbundle(ui, f, bundlepath)
479 gen = exchange.readbundle(ui, f, bundlepath)
480 if isinstance(gen, bundle2.unbundle20):
480 if isinstance(gen, bundle2.unbundle20):
481 return _debugbundle2(ui, gen, all=all, **opts)
481 return _debugbundle2(ui, gen, all=all, **opts)
482 _debugchangegroup(ui, gen, all=all, **opts)
482 _debugchangegroup(ui, gen, all=all, **opts)
483
483
484
484
485 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 def debugcapabilities(ui, path, **opts):
486 def debugcapabilities(ui, path, **opts):
487 """lists the capabilities of a remote peer"""
487 """lists the capabilities of a remote peer"""
488 opts = pycompat.byteskwargs(opts)
488 opts = pycompat.byteskwargs(opts)
489 peer = hg.peer(ui, opts, path)
489 peer = hg.peer(ui, opts, path)
490 try:
490 try:
491 caps = peer.capabilities()
491 caps = peer.capabilities()
492 ui.writenoi18n(b'Main capabilities:\n')
492 ui.writenoi18n(b'Main capabilities:\n')
493 for c in sorted(caps):
493 for c in sorted(caps):
494 ui.write(b' %s\n' % c)
494 ui.write(b' %s\n' % c)
495 b2caps = bundle2.bundle2caps(peer)
495 b2caps = bundle2.bundle2caps(peer)
496 if b2caps:
496 if b2caps:
497 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 for key, values in sorted(b2caps.items()):
498 for key, values in sorted(b2caps.items()):
499 ui.write(b' %s\n' % key)
499 ui.write(b' %s\n' % key)
500 for v in values:
500 for v in values:
501 ui.write(b' %s\n' % v)
501 ui.write(b' %s\n' % v)
502 finally:
502 finally:
503 peer.close()
503 peer.close()
504
504
505
505
506 @command(
506 @command(
507 b'debugchangedfiles',
507 b'debugchangedfiles',
508 [
508 [
509 (
509 (
510 b'',
510 b'',
511 b'compute',
511 b'compute',
512 False,
512 False,
513 b"compute information instead of reading it from storage",
513 b"compute information instead of reading it from storage",
514 ),
514 ),
515 ],
515 ],
516 b'REV',
516 b'REV',
517 )
517 )
518 def debugchangedfiles(ui, repo, rev, **opts):
518 def debugchangedfiles(ui, repo, rev, **opts):
519 """list the stored files changes for a revision"""
519 """list the stored files changes for a revision"""
520 ctx = logcmdutil.revsingle(repo, rev, None)
520 ctx = logcmdutil.revsingle(repo, rev, None)
521 files = None
521 files = None
522
522
523 if opts['compute']:
523 if opts['compute']:
524 files = metadata.compute_all_files_changes(ctx)
524 files = metadata.compute_all_files_changes(ctx)
525 else:
525 else:
526 sd = repo.changelog.sidedata(ctx.rev())
526 sd = repo.changelog.sidedata(ctx.rev())
527 files_block = sd.get(sidedata.SD_FILES)
527 files_block = sd.get(sidedata.SD_FILES)
528 if files_block is not None:
528 if files_block is not None:
529 files = metadata.decode_files_sidedata(sd)
529 files = metadata.decode_files_sidedata(sd)
530 if files is not None:
530 if files is not None:
531 for f in sorted(files.touched):
531 for f in sorted(files.touched):
532 if f in files.added:
532 if f in files.added:
533 action = b"added"
533 action = b"added"
534 elif f in files.removed:
534 elif f in files.removed:
535 action = b"removed"
535 action = b"removed"
536 elif f in files.merged:
536 elif f in files.merged:
537 action = b"merged"
537 action = b"merged"
538 elif f in files.salvaged:
538 elif f in files.salvaged:
539 action = b"salvaged"
539 action = b"salvaged"
540 else:
540 else:
541 action = b"touched"
541 action = b"touched"
542
542
543 copy_parent = b""
543 copy_parent = b""
544 copy_source = b""
544 copy_source = b""
545 if f in files.copied_from_p1:
545 if f in files.copied_from_p1:
546 copy_parent = b"p1"
546 copy_parent = b"p1"
547 copy_source = files.copied_from_p1[f]
547 copy_source = files.copied_from_p1[f]
548 elif f in files.copied_from_p2:
548 elif f in files.copied_from_p2:
549 copy_parent = b"p2"
549 copy_parent = b"p2"
550 copy_source = files.copied_from_p2[f]
550 copy_source = files.copied_from_p2[f]
551
551
552 data = (action, copy_parent, f, copy_source)
552 data = (action, copy_parent, f, copy_source)
553 template = b"%-8s %2s: %s, %s;\n"
553 template = b"%-8s %2s: %s, %s;\n"
554 ui.write(template % data)
554 ui.write(template % data)
555
555
556
556
557 @command(b'debugcheckstate', [], b'')
557 @command(b'debugcheckstate', [], b'')
558 def debugcheckstate(ui, repo):
558 def debugcheckstate(ui, repo):
559 """validate the correctness of the current dirstate"""
559 """validate the correctness of the current dirstate"""
560 parent1, parent2 = repo.dirstate.parents()
560 parent1, parent2 = repo.dirstate.parents()
561 m1 = repo[parent1].manifest()
561 m1 = repo[parent1].manifest()
562 m2 = repo[parent2].manifest()
562 m2 = repo[parent2].manifest()
563 errors = 0
563 errors = 0
564 for err in repo.dirstate.verify(m1, m2):
564 for err in repo.dirstate.verify(m1, m2):
565 ui.warn(err[0] % err[1:])
565 ui.warn(err[0] % err[1:])
566 errors += 1
566 errors += 1
567 if errors:
567 if errors:
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 raise error.Abort(errstr)
569 raise error.Abort(errstr)
570
570
571
571
572 @command(
572 @command(
573 b'debugcolor',
573 b'debugcolor',
574 [(b'', b'style', None, _(b'show all configured styles'))],
574 [(b'', b'style', None, _(b'show all configured styles'))],
575 b'hg debugcolor',
575 b'hg debugcolor',
576 )
576 )
577 def debugcolor(ui, repo, **opts):
577 def debugcolor(ui, repo, **opts):
578 """show available color, effects or style"""
578 """show available color, effects or style"""
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 if opts.get('style'):
580 if opts.get('style'):
581 return _debugdisplaystyle(ui)
581 return _debugdisplaystyle(ui)
582 else:
582 else:
583 return _debugdisplaycolor(ui)
583 return _debugdisplaycolor(ui)
584
584
585
585
586 def _debugdisplaycolor(ui):
586 def _debugdisplaycolor(ui):
587 ui = ui.copy()
587 ui = ui.copy()
588 ui._styles.clear()
588 ui._styles.clear()
589 for effect in color._activeeffects(ui).keys():
589 for effect in color._activeeffects(ui).keys():
590 ui._styles[effect] = effect
590 ui._styles[effect] = effect
591 if ui._terminfoparams:
591 if ui._terminfoparams:
592 for k, v in ui.configitems(b'color'):
592 for k, v in ui.configitems(b'color'):
593 if k.startswith(b'color.'):
593 if k.startswith(b'color.'):
594 ui._styles[k] = k[6:]
594 ui._styles[k] = k[6:]
595 elif k.startswith(b'terminfo.'):
595 elif k.startswith(b'terminfo.'):
596 ui._styles[k] = k[9:]
596 ui._styles[k] = k[9:]
597 ui.write(_(b'available colors:\n'))
597 ui.write(_(b'available colors:\n'))
598 # sort label with a '_' after the other to group '_background' entry.
598 # sort label with a '_' after the other to group '_background' entry.
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 for colorname, label in items:
600 for colorname, label in items:
601 ui.write(b'%s\n' % colorname, label=label)
601 ui.write(b'%s\n' % colorname, label=label)
602
602
603
603
604 def _debugdisplaystyle(ui):
604 def _debugdisplaystyle(ui):
605 ui.write(_(b'available style:\n'))
605 ui.write(_(b'available style:\n'))
606 if not ui._styles:
606 if not ui._styles:
607 return
607 return
608 width = max(len(s) for s in ui._styles)
608 width = max(len(s) for s in ui._styles)
609 for label, effects in sorted(ui._styles.items()):
609 for label, effects in sorted(ui._styles.items()):
610 ui.write(b'%s' % label, label=label)
610 ui.write(b'%s' % label, label=label)
611 if effects:
611 if effects:
612 # 50
612 # 50
613 ui.write(b': ')
613 ui.write(b': ')
614 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b'\n')
616 ui.write(b'\n')
617
617
618
618
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 def debugcreatestreamclonebundle(ui, repo, fname):
620 def debugcreatestreamclonebundle(ui, repo, fname):
621 """create a stream clone bundle file
621 """create a stream clone bundle file
622
622
623 Stream bundles are special bundles that are essentially archives of
623 Stream bundles are special bundles that are essentially archives of
624 revlog files. They are commonly used for cloning very quickly.
624 revlog files. They are commonly used for cloning very quickly.
625 """
625 """
626 # TODO we may want to turn this into an abort when this functionality
626 # TODO we may want to turn this into an abort when this functionality
627 # is moved into `hg bundle`.
627 # is moved into `hg bundle`.
628 if phases.hassecret(repo):
628 if phases.hassecret(repo):
629 ui.warn(
629 ui.warn(
630 _(
630 _(
631 b'(warning: stream clone bundle will contain secret '
631 b'(warning: stream clone bundle will contain secret '
632 b'revisions)\n'
632 b'revisions)\n'
633 )
633 )
634 )
634 )
635
635
636 requirements, gen = streamclone.generatebundlev1(repo)
636 requirements, gen = streamclone.generatebundlev1(repo)
637 changegroup.writechunks(ui, gen, fname)
637 changegroup.writechunks(ui, gen, fname)
638
638
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640
640
641
641
642 @command(
642 @command(
643 b'debugdag',
643 b'debugdag',
644 [
644 [
645 (b't', b'tags', None, _(b'use tags as labels')),
645 (b't', b'tags', None, _(b'use tags as labels')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'', b'dots', None, _(b'use dots for runs')),
647 (b'', b'dots', None, _(b'use dots for runs')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 ],
649 ],
650 _(b'[OPTION]... [FILE [REV]...]'),
650 _(b'[OPTION]... [FILE [REV]...]'),
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 """format the changelog or an index DAG as a concise textual description
654 """format the changelog or an index DAG as a concise textual description
655
655
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 revision numbers, they get labeled in the output as rN.
657 revision numbers, they get labeled in the output as rN.
658
658
659 Otherwise, the changelog DAG of the current repo is emitted.
659 Otherwise, the changelog DAG of the current repo is emitted.
660 """
660 """
661 spaces = opts.get('spaces')
661 spaces = opts.get('spaces')
662 dots = opts.get('dots')
662 dots = opts.get('dots')
663 if file_:
663 if file_:
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 revs = {int(r) for r in revs}
665 revs = {int(r) for r in revs}
666
666
667 def events():
667 def events():
668 for r in rlog:
668 for r in rlog:
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 if r in revs:
670 if r in revs:
671 yield b'l', (r, b"r%i" % r)
671 yield b'l', (r, b"r%i" % r)
672
672
673 elif repo:
673 elif repo:
674 cl = repo.changelog
674 cl = repo.changelog
675 tags = opts.get('tags')
675 tags = opts.get('tags')
676 branches = opts.get('branches')
676 branches = opts.get('branches')
677 if tags:
677 if tags:
678 labels = {}
678 labels = {}
679 for l, n in repo.tags().items():
679 for l, n in repo.tags().items():
680 labels.setdefault(cl.rev(n), []).append(l)
680 labels.setdefault(cl.rev(n), []).append(l)
681
681
682 def events():
682 def events():
683 b = b"default"
683 b = b"default"
684 for r in cl:
684 for r in cl:
685 if branches:
685 if branches:
686 newb = cl.read(cl.node(r))[5][b'branch']
686 newb = cl.read(cl.node(r))[5][b'branch']
687 if newb != b:
687 if newb != b:
688 yield b'a', newb
688 yield b'a', newb
689 b = newb
689 b = newb
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 if tags:
691 if tags:
692 ls = labels.get(r)
692 ls = labels.get(r)
693 if ls:
693 if ls:
694 for l in ls:
694 for l in ls:
695 yield b'l', (r, l)
695 yield b'l', (r, l)
696
696
697 else:
697 else:
698 raise error.Abort(_(b'need repo for changelog dag'))
698 raise error.Abort(_(b'need repo for changelog dag'))
699
699
700 for line in dagparser.dagtextlines(
700 for line in dagparser.dagtextlines(
701 events(),
701 events(),
702 addspaces=spaces,
702 addspaces=spaces,
703 wraplabels=True,
703 wraplabels=True,
704 wrapannotations=True,
704 wrapannotations=True,
705 wrapnonlinear=dots,
705 wrapnonlinear=dots,
706 usedots=dots,
706 usedots=dots,
707 maxlinewidth=70,
707 maxlinewidth=70,
708 ):
708 ):
709 ui.write(line)
709 ui.write(line)
710 ui.write(b"\n")
710 ui.write(b"\n")
711
711
712
712
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 def debugdata(ui, repo, file_, rev=None, **opts):
714 def debugdata(ui, repo, file_, rev=None, **opts):
715 """dump the contents of a data file revision"""
715 """dump the contents of a data file revision"""
716 opts = pycompat.byteskwargs(opts)
716 opts = pycompat.byteskwargs(opts)
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if rev is not None:
718 if rev is not None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 file_, rev = None, file_
720 file_, rev = None, file_
721 elif rev is None:
721 elif rev is None:
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 try:
724 try:
725 ui.write(r.rawdata(r.lookup(rev)))
725 ui.write(r.rawdata(r.lookup(rev)))
726 except KeyError:
726 except KeyError:
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728
728
729
729
730 @command(
730 @command(
731 b'debugdate',
731 b'debugdate',
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 _(b'[-e] DATE [RANGE]'),
733 _(b'[-e] DATE [RANGE]'),
734 norepo=True,
734 norepo=True,
735 optionalrepo=True,
735 optionalrepo=True,
736 )
736 )
737 def debugdate(ui, date, range=None, **opts):
737 def debugdate(ui, date, range=None, **opts):
738 """parse and display a date"""
738 """parse and display a date"""
739 if opts["extended"]:
739 if opts["extended"]:
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 else:
741 else:
742 d = dateutil.parsedate(date)
742 d = dateutil.parsedate(date)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 if range:
745 if range:
746 m = dateutil.matchdate(range)
746 m = dateutil.matchdate(range)
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748
748
749
749
750 @command(
750 @command(
751 b'debugdeltachain',
751 b'debugdeltachain',
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 _(b'-c|-m|FILE'),
753 _(b'-c|-m|FILE'),
754 optionalrepo=True,
754 optionalrepo=True,
755 )
755 )
756 def debugdeltachain(ui, repo, file_=None, **opts):
756 def debugdeltachain(ui, repo, file_=None, **opts):
757 """dump information about delta chains in a revlog
757 """dump information about delta chains in a revlog
758
758
759 Output can be templatized. Available template keywords are:
759 Output can be templatized. Available template keywords are:
760
760
761 :``rev``: revision number
761 :``rev``: revision number
762 :``p1``: parent 1 revision number (for reference)
762 :``p1``: parent 1 revision number (for reference)
763 :``p2``: parent 2 revision number (for reference)
763 :``p2``: parent 2 revision number (for reference)
764 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainid``: delta chain identifier (numbered by unique base)
765 :``chainlen``: delta chain length to this revision
765 :``chainlen``: delta chain length to this revision
766 :``prevrev``: previous revision in delta chain
766 :``prevrev``: previous revision in delta chain
767 :``deltatype``: role of delta / how it was computed
767 :``deltatype``: role of delta / how it was computed
768 - base: a full snapshot
768 - base: a full snapshot
769 - snap: an intermediate snapshot
769 - snap: an intermediate snapshot
770 - p1: a delta against the first parent
770 - p1: a delta against the first parent
771 - p2: a delta against the second parent
771 - p2: a delta against the second parent
772 - skip1: a delta against the same base as p1
772 - skip1: a delta against the same base as p1
773 (when p1 has empty delta
773 (when p1 has empty delta
774 - skip2: a delta against the same base as p2
774 - skip2: a delta against the same base as p2
775 (when p2 has empty delta
775 (when p2 has empty delta
776 - prev: a delta against the previous revision
776 - prev: a delta against the previous revision
777 - other: a delta against an arbitrary revision
777 - other: a delta against an arbitrary revision
778 :``compsize``: compressed size of revision
778 :``compsize``: compressed size of revision
779 :``uncompsize``: uncompressed size of revision
779 :``uncompsize``: uncompressed size of revision
780 :``chainsize``: total size of compressed revisions in chain
780 :``chainsize``: total size of compressed revisions in chain
781 :``chainratio``: total chain size divided by uncompressed revision size
781 :``chainratio``: total chain size divided by uncompressed revision size
782 (new delta chains typically start at ratio 2.00)
782 (new delta chains typically start at ratio 2.00)
783 :``lindist``: linear distance from base revision in delta chain to end
783 :``lindist``: linear distance from base revision in delta chain to end
784 of this revision
784 of this revision
785 :``extradist``: total size of revisions not part of this delta chain from
785 :``extradist``: total size of revisions not part of this delta chain from
786 base of delta chain to end of this revision; a measurement
786 base of delta chain to end of this revision; a measurement
787 of how much extra data we need to read/seek across to read
787 of how much extra data we need to read/seek across to read
788 the delta chain for this revision
788 the delta chain for this revision
789 :``extraratio``: extradist divided by chainsize; another representation of
789 :``extraratio``: extradist divided by chainsize; another representation of
790 how much unrelated data is needed to load this delta chain
790 how much unrelated data is needed to load this delta chain
791
791
792 If the repository is configured to use the sparse read, additional keywords
792 If the repository is configured to use the sparse read, additional keywords
793 are available:
793 are available:
794
794
795 :``readsize``: total size of data read from the disk for a revision
795 :``readsize``: total size of data read from the disk for a revision
796 (sum of the sizes of all the blocks)
796 (sum of the sizes of all the blocks)
797 :``largestblock``: size of the largest block of data read from the disk
797 :``largestblock``: size of the largest block of data read from the disk
798 :``readdensity``: density of useful bytes in the data read from the disk
798 :``readdensity``: density of useful bytes in the data read from the disk
799 :``srchunks``: in how many data hunks the whole revision would be read
799 :``srchunks``: in how many data hunks the whole revision would be read
800
800
801 The sparse read can be enabled with experimental.sparse-read = True
801 The sparse read can be enabled with experimental.sparse-read = True
802 """
802 """
803 opts = pycompat.byteskwargs(opts)
803 opts = pycompat.byteskwargs(opts)
804 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 index = r.index
805 index = r.index
806 start = r.start
806 start = r.start
807 length = r.length
807 length = r.length
808 generaldelta = r._generaldelta
808 generaldelta = r._generaldelta
809 withsparseread = getattr(r, '_withsparseread', False)
809 withsparseread = getattr(r, '_withsparseread', False)
810
810
811 # security to avoid crash on corrupted revlogs
811 # security to avoid crash on corrupted revlogs
812 total_revs = len(index)
812 total_revs = len(index)
813
813
814 def revinfo(rev):
814 def revinfo(rev):
815 e = index[rev]
815 e = index[rev]
816 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
817 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
818 chainsize = 0
818 chainsize = 0
819
819
820 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 p2 = e[revlog_constants.ENTRY_PARENT_2]
823
823
824 # If the parents of a revision has an empty delta, we never try to delta
824 # If the parents of a revision has an empty delta, we never try to delta
825 # against that parent, but directly against the delta base of that
825 # against that parent, but directly against the delta base of that
826 # parent (recursively). It avoids adding a useless entry in the chain.
826 # parent (recursively). It avoids adding a useless entry in the chain.
827 #
827 #
828 # However we need to detect that as a special case for delta-type, that
828 # However we need to detect that as a special case for delta-type, that
829 # is not simply "other".
829 # is not simply "other".
830 p1_base = p1
830 p1_base = p1
831 if p1 != nullrev and p1 < total_revs:
831 if p1 != nullrev and p1 < total_revs:
832 e1 = index[p1]
832 e1 = index[p1]
833 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 if (
835 if (
836 new_base == p1_base
836 new_base == p1_base
837 or new_base == nullrev
837 or new_base == nullrev
838 or new_base >= total_revs
838 or new_base >= total_revs
839 ):
839 ):
840 break
840 break
841 p1_base = new_base
841 p1_base = new_base
842 e1 = index[p1_base]
842 e1 = index[p1_base]
843 p2_base = p2
843 p2_base = p2
844 if p2 != nullrev and p2 < total_revs:
844 if p2 != nullrev and p2 < total_revs:
845 e2 = index[p2]
845 e2 = index[p2]
846 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 if (
848 if (
849 new_base == p2_base
849 new_base == p2_base
850 or new_base == nullrev
850 or new_base == nullrev
851 or new_base >= total_revs
851 or new_base >= total_revs
852 ):
852 ):
853 break
853 break
854 p2_base = new_base
854 p2_base = new_base
855 e2 = index[p2_base]
855 e2 = index[p2_base]
856
856
857 if generaldelta:
857 if generaldelta:
858 if base == p1:
858 if base == p1:
859 deltatype = b'p1'
859 deltatype = b'p1'
860 elif base == p2:
860 elif base == p2:
861 deltatype = b'p2'
861 deltatype = b'p2'
862 elif base == rev:
862 elif base == rev:
863 deltatype = b'base'
863 deltatype = b'base'
864 elif base == p1_base:
864 elif base == p1_base:
865 deltatype = b'skip1'
865 deltatype = b'skip1'
866 elif base == p2_base:
866 elif base == p2_base:
867 deltatype = b'skip2'
867 deltatype = b'skip2'
868 elif r.issnapshot(rev):
868 elif r.issnapshot(rev):
869 deltatype = b'snap'
869 deltatype = b'snap'
870 elif base == rev - 1:
870 elif base == rev - 1:
871 deltatype = b'prev'
871 deltatype = b'prev'
872 else:
872 else:
873 deltatype = b'other'
873 deltatype = b'other'
874 else:
874 else:
875 if base == rev:
875 if base == rev:
876 deltatype = b'base'
876 deltatype = b'base'
877 else:
877 else:
878 deltatype = b'prev'
878 deltatype = b'prev'
879
879
880 chain = r._deltachain(rev)[0]
880 chain = r._deltachain(rev)[0]
881 for iterrev in chain:
881 for iterrev in chain:
882 e = index[iterrev]
882 e = index[iterrev]
883 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884
884
885 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
886
886
887 fm = ui.formatter(b'debugdeltachain', opts)
887 fm = ui.formatter(b'debugdeltachain', opts)
888
888
889 fm.plain(
889 fm.plain(
890 b' rev p1 p2 chain# chainlen prev delta '
890 b' rev p1 p2 chain# chainlen prev delta '
891 b'size rawsize chainsize ratio lindist extradist '
891 b'size rawsize chainsize ratio lindist extradist '
892 b'extraratio'
892 b'extraratio'
893 )
893 )
894 if withsparseread:
894 if withsparseread:
895 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b' readsize largestblk rddensity srchunks')
896 fm.plain(b'\n')
896 fm.plain(b'\n')
897
897
898 chainbases = {}
898 chainbases = {}
899 for rev in r:
899 for rev in r:
900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 chainbase = chain[0]
901 chainbase = chain[0]
902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 basestart = start(chainbase)
903 basestart = start(chainbase)
904 revstart = start(rev)
904 revstart = start(rev)
905 lineardist = revstart + comp - basestart
905 lineardist = revstart + comp - basestart
906 extradist = lineardist - chainsize
906 extradist = lineardist - chainsize
907 try:
907 try:
908 prevrev = chain[-2]
908 prevrev = chain[-2]
909 except IndexError:
909 except IndexError:
910 prevrev = -1
910 prevrev = -1
911
911
912 if uncomp != 0:
912 if uncomp != 0:
913 chainratio = float(chainsize) / float(uncomp)
913 chainratio = float(chainsize) / float(uncomp)
914 else:
914 else:
915 chainratio = chainsize
915 chainratio = chainsize
916
916
917 if chainsize != 0:
917 if chainsize != 0:
918 extraratio = float(extradist) / float(chainsize)
918 extraratio = float(extradist) / float(chainsize)
919 else:
919 else:
920 extraratio = extradist
920 extraratio = extradist
921
921
922 fm.startitem()
922 fm.startitem()
923 fm.write(
923 fm.write(
924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 b'uncompsize chainsize chainratio lindist extradist '
925 b'uncompsize chainsize chainratio lindist extradist '
926 b'extraratio',
926 b'extraratio',
927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 rev,
928 rev,
929 p1,
929 p1,
930 p2,
930 p2,
931 chainid,
931 chainid,
932 len(chain),
932 len(chain),
933 prevrev,
933 prevrev,
934 deltatype,
934 deltatype,
935 comp,
935 comp,
936 uncomp,
936 uncomp,
937 chainsize,
937 chainsize,
938 chainratio,
938 chainratio,
939 lineardist,
939 lineardist,
940 extradist,
940 extradist,
941 extraratio,
941 extraratio,
942 rev=rev,
942 rev=rev,
943 chainid=chainid,
943 chainid=chainid,
944 chainlen=len(chain),
944 chainlen=len(chain),
945 prevrev=prevrev,
945 prevrev=prevrev,
946 deltatype=deltatype,
946 deltatype=deltatype,
947 compsize=comp,
947 compsize=comp,
948 uncompsize=uncomp,
948 uncompsize=uncomp,
949 chainsize=chainsize,
949 chainsize=chainsize,
950 chainratio=chainratio,
950 chainratio=chainratio,
951 lindist=lineardist,
951 lindist=lineardist,
952 extradist=extradist,
952 extradist=extradist,
953 extraratio=extraratio,
953 extraratio=extraratio,
954 )
954 )
955 if withsparseread:
955 if withsparseread:
956 readsize = 0
956 readsize = 0
957 largestblock = 0
957 largestblock = 0
958 srchunks = 0
958 srchunks = 0
959
959
960 for revschunk in deltautil.slicechunk(r, chain):
960 for revschunk in deltautil.slicechunk(r, chain):
961 srchunks += 1
961 srchunks += 1
962 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 blksize = blkend - start(revschunk[0])
963 blksize = blkend - start(revschunk[0])
964
964
965 readsize += blksize
965 readsize += blksize
966 if largestblock < blksize:
966 if largestblock < blksize:
967 largestblock = blksize
967 largestblock = blksize
968
968
969 if readsize:
969 if readsize:
970 readdensity = float(chainsize) / float(readsize)
970 readdensity = float(chainsize) / float(readsize)
971 else:
971 else:
972 readdensity = 1
972 readdensity = 1
973
973
974 fm.write(
974 fm.write(
975 b'readsize largestblock readdensity srchunks',
975 b'readsize largestblock readdensity srchunks',
976 b' %10d %10d %9.5f %8d',
976 b' %10d %10d %9.5f %8d',
977 readsize,
977 readsize,
978 largestblock,
978 largestblock,
979 readdensity,
979 readdensity,
980 srchunks,
980 srchunks,
981 readsize=readsize,
981 readsize=readsize,
982 largestblock=largestblock,
982 largestblock=largestblock,
983 readdensity=readdensity,
983 readdensity=readdensity,
984 srchunks=srchunks,
984 srchunks=srchunks,
985 )
985 )
986
986
987 fm.plain(b'\n')
987 fm.plain(b'\n')
988
988
989 fm.end()
989 fm.end()
990
990
991
991
992 @command(
992 @command(
993 b'debug-delta-find',
993 b'debug-delta-find',
994 cmdutil.debugrevlogopts
994 cmdutil.debugrevlogopts
995 + cmdutil.formatteropts
995 + cmdutil.formatteropts
996 + [
996 + [
997 (
997 (
998 b'',
998 b'',
999 b'source',
999 b'source',
1000 b'full',
1000 b'full',
1001 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1001 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1002 ),
1002 ),
1003 ],
1003 ],
1004 _(b'-c|-m|FILE REV'),
1004 _(b'-c|-m|FILE REV'),
1005 optionalrepo=True,
1005 optionalrepo=True,
1006 )
1006 )
1007 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1007 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1008 """display the computation to get to a valid delta for storing REV
1008 """display the computation to get to a valid delta for storing REV
1009
1009
1010 This command will replay the process used to find the "best" delta to store
1010 This command will replay the process used to find the "best" delta to store
1011 a revision and display information about all the steps used to get to that
1011 a revision and display information about all the steps used to get to that
1012 result.
1012 result.
1013
1013
1014 By default, the process is fed with a the full-text for the revision. This
1014 By default, the process is fed with a the full-text for the revision. This
1015 can be controlled with the --source flag.
1015 can be controlled with the --source flag.
1016
1016
1017 The revision use the revision number of the target storage (not changelog
1017 The revision use the revision number of the target storage (not changelog
1018 revision number).
1018 revision number).
1019
1019
1020 note: the process is initiated from a full text of the revision to store.
1020 note: the process is initiated from a full text of the revision to store.
1021 """
1021 """
1022 opts = pycompat.byteskwargs(opts)
1022 opts = pycompat.byteskwargs(opts)
1023 if arg_2 is None:
1023 if arg_2 is None:
1024 file_ = None
1024 file_ = None
1025 rev = arg_1
1025 rev = arg_1
1026 else:
1026 else:
1027 file_ = arg_1
1027 file_ = arg_1
1028 rev = arg_2
1028 rev = arg_2
1029
1029
1030 rev = int(rev)
1030 rev = int(rev)
1031
1031
1032 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1032 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1033
1033
1034 deltacomputer = deltautil.deltacomputer(
1034 deltacomputer = deltautil.deltacomputer(
1035 revlog,
1035 revlog,
1036 write_debug=ui.write,
1036 write_debug=ui.write,
1037 debug_search=not ui.quiet,
1037 debug_search=not ui.quiet,
1038 )
1038 )
1039
1039
1040 node = revlog.node(rev)
1040 node = revlog.node(rev)
1041 p1r, p2r = revlog.parentrevs(rev)
1041 p1r, p2r = revlog.parentrevs(rev)
1042 p1 = revlog.node(p1r)
1042 p1 = revlog.node(p1r)
1043 p2 = revlog.node(p2r)
1043 p2 = revlog.node(p2r)
1044 full_text = revlog.revision(rev)
1044 full_text = revlog.revision(rev)
1045 btext = [full_text]
1045 btext = [full_text]
1046 textlen = len(btext[0])
1046 textlen = len(btext[0])
1047 cachedelta = None
1047 cachedelta = None
1048 flags = revlog.flags(rev)
1048 flags = revlog.flags(rev)
1049
1049
1050 if source != b'full':
1050 if source != b'full':
1051 if source == b'storage':
1051 if source == b'storage':
1052 base_rev = revlog.deltaparent(rev)
1052 base_rev = revlog.deltaparent(rev)
1053 elif source == b'p1':
1053 elif source == b'p1':
1054 base_rev = p1r
1054 base_rev = p1r
1055 elif source == b'p2':
1055 elif source == b'p2':
1056 base_rev = p2r
1056 base_rev = p2r
1057 elif source == b'prev':
1057 elif source == b'prev':
1058 base_rev = rev - 1
1058 base_rev = rev - 1
1059 else:
1059 else:
1060 raise error.InputError(b"invalid --source value: %s" % source)
1060 raise error.InputError(b"invalid --source value: %s" % source)
1061
1061
1062 if base_rev != nullrev:
1062 if base_rev != nullrev:
1063 base_text = revlog.revision(base_rev)
1063 base_text = revlog.revision(base_rev)
1064 delta = mdiff.textdiff(base_text, full_text)
1064 delta = mdiff.textdiff(base_text, full_text)
1065
1065
1066 cachedelta = (base_rev, delta)
1066 cachedelta = (base_rev, delta)
1067 btext = [None]
1067 btext = [None]
1068
1068
1069 revinfo = revlogutils.revisioninfo(
1069 revinfo = revlogutils.revisioninfo(
1070 node,
1070 node,
1071 p1,
1071 p1,
1072 p2,
1072 p2,
1073 btext,
1073 btext,
1074 textlen,
1074 textlen,
1075 cachedelta,
1075 cachedelta,
1076 flags,
1076 flags,
1077 )
1077 )
1078
1078
1079 fh = revlog._datafp()
1079 fh = revlog._datafp()
1080 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1080 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1081
1081
1082
1082
1083 @command(
1083 @command(
1084 b'debugdirstate|debugstate',
1084 b'debugdirstate|debugstate',
1085 [
1085 [
1086 (
1086 (
1087 b'',
1087 b'',
1088 b'nodates',
1088 b'nodates',
1089 None,
1089 None,
1090 _(b'do not display the saved mtime (DEPRECATED)'),
1090 _(b'do not display the saved mtime (DEPRECATED)'),
1091 ),
1091 ),
1092 (b'', b'dates', True, _(b'display the saved mtime')),
1092 (b'', b'dates', True, _(b'display the saved mtime')),
1093 (b'', b'datesort', None, _(b'sort by saved mtime')),
1093 (b'', b'datesort', None, _(b'sort by saved mtime')),
1094 (
1094 (
1095 b'',
1095 b'',
1096 b'docket',
1096 b'docket',
1097 False,
1097 False,
1098 _(b'display the docket (metadata file) instead'),
1098 _(b'display the docket (metadata file) instead'),
1099 ),
1099 ),
1100 (
1100 (
1101 b'',
1101 b'',
1102 b'all',
1102 b'all',
1103 False,
1103 False,
1104 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1104 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1105 ),
1105 ),
1106 ],
1106 ],
1107 _(b'[OPTION]...'),
1107 _(b'[OPTION]...'),
1108 )
1108 )
1109 def debugstate(ui, repo, **opts):
1109 def debugstate(ui, repo, **opts):
1110 """show the contents of the current dirstate"""
1110 """show the contents of the current dirstate"""
1111
1111
1112 if opts.get("docket"):
1112 if opts.get("docket"):
1113 if not repo.dirstate._use_dirstate_v2:
1113 if not repo.dirstate._use_dirstate_v2:
1114 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1114 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1115
1115
1116 docket = repo.dirstate._map.docket
1116 docket = repo.dirstate._map.docket
1117 (
1117 (
1118 start_offset,
1118 start_offset,
1119 root_nodes,
1119 root_nodes,
1120 nodes_with_entry,
1120 nodes_with_entry,
1121 nodes_with_copy,
1121 nodes_with_copy,
1122 unused_bytes,
1122 unused_bytes,
1123 _unused,
1123 _unused,
1124 ignore_pattern,
1124 ignore_pattern,
1125 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1125 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1126
1126
1127 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1127 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1128 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1128 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1129 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1129 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1130 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1130 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1131 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1131 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1132 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1132 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1133 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1133 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1134 ui.write(
1134 ui.write(
1135 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1135 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1136 )
1136 )
1137 return
1137 return
1138
1138
1139 nodates = not opts['dates']
1139 nodates = not opts['dates']
1140 if opts.get('nodates') is not None:
1140 if opts.get('nodates') is not None:
1141 nodates = True
1141 nodates = True
1142 datesort = opts.get('datesort')
1142 datesort = opts.get('datesort')
1143
1143
1144 if datesort:
1144 if datesort:
1145
1145
1146 def keyfunc(entry):
1146 def keyfunc(entry):
1147 filename, _state, _mode, _size, mtime = entry
1147 filename, _state, _mode, _size, mtime = entry
1148 return (mtime, filename)
1148 return (mtime, filename)
1149
1149
1150 else:
1150 else:
1151 keyfunc = None # sort by filename
1151 keyfunc = None # sort by filename
1152 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1152 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1153 entries.sort(key=keyfunc)
1153 entries.sort(key=keyfunc)
1154 for entry in entries:
1154 for entry in entries:
1155 filename, state, mode, size, mtime = entry
1155 filename, state, mode, size, mtime = entry
1156 if mtime == -1:
1156 if mtime == -1:
1157 timestr = b'unset '
1157 timestr = b'unset '
1158 elif nodates:
1158 elif nodates:
1159 timestr = b'set '
1159 timestr = b'set '
1160 else:
1160 else:
1161 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1161 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1162 timestr = encoding.strtolocal(timestr)
1162 timestr = encoding.strtolocal(timestr)
1163 if mode & 0o20000:
1163 if mode & 0o20000:
1164 mode = b'lnk'
1164 mode = b'lnk'
1165 else:
1165 else:
1166 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1166 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1167 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1167 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1168 for f in repo.dirstate.copies():
1168 for f in repo.dirstate.copies():
1169 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1169 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1170
1170
1171
1171
1172 @command(
1172 @command(
1173 b'debugdirstateignorepatternshash',
1173 b'debugdirstateignorepatternshash',
1174 [],
1174 [],
1175 _(b''),
1175 _(b''),
1176 )
1176 )
1177 def debugdirstateignorepatternshash(ui, repo, **opts):
1177 def debugdirstateignorepatternshash(ui, repo, **opts):
1178 """show the hash of ignore patterns stored in dirstate if v2,
1178 """show the hash of ignore patterns stored in dirstate if v2,
1179 or nothing for dirstate-v2
1179 or nothing for dirstate-v2
1180 """
1180 """
1181 if repo.dirstate._use_dirstate_v2:
1181 if repo.dirstate._use_dirstate_v2:
1182 docket = repo.dirstate._map.docket
1182 docket = repo.dirstate._map.docket
1183 hash_len = 20 # 160 bits for SHA-1
1183 hash_len = 20 # 160 bits for SHA-1
1184 hash_bytes = docket.tree_metadata[-hash_len:]
1184 hash_bytes = docket.tree_metadata[-hash_len:]
1185 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1185 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1186
1186
1187
1187
1188 @command(
1188 @command(
1189 b'debugdiscovery',
1189 b'debugdiscovery',
1190 [
1190 [
1191 (b'', b'old', None, _(b'use old-style discovery')),
1191 (b'', b'old', None, _(b'use old-style discovery')),
1192 (
1192 (
1193 b'',
1193 b'',
1194 b'nonheads',
1194 b'nonheads',
1195 None,
1195 None,
1196 _(b'use old-style discovery with non-heads included'),
1196 _(b'use old-style discovery with non-heads included'),
1197 ),
1197 ),
1198 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1198 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1199 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1199 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1200 (
1200 (
1201 b'',
1201 b'',
1202 b'local-as-revs',
1202 b'local-as-revs',
1203 b"",
1203 b"",
1204 b'treat local has having these revisions only',
1204 b'treat local has having these revisions only',
1205 ),
1205 ),
1206 (
1206 (
1207 b'',
1207 b'',
1208 b'remote-as-revs',
1208 b'remote-as-revs',
1209 b"",
1209 b"",
1210 b'use local as remote, with only these revisions',
1210 b'use local as remote, with only these revisions',
1211 ),
1211 ),
1212 ]
1212 ]
1213 + cmdutil.remoteopts
1213 + cmdutil.remoteopts
1214 + cmdutil.formatteropts,
1214 + cmdutil.formatteropts,
1215 _(b'[--rev REV] [OTHER]'),
1215 _(b'[--rev REV] [OTHER]'),
1216 )
1216 )
1217 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1217 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1218 """runs the changeset discovery protocol in isolation
1218 """runs the changeset discovery protocol in isolation
1219
1219
1220 The local peer can be "replaced" by a subset of the local repository by
1220 The local peer can be "replaced" by a subset of the local repository by
1221 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1221 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1222 can be "replaced" by a subset of the local repository using the
1222 can be "replaced" by a subset of the local repository using the
1223 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1223 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1224 discovery situations.
1224 discovery situations.
1225
1225
1226 The following developer oriented config are relevant for people playing with this command:
1226 The following developer oriented config are relevant for people playing with this command:
1227
1227
1228 * devel.discovery.exchange-heads=True
1228 * devel.discovery.exchange-heads=True
1229
1229
1230 If False, the discovery will not start with
1230 If False, the discovery will not start with
1231 remote head fetching and local head querying.
1231 remote head fetching and local head querying.
1232
1232
1233 * devel.discovery.grow-sample=True
1233 * devel.discovery.grow-sample=True
1234
1234
1235 If False, the sample size used in set discovery will not be increased
1235 If False, the sample size used in set discovery will not be increased
1236 through the process
1236 through the process
1237
1237
1238 * devel.discovery.grow-sample.dynamic=True
1238 * devel.discovery.grow-sample.dynamic=True
1239
1239
1240 When discovery.grow-sample.dynamic is True, the default, the sample size is
1240 When discovery.grow-sample.dynamic is True, the default, the sample size is
1241 adapted to the shape of the undecided set (it is set to the max of:
1241 adapted to the shape of the undecided set (it is set to the max of:
1242 <target-size>, len(roots(undecided)), len(heads(undecided)
1242 <target-size>, len(roots(undecided)), len(heads(undecided)
1243
1243
1244 * devel.discovery.grow-sample.rate=1.05
1244 * devel.discovery.grow-sample.rate=1.05
1245
1245
1246 the rate at which the sample grow
1246 the rate at which the sample grow
1247
1247
1248 * devel.discovery.randomize=True
1248 * devel.discovery.randomize=True
1249
1249
1250 If andom sampling during discovery are deterministic. It is meant for
1250 If andom sampling during discovery are deterministic. It is meant for
1251 integration tests.
1251 integration tests.
1252
1252
1253 * devel.discovery.sample-size=200
1253 * devel.discovery.sample-size=200
1254
1254
1255 Control the initial size of the discovery sample
1255 Control the initial size of the discovery sample
1256
1256
1257 * devel.discovery.sample-size.initial=100
1257 * devel.discovery.sample-size.initial=100
1258
1258
1259 Control the initial size of the discovery for initial change
1259 Control the initial size of the discovery for initial change
1260 """
1260 """
1261 opts = pycompat.byteskwargs(opts)
1261 opts = pycompat.byteskwargs(opts)
1262 unfi = repo.unfiltered()
1262 unfi = repo.unfiltered()
1263
1263
1264 # setup potential extra filtering
1264 # setup potential extra filtering
1265 local_revs = opts[b"local_as_revs"]
1265 local_revs = opts[b"local_as_revs"]
1266 remote_revs = opts[b"remote_as_revs"]
1266 remote_revs = opts[b"remote_as_revs"]
1267
1267
1268 # make sure tests are repeatable
1268 # make sure tests are repeatable
1269 random.seed(int(opts[b'seed']))
1269 random.seed(int(opts[b'seed']))
1270
1270
1271 if not remote_revs:
1271 if not remote_revs:
1272
1272
1273 remoteurl, branches = urlutil.get_unique_pull_path(
1273 remoteurl, branches = urlutil.get_unique_pull_path(
1274 b'debugdiscovery', repo, ui, remoteurl
1274 b'debugdiscovery', repo, ui, remoteurl
1275 )
1275 )
1276 remote = hg.peer(repo, opts, remoteurl)
1276 remote = hg.peer(repo, opts, remoteurl)
1277 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1277 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1278 else:
1278 else:
1279 branches = (None, [])
1279 branches = (None, [])
1280 remote_filtered_revs = logcmdutil.revrange(
1280 remote_filtered_revs = logcmdutil.revrange(
1281 unfi, [b"not (::(%s))" % remote_revs]
1281 unfi, [b"not (::(%s))" % remote_revs]
1282 )
1282 )
1283 remote_filtered_revs = frozenset(remote_filtered_revs)
1283 remote_filtered_revs = frozenset(remote_filtered_revs)
1284
1284
1285 def remote_func(x):
1285 def remote_func(x):
1286 return remote_filtered_revs
1286 return remote_filtered_revs
1287
1287
1288 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1288 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1289
1289
1290 remote = repo.peer()
1290 remote = repo.peer()
1291 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1291 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1292
1292
1293 if local_revs:
1293 if local_revs:
1294 local_filtered_revs = logcmdutil.revrange(
1294 local_filtered_revs = logcmdutil.revrange(
1295 unfi, [b"not (::(%s))" % local_revs]
1295 unfi, [b"not (::(%s))" % local_revs]
1296 )
1296 )
1297 local_filtered_revs = frozenset(local_filtered_revs)
1297 local_filtered_revs = frozenset(local_filtered_revs)
1298
1298
1299 def local_func(x):
1299 def local_func(x):
1300 return local_filtered_revs
1300 return local_filtered_revs
1301
1301
1302 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1302 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1303 repo = repo.filtered(b'debug-discovery-local-filter')
1303 repo = repo.filtered(b'debug-discovery-local-filter')
1304
1304
1305 data = {}
1305 data = {}
1306 if opts.get(b'old'):
1306 if opts.get(b'old'):
1307
1307
1308 def doit(pushedrevs, remoteheads, remote=remote):
1308 def doit(pushedrevs, remoteheads, remote=remote):
1309 if not util.safehasattr(remote, b'branches'):
1309 if not util.safehasattr(remote, b'branches'):
1310 # enable in-client legacy support
1310 # enable in-client legacy support
1311 remote = localrepo.locallegacypeer(remote.local())
1311 remote = localrepo.locallegacypeer(remote.local())
1312 if remote_revs:
1312 if remote_revs:
1313 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1313 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1314 remote._repo = r
1314 remote._repo = r
1315 common, _in, hds = treediscovery.findcommonincoming(
1315 common, _in, hds = treediscovery.findcommonincoming(
1316 repo, remote, force=True, audit=data
1316 repo, remote, force=True, audit=data
1317 )
1317 )
1318 common = set(common)
1318 common = set(common)
1319 if not opts.get(b'nonheads'):
1319 if not opts.get(b'nonheads'):
1320 ui.writenoi18n(
1320 ui.writenoi18n(
1321 b"unpruned common: %s\n"
1321 b"unpruned common: %s\n"
1322 % b" ".join(sorted(short(n) for n in common))
1322 % b" ".join(sorted(short(n) for n in common))
1323 )
1323 )
1324
1324
1325 clnode = repo.changelog.node
1325 clnode = repo.changelog.node
1326 common = repo.revs(b'heads(::%ln)', common)
1326 common = repo.revs(b'heads(::%ln)', common)
1327 common = {clnode(r) for r in common}
1327 common = {clnode(r) for r in common}
1328 return common, hds
1328 return common, hds
1329
1329
1330 else:
1330 else:
1331
1331
1332 def doit(pushedrevs, remoteheads, remote=remote):
1332 def doit(pushedrevs, remoteheads, remote=remote):
1333 nodes = None
1333 nodes = None
1334 if pushedrevs:
1334 if pushedrevs:
1335 revs = logcmdutil.revrange(repo, pushedrevs)
1335 revs = logcmdutil.revrange(repo, pushedrevs)
1336 nodes = [repo[r].node() for r in revs]
1336 nodes = [repo[r].node() for r in revs]
1337 common, any, hds = setdiscovery.findcommonheads(
1337 common, any, hds = setdiscovery.findcommonheads(
1338 ui,
1338 ui,
1339 repo,
1339 repo,
1340 remote,
1340 remote,
1341 ancestorsof=nodes,
1341 ancestorsof=nodes,
1342 audit=data,
1342 audit=data,
1343 abortwhenunrelated=False,
1343 abortwhenunrelated=False,
1344 )
1344 )
1345 return common, hds
1345 return common, hds
1346
1346
1347 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1347 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1348 localrevs = opts[b'rev']
1348 localrevs = opts[b'rev']
1349
1349
1350 fm = ui.formatter(b'debugdiscovery', opts)
1350 fm = ui.formatter(b'debugdiscovery', opts)
1351 if fm.strict_format:
1351 if fm.strict_format:
1352
1352
1353 @contextlib.contextmanager
1353 @contextlib.contextmanager
1354 def may_capture_output():
1354 def may_capture_output():
1355 ui.pushbuffer()
1355 ui.pushbuffer()
1356 yield
1356 yield
1357 data[b'output'] = ui.popbuffer()
1357 data[b'output'] = ui.popbuffer()
1358
1358
1359 else:
1359 else:
1360 may_capture_output = util.nullcontextmanager
1360 may_capture_output = util.nullcontextmanager
1361 with may_capture_output():
1361 with may_capture_output():
1362 with util.timedcm('debug-discovery') as t:
1362 with util.timedcm('debug-discovery') as t:
1363 common, hds = doit(localrevs, remoterevs)
1363 common, hds = doit(localrevs, remoterevs)
1364
1364
1365 # compute all statistics
1365 # compute all statistics
1366 if len(common) == 1 and repo.nullid in common:
1366 if len(common) == 1 and repo.nullid in common:
1367 common = set()
1367 common = set()
1368 heads_common = set(common)
1368 heads_common = set(common)
1369 heads_remote = set(hds)
1369 heads_remote = set(hds)
1370 heads_local = set(repo.heads())
1370 heads_local = set(repo.heads())
1371 # note: they cannot be a local or remote head that is in common and not
1371 # note: they cannot be a local or remote head that is in common and not
1372 # itself a head of common.
1372 # itself a head of common.
1373 heads_common_local = heads_common & heads_local
1373 heads_common_local = heads_common & heads_local
1374 heads_common_remote = heads_common & heads_remote
1374 heads_common_remote = heads_common & heads_remote
1375 heads_common_both = heads_common & heads_remote & heads_local
1375 heads_common_both = heads_common & heads_remote & heads_local
1376
1376
1377 all = repo.revs(b'all()')
1377 all = repo.revs(b'all()')
1378 common = repo.revs(b'::%ln', common)
1378 common = repo.revs(b'::%ln', common)
1379 roots_common = repo.revs(b'roots(::%ld)', common)
1379 roots_common = repo.revs(b'roots(::%ld)', common)
1380 missing = repo.revs(b'not ::%ld', common)
1380 missing = repo.revs(b'not ::%ld', common)
1381 heads_missing = repo.revs(b'heads(%ld)', missing)
1381 heads_missing = repo.revs(b'heads(%ld)', missing)
1382 roots_missing = repo.revs(b'roots(%ld)', missing)
1382 roots_missing = repo.revs(b'roots(%ld)', missing)
1383 assert len(common) + len(missing) == len(all)
1383 assert len(common) + len(missing) == len(all)
1384
1384
1385 initial_undecided = repo.revs(
1385 initial_undecided = repo.revs(
1386 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1386 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1387 )
1387 )
1388 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1388 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1389 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1389 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1390 common_initial_undecided = initial_undecided & common
1390 common_initial_undecided = initial_undecided & common
1391 missing_initial_undecided = initial_undecided & missing
1391 missing_initial_undecided = initial_undecided & missing
1392
1392
1393 data[b'elapsed'] = t.elapsed
1393 data[b'elapsed'] = t.elapsed
1394 data[b'nb-common-heads'] = len(heads_common)
1394 data[b'nb-common-heads'] = len(heads_common)
1395 data[b'nb-common-heads-local'] = len(heads_common_local)
1395 data[b'nb-common-heads-local'] = len(heads_common_local)
1396 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1396 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1397 data[b'nb-common-heads-both'] = len(heads_common_both)
1397 data[b'nb-common-heads-both'] = len(heads_common_both)
1398 data[b'nb-common-roots'] = len(roots_common)
1398 data[b'nb-common-roots'] = len(roots_common)
1399 data[b'nb-head-local'] = len(heads_local)
1399 data[b'nb-head-local'] = len(heads_local)
1400 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1400 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1401 data[b'nb-head-remote'] = len(heads_remote)
1401 data[b'nb-head-remote'] = len(heads_remote)
1402 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1402 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1403 heads_common_remote
1403 heads_common_remote
1404 )
1404 )
1405 data[b'nb-revs'] = len(all)
1405 data[b'nb-revs'] = len(all)
1406 data[b'nb-revs-common'] = len(common)
1406 data[b'nb-revs-common'] = len(common)
1407 data[b'nb-revs-missing'] = len(missing)
1407 data[b'nb-revs-missing'] = len(missing)
1408 data[b'nb-missing-heads'] = len(heads_missing)
1408 data[b'nb-missing-heads'] = len(heads_missing)
1409 data[b'nb-missing-roots'] = len(roots_missing)
1409 data[b'nb-missing-roots'] = len(roots_missing)
1410 data[b'nb-ini_und'] = len(initial_undecided)
1410 data[b'nb-ini_und'] = len(initial_undecided)
1411 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1411 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1412 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1412 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1413 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1413 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1414 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1414 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1415
1415
1416 fm.startitem()
1416 fm.startitem()
1417 fm.data(**pycompat.strkwargs(data))
1417 fm.data(**pycompat.strkwargs(data))
1418 # display discovery summary
1418 # display discovery summary
1419 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1419 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1420 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1420 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1421 if b'total-round-trips-heads' in data:
1421 if b'total-round-trips-heads' in data:
1422 fm.plain(
1422 fm.plain(
1423 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1423 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1424 )
1424 )
1425 if b'total-round-trips-branches' in data:
1425 if b'total-round-trips-branches' in data:
1426 fm.plain(
1426 fm.plain(
1427 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1427 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1428 % data
1428 % data
1429 )
1429 )
1430 if b'total-round-trips-between' in data:
1430 if b'total-round-trips-between' in data:
1431 fm.plain(
1431 fm.plain(
1432 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1432 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1433 )
1433 )
1434 fm.plain(b"queries: %(total-queries)9d\n" % data)
1434 fm.plain(b"queries: %(total-queries)9d\n" % data)
1435 if b'total-queries-branches' in data:
1435 if b'total-queries-branches' in data:
1436 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1436 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1437 if b'total-queries-between' in data:
1437 if b'total-queries-between' in data:
1438 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1438 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1439 fm.plain(b"heads summary:\n")
1439 fm.plain(b"heads summary:\n")
1440 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1440 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1441 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1441 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1442 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1442 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1443 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1443 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1444 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1444 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1445 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1445 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1446 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1446 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1447 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1447 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1448 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1448 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1449 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1449 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1450 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1450 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1451 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1451 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1452 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1452 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1453 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1453 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1454 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1454 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1455 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1455 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1456 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1456 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1457 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1457 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1458 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1458 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1459 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1459 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1460 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1460 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1461 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1461 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1462
1462
1463 if ui.verbose:
1463 if ui.verbose:
1464 fm.plain(
1464 fm.plain(
1465 b"common heads: %s\n"
1465 b"common heads: %s\n"
1466 % b" ".join(sorted(short(n) for n in heads_common))
1466 % b" ".join(sorted(short(n) for n in heads_common))
1467 )
1467 )
1468 fm.end()
1468 fm.end()
1469
1469
1470
1470
1471 _chunksize = 4 << 10
1471 _chunksize = 4 << 10
1472
1472
1473
1473
1474 @command(
1474 @command(
1475 b'debugdownload',
1475 b'debugdownload',
1476 [
1476 [
1477 (b'o', b'output', b'', _(b'path')),
1477 (b'o', b'output', b'', _(b'path')),
1478 ],
1478 ],
1479 optionalrepo=True,
1479 optionalrepo=True,
1480 )
1480 )
1481 def debugdownload(ui, repo, url, output=None, **opts):
1481 def debugdownload(ui, repo, url, output=None, **opts):
1482 """download a resource using Mercurial logic and config"""
1482 """download a resource using Mercurial logic and config"""
1483 fh = urlmod.open(ui, url, output)
1483 fh = urlmod.open(ui, url, output)
1484
1484
1485 dest = ui
1485 dest = ui
1486 if output:
1486 if output:
1487 dest = open(output, b"wb", _chunksize)
1487 dest = open(output, b"wb", _chunksize)
1488 try:
1488 try:
1489 data = fh.read(_chunksize)
1489 data = fh.read(_chunksize)
1490 while data:
1490 while data:
1491 dest.write(data)
1491 dest.write(data)
1492 data = fh.read(_chunksize)
1492 data = fh.read(_chunksize)
1493 finally:
1493 finally:
1494 if output:
1494 if output:
1495 dest.close()
1495 dest.close()
1496
1496
1497
1497
1498 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1498 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1499 def debugextensions(ui, repo, **opts):
1499 def debugextensions(ui, repo, **opts):
1500 '''show information about active extensions'''
1500 '''show information about active extensions'''
1501 opts = pycompat.byteskwargs(opts)
1501 opts = pycompat.byteskwargs(opts)
1502 exts = extensions.extensions(ui)
1502 exts = extensions.extensions(ui)
1503 hgver = util.version()
1503 hgver = util.version()
1504 fm = ui.formatter(b'debugextensions', opts)
1504 fm = ui.formatter(b'debugextensions', opts)
1505 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1505 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1506 isinternal = extensions.ismoduleinternal(extmod)
1506 isinternal = extensions.ismoduleinternal(extmod)
1507 extsource = None
1507 extsource = None
1508
1508
1509 if util.safehasattr(extmod, '__file__'):
1509 if util.safehasattr(extmod, '__file__'):
1510 extsource = pycompat.fsencode(extmod.__file__)
1510 extsource = pycompat.fsencode(extmod.__file__)
1511 elif getattr(sys, 'oxidized', False):
1511 elif getattr(sys, 'oxidized', False):
1512 extsource = pycompat.sysexecutable
1512 extsource = pycompat.sysexecutable
1513 if isinternal:
1513 if isinternal:
1514 exttestedwith = [] # never expose magic string to users
1514 exttestedwith = [] # never expose magic string to users
1515 else:
1515 else:
1516 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1516 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1517 extbuglink = getattr(extmod, 'buglink', None)
1517 extbuglink = getattr(extmod, 'buglink', None)
1518
1518
1519 fm.startitem()
1519 fm.startitem()
1520
1520
1521 if ui.quiet or ui.verbose:
1521 if ui.quiet or ui.verbose:
1522 fm.write(b'name', b'%s\n', extname)
1522 fm.write(b'name', b'%s\n', extname)
1523 else:
1523 else:
1524 fm.write(b'name', b'%s', extname)
1524 fm.write(b'name', b'%s', extname)
1525 if isinternal or hgver in exttestedwith:
1525 if isinternal or hgver in exttestedwith:
1526 fm.plain(b'\n')
1526 fm.plain(b'\n')
1527 elif not exttestedwith:
1527 elif not exttestedwith:
1528 fm.plain(_(b' (untested!)\n'))
1528 fm.plain(_(b' (untested!)\n'))
1529 else:
1529 else:
1530 lasttestedversion = exttestedwith[-1]
1530 lasttestedversion = exttestedwith[-1]
1531 fm.plain(b' (%s!)\n' % lasttestedversion)
1531 fm.plain(b' (%s!)\n' % lasttestedversion)
1532
1532
1533 fm.condwrite(
1533 fm.condwrite(
1534 ui.verbose and extsource,
1534 ui.verbose and extsource,
1535 b'source',
1535 b'source',
1536 _(b' location: %s\n'),
1536 _(b' location: %s\n'),
1537 extsource or b"",
1537 extsource or b"",
1538 )
1538 )
1539
1539
1540 if ui.verbose:
1540 if ui.verbose:
1541 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1541 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1542 fm.data(bundled=isinternal)
1542 fm.data(bundled=isinternal)
1543
1543
1544 fm.condwrite(
1544 fm.condwrite(
1545 ui.verbose and exttestedwith,
1545 ui.verbose and exttestedwith,
1546 b'testedwith',
1546 b'testedwith',
1547 _(b' tested with: %s\n'),
1547 _(b' tested with: %s\n'),
1548 fm.formatlist(exttestedwith, name=b'ver'),
1548 fm.formatlist(exttestedwith, name=b'ver'),
1549 )
1549 )
1550
1550
1551 fm.condwrite(
1551 fm.condwrite(
1552 ui.verbose and extbuglink,
1552 ui.verbose and extbuglink,
1553 b'buglink',
1553 b'buglink',
1554 _(b' bug reporting: %s\n'),
1554 _(b' bug reporting: %s\n'),
1555 extbuglink or b"",
1555 extbuglink or b"",
1556 )
1556 )
1557
1557
1558 fm.end()
1558 fm.end()
1559
1559
1560
1560
1561 @command(
1561 @command(
1562 b'debugfileset',
1562 b'debugfileset',
1563 [
1563 [
1564 (
1564 (
1565 b'r',
1565 b'r',
1566 b'rev',
1566 b'rev',
1567 b'',
1567 b'',
1568 _(b'apply the filespec on this revision'),
1568 _(b'apply the filespec on this revision'),
1569 _(b'REV'),
1569 _(b'REV'),
1570 ),
1570 ),
1571 (
1571 (
1572 b'',
1572 b'',
1573 b'all-files',
1573 b'all-files',
1574 False,
1574 False,
1575 _(b'test files from all revisions and working directory'),
1575 _(b'test files from all revisions and working directory'),
1576 ),
1576 ),
1577 (
1577 (
1578 b's',
1578 b's',
1579 b'show-matcher',
1579 b'show-matcher',
1580 None,
1580 None,
1581 _(b'print internal representation of matcher'),
1581 _(b'print internal representation of matcher'),
1582 ),
1582 ),
1583 (
1583 (
1584 b'p',
1584 b'p',
1585 b'show-stage',
1585 b'show-stage',
1586 [],
1586 [],
1587 _(b'print parsed tree at the given stage'),
1587 _(b'print parsed tree at the given stage'),
1588 _(b'NAME'),
1588 _(b'NAME'),
1589 ),
1589 ),
1590 ],
1590 ],
1591 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1591 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1592 )
1592 )
1593 def debugfileset(ui, repo, expr, **opts):
1593 def debugfileset(ui, repo, expr, **opts):
1594 '''parse and apply a fileset specification'''
1594 '''parse and apply a fileset specification'''
1595 from . import fileset
1595 from . import fileset
1596
1596
1597 fileset.symbols # force import of fileset so we have predicates to optimize
1597 fileset.symbols # force import of fileset so we have predicates to optimize
1598 opts = pycompat.byteskwargs(opts)
1598 opts = pycompat.byteskwargs(opts)
1599 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1599 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1600
1600
1601 stages = [
1601 stages = [
1602 (b'parsed', pycompat.identity),
1602 (b'parsed', pycompat.identity),
1603 (b'analyzed', filesetlang.analyze),
1603 (b'analyzed', filesetlang.analyze),
1604 (b'optimized', filesetlang.optimize),
1604 (b'optimized', filesetlang.optimize),
1605 ]
1605 ]
1606 stagenames = {n for n, f in stages}
1606 stagenames = {n for n, f in stages}
1607
1607
1608 showalways = set()
1608 showalways = set()
1609 if ui.verbose and not opts[b'show_stage']:
1609 if ui.verbose and not opts[b'show_stage']:
1610 # show parsed tree by --verbose (deprecated)
1610 # show parsed tree by --verbose (deprecated)
1611 showalways.add(b'parsed')
1611 showalways.add(b'parsed')
1612 if opts[b'show_stage'] == [b'all']:
1612 if opts[b'show_stage'] == [b'all']:
1613 showalways.update(stagenames)
1613 showalways.update(stagenames)
1614 else:
1614 else:
1615 for n in opts[b'show_stage']:
1615 for n in opts[b'show_stage']:
1616 if n not in stagenames:
1616 if n not in stagenames:
1617 raise error.Abort(_(b'invalid stage name: %s') % n)
1617 raise error.Abort(_(b'invalid stage name: %s') % n)
1618 showalways.update(opts[b'show_stage'])
1618 showalways.update(opts[b'show_stage'])
1619
1619
1620 tree = filesetlang.parse(expr)
1620 tree = filesetlang.parse(expr)
1621 for n, f in stages:
1621 for n, f in stages:
1622 tree = f(tree)
1622 tree = f(tree)
1623 if n in showalways:
1623 if n in showalways:
1624 if opts[b'show_stage'] or n != b'parsed':
1624 if opts[b'show_stage'] or n != b'parsed':
1625 ui.write(b"* %s:\n" % n)
1625 ui.write(b"* %s:\n" % n)
1626 ui.write(filesetlang.prettyformat(tree), b"\n")
1626 ui.write(filesetlang.prettyformat(tree), b"\n")
1627
1627
1628 files = set()
1628 files = set()
1629 if opts[b'all_files']:
1629 if opts[b'all_files']:
1630 for r in repo:
1630 for r in repo:
1631 c = repo[r]
1631 c = repo[r]
1632 files.update(c.files())
1632 files.update(c.files())
1633 files.update(c.substate)
1633 files.update(c.substate)
1634 if opts[b'all_files'] or ctx.rev() is None:
1634 if opts[b'all_files'] or ctx.rev() is None:
1635 wctx = repo[None]
1635 wctx = repo[None]
1636 files.update(
1636 files.update(
1637 repo.dirstate.walk(
1637 repo.dirstate.walk(
1638 scmutil.matchall(repo),
1638 scmutil.matchall(repo),
1639 subrepos=list(wctx.substate),
1639 subrepos=list(wctx.substate),
1640 unknown=True,
1640 unknown=True,
1641 ignored=True,
1641 ignored=True,
1642 )
1642 )
1643 )
1643 )
1644 files.update(wctx.substate)
1644 files.update(wctx.substate)
1645 else:
1645 else:
1646 files.update(ctx.files())
1646 files.update(ctx.files())
1647 files.update(ctx.substate)
1647 files.update(ctx.substate)
1648
1648
1649 m = ctx.matchfileset(repo.getcwd(), expr)
1649 m = ctx.matchfileset(repo.getcwd(), expr)
1650 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1650 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1651 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1651 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1652 for f in sorted(files):
1652 for f in sorted(files):
1653 if not m(f):
1653 if not m(f):
1654 continue
1654 continue
1655 ui.write(b"%s\n" % f)
1655 ui.write(b"%s\n" % f)
1656
1656
1657
1657
1658 @command(
1658 @command(
1659 b"debug-repair-issue6528",
1659 b"debug-repair-issue6528",
1660 [
1660 [
1661 (
1661 (
1662 b'',
1662 b'',
1663 b'to-report',
1663 b'to-report',
1664 b'',
1664 b'',
1665 _(b'build a report of affected revisions to this file'),
1665 _(b'build a report of affected revisions to this file'),
1666 _(b'FILE'),
1666 _(b'FILE'),
1667 ),
1667 ),
1668 (
1668 (
1669 b'',
1669 b'',
1670 b'from-report',
1670 b'from-report',
1671 b'',
1671 b'',
1672 _(b'repair revisions listed in this report file'),
1672 _(b'repair revisions listed in this report file'),
1673 _(b'FILE'),
1673 _(b'FILE'),
1674 ),
1674 ),
1675 (
1675 (
1676 b'',
1676 b'',
1677 b'paranoid',
1677 b'paranoid',
1678 False,
1678 False,
1679 _(b'check that both detection methods do the same thing'),
1679 _(b'check that both detection methods do the same thing'),
1680 ),
1680 ),
1681 ]
1681 ]
1682 + cmdutil.dryrunopts,
1682 + cmdutil.dryrunopts,
1683 )
1683 )
1684 def debug_repair_issue6528(ui, repo, **opts):
1684 def debug_repair_issue6528(ui, repo, **opts):
1685 """find affected revisions and repair them. See issue6528 for more details.
1685 """find affected revisions and repair them. See issue6528 for more details.
1686
1686
1687 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1687 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1688 computation of affected revisions for a given repository across clones.
1688 computation of affected revisions for a given repository across clones.
1689 The report format is line-based (with empty lines ignored):
1689 The report format is line-based (with empty lines ignored):
1690
1690
1691 ```
1691 ```
1692 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1692 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1693 ```
1693 ```
1694
1694
1695 There can be multiple broken revisions per filelog, they are separated by
1695 There can be multiple broken revisions per filelog, they are separated by
1696 a comma with no spaces. The only space is between the revision(s) and the
1696 a comma with no spaces. The only space is between the revision(s) and the
1697 filename.
1697 filename.
1698
1698
1699 Note that this does *not* mean that this repairs future affected revisions,
1699 Note that this does *not* mean that this repairs future affected revisions,
1700 that needs a separate fix at the exchange level that was introduced in
1700 that needs a separate fix at the exchange level that was introduced in
1701 Mercurial 5.9.1.
1701 Mercurial 5.9.1.
1702
1702
1703 There is a `--paranoid` flag to test that the fast implementation is correct
1703 There is a `--paranoid` flag to test that the fast implementation is correct
1704 by checking it against the slow implementation. Since this matter is quite
1704 by checking it against the slow implementation. Since this matter is quite
1705 urgent and testing every edge-case is probably quite costly, we use this
1705 urgent and testing every edge-case is probably quite costly, we use this
1706 method to test on large repositories as a fuzzing method of sorts.
1706 method to test on large repositories as a fuzzing method of sorts.
1707 """
1707 """
1708 cmdutil.check_incompatible_arguments(
1708 cmdutil.check_incompatible_arguments(
1709 opts, 'to_report', ['from_report', 'dry_run']
1709 opts, 'to_report', ['from_report', 'dry_run']
1710 )
1710 )
1711 dry_run = opts.get('dry_run')
1711 dry_run = opts.get('dry_run')
1712 to_report = opts.get('to_report')
1712 to_report = opts.get('to_report')
1713 from_report = opts.get('from_report')
1713 from_report = opts.get('from_report')
1714 paranoid = opts.get('paranoid')
1714 paranoid = opts.get('paranoid')
1715 # TODO maybe add filelog pattern and revision pattern parameters to help
1715 # TODO maybe add filelog pattern and revision pattern parameters to help
1716 # narrow down the search for users that know what they're looking for?
1716 # narrow down the search for users that know what they're looking for?
1717
1717
1718 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1718 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1719 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1719 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1720 raise error.Abort(_(msg))
1720 raise error.Abort(_(msg))
1721
1721
1722 rewrite.repair_issue6528(
1722 rewrite.repair_issue6528(
1723 ui,
1723 ui,
1724 repo,
1724 repo,
1725 dry_run=dry_run,
1725 dry_run=dry_run,
1726 to_report=to_report,
1726 to_report=to_report,
1727 from_report=from_report,
1727 from_report=from_report,
1728 paranoid=paranoid,
1728 paranoid=paranoid,
1729 )
1729 )
1730
1730
1731
1731
1732 @command(b'debugformat', [] + cmdutil.formatteropts)
1732 @command(b'debugformat', [] + cmdutil.formatteropts)
1733 def debugformat(ui, repo, **opts):
1733 def debugformat(ui, repo, **opts):
1734 """display format information about the current repository
1734 """display format information about the current repository
1735
1735
1736 Use --verbose to get extra information about current config value and
1736 Use --verbose to get extra information about current config value and
1737 Mercurial default."""
1737 Mercurial default."""
1738 opts = pycompat.byteskwargs(opts)
1738 opts = pycompat.byteskwargs(opts)
1739 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1739 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1740 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1740 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1741
1741
1742 def makeformatname(name):
1742 def makeformatname(name):
1743 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1743 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1744
1744
1745 fm = ui.formatter(b'debugformat', opts)
1745 fm = ui.formatter(b'debugformat', opts)
1746 if fm.isplain():
1746 if fm.isplain():
1747
1747
1748 def formatvalue(value):
1748 def formatvalue(value):
1749 if util.safehasattr(value, b'startswith'):
1749 if util.safehasattr(value, b'startswith'):
1750 return value
1750 return value
1751 if value:
1751 if value:
1752 return b'yes'
1752 return b'yes'
1753 else:
1753 else:
1754 return b'no'
1754 return b'no'
1755
1755
1756 else:
1756 else:
1757 formatvalue = pycompat.identity
1757 formatvalue = pycompat.identity
1758
1758
1759 fm.plain(b'format-variant')
1759 fm.plain(b'format-variant')
1760 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1760 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1761 fm.plain(b' repo')
1761 fm.plain(b' repo')
1762 if ui.verbose:
1762 if ui.verbose:
1763 fm.plain(b' config default')
1763 fm.plain(b' config default')
1764 fm.plain(b'\n')
1764 fm.plain(b'\n')
1765 for fv in upgrade.allformatvariant:
1765 for fv in upgrade.allformatvariant:
1766 fm.startitem()
1766 fm.startitem()
1767 repovalue = fv.fromrepo(repo)
1767 repovalue = fv.fromrepo(repo)
1768 configvalue = fv.fromconfig(repo)
1768 configvalue = fv.fromconfig(repo)
1769
1769
1770 if repovalue != configvalue:
1770 if repovalue != configvalue:
1771 namelabel = b'formatvariant.name.mismatchconfig'
1771 namelabel = b'formatvariant.name.mismatchconfig'
1772 repolabel = b'formatvariant.repo.mismatchconfig'
1772 repolabel = b'formatvariant.repo.mismatchconfig'
1773 elif repovalue != fv.default:
1773 elif repovalue != fv.default:
1774 namelabel = b'formatvariant.name.mismatchdefault'
1774 namelabel = b'formatvariant.name.mismatchdefault'
1775 repolabel = b'formatvariant.repo.mismatchdefault'
1775 repolabel = b'formatvariant.repo.mismatchdefault'
1776 else:
1776 else:
1777 namelabel = b'formatvariant.name.uptodate'
1777 namelabel = b'formatvariant.name.uptodate'
1778 repolabel = b'formatvariant.repo.uptodate'
1778 repolabel = b'formatvariant.repo.uptodate'
1779
1779
1780 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1780 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1781 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1781 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1782 if fv.default != configvalue:
1782 if fv.default != configvalue:
1783 configlabel = b'formatvariant.config.special'
1783 configlabel = b'formatvariant.config.special'
1784 else:
1784 else:
1785 configlabel = b'formatvariant.config.default'
1785 configlabel = b'formatvariant.config.default'
1786 fm.condwrite(
1786 fm.condwrite(
1787 ui.verbose,
1787 ui.verbose,
1788 b'config',
1788 b'config',
1789 b' %6s',
1789 b' %6s',
1790 formatvalue(configvalue),
1790 formatvalue(configvalue),
1791 label=configlabel,
1791 label=configlabel,
1792 )
1792 )
1793 fm.condwrite(
1793 fm.condwrite(
1794 ui.verbose,
1794 ui.verbose,
1795 b'default',
1795 b'default',
1796 b' %7s',
1796 b' %7s',
1797 formatvalue(fv.default),
1797 formatvalue(fv.default),
1798 label=b'formatvariant.default',
1798 label=b'formatvariant.default',
1799 )
1799 )
1800 fm.plain(b'\n')
1800 fm.plain(b'\n')
1801 fm.end()
1801 fm.end()
1802
1802
1803
1803
1804 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1804 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1805 def debugfsinfo(ui, path=b"."):
1805 def debugfsinfo(ui, path=b"."):
1806 """show information detected about current filesystem"""
1806 """show information detected about current filesystem"""
1807 ui.writenoi18n(b'path: %s\n' % path)
1807 ui.writenoi18n(b'path: %s\n' % path)
1808 ui.writenoi18n(
1808 ui.writenoi18n(
1809 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1809 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1810 )
1810 )
1811 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1811 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1812 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1812 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1813 ui.writenoi18n(
1813 ui.writenoi18n(
1814 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1814 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1815 )
1815 )
1816 ui.writenoi18n(
1816 ui.writenoi18n(
1817 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1817 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1818 )
1818 )
1819 casesensitive = b'(unknown)'
1819 casesensitive = b'(unknown)'
1820 try:
1820 try:
1821 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1821 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1822 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1822 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1823 except OSError:
1823 except OSError:
1824 pass
1824 pass
1825 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1825 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1826
1826
1827
1827
1828 @command(
1828 @command(
1829 b'debuggetbundle',
1829 b'debuggetbundle',
1830 [
1830 [
1831 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1831 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1832 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1832 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1833 (
1833 (
1834 b't',
1834 b't',
1835 b'type',
1835 b'type',
1836 b'bzip2',
1836 b'bzip2',
1837 _(b'bundle compression type to use'),
1837 _(b'bundle compression type to use'),
1838 _(b'TYPE'),
1838 _(b'TYPE'),
1839 ),
1839 ),
1840 ],
1840 ],
1841 _(b'REPO FILE [-H|-C ID]...'),
1841 _(b'REPO FILE [-H|-C ID]...'),
1842 norepo=True,
1842 norepo=True,
1843 )
1843 )
1844 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1844 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1845 """retrieves a bundle from a repo
1845 """retrieves a bundle from a repo
1846
1846
1847 Every ID must be a full-length hex node id string. Saves the bundle to the
1847 Every ID must be a full-length hex node id string. Saves the bundle to the
1848 given file.
1848 given file.
1849 """
1849 """
1850 opts = pycompat.byteskwargs(opts)
1850 opts = pycompat.byteskwargs(opts)
1851 repo = hg.peer(ui, opts, repopath)
1851 repo = hg.peer(ui, opts, repopath)
1852 if not repo.capable(b'getbundle'):
1852 if not repo.capable(b'getbundle'):
1853 raise error.Abort(b"getbundle() not supported by target repository")
1853 raise error.Abort(b"getbundle() not supported by target repository")
1854 args = {}
1854 args = {}
1855 if common:
1855 if common:
1856 args['common'] = [bin(s) for s in common]
1856 args['common'] = [bin(s) for s in common]
1857 if head:
1857 if head:
1858 args['heads'] = [bin(s) for s in head]
1858 args['heads'] = [bin(s) for s in head]
1859 # TODO: get desired bundlecaps from command line.
1859 # TODO: get desired bundlecaps from command line.
1860 args['bundlecaps'] = None
1860 args['bundlecaps'] = None
1861 bundle = repo.getbundle(b'debug', **args)
1861 bundle = repo.getbundle(b'debug', **args)
1862
1862
1863 bundletype = opts.get(b'type', b'bzip2').lower()
1863 bundletype = opts.get(b'type', b'bzip2').lower()
1864 btypes = {
1864 btypes = {
1865 b'none': b'HG10UN',
1865 b'none': b'HG10UN',
1866 b'bzip2': b'HG10BZ',
1866 b'bzip2': b'HG10BZ',
1867 b'gzip': b'HG10GZ',
1867 b'gzip': b'HG10GZ',
1868 b'bundle2': b'HG20',
1868 b'bundle2': b'HG20',
1869 }
1869 }
1870 bundletype = btypes.get(bundletype)
1870 bundletype = btypes.get(bundletype)
1871 if bundletype not in bundle2.bundletypes:
1871 if bundletype not in bundle2.bundletypes:
1872 raise error.Abort(_(b'unknown bundle type specified with --type'))
1872 raise error.Abort(_(b'unknown bundle type specified with --type'))
1873 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1873 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1874
1874
1875
1875
1876 @command(b'debugignore', [], b'[FILE]')
1876 @command(b'debugignore', [], b'[FILE]')
1877 def debugignore(ui, repo, *files, **opts):
1877 def debugignore(ui, repo, *files, **opts):
1878 """display the combined ignore pattern and information about ignored files
1878 """display the combined ignore pattern and information about ignored files
1879
1879
1880 With no argument display the combined ignore pattern.
1880 With no argument display the combined ignore pattern.
1881
1881
1882 Given space separated file names, shows if the given file is ignored and
1882 Given space separated file names, shows if the given file is ignored and
1883 if so, show the ignore rule (file and line number) that matched it.
1883 if so, show the ignore rule (file and line number) that matched it.
1884 """
1884 """
1885 ignore = repo.dirstate._ignore
1885 ignore = repo.dirstate._ignore
1886 if not files:
1886 if not files:
1887 # Show all the patterns
1887 # Show all the patterns
1888 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1888 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1889 else:
1889 else:
1890 m = scmutil.match(repo[None], pats=files)
1890 m = scmutil.match(repo[None], pats=files)
1891 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1891 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1892 for f in m.files():
1892 for f in m.files():
1893 nf = util.normpath(f)
1893 nf = util.normpath(f)
1894 ignored = None
1894 ignored = None
1895 ignoredata = None
1895 ignoredata = None
1896 if nf != b'.':
1896 if nf != b'.':
1897 if ignore(nf):
1897 if ignore(nf):
1898 ignored = nf
1898 ignored = nf
1899 ignoredata = repo.dirstate._ignorefileandline(nf)
1899 ignoredata = repo.dirstate._ignorefileandline(nf)
1900 else:
1900 else:
1901 for p in pathutil.finddirs(nf):
1901 for p in pathutil.finddirs(nf):
1902 if ignore(p):
1902 if ignore(p):
1903 ignored = p
1903 ignored = p
1904 ignoredata = repo.dirstate._ignorefileandline(p)
1904 ignoredata = repo.dirstate._ignorefileandline(p)
1905 break
1905 break
1906 if ignored:
1906 if ignored:
1907 if ignored == nf:
1907 if ignored == nf:
1908 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1908 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1909 else:
1909 else:
1910 ui.write(
1910 ui.write(
1911 _(
1911 _(
1912 b"%s is ignored because of "
1912 b"%s is ignored because of "
1913 b"containing directory %s\n"
1913 b"containing directory %s\n"
1914 )
1914 )
1915 % (uipathfn(f), ignored)
1915 % (uipathfn(f), ignored)
1916 )
1916 )
1917 ignorefile, lineno, line = ignoredata
1917 ignorefile, lineno, line = ignoredata
1918 ui.write(
1918 ui.write(
1919 _(b"(ignore rule in %s, line %d: '%s')\n")
1919 _(b"(ignore rule in %s, line %d: '%s')\n")
1920 % (ignorefile, lineno, line)
1920 % (ignorefile, lineno, line)
1921 )
1921 )
1922 else:
1922 else:
1923 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1923 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1924
1924
1925
1925
1926 @command(
1926 @command(
1927 b'debug-revlog-index|debugindex',
1927 b'debug-revlog-index|debugindex',
1928 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1928 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1929 _(b'-c|-m|FILE'),
1929 _(b'-c|-m|FILE'),
1930 )
1930 )
1931 def debugindex(ui, repo, file_=None, **opts):
1931 def debugindex(ui, repo, file_=None, **opts):
1932 """dump index data for a revlog"""
1932 """dump index data for a revlog"""
1933 opts = pycompat.byteskwargs(opts)
1933 opts = pycompat.byteskwargs(opts)
1934 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1934 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1935
1935
1936 fm = ui.formatter(b'debugindex', opts)
1936 fm = ui.formatter(b'debugindex', opts)
1937
1937
1938 revlog = getattr(store, b'_revlog', store)
1938 revlog = getattr(store, b'_revlog', store)
1939
1939
1940 return revlog_debug.debug_index(
1940 return revlog_debug.debug_index(
1941 ui,
1941 ui,
1942 repo,
1942 repo,
1943 formatter=fm,
1943 formatter=fm,
1944 revlog=revlog,
1944 revlog=revlog,
1945 full_node=ui.debugflag,
1945 full_node=ui.debugflag,
1946 )
1946 )
1947
1947
1948
1948
1949 @command(
1949 @command(
1950 b'debugindexdot',
1950 b'debugindexdot',
1951 cmdutil.debugrevlogopts,
1951 cmdutil.debugrevlogopts,
1952 _(b'-c|-m|FILE'),
1952 _(b'-c|-m|FILE'),
1953 optionalrepo=True,
1953 optionalrepo=True,
1954 )
1954 )
1955 def debugindexdot(ui, repo, file_=None, **opts):
1955 def debugindexdot(ui, repo, file_=None, **opts):
1956 """dump an index DAG as a graphviz dot file"""
1956 """dump an index DAG as a graphviz dot file"""
1957 opts = pycompat.byteskwargs(opts)
1957 opts = pycompat.byteskwargs(opts)
1958 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1958 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1959 ui.writenoi18n(b"digraph G {\n")
1959 ui.writenoi18n(b"digraph G {\n")
1960 for i in r:
1960 for i in r:
1961 node = r.node(i)
1961 node = r.node(i)
1962 pp = r.parents(node)
1962 pp = r.parents(node)
1963 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1963 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1964 if pp[1] != repo.nullid:
1964 if pp[1] != repo.nullid:
1965 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1965 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1966 ui.write(b"}\n")
1966 ui.write(b"}\n")
1967
1967
1968
1968
1969 @command(b'debugindexstats', [])
1969 @command(b'debugindexstats', [])
1970 def debugindexstats(ui, repo):
1970 def debugindexstats(ui, repo):
1971 """show stats related to the changelog index"""
1971 """show stats related to the changelog index"""
1972 repo.changelog.shortest(repo.nullid, 1)
1972 repo.changelog.shortest(repo.nullid, 1)
1973 index = repo.changelog.index
1973 index = repo.changelog.index
1974 if not util.safehasattr(index, b'stats'):
1974 if not util.safehasattr(index, b'stats'):
1975 raise error.Abort(_(b'debugindexstats only works with native code'))
1975 raise error.Abort(_(b'debugindexstats only works with native code'))
1976 for k, v in sorted(index.stats().items()):
1976 for k, v in sorted(index.stats().items()):
1977 ui.write(b'%s: %d\n' % (k, v))
1977 ui.write(b'%s: %d\n' % (k, v))
1978
1978
1979
1979
1980 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1980 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1981 def debuginstall(ui, **opts):
1981 def debuginstall(ui, **opts):
1982 """test Mercurial installation
1982 """test Mercurial installation
1983
1983
1984 Returns 0 on success.
1984 Returns 0 on success.
1985 """
1985 """
1986 opts = pycompat.byteskwargs(opts)
1986 opts = pycompat.byteskwargs(opts)
1987
1987
1988 problems = 0
1988 problems = 0
1989
1989
1990 fm = ui.formatter(b'debuginstall', opts)
1990 fm = ui.formatter(b'debuginstall', opts)
1991 fm.startitem()
1991 fm.startitem()
1992
1992
1993 # encoding might be unknown or wrong. don't translate these messages.
1993 # encoding might be unknown or wrong. don't translate these messages.
1994 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1994 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1995 err = None
1995 err = None
1996 try:
1996 try:
1997 codecs.lookup(pycompat.sysstr(encoding.encoding))
1997 codecs.lookup(pycompat.sysstr(encoding.encoding))
1998 except LookupError as inst:
1998 except LookupError as inst:
1999 err = stringutil.forcebytestr(inst)
1999 err = stringutil.forcebytestr(inst)
2000 problems += 1
2000 problems += 1
2001 fm.condwrite(
2001 fm.condwrite(
2002 err,
2002 err,
2003 b'encodingerror',
2003 b'encodingerror',
2004 b" %s\n (check that your locale is properly set)\n",
2004 b" %s\n (check that your locale is properly set)\n",
2005 err,
2005 err,
2006 )
2006 )
2007
2007
2008 # Python
2008 # Python
2009 pythonlib = None
2009 pythonlib = None
2010 if util.safehasattr(os, '__file__'):
2010 if util.safehasattr(os, '__file__'):
2011 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
2011 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
2012 elif getattr(sys, 'oxidized', False):
2012 elif getattr(sys, 'oxidized', False):
2013 pythonlib = pycompat.sysexecutable
2013 pythonlib = pycompat.sysexecutable
2014
2014
2015 fm.write(
2015 fm.write(
2016 b'pythonexe',
2016 b'pythonexe',
2017 _(b"checking Python executable (%s)\n"),
2017 _(b"checking Python executable (%s)\n"),
2018 pycompat.sysexecutable or _(b"unknown"),
2018 pycompat.sysexecutable or _(b"unknown"),
2019 )
2019 )
2020 fm.write(
2020 fm.write(
2021 b'pythonimplementation',
2021 b'pythonimplementation',
2022 _(b"checking Python implementation (%s)\n"),
2022 _(b"checking Python implementation (%s)\n"),
2023 pycompat.sysbytes(platform.python_implementation()),
2023 pycompat.sysbytes(platform.python_implementation()),
2024 )
2024 )
2025 fm.write(
2025 fm.write(
2026 b'pythonver',
2026 b'pythonver',
2027 _(b"checking Python version (%s)\n"),
2027 _(b"checking Python version (%s)\n"),
2028 (b"%d.%d.%d" % sys.version_info[:3]),
2028 (b"%d.%d.%d" % sys.version_info[:3]),
2029 )
2029 )
2030 fm.write(
2030 fm.write(
2031 b'pythonlib',
2031 b'pythonlib',
2032 _(b"checking Python lib (%s)...\n"),
2032 _(b"checking Python lib (%s)...\n"),
2033 pythonlib or _(b"unknown"),
2033 pythonlib or _(b"unknown"),
2034 )
2034 )
2035
2035
2036 try:
2036 try:
2037 from . import rustext # pytype: disable=import-error
2037 from . import rustext # pytype: disable=import-error
2038
2038
2039 rustext.__doc__ # trigger lazy import
2039 rustext.__doc__ # trigger lazy import
2040 except ImportError:
2040 except ImportError:
2041 rustext = None
2041 rustext = None
2042
2042
2043 security = set(sslutil.supportedprotocols)
2043 security = set(sslutil.supportedprotocols)
2044 if sslutil.hassni:
2044 if sslutil.hassni:
2045 security.add(b'sni')
2045 security.add(b'sni')
2046
2046
2047 fm.write(
2047 fm.write(
2048 b'pythonsecurity',
2048 b'pythonsecurity',
2049 _(b"checking Python security support (%s)\n"),
2049 _(b"checking Python security support (%s)\n"),
2050 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2050 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2051 )
2051 )
2052
2052
2053 # These are warnings, not errors. So don't increment problem count. This
2053 # These are warnings, not errors. So don't increment problem count. This
2054 # may change in the future.
2054 # may change in the future.
2055 if b'tls1.2' not in security:
2055 if b'tls1.2' not in security:
2056 fm.plain(
2056 fm.plain(
2057 _(
2057 _(
2058 b' TLS 1.2 not supported by Python install; '
2058 b' TLS 1.2 not supported by Python install; '
2059 b'network connections lack modern security\n'
2059 b'network connections lack modern security\n'
2060 )
2060 )
2061 )
2061 )
2062 if b'sni' not in security:
2062 if b'sni' not in security:
2063 fm.plain(
2063 fm.plain(
2064 _(
2064 _(
2065 b' SNI not supported by Python install; may have '
2065 b' SNI not supported by Python install; may have '
2066 b'connectivity issues with some servers\n'
2066 b'connectivity issues with some servers\n'
2067 )
2067 )
2068 )
2068 )
2069
2069
2070 fm.plain(
2070 fm.plain(
2071 _(
2071 _(
2072 b"checking Rust extensions (%s)\n"
2072 b"checking Rust extensions (%s)\n"
2073 % (b'missing' if rustext is None else b'installed')
2073 % (b'missing' if rustext is None else b'installed')
2074 ),
2074 ),
2075 )
2075 )
2076
2076
2077 # TODO print CA cert info
2077 # TODO print CA cert info
2078
2078
2079 # hg version
2079 # hg version
2080 hgver = util.version()
2080 hgver = util.version()
2081 fm.write(
2081 fm.write(
2082 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2082 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2083 )
2083 )
2084 fm.write(
2084 fm.write(
2085 b'hgverextra',
2085 b'hgverextra',
2086 _(b"checking Mercurial custom build (%s)\n"),
2086 _(b"checking Mercurial custom build (%s)\n"),
2087 b'+'.join(hgver.split(b'+')[1:]),
2087 b'+'.join(hgver.split(b'+')[1:]),
2088 )
2088 )
2089
2089
2090 # compiled modules
2090 # compiled modules
2091 hgmodules = None
2091 hgmodules = None
2092 if util.safehasattr(sys.modules[__name__], '__file__'):
2092 if util.safehasattr(sys.modules[__name__], '__file__'):
2093 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2093 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2094 elif getattr(sys, 'oxidized', False):
2094 elif getattr(sys, 'oxidized', False):
2095 hgmodules = pycompat.sysexecutable
2095 hgmodules = pycompat.sysexecutable
2096
2096
2097 fm.write(
2097 fm.write(
2098 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2098 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2099 )
2099 )
2100 fm.write(
2100 fm.write(
2101 b'hgmodules',
2101 b'hgmodules',
2102 _(b"checking installed modules (%s)...\n"),
2102 _(b"checking installed modules (%s)...\n"),
2103 hgmodules or _(b"unknown"),
2103 hgmodules or _(b"unknown"),
2104 )
2104 )
2105
2105
2106 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2106 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2107 rustext = rustandc # for now, that's the only case
2107 rustext = rustandc # for now, that's the only case
2108 cext = policy.policy in (b'c', b'allow') or rustandc
2108 cext = policy.policy in (b'c', b'allow') or rustandc
2109 nopure = cext or rustext
2109 nopure = cext or rustext
2110 if nopure:
2110 if nopure:
2111 err = None
2111 err = None
2112 try:
2112 try:
2113 if cext:
2113 if cext:
2114 from .cext import ( # pytype: disable=import-error
2114 from .cext import ( # pytype: disable=import-error
2115 base85,
2115 base85,
2116 bdiff,
2116 bdiff,
2117 mpatch,
2117 mpatch,
2118 osutil,
2118 osutil,
2119 )
2119 )
2120
2120
2121 # quiet pyflakes
2121 # quiet pyflakes
2122 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2122 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2123 if rustext:
2123 if rustext:
2124 from .rustext import ( # pytype: disable=import-error
2124 from .rustext import ( # pytype: disable=import-error
2125 ancestor,
2125 ancestor,
2126 dirstate,
2126 dirstate,
2127 )
2127 )
2128
2128
2129 dir(ancestor), dir(dirstate) # quiet pyflakes
2129 dir(ancestor), dir(dirstate) # quiet pyflakes
2130 except Exception as inst:
2130 except Exception as inst:
2131 err = stringutil.forcebytestr(inst)
2131 err = stringutil.forcebytestr(inst)
2132 problems += 1
2132 problems += 1
2133 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2133 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2134
2134
2135 compengines = util.compengines._engines.values()
2135 compengines = util.compengines._engines.values()
2136 fm.write(
2136 fm.write(
2137 b'compengines',
2137 b'compengines',
2138 _(b'checking registered compression engines (%s)\n'),
2138 _(b'checking registered compression engines (%s)\n'),
2139 fm.formatlist(
2139 fm.formatlist(
2140 sorted(e.name() for e in compengines),
2140 sorted(e.name() for e in compengines),
2141 name=b'compengine',
2141 name=b'compengine',
2142 fmt=b'%s',
2142 fmt=b'%s',
2143 sep=b', ',
2143 sep=b', ',
2144 ),
2144 ),
2145 )
2145 )
2146 fm.write(
2146 fm.write(
2147 b'compenginesavail',
2147 b'compenginesavail',
2148 _(b'checking available compression engines (%s)\n'),
2148 _(b'checking available compression engines (%s)\n'),
2149 fm.formatlist(
2149 fm.formatlist(
2150 sorted(e.name() for e in compengines if e.available()),
2150 sorted(e.name() for e in compengines if e.available()),
2151 name=b'compengine',
2151 name=b'compengine',
2152 fmt=b'%s',
2152 fmt=b'%s',
2153 sep=b', ',
2153 sep=b', ',
2154 ),
2154 ),
2155 )
2155 )
2156 wirecompengines = compression.compengines.supportedwireengines(
2156 wirecompengines = compression.compengines.supportedwireengines(
2157 compression.SERVERROLE
2157 compression.SERVERROLE
2158 )
2158 )
2159 fm.write(
2159 fm.write(
2160 b'compenginesserver',
2160 b'compenginesserver',
2161 _(
2161 _(
2162 b'checking available compression engines '
2162 b'checking available compression engines '
2163 b'for wire protocol (%s)\n'
2163 b'for wire protocol (%s)\n'
2164 ),
2164 ),
2165 fm.formatlist(
2165 fm.formatlist(
2166 [e.name() for e in wirecompengines if e.wireprotosupport()],
2166 [e.name() for e in wirecompengines if e.wireprotosupport()],
2167 name=b'compengine',
2167 name=b'compengine',
2168 fmt=b'%s',
2168 fmt=b'%s',
2169 sep=b', ',
2169 sep=b', ',
2170 ),
2170 ),
2171 )
2171 )
2172 re2 = b'missing'
2172 re2 = b'missing'
2173 if util._re2:
2173 if util._re2:
2174 re2 = b'available'
2174 re2 = b'available'
2175 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2175 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2176 fm.data(re2=bool(util._re2))
2176 fm.data(re2=bool(util._re2))
2177
2177
2178 # templates
2178 # templates
2179 p = templater.templatedir()
2179 p = templater.templatedir()
2180 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2180 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2181 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2181 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2182 if p:
2182 if p:
2183 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2183 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2184 if m:
2184 if m:
2185 # template found, check if it is working
2185 # template found, check if it is working
2186 err = None
2186 err = None
2187 try:
2187 try:
2188 templater.templater.frommapfile(m)
2188 templater.templater.frommapfile(m)
2189 except Exception as inst:
2189 except Exception as inst:
2190 err = stringutil.forcebytestr(inst)
2190 err = stringutil.forcebytestr(inst)
2191 p = None
2191 p = None
2192 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2192 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2193 else:
2193 else:
2194 p = None
2194 p = None
2195 fm.condwrite(
2195 fm.condwrite(
2196 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2196 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2197 )
2197 )
2198 fm.condwrite(
2198 fm.condwrite(
2199 not m,
2199 not m,
2200 b'defaulttemplatenotfound',
2200 b'defaulttemplatenotfound',
2201 _(b" template '%s' not found\n"),
2201 _(b" template '%s' not found\n"),
2202 b"default",
2202 b"default",
2203 )
2203 )
2204 if not p:
2204 if not p:
2205 problems += 1
2205 problems += 1
2206 fm.condwrite(
2206 fm.condwrite(
2207 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2207 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2208 )
2208 )
2209
2209
2210 # editor
2210 # editor
2211 editor = ui.geteditor()
2211 editor = ui.geteditor()
2212 editor = util.expandpath(editor)
2212 editor = util.expandpath(editor)
2213 editorbin = procutil.shellsplit(editor)[0]
2213 editorbin = procutil.shellsplit(editor)[0]
2214 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2214 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2215 cmdpath = procutil.findexe(editorbin)
2215 cmdpath = procutil.findexe(editorbin)
2216 fm.condwrite(
2216 fm.condwrite(
2217 not cmdpath and editor == b'vi',
2217 not cmdpath and editor == b'vi',
2218 b'vinotfound',
2218 b'vinotfound',
2219 _(
2219 _(
2220 b" No commit editor set and can't find %s in PATH\n"
2220 b" No commit editor set and can't find %s in PATH\n"
2221 b" (specify a commit editor in your configuration"
2221 b" (specify a commit editor in your configuration"
2222 b" file)\n"
2222 b" file)\n"
2223 ),
2223 ),
2224 not cmdpath and editor == b'vi' and editorbin,
2224 not cmdpath and editor == b'vi' and editorbin,
2225 )
2225 )
2226 fm.condwrite(
2226 fm.condwrite(
2227 not cmdpath and editor != b'vi',
2227 not cmdpath and editor != b'vi',
2228 b'editornotfound',
2228 b'editornotfound',
2229 _(
2229 _(
2230 b" Can't find editor '%s' in PATH\n"
2230 b" Can't find editor '%s' in PATH\n"
2231 b" (specify a commit editor in your configuration"
2231 b" (specify a commit editor in your configuration"
2232 b" file)\n"
2232 b" file)\n"
2233 ),
2233 ),
2234 not cmdpath and editorbin,
2234 not cmdpath and editorbin,
2235 )
2235 )
2236 if not cmdpath and editor != b'vi':
2236 if not cmdpath and editor != b'vi':
2237 problems += 1
2237 problems += 1
2238
2238
2239 # check username
2239 # check username
2240 username = None
2240 username = None
2241 err = None
2241 err = None
2242 try:
2242 try:
2243 username = ui.username()
2243 username = ui.username()
2244 except error.Abort as e:
2244 except error.Abort as e:
2245 err = e.message
2245 err = e.message
2246 problems += 1
2246 problems += 1
2247
2247
2248 fm.condwrite(
2248 fm.condwrite(
2249 username, b'username', _(b"checking username (%s)\n"), username
2249 username, b'username', _(b"checking username (%s)\n"), username
2250 )
2250 )
2251 fm.condwrite(
2251 fm.condwrite(
2252 err,
2252 err,
2253 b'usernameerror',
2253 b'usernameerror',
2254 _(
2254 _(
2255 b"checking username...\n %s\n"
2255 b"checking username...\n %s\n"
2256 b" (specify a username in your configuration file)\n"
2256 b" (specify a username in your configuration file)\n"
2257 ),
2257 ),
2258 err,
2258 err,
2259 )
2259 )
2260
2260
2261 for name, mod in extensions.extensions():
2261 for name, mod in extensions.extensions():
2262 handler = getattr(mod, 'debuginstall', None)
2262 handler = getattr(mod, 'debuginstall', None)
2263 if handler is not None:
2263 if handler is not None:
2264 problems += handler(ui, fm)
2264 problems += handler(ui, fm)
2265
2265
2266 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2266 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2267 if not problems:
2267 if not problems:
2268 fm.data(problems=problems)
2268 fm.data(problems=problems)
2269 fm.condwrite(
2269 fm.condwrite(
2270 problems,
2270 problems,
2271 b'problems',
2271 b'problems',
2272 _(b"%d problems detected, please check your install!\n"),
2272 _(b"%d problems detected, please check your install!\n"),
2273 problems,
2273 problems,
2274 )
2274 )
2275 fm.end()
2275 fm.end()
2276
2276
2277 return problems
2277 return problems
2278
2278
2279
2279
2280 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2280 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2281 def debugknown(ui, repopath, *ids, **opts):
2281 def debugknown(ui, repopath, *ids, **opts):
2282 """test whether node ids are known to a repo
2282 """test whether node ids are known to a repo
2283
2283
2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2285 and 1s indicating unknown/known.
2285 and 1s indicating unknown/known.
2286 """
2286 """
2287 opts = pycompat.byteskwargs(opts)
2287 opts = pycompat.byteskwargs(opts)
2288 repo = hg.peer(ui, opts, repopath)
2288 repo = hg.peer(ui, opts, repopath)
2289 if not repo.capable(b'known'):
2289 if not repo.capable(b'known'):
2290 raise error.Abort(b"known() not supported by target repository")
2290 raise error.Abort(b"known() not supported by target repository")
2291 flags = repo.known([bin(s) for s in ids])
2291 flags = repo.known([bin(s) for s in ids])
2292 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2292 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2293
2293
2294
2294
2295 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2295 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2296 def debuglabelcomplete(ui, repo, *args):
2296 def debuglabelcomplete(ui, repo, *args):
2297 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2297 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2298 debugnamecomplete(ui, repo, *args)
2298 debugnamecomplete(ui, repo, *args)
2299
2299
2300
2300
2301 @command(
2301 @command(
2302 b'debuglocks',
2302 b'debuglocks',
2303 [
2303 [
2304 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2304 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2305 (
2305 (
2306 b'W',
2306 b'W',
2307 b'force-free-wlock',
2307 b'force-free-wlock',
2308 None,
2308 None,
2309 _(b'free the working state lock (DANGEROUS)'),
2309 _(b'free the working state lock (DANGEROUS)'),
2310 ),
2310 ),
2311 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2311 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2312 (
2312 (
2313 b'S',
2313 b'S',
2314 b'set-wlock',
2314 b'set-wlock',
2315 None,
2315 None,
2316 _(b'set the working state lock until stopped'),
2316 _(b'set the working state lock until stopped'),
2317 ),
2317 ),
2318 ],
2318 ],
2319 _(b'[OPTION]...'),
2319 _(b'[OPTION]...'),
2320 )
2320 )
2321 def debuglocks(ui, repo, **opts):
2321 def debuglocks(ui, repo, **opts):
2322 """show or modify state of locks
2322 """show or modify state of locks
2323
2323
2324 By default, this command will show which locks are held. This
2324 By default, this command will show which locks are held. This
2325 includes the user and process holding the lock, the amount of time
2325 includes the user and process holding the lock, the amount of time
2326 the lock has been held, and the machine name where the process is
2326 the lock has been held, and the machine name where the process is
2327 running if it's not local.
2327 running if it's not local.
2328
2328
2329 Locks protect the integrity of Mercurial's data, so should be
2329 Locks protect the integrity of Mercurial's data, so should be
2330 treated with care. System crashes or other interruptions may cause
2330 treated with care. System crashes or other interruptions may cause
2331 locks to not be properly released, though Mercurial will usually
2331 locks to not be properly released, though Mercurial will usually
2332 detect and remove such stale locks automatically.
2332 detect and remove such stale locks automatically.
2333
2333
2334 However, detecting stale locks may not always be possible (for
2334 However, detecting stale locks may not always be possible (for
2335 instance, on a shared filesystem). Removing locks may also be
2335 instance, on a shared filesystem). Removing locks may also be
2336 blocked by filesystem permissions.
2336 blocked by filesystem permissions.
2337
2337
2338 Setting a lock will prevent other commands from changing the data.
2338 Setting a lock will prevent other commands from changing the data.
2339 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2339 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2340 The set locks are removed when the command exits.
2340 The set locks are removed when the command exits.
2341
2341
2342 Returns 0 if no locks are held.
2342 Returns 0 if no locks are held.
2343
2343
2344 """
2344 """
2345
2345
2346 if opts.get('force_free_lock'):
2346 if opts.get('force_free_lock'):
2347 repo.svfs.tryunlink(b'lock')
2347 repo.svfs.tryunlink(b'lock')
2348 if opts.get('force_free_wlock'):
2348 if opts.get('force_free_wlock'):
2349 repo.vfs.tryunlink(b'wlock')
2349 repo.vfs.tryunlink(b'wlock')
2350 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2350 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2351 return 0
2351 return 0
2352
2352
2353 locks = []
2353 locks = []
2354 try:
2354 try:
2355 if opts.get('set_wlock'):
2355 if opts.get('set_wlock'):
2356 try:
2356 try:
2357 locks.append(repo.wlock(False))
2357 locks.append(repo.wlock(False))
2358 except error.LockHeld:
2358 except error.LockHeld:
2359 raise error.Abort(_(b'wlock is already held'))
2359 raise error.Abort(_(b'wlock is already held'))
2360 if opts.get('set_lock'):
2360 if opts.get('set_lock'):
2361 try:
2361 try:
2362 locks.append(repo.lock(False))
2362 locks.append(repo.lock(False))
2363 except error.LockHeld:
2363 except error.LockHeld:
2364 raise error.Abort(_(b'lock is already held'))
2364 raise error.Abort(_(b'lock is already held'))
2365 if len(locks):
2365 if len(locks):
2366 try:
2366 try:
2367 if ui.interactive():
2367 if ui.interactive():
2368 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2368 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2369 ui.promptchoice(prompt)
2369 ui.promptchoice(prompt)
2370 else:
2370 else:
2371 msg = b"%d locks held, waiting for signal\n"
2371 msg = b"%d locks held, waiting for signal\n"
2372 msg %= len(locks)
2372 msg %= len(locks)
2373 ui.status(msg)
2373 ui.status(msg)
2374 while True: # XXX wait for a signal
2374 while True: # XXX wait for a signal
2375 time.sleep(0.1)
2375 time.sleep(0.1)
2376 except KeyboardInterrupt:
2376 except KeyboardInterrupt:
2377 msg = b"signal-received releasing locks\n"
2377 msg = b"signal-received releasing locks\n"
2378 ui.status(msg)
2378 ui.status(msg)
2379 return 0
2379 return 0
2380 finally:
2380 finally:
2381 release(*locks)
2381 release(*locks)
2382
2382
2383 now = time.time()
2383 now = time.time()
2384 held = 0
2384 held = 0
2385
2385
2386 def report(vfs, name, method):
2386 def report(vfs, name, method):
2387 # this causes stale locks to get reaped for more accurate reporting
2387 # this causes stale locks to get reaped for more accurate reporting
2388 try:
2388 try:
2389 l = method(False)
2389 l = method(False)
2390 except error.LockHeld:
2390 except error.LockHeld:
2391 l = None
2391 l = None
2392
2392
2393 if l:
2393 if l:
2394 l.release()
2394 l.release()
2395 else:
2395 else:
2396 try:
2396 try:
2397 st = vfs.lstat(name)
2397 st = vfs.lstat(name)
2398 age = now - st[stat.ST_MTIME]
2398 age = now - st[stat.ST_MTIME]
2399 user = util.username(st.st_uid)
2399 user = util.username(st.st_uid)
2400 locker = vfs.readlock(name)
2400 locker = vfs.readlock(name)
2401 if b":" in locker:
2401 if b":" in locker:
2402 host, pid = locker.split(b':')
2402 host, pid = locker.split(b':')
2403 if host == socket.gethostname():
2403 if host == socket.gethostname():
2404 locker = b'user %s, process %s' % (user or b'None', pid)
2404 locker = b'user %s, process %s' % (user or b'None', pid)
2405 else:
2405 else:
2406 locker = b'user %s, process %s, host %s' % (
2406 locker = b'user %s, process %s, host %s' % (
2407 user or b'None',
2407 user or b'None',
2408 pid,
2408 pid,
2409 host,
2409 host,
2410 )
2410 )
2411 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2411 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2412 return 1
2412 return 1
2413 except FileNotFoundError:
2413 except FileNotFoundError:
2414 pass
2414 pass
2415
2415
2416 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2416 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2417 return 0
2417 return 0
2418
2418
2419 held += report(repo.svfs, b"lock", repo.lock)
2419 held += report(repo.svfs, b"lock", repo.lock)
2420 held += report(repo.vfs, b"wlock", repo.wlock)
2420 held += report(repo.vfs, b"wlock", repo.wlock)
2421
2421
2422 return held
2422 return held
2423
2423
2424
2424
2425 @command(
2425 @command(
2426 b'debugmanifestfulltextcache',
2426 b'debugmanifestfulltextcache',
2427 [
2427 [
2428 (b'', b'clear', False, _(b'clear the cache')),
2428 (b'', b'clear', False, _(b'clear the cache')),
2429 (
2429 (
2430 b'a',
2430 b'a',
2431 b'add',
2431 b'add',
2432 [],
2432 [],
2433 _(b'add the given manifest nodes to the cache'),
2433 _(b'add the given manifest nodes to the cache'),
2434 _(b'NODE'),
2434 _(b'NODE'),
2435 ),
2435 ),
2436 ],
2436 ],
2437 b'',
2437 b'',
2438 )
2438 )
2439 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2439 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2440 """show, clear or amend the contents of the manifest fulltext cache"""
2440 """show, clear or amend the contents of the manifest fulltext cache"""
2441
2441
2442 def getcache():
2442 def getcache():
2443 r = repo.manifestlog.getstorage(b'')
2443 r = repo.manifestlog.getstorage(b'')
2444 try:
2444 try:
2445 return r._fulltextcache
2445 return r._fulltextcache
2446 except AttributeError:
2446 except AttributeError:
2447 msg = _(
2447 msg = _(
2448 b"Current revlog implementation doesn't appear to have a "
2448 b"Current revlog implementation doesn't appear to have a "
2449 b"manifest fulltext cache\n"
2449 b"manifest fulltext cache\n"
2450 )
2450 )
2451 raise error.Abort(msg)
2451 raise error.Abort(msg)
2452
2452
2453 if opts.get('clear'):
2453 if opts.get('clear'):
2454 with repo.wlock():
2454 with repo.wlock():
2455 cache = getcache()
2455 cache = getcache()
2456 cache.clear(clear_persisted_data=True)
2456 cache.clear(clear_persisted_data=True)
2457 return
2457 return
2458
2458
2459 if add:
2459 if add:
2460 with repo.wlock():
2460 with repo.wlock():
2461 m = repo.manifestlog
2461 m = repo.manifestlog
2462 store = m.getstorage(b'')
2462 store = m.getstorage(b'')
2463 for n in add:
2463 for n in add:
2464 try:
2464 try:
2465 manifest = m[store.lookup(n)]
2465 manifest = m[store.lookup(n)]
2466 except error.LookupError as e:
2466 except error.LookupError as e:
2467 raise error.Abort(
2467 raise error.Abort(
2468 bytes(e), hint=b"Check your manifest node id"
2468 bytes(e), hint=b"Check your manifest node id"
2469 )
2469 )
2470 manifest.read() # stores revisision in cache too
2470 manifest.read() # stores revisision in cache too
2471 return
2471 return
2472
2472
2473 cache = getcache()
2473 cache = getcache()
2474 if not len(cache):
2474 if not len(cache):
2475 ui.write(_(b'cache empty\n'))
2475 ui.write(_(b'cache empty\n'))
2476 else:
2476 else:
2477 ui.write(
2477 ui.write(
2478 _(
2478 _(
2479 b'cache contains %d manifest entries, in order of most to '
2479 b'cache contains %d manifest entries, in order of most to '
2480 b'least recent:\n'
2480 b'least recent:\n'
2481 )
2481 )
2482 % (len(cache),)
2482 % (len(cache),)
2483 )
2483 )
2484 totalsize = 0
2484 totalsize = 0
2485 for nodeid in cache:
2485 for nodeid in cache:
2486 # Use cache.get to not update the LRU order
2486 # Use cache.get to not update the LRU order
2487 data = cache.peek(nodeid)
2487 data = cache.peek(nodeid)
2488 size = len(data)
2488 size = len(data)
2489 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2489 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2490 ui.write(
2490 ui.write(
2491 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2491 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2492 )
2492 )
2493 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2493 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2494 ui.write(
2494 ui.write(
2495 _(b'total cache data size %s, on-disk %s\n')
2495 _(b'total cache data size %s, on-disk %s\n')
2496 % (util.bytecount(totalsize), util.bytecount(ondisk))
2496 % (util.bytecount(totalsize), util.bytecount(ondisk))
2497 )
2497 )
2498
2498
2499
2499
2500 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2500 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2501 def debugmergestate(ui, repo, *args, **opts):
2501 def debugmergestate(ui, repo, *args, **opts):
2502 """print merge state
2502 """print merge state
2503
2503
2504 Use --verbose to print out information about whether v1 or v2 merge state
2504 Use --verbose to print out information about whether v1 or v2 merge state
2505 was chosen."""
2505 was chosen."""
2506
2506
2507 if ui.verbose:
2507 if ui.verbose:
2508 ms = mergestatemod.mergestate(repo)
2508 ms = mergestatemod.mergestate(repo)
2509
2509
2510 # sort so that reasonable information is on top
2510 # sort so that reasonable information is on top
2511 v1records = ms._readrecordsv1()
2511 v1records = ms._readrecordsv1()
2512 v2records = ms._readrecordsv2()
2512 v2records = ms._readrecordsv2()
2513
2513
2514 if not v1records and not v2records:
2514 if not v1records and not v2records:
2515 pass
2515 pass
2516 elif not v2records:
2516 elif not v2records:
2517 ui.writenoi18n(b'no version 2 merge state\n')
2517 ui.writenoi18n(b'no version 2 merge state\n')
2518 elif ms._v1v2match(v1records, v2records):
2518 elif ms._v1v2match(v1records, v2records):
2519 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2519 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2520 else:
2520 else:
2521 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2521 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2522
2522
2523 opts = pycompat.byteskwargs(opts)
2523 opts = pycompat.byteskwargs(opts)
2524 if not opts[b'template']:
2524 if not opts[b'template']:
2525 opts[b'template'] = (
2525 opts[b'template'] = (
2526 b'{if(commits, "", "no merge state found\n")}'
2526 b'{if(commits, "", "no merge state found\n")}'
2527 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2527 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2528 b'{files % "file: {path} (state \\"{state}\\")\n'
2528 b'{files % "file: {path} (state \\"{state}\\")\n'
2529 b'{if(local_path, "'
2529 b'{if(local_path, "'
2530 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2530 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2531 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2531 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2532 b' other path: {other_path} (node {other_node})\n'
2532 b' other path: {other_path} (node {other_node})\n'
2533 b'")}'
2533 b'")}'
2534 b'{if(rename_side, "'
2534 b'{if(rename_side, "'
2535 b' rename side: {rename_side}\n'
2535 b' rename side: {rename_side}\n'
2536 b' renamed path: {renamed_path}\n'
2536 b' renamed path: {renamed_path}\n'
2537 b'")}'
2537 b'")}'
2538 b'{extras % " extra: {key} = {value}\n"}'
2538 b'{extras % " extra: {key} = {value}\n"}'
2539 b'"}'
2539 b'"}'
2540 b'{extras % "extra: {file} ({key} = {value})\n"}'
2540 b'{extras % "extra: {file} ({key} = {value})\n"}'
2541 )
2541 )
2542
2542
2543 ms = mergestatemod.mergestate.read(repo)
2543 ms = mergestatemod.mergestate.read(repo)
2544
2544
2545 fm = ui.formatter(b'debugmergestate', opts)
2545 fm = ui.formatter(b'debugmergestate', opts)
2546 fm.startitem()
2546 fm.startitem()
2547
2547
2548 fm_commits = fm.nested(b'commits')
2548 fm_commits = fm.nested(b'commits')
2549 if ms.active():
2549 if ms.active():
2550 for name, node, label_index in (
2550 for name, node, label_index in (
2551 (b'local', ms.local, 0),
2551 (b'local', ms.local, 0),
2552 (b'other', ms.other, 1),
2552 (b'other', ms.other, 1),
2553 ):
2553 ):
2554 fm_commits.startitem()
2554 fm_commits.startitem()
2555 fm_commits.data(name=name)
2555 fm_commits.data(name=name)
2556 fm_commits.data(node=hex(node))
2556 fm_commits.data(node=hex(node))
2557 if ms._labels and len(ms._labels) > label_index:
2557 if ms._labels and len(ms._labels) > label_index:
2558 fm_commits.data(label=ms._labels[label_index])
2558 fm_commits.data(label=ms._labels[label_index])
2559 fm_commits.end()
2559 fm_commits.end()
2560
2560
2561 fm_files = fm.nested(b'files')
2561 fm_files = fm.nested(b'files')
2562 if ms.active():
2562 if ms.active():
2563 for f in ms:
2563 for f in ms:
2564 fm_files.startitem()
2564 fm_files.startitem()
2565 fm_files.data(path=f)
2565 fm_files.data(path=f)
2566 state = ms._state[f]
2566 state = ms._state[f]
2567 fm_files.data(state=state[0])
2567 fm_files.data(state=state[0])
2568 if state[0] in (
2568 if state[0] in (
2569 mergestatemod.MERGE_RECORD_UNRESOLVED,
2569 mergestatemod.MERGE_RECORD_UNRESOLVED,
2570 mergestatemod.MERGE_RECORD_RESOLVED,
2570 mergestatemod.MERGE_RECORD_RESOLVED,
2571 ):
2571 ):
2572 fm_files.data(local_key=state[1])
2572 fm_files.data(local_key=state[1])
2573 fm_files.data(local_path=state[2])
2573 fm_files.data(local_path=state[2])
2574 fm_files.data(ancestor_path=state[3])
2574 fm_files.data(ancestor_path=state[3])
2575 fm_files.data(ancestor_node=state[4])
2575 fm_files.data(ancestor_node=state[4])
2576 fm_files.data(other_path=state[5])
2576 fm_files.data(other_path=state[5])
2577 fm_files.data(other_node=state[6])
2577 fm_files.data(other_node=state[6])
2578 fm_files.data(local_flags=state[7])
2578 fm_files.data(local_flags=state[7])
2579 elif state[0] in (
2579 elif state[0] in (
2580 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2580 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2581 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2581 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2582 ):
2582 ):
2583 fm_files.data(renamed_path=state[1])
2583 fm_files.data(renamed_path=state[1])
2584 fm_files.data(rename_side=state[2])
2584 fm_files.data(rename_side=state[2])
2585 fm_extras = fm_files.nested(b'extras')
2585 fm_extras = fm_files.nested(b'extras')
2586 for k, v in sorted(ms.extras(f).items()):
2586 for k, v in sorted(ms.extras(f).items()):
2587 fm_extras.startitem()
2587 fm_extras.startitem()
2588 fm_extras.data(key=k)
2588 fm_extras.data(key=k)
2589 fm_extras.data(value=v)
2589 fm_extras.data(value=v)
2590 fm_extras.end()
2590 fm_extras.end()
2591
2591
2592 fm_files.end()
2592 fm_files.end()
2593
2593
2594 fm_extras = fm.nested(b'extras')
2594 fm_extras = fm.nested(b'extras')
2595 for f, d in sorted(ms.allextras().items()):
2595 for f, d in sorted(ms.allextras().items()):
2596 if f in ms:
2596 if f in ms:
2597 # If file is in mergestate, we have already processed it's extras
2597 # If file is in mergestate, we have already processed it's extras
2598 continue
2598 continue
2599 for k, v in d.items():
2599 for k, v in d.items():
2600 fm_extras.startitem()
2600 fm_extras.startitem()
2601 fm_extras.data(file=f)
2601 fm_extras.data(file=f)
2602 fm_extras.data(key=k)
2602 fm_extras.data(key=k)
2603 fm_extras.data(value=v)
2603 fm_extras.data(value=v)
2604 fm_extras.end()
2604 fm_extras.end()
2605
2605
2606 fm.end()
2606 fm.end()
2607
2607
2608
2608
2609 @command(b'debugnamecomplete', [], _(b'NAME...'))
2609 @command(b'debugnamecomplete', [], _(b'NAME...'))
2610 def debugnamecomplete(ui, repo, *args):
2610 def debugnamecomplete(ui, repo, *args):
2611 '''complete "names" - tags, open branch names, bookmark names'''
2611 '''complete "names" - tags, open branch names, bookmark names'''
2612
2612
2613 names = set()
2613 names = set()
2614 # since we previously only listed open branches, we will handle that
2614 # since we previously only listed open branches, we will handle that
2615 # specially (after this for loop)
2615 # specially (after this for loop)
2616 for name, ns in repo.names.items():
2616 for name, ns in repo.names.items():
2617 if name != b'branches':
2617 if name != b'branches':
2618 names.update(ns.listnames(repo))
2618 names.update(ns.listnames(repo))
2619 names.update(
2619 names.update(
2620 tag
2620 tag
2621 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2621 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2622 if not closed
2622 if not closed
2623 )
2623 )
2624 completions = set()
2624 completions = set()
2625 if not args:
2625 if not args:
2626 args = [b'']
2626 args = [b'']
2627 for a in args:
2627 for a in args:
2628 completions.update(n for n in names if n.startswith(a))
2628 completions.update(n for n in names if n.startswith(a))
2629 ui.write(b'\n'.join(sorted(completions)))
2629 ui.write(b'\n'.join(sorted(completions)))
2630 ui.write(b'\n')
2630 ui.write(b'\n')
2631
2631
2632
2632
2633 @command(
2633 @command(
2634 b'debugnodemap',
2634 b'debugnodemap',
2635 [
2635 [
2636 (
2636 (
2637 b'',
2637 b'',
2638 b'dump-new',
2638 b'dump-new',
2639 False,
2639 False,
2640 _(b'write a (new) persistent binary nodemap on stdout'),
2640 _(b'write a (new) persistent binary nodemap on stdout'),
2641 ),
2641 ),
2642 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2642 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2643 (
2643 (
2644 b'',
2644 b'',
2645 b'check',
2645 b'check',
2646 False,
2646 False,
2647 _(b'check that the data on disk data are correct.'),
2647 _(b'check that the data on disk data are correct.'),
2648 ),
2648 ),
2649 (
2649 (
2650 b'',
2650 b'',
2651 b'metadata',
2651 b'metadata',
2652 False,
2652 False,
2653 _(b'display the on disk meta data for the nodemap'),
2653 _(b'display the on disk meta data for the nodemap'),
2654 ),
2654 ),
2655 ],
2655 ],
2656 )
2656 )
2657 def debugnodemap(ui, repo, **opts):
2657 def debugnodemap(ui, repo, **opts):
2658 """write and inspect on disk nodemap"""
2658 """write and inspect on disk nodemap"""
2659 if opts['dump_new']:
2659 if opts['dump_new']:
2660 unfi = repo.unfiltered()
2660 unfi = repo.unfiltered()
2661 cl = unfi.changelog
2661 cl = unfi.changelog
2662 if util.safehasattr(cl.index, "nodemap_data_all"):
2662 if util.safehasattr(cl.index, "nodemap_data_all"):
2663 data = cl.index.nodemap_data_all()
2663 data = cl.index.nodemap_data_all()
2664 else:
2664 else:
2665 data = nodemap.persistent_data(cl.index)
2665 data = nodemap.persistent_data(cl.index)
2666 ui.write(data)
2666 ui.write(data)
2667 elif opts['dump_disk']:
2667 elif opts['dump_disk']:
2668 unfi = repo.unfiltered()
2668 unfi = repo.unfiltered()
2669 cl = unfi.changelog
2669 cl = unfi.changelog
2670 nm_data = nodemap.persisted_data(cl)
2670 nm_data = nodemap.persisted_data(cl)
2671 if nm_data is not None:
2671 if nm_data is not None:
2672 docket, data = nm_data
2672 docket, data = nm_data
2673 ui.write(data[:])
2673 ui.write(data[:])
2674 elif opts['check']:
2674 elif opts['check']:
2675 unfi = repo.unfiltered()
2675 unfi = repo.unfiltered()
2676 cl = unfi.changelog
2676 cl = unfi.changelog
2677 nm_data = nodemap.persisted_data(cl)
2677 nm_data = nodemap.persisted_data(cl)
2678 if nm_data is not None:
2678 if nm_data is not None:
2679 docket, data = nm_data
2679 docket, data = nm_data
2680 return nodemap.check_data(ui, cl.index, data)
2680 return nodemap.check_data(ui, cl.index, data)
2681 elif opts['metadata']:
2681 elif opts['metadata']:
2682 unfi = repo.unfiltered()
2682 unfi = repo.unfiltered()
2683 cl = unfi.changelog
2683 cl = unfi.changelog
2684 nm_data = nodemap.persisted_data(cl)
2684 nm_data = nodemap.persisted_data(cl)
2685 if nm_data is not None:
2685 if nm_data is not None:
2686 docket, data = nm_data
2686 docket, data = nm_data
2687 ui.write((b"uid: %s\n") % docket.uid)
2687 ui.write((b"uid: %s\n") % docket.uid)
2688 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2688 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2689 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2689 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2690 ui.write((b"data-length: %d\n") % docket.data_length)
2690 ui.write((b"data-length: %d\n") % docket.data_length)
2691 ui.write((b"data-unused: %d\n") % docket.data_unused)
2691 ui.write((b"data-unused: %d\n") % docket.data_unused)
2692 unused_perc = docket.data_unused * 100.0 / docket.data_length
2692 unused_perc = docket.data_unused * 100.0 / docket.data_length
2693 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2693 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2694
2694
2695
2695
2696 @command(
2696 @command(
2697 b'debugobsolete',
2697 b'debugobsolete',
2698 [
2698 [
2699 (b'', b'flags', 0, _(b'markers flag')),
2699 (b'', b'flags', 0, _(b'markers flag')),
2700 (
2700 (
2701 b'',
2701 b'',
2702 b'record-parents',
2702 b'record-parents',
2703 False,
2703 False,
2704 _(b'record parent information for the precursor'),
2704 _(b'record parent information for the precursor'),
2705 ),
2705 ),
2706 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2706 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2707 (
2707 (
2708 b'',
2708 b'',
2709 b'exclusive',
2709 b'exclusive',
2710 False,
2710 False,
2711 _(b'restrict display to markers only relevant to REV'),
2711 _(b'restrict display to markers only relevant to REV'),
2712 ),
2712 ),
2713 (b'', b'index', False, _(b'display index of the marker')),
2713 (b'', b'index', False, _(b'display index of the marker')),
2714 (b'', b'delete', [], _(b'delete markers specified by indices')),
2714 (b'', b'delete', [], _(b'delete markers specified by indices')),
2715 ]
2715 ]
2716 + cmdutil.commitopts2
2716 + cmdutil.commitopts2
2717 + cmdutil.formatteropts,
2717 + cmdutil.formatteropts,
2718 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2718 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2719 )
2719 )
2720 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2720 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2721 """create arbitrary obsolete marker
2721 """create arbitrary obsolete marker
2722
2722
2723 With no arguments, displays the list of obsolescence markers."""
2723 With no arguments, displays the list of obsolescence markers."""
2724
2724
2725 opts = pycompat.byteskwargs(opts)
2725 opts = pycompat.byteskwargs(opts)
2726
2726
2727 def parsenodeid(s):
2727 def parsenodeid(s):
2728 try:
2728 try:
2729 # We do not use revsingle/revrange functions here to accept
2729 # We do not use revsingle/revrange functions here to accept
2730 # arbitrary node identifiers, possibly not present in the
2730 # arbitrary node identifiers, possibly not present in the
2731 # local repository.
2731 # local repository.
2732 n = bin(s)
2732 n = bin(s)
2733 if len(n) != repo.nodeconstants.nodelen:
2733 if len(n) != repo.nodeconstants.nodelen:
2734 raise ValueError
2734 raise ValueError
2735 return n
2735 return n
2736 except ValueError:
2736 except ValueError:
2737 raise error.InputError(
2737 raise error.InputError(
2738 b'changeset references must be full hexadecimal '
2738 b'changeset references must be full hexadecimal '
2739 b'node identifiers'
2739 b'node identifiers'
2740 )
2740 )
2741
2741
2742 if opts.get(b'delete'):
2742 if opts.get(b'delete'):
2743 indices = []
2743 indices = []
2744 for v in opts.get(b'delete'):
2744 for v in opts.get(b'delete'):
2745 try:
2745 try:
2746 indices.append(int(v))
2746 indices.append(int(v))
2747 except ValueError:
2747 except ValueError:
2748 raise error.InputError(
2748 raise error.InputError(
2749 _(b'invalid index value: %r') % v,
2749 _(b'invalid index value: %r') % v,
2750 hint=_(b'use integers for indices'),
2750 hint=_(b'use integers for indices'),
2751 )
2751 )
2752
2752
2753 if repo.currenttransaction():
2753 if repo.currenttransaction():
2754 raise error.Abort(
2754 raise error.Abort(
2755 _(b'cannot delete obsmarkers in the middle of transaction.')
2755 _(b'cannot delete obsmarkers in the middle of transaction.')
2756 )
2756 )
2757
2757
2758 with repo.lock():
2758 with repo.lock():
2759 n = repair.deleteobsmarkers(repo.obsstore, indices)
2759 n = repair.deleteobsmarkers(repo.obsstore, indices)
2760 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2760 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2761
2761
2762 return
2762 return
2763
2763
2764 if precursor is not None:
2764 if precursor is not None:
2765 if opts[b'rev']:
2765 if opts[b'rev']:
2766 raise error.InputError(
2766 raise error.InputError(
2767 b'cannot select revision when creating marker'
2767 b'cannot select revision when creating marker'
2768 )
2768 )
2769 metadata = {}
2769 metadata = {}
2770 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2770 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2771 succs = tuple(parsenodeid(succ) for succ in successors)
2771 succs = tuple(parsenodeid(succ) for succ in successors)
2772 l = repo.lock()
2772 l = repo.lock()
2773 try:
2773 try:
2774 tr = repo.transaction(b'debugobsolete')
2774 tr = repo.transaction(b'debugobsolete')
2775 try:
2775 try:
2776 date = opts.get(b'date')
2776 date = opts.get(b'date')
2777 if date:
2777 if date:
2778 date = dateutil.parsedate(date)
2778 date = dateutil.parsedate(date)
2779 else:
2779 else:
2780 date = None
2780 date = None
2781 prec = parsenodeid(precursor)
2781 prec = parsenodeid(precursor)
2782 parents = None
2782 parents = None
2783 if opts[b'record_parents']:
2783 if opts[b'record_parents']:
2784 if prec not in repo.unfiltered():
2784 if prec not in repo.unfiltered():
2785 raise error.Abort(
2785 raise error.Abort(
2786 b'cannot used --record-parents on '
2786 b'cannot used --record-parents on '
2787 b'unknown changesets'
2787 b'unknown changesets'
2788 )
2788 )
2789 parents = repo.unfiltered()[prec].parents()
2789 parents = repo.unfiltered()[prec].parents()
2790 parents = tuple(p.node() for p in parents)
2790 parents = tuple(p.node() for p in parents)
2791 repo.obsstore.create(
2791 repo.obsstore.create(
2792 tr,
2792 tr,
2793 prec,
2793 prec,
2794 succs,
2794 succs,
2795 opts[b'flags'],
2795 opts[b'flags'],
2796 parents=parents,
2796 parents=parents,
2797 date=date,
2797 date=date,
2798 metadata=metadata,
2798 metadata=metadata,
2799 ui=ui,
2799 ui=ui,
2800 )
2800 )
2801 tr.close()
2801 tr.close()
2802 except ValueError as exc:
2802 except ValueError as exc:
2803 raise error.Abort(
2803 raise error.Abort(
2804 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2804 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2805 )
2805 )
2806 finally:
2806 finally:
2807 tr.release()
2807 tr.release()
2808 finally:
2808 finally:
2809 l.release()
2809 l.release()
2810 else:
2810 else:
2811 if opts[b'rev']:
2811 if opts[b'rev']:
2812 revs = logcmdutil.revrange(repo, opts[b'rev'])
2812 revs = logcmdutil.revrange(repo, opts[b'rev'])
2813 nodes = [repo[r].node() for r in revs]
2813 nodes = [repo[r].node() for r in revs]
2814 markers = list(
2814 markers = list(
2815 obsutil.getmarkers(
2815 obsutil.getmarkers(
2816 repo, nodes=nodes, exclusive=opts[b'exclusive']
2816 repo, nodes=nodes, exclusive=opts[b'exclusive']
2817 )
2817 )
2818 )
2818 )
2819 markers.sort(key=lambda x: x._data)
2819 markers.sort(key=lambda x: x._data)
2820 else:
2820 else:
2821 markers = obsutil.getmarkers(repo)
2821 markers = obsutil.getmarkers(repo)
2822
2822
2823 markerstoiter = markers
2823 markerstoiter = markers
2824 isrelevant = lambda m: True
2824 isrelevant = lambda m: True
2825 if opts.get(b'rev') and opts.get(b'index'):
2825 if opts.get(b'rev') and opts.get(b'index'):
2826 markerstoiter = obsutil.getmarkers(repo)
2826 markerstoiter = obsutil.getmarkers(repo)
2827 markerset = set(markers)
2827 markerset = set(markers)
2828 isrelevant = lambda m: m in markerset
2828 isrelevant = lambda m: m in markerset
2829
2829
2830 fm = ui.formatter(b'debugobsolete', opts)
2830 fm = ui.formatter(b'debugobsolete', opts)
2831 for i, m in enumerate(markerstoiter):
2831 for i, m in enumerate(markerstoiter):
2832 if not isrelevant(m):
2832 if not isrelevant(m):
2833 # marker can be irrelevant when we're iterating over a set
2833 # marker can be irrelevant when we're iterating over a set
2834 # of markers (markerstoiter) which is bigger than the set
2834 # of markers (markerstoiter) which is bigger than the set
2835 # of markers we want to display (markers)
2835 # of markers we want to display (markers)
2836 # this can happen if both --index and --rev options are
2836 # this can happen if both --index and --rev options are
2837 # provided and thus we need to iterate over all of the markers
2837 # provided and thus we need to iterate over all of the markers
2838 # to get the correct indices, but only display the ones that
2838 # to get the correct indices, but only display the ones that
2839 # are relevant to --rev value
2839 # are relevant to --rev value
2840 continue
2840 continue
2841 fm.startitem()
2841 fm.startitem()
2842 ind = i if opts.get(b'index') else None
2842 ind = i if opts.get(b'index') else None
2843 cmdutil.showmarker(fm, m, index=ind)
2843 cmdutil.showmarker(fm, m, index=ind)
2844 fm.end()
2844 fm.end()
2845
2845
2846
2846
2847 @command(
2847 @command(
2848 b'debugp1copies',
2848 b'debugp1copies',
2849 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2849 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2850 _(b'[-r REV]'),
2850 _(b'[-r REV]'),
2851 )
2851 )
2852 def debugp1copies(ui, repo, **opts):
2852 def debugp1copies(ui, repo, **opts):
2853 """dump copy information compared to p1"""
2853 """dump copy information compared to p1"""
2854
2854
2855 opts = pycompat.byteskwargs(opts)
2855 opts = pycompat.byteskwargs(opts)
2856 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2856 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2857 for dst, src in ctx.p1copies().items():
2857 for dst, src in ctx.p1copies().items():
2858 ui.write(b'%s -> %s\n' % (src, dst))
2858 ui.write(b'%s -> %s\n' % (src, dst))
2859
2859
2860
2860
2861 @command(
2861 @command(
2862 b'debugp2copies',
2862 b'debugp2copies',
2863 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2863 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2864 _(b'[-r REV]'),
2864 _(b'[-r REV]'),
2865 )
2865 )
2866 def debugp2copies(ui, repo, **opts):
2866 def debugp2copies(ui, repo, **opts):
2867 """dump copy information compared to p2"""
2867 """dump copy information compared to p2"""
2868
2868
2869 opts = pycompat.byteskwargs(opts)
2869 opts = pycompat.byteskwargs(opts)
2870 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2870 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2871 for dst, src in ctx.p2copies().items():
2871 for dst, src in ctx.p2copies().items():
2872 ui.write(b'%s -> %s\n' % (src, dst))
2872 ui.write(b'%s -> %s\n' % (src, dst))
2873
2873
2874
2874
2875 @command(
2875 @command(
2876 b'debugpathcomplete',
2876 b'debugpathcomplete',
2877 [
2877 [
2878 (b'f', b'full', None, _(b'complete an entire path')),
2878 (b'f', b'full', None, _(b'complete an entire path')),
2879 (b'n', b'normal', None, _(b'show only normal files')),
2879 (b'n', b'normal', None, _(b'show only normal files')),
2880 (b'a', b'added', None, _(b'show only added files')),
2880 (b'a', b'added', None, _(b'show only added files')),
2881 (b'r', b'removed', None, _(b'show only removed files')),
2881 (b'r', b'removed', None, _(b'show only removed files')),
2882 ],
2882 ],
2883 _(b'FILESPEC...'),
2883 _(b'FILESPEC...'),
2884 )
2884 )
2885 def debugpathcomplete(ui, repo, *specs, **opts):
2885 def debugpathcomplete(ui, repo, *specs, **opts):
2886 """complete part or all of a tracked path
2886 """complete part or all of a tracked path
2887
2887
2888 This command supports shells that offer path name completion. It
2888 This command supports shells that offer path name completion. It
2889 currently completes only files already known to the dirstate.
2889 currently completes only files already known to the dirstate.
2890
2890
2891 Completion extends only to the next path segment unless
2891 Completion extends only to the next path segment unless
2892 --full is specified, in which case entire paths are used."""
2892 --full is specified, in which case entire paths are used."""
2893
2893
2894 def complete(path, acceptable):
2894 def complete(path, acceptable):
2895 dirstate = repo.dirstate
2895 dirstate = repo.dirstate
2896 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2896 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2897 rootdir = repo.root + pycompat.ossep
2897 rootdir = repo.root + pycompat.ossep
2898 if spec != repo.root and not spec.startswith(rootdir):
2898 if spec != repo.root and not spec.startswith(rootdir):
2899 return [], []
2899 return [], []
2900 if os.path.isdir(spec):
2900 if os.path.isdir(spec):
2901 spec += b'/'
2901 spec += b'/'
2902 spec = spec[len(rootdir) :]
2902 spec = spec[len(rootdir) :]
2903 fixpaths = pycompat.ossep != b'/'
2903 fixpaths = pycompat.ossep != b'/'
2904 if fixpaths:
2904 if fixpaths:
2905 spec = spec.replace(pycompat.ossep, b'/')
2905 spec = spec.replace(pycompat.ossep, b'/')
2906 speclen = len(spec)
2906 speclen = len(spec)
2907 fullpaths = opts['full']
2907 fullpaths = opts['full']
2908 files, dirs = set(), set()
2908 files, dirs = set(), set()
2909 adddir, addfile = dirs.add, files.add
2909 adddir, addfile = dirs.add, files.add
2910 for f, st in dirstate.items():
2910 for f, st in dirstate.items():
2911 if f.startswith(spec) and st.state in acceptable:
2911 if f.startswith(spec) and st.state in acceptable:
2912 if fixpaths:
2912 if fixpaths:
2913 f = f.replace(b'/', pycompat.ossep)
2913 f = f.replace(b'/', pycompat.ossep)
2914 if fullpaths:
2914 if fullpaths:
2915 addfile(f)
2915 addfile(f)
2916 continue
2916 continue
2917 s = f.find(pycompat.ossep, speclen)
2917 s = f.find(pycompat.ossep, speclen)
2918 if s >= 0:
2918 if s >= 0:
2919 adddir(f[:s])
2919 adddir(f[:s])
2920 else:
2920 else:
2921 addfile(f)
2921 addfile(f)
2922 return files, dirs
2922 return files, dirs
2923
2923
2924 acceptable = b''
2924 acceptable = b''
2925 if opts['normal']:
2925 if opts['normal']:
2926 acceptable += b'nm'
2926 acceptable += b'nm'
2927 if opts['added']:
2927 if opts['added']:
2928 acceptable += b'a'
2928 acceptable += b'a'
2929 if opts['removed']:
2929 if opts['removed']:
2930 acceptable += b'r'
2930 acceptable += b'r'
2931 cwd = repo.getcwd()
2931 cwd = repo.getcwd()
2932 if not specs:
2932 if not specs:
2933 specs = [b'.']
2933 specs = [b'.']
2934
2934
2935 files, dirs = set(), set()
2935 files, dirs = set(), set()
2936 for spec in specs:
2936 for spec in specs:
2937 f, d = complete(spec, acceptable or b'nmar')
2937 f, d = complete(spec, acceptable or b'nmar')
2938 files.update(f)
2938 files.update(f)
2939 dirs.update(d)
2939 dirs.update(d)
2940 files.update(dirs)
2940 files.update(dirs)
2941 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2941 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2942 ui.write(b'\n')
2942 ui.write(b'\n')
2943
2943
2944
2944
2945 @command(
2945 @command(
2946 b'debugpathcopies',
2946 b'debugpathcopies',
2947 cmdutil.walkopts,
2947 cmdutil.walkopts,
2948 b'hg debugpathcopies REV1 REV2 [FILE]',
2948 b'hg debugpathcopies REV1 REV2 [FILE]',
2949 inferrepo=True,
2949 inferrepo=True,
2950 )
2950 )
2951 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2951 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2952 """show copies between two revisions"""
2952 """show copies between two revisions"""
2953 ctx1 = scmutil.revsingle(repo, rev1)
2953 ctx1 = scmutil.revsingle(repo, rev1)
2954 ctx2 = scmutil.revsingle(repo, rev2)
2954 ctx2 = scmutil.revsingle(repo, rev2)
2955 m = scmutil.match(ctx1, pats, opts)
2955 m = scmutil.match(ctx1, pats, opts)
2956 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2956 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2957 ui.write(b'%s -> %s\n' % (src, dst))
2957 ui.write(b'%s -> %s\n' % (src, dst))
2958
2958
2959
2959
2960 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2960 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2961 def debugpeer(ui, path):
2961 def debugpeer(ui, path):
2962 """establish a connection to a peer repository"""
2962 """establish a connection to a peer repository"""
2963 # Always enable peer request logging. Requires --debug to display
2963 # Always enable peer request logging. Requires --debug to display
2964 # though.
2964 # though.
2965 overrides = {
2965 overrides = {
2966 (b'devel', b'debug.peer-request'): True,
2966 (b'devel', b'debug.peer-request'): True,
2967 }
2967 }
2968
2968
2969 with ui.configoverride(overrides):
2969 with ui.configoverride(overrides):
2970 peer = hg.peer(ui, {}, path)
2970 peer = hg.peer(ui, {}, path)
2971
2971
2972 try:
2972 try:
2973 local = peer.local() is not None
2973 local = peer.local() is not None
2974 canpush = peer.canpush()
2974 canpush = peer.canpush()
2975
2975
2976 ui.write(_(b'url: %s\n') % peer.url())
2976 ui.write(_(b'url: %s\n') % peer.url())
2977 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2977 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2978 ui.write(
2978 ui.write(
2979 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2979 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2980 )
2980 )
2981 finally:
2981 finally:
2982 peer.close()
2982 peer.close()
2983
2983
2984
2984
2985 @command(
2985 @command(
2986 b'debugpickmergetool',
2986 b'debugpickmergetool',
2987 [
2987 [
2988 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2988 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2989 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2989 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2990 ]
2990 ]
2991 + cmdutil.walkopts
2991 + cmdutil.walkopts
2992 + cmdutil.mergetoolopts,
2992 + cmdutil.mergetoolopts,
2993 _(b'[PATTERN]...'),
2993 _(b'[PATTERN]...'),
2994 inferrepo=True,
2994 inferrepo=True,
2995 )
2995 )
2996 def debugpickmergetool(ui, repo, *pats, **opts):
2996 def debugpickmergetool(ui, repo, *pats, **opts):
2997 """examine which merge tool is chosen for specified file
2997 """examine which merge tool is chosen for specified file
2998
2998
2999 As described in :hg:`help merge-tools`, Mercurial examines
2999 As described in :hg:`help merge-tools`, Mercurial examines
3000 configurations below in this order to decide which merge tool is
3000 configurations below in this order to decide which merge tool is
3001 chosen for specified file.
3001 chosen for specified file.
3002
3002
3003 1. ``--tool`` option
3003 1. ``--tool`` option
3004 2. ``HGMERGE`` environment variable
3004 2. ``HGMERGE`` environment variable
3005 3. configurations in ``merge-patterns`` section
3005 3. configurations in ``merge-patterns`` section
3006 4. configuration of ``ui.merge``
3006 4. configuration of ``ui.merge``
3007 5. configurations in ``merge-tools`` section
3007 5. configurations in ``merge-tools`` section
3008 6. ``hgmerge`` tool (for historical reason only)
3008 6. ``hgmerge`` tool (for historical reason only)
3009 7. default tool for fallback (``:merge`` or ``:prompt``)
3009 7. default tool for fallback (``:merge`` or ``:prompt``)
3010
3010
3011 This command writes out examination result in the style below::
3011 This command writes out examination result in the style below::
3012
3012
3013 FILE = MERGETOOL
3013 FILE = MERGETOOL
3014
3014
3015 By default, all files known in the first parent context of the
3015 By default, all files known in the first parent context of the
3016 working directory are examined. Use file patterns and/or -I/-X
3016 working directory are examined. Use file patterns and/or -I/-X
3017 options to limit target files. -r/--rev is also useful to examine
3017 options to limit target files. -r/--rev is also useful to examine
3018 files in another context without actual updating to it.
3018 files in another context without actual updating to it.
3019
3019
3020 With --debug, this command shows warning messages while matching
3020 With --debug, this command shows warning messages while matching
3021 against ``merge-patterns`` and so on, too. It is recommended to
3021 against ``merge-patterns`` and so on, too. It is recommended to
3022 use this option with explicit file patterns and/or -I/-X options,
3022 use this option with explicit file patterns and/or -I/-X options,
3023 because this option increases amount of output per file according
3023 because this option increases amount of output per file according
3024 to configurations in hgrc.
3024 to configurations in hgrc.
3025
3025
3026 With -v/--verbose, this command shows configurations below at
3026 With -v/--verbose, this command shows configurations below at
3027 first (only if specified).
3027 first (only if specified).
3028
3028
3029 - ``--tool`` option
3029 - ``--tool`` option
3030 - ``HGMERGE`` environment variable
3030 - ``HGMERGE`` environment variable
3031 - configuration of ``ui.merge``
3031 - configuration of ``ui.merge``
3032
3032
3033 If merge tool is chosen before matching against
3033 If merge tool is chosen before matching against
3034 ``merge-patterns``, this command can't show any helpful
3034 ``merge-patterns``, this command can't show any helpful
3035 information, even with --debug. In such case, information above is
3035 information, even with --debug. In such case, information above is
3036 useful to know why a merge tool is chosen.
3036 useful to know why a merge tool is chosen.
3037 """
3037 """
3038 opts = pycompat.byteskwargs(opts)
3038 opts = pycompat.byteskwargs(opts)
3039 overrides = {}
3039 overrides = {}
3040 if opts[b'tool']:
3040 if opts[b'tool']:
3041 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3041 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3042 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3042 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3043
3043
3044 with ui.configoverride(overrides, b'debugmergepatterns'):
3044 with ui.configoverride(overrides, b'debugmergepatterns'):
3045 hgmerge = encoding.environ.get(b"HGMERGE")
3045 hgmerge = encoding.environ.get(b"HGMERGE")
3046 if hgmerge is not None:
3046 if hgmerge is not None:
3047 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3047 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3048 uimerge = ui.config(b"ui", b"merge")
3048 uimerge = ui.config(b"ui", b"merge")
3049 if uimerge:
3049 if uimerge:
3050 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3050 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3051
3051
3052 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3052 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3053 m = scmutil.match(ctx, pats, opts)
3053 m = scmutil.match(ctx, pats, opts)
3054 changedelete = opts[b'changedelete']
3054 changedelete = opts[b'changedelete']
3055 for path in ctx.walk(m):
3055 for path in ctx.walk(m):
3056 fctx = ctx[path]
3056 fctx = ctx[path]
3057 with ui.silent(
3057 with ui.silent(
3058 error=True
3058 error=True
3059 ) if not ui.debugflag else util.nullcontextmanager():
3059 ) if not ui.debugflag else util.nullcontextmanager():
3060 tool, toolpath = filemerge._picktool(
3060 tool, toolpath = filemerge._picktool(
3061 repo,
3061 repo,
3062 ui,
3062 ui,
3063 path,
3063 path,
3064 fctx.isbinary(),
3064 fctx.isbinary(),
3065 b'l' in fctx.flags(),
3065 b'l' in fctx.flags(),
3066 changedelete,
3066 changedelete,
3067 )
3067 )
3068 ui.write(b'%s = %s\n' % (path, tool))
3068 ui.write(b'%s = %s\n' % (path, tool))
3069
3069
3070
3070
3071 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3071 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3072 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3072 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3073 """access the pushkey key/value protocol
3073 """access the pushkey key/value protocol
3074
3074
3075 With two args, list the keys in the given namespace.
3075 With two args, list the keys in the given namespace.
3076
3076
3077 With five args, set a key to new if it currently is set to old.
3077 With five args, set a key to new if it currently is set to old.
3078 Reports success or failure.
3078 Reports success or failure.
3079 """
3079 """
3080
3080
3081 target = hg.peer(ui, {}, repopath)
3081 target = hg.peer(ui, {}, repopath)
3082 try:
3082 try:
3083 if keyinfo:
3083 if keyinfo:
3084 key, old, new = keyinfo
3084 key, old, new = keyinfo
3085 with target.commandexecutor() as e:
3085 with target.commandexecutor() as e:
3086 r = e.callcommand(
3086 r = e.callcommand(
3087 b'pushkey',
3087 b'pushkey',
3088 {
3088 {
3089 b'namespace': namespace,
3089 b'namespace': namespace,
3090 b'key': key,
3090 b'key': key,
3091 b'old': old,
3091 b'old': old,
3092 b'new': new,
3092 b'new': new,
3093 },
3093 },
3094 ).result()
3094 ).result()
3095
3095
3096 ui.status(pycompat.bytestr(r) + b'\n')
3096 ui.status(pycompat.bytestr(r) + b'\n')
3097 return not r
3097 return not r
3098 else:
3098 else:
3099 for k, v in sorted(target.listkeys(namespace).items()):
3099 for k, v in sorted(target.listkeys(namespace).items()):
3100 ui.write(
3100 ui.write(
3101 b"%s\t%s\n"
3101 b"%s\t%s\n"
3102 % (stringutil.escapestr(k), stringutil.escapestr(v))
3102 % (stringutil.escapestr(k), stringutil.escapestr(v))
3103 )
3103 )
3104 finally:
3104 finally:
3105 target.close()
3105 target.close()
3106
3106
3107
3107
3108 @command(b'debugpvec', [], _(b'A B'))
3108 @command(b'debugpvec', [], _(b'A B'))
3109 def debugpvec(ui, repo, a, b=None):
3109 def debugpvec(ui, repo, a, b=None):
3110 ca = scmutil.revsingle(repo, a)
3110 ca = scmutil.revsingle(repo, a)
3111 cb = scmutil.revsingle(repo, b)
3111 cb = scmutil.revsingle(repo, b)
3112 pa = pvec.ctxpvec(ca)
3112 pa = pvec.ctxpvec(ca)
3113 pb = pvec.ctxpvec(cb)
3113 pb = pvec.ctxpvec(cb)
3114 if pa == pb:
3114 if pa == pb:
3115 rel = b"="
3115 rel = b"="
3116 elif pa > pb:
3116 elif pa > pb:
3117 rel = b">"
3117 rel = b">"
3118 elif pa < pb:
3118 elif pa < pb:
3119 rel = b"<"
3119 rel = b"<"
3120 elif pa | pb:
3120 elif pa | pb:
3121 rel = b"|"
3121 rel = b"|"
3122 ui.write(_(b"a: %s\n") % pa)
3122 ui.write(_(b"a: %s\n") % pa)
3123 ui.write(_(b"b: %s\n") % pb)
3123 ui.write(_(b"b: %s\n") % pb)
3124 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3124 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3125 ui.write(
3125 ui.write(
3126 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3126 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3127 % (
3127 % (
3128 abs(pa._depth - pb._depth),
3128 abs(pa._depth - pb._depth),
3129 pvec._hamming(pa._vec, pb._vec),
3129 pvec._hamming(pa._vec, pb._vec),
3130 pa.distance(pb),
3130 pa.distance(pb),
3131 rel,
3131 rel,
3132 )
3132 )
3133 )
3133 )
3134
3134
3135
3135
3136 @command(
3136 @command(
3137 b'debugrebuilddirstate|debugrebuildstate',
3137 b'debugrebuilddirstate|debugrebuildstate',
3138 [
3138 [
3139 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3139 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3140 (
3140 (
3141 b'',
3141 b'',
3142 b'minimal',
3142 b'minimal',
3143 None,
3143 None,
3144 _(
3144 _(
3145 b'only rebuild files that are inconsistent with '
3145 b'only rebuild files that are inconsistent with '
3146 b'the working copy parent'
3146 b'the working copy parent'
3147 ),
3147 ),
3148 ),
3148 ),
3149 ],
3149 ],
3150 _(b'[-r REV]'),
3150 _(b'[-r REV]'),
3151 )
3151 )
3152 def debugrebuilddirstate(ui, repo, rev, **opts):
3152 def debugrebuilddirstate(ui, repo, rev, **opts):
3153 """rebuild the dirstate as it would look like for the given revision
3153 """rebuild the dirstate as it would look like for the given revision
3154
3154
3155 If no revision is specified the first current parent will be used.
3155 If no revision is specified the first current parent will be used.
3156
3156
3157 The dirstate will be set to the files of the given revision.
3157 The dirstate will be set to the files of the given revision.
3158 The actual working directory content or existing dirstate
3158 The actual working directory content or existing dirstate
3159 information such as adds or removes is not considered.
3159 information such as adds or removes is not considered.
3160
3160
3161 ``minimal`` will only rebuild the dirstate status for files that claim to be
3161 ``minimal`` will only rebuild the dirstate status for files that claim to be
3162 tracked but are not in the parent manifest, or that exist in the parent
3162 tracked but are not in the parent manifest, or that exist in the parent
3163 manifest but are not in the dirstate. It will not change adds, removes, or
3163 manifest but are not in the dirstate. It will not change adds, removes, or
3164 modified files that are in the working copy parent.
3164 modified files that are in the working copy parent.
3165
3165
3166 One use of this command is to make the next :hg:`status` invocation
3166 One use of this command is to make the next :hg:`status` invocation
3167 check the actual file content.
3167 check the actual file content.
3168 """
3168 """
3169 ctx = scmutil.revsingle(repo, rev)
3169 ctx = scmutil.revsingle(repo, rev)
3170 with repo.wlock():
3170 with repo.wlock():
3171 dirstate = repo.dirstate
3171 dirstate = repo.dirstate
3172 changedfiles = None
3172 changedfiles = None
3173 # See command doc for what minimal does.
3173 # See command doc for what minimal does.
3174 if opts.get('minimal'):
3174 if opts.get('minimal'):
3175 manifestfiles = set(ctx.manifest().keys())
3175 manifestfiles = set(ctx.manifest().keys())
3176 dirstatefiles = set(dirstate)
3176 dirstatefiles = set(dirstate)
3177 manifestonly = manifestfiles - dirstatefiles
3177 manifestonly = manifestfiles - dirstatefiles
3178 dsonly = dirstatefiles - manifestfiles
3178 dsonly = dirstatefiles - manifestfiles
3179 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3179 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3180 changedfiles = manifestonly | dsnotadded
3180 changedfiles = manifestonly | dsnotadded
3181
3181
3182 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3182 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3183
3183
3184
3184
3185 @command(
3185 @command(
3186 b'debugrebuildfncache',
3186 b'debugrebuildfncache',
3187 [
3187 [
3188 (
3188 (
3189 b'',
3189 b'',
3190 b'only-data',
3190 b'only-data',
3191 False,
3191 False,
3192 _(b'only look for wrong .d files (much faster)'),
3192 _(b'only look for wrong .d files (much faster)'),
3193 )
3193 )
3194 ],
3194 ],
3195 b'',
3195 b'',
3196 )
3196 )
3197 def debugrebuildfncache(ui, repo, **opts):
3197 def debugrebuildfncache(ui, repo, **opts):
3198 """rebuild the fncache file"""
3198 """rebuild the fncache file"""
3199 opts = pycompat.byteskwargs(opts)
3199 opts = pycompat.byteskwargs(opts)
3200 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3200 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3201
3201
3202
3202
3203 @command(
3203 @command(
3204 b'debugrename',
3204 b'debugrename',
3205 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3205 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3206 _(b'[-r REV] [FILE]...'),
3206 _(b'[-r REV] [FILE]...'),
3207 )
3207 )
3208 def debugrename(ui, repo, *pats, **opts):
3208 def debugrename(ui, repo, *pats, **opts):
3209 """dump rename information"""
3209 """dump rename information"""
3210
3210
3211 opts = pycompat.byteskwargs(opts)
3211 opts = pycompat.byteskwargs(opts)
3212 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3212 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3213 m = scmutil.match(ctx, pats, opts)
3213 m = scmutil.match(ctx, pats, opts)
3214 for abs in ctx.walk(m):
3214 for abs in ctx.walk(m):
3215 fctx = ctx[abs]
3215 fctx = ctx[abs]
3216 o = fctx.filelog().renamed(fctx.filenode())
3216 o = fctx.filelog().renamed(fctx.filenode())
3217 rel = repo.pathto(abs)
3217 rel = repo.pathto(abs)
3218 if o:
3218 if o:
3219 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3219 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3220 else:
3220 else:
3221 ui.write(_(b"%s not renamed\n") % rel)
3221 ui.write(_(b"%s not renamed\n") % rel)
3222
3222
3223
3223
3224 @command(b'debugrequires|debugrequirements', [], b'')
3224 @command(b'debugrequires|debugrequirements', [], b'')
3225 def debugrequirements(ui, repo):
3225 def debugrequirements(ui, repo):
3226 """print the current repo requirements"""
3226 """print the current repo requirements"""
3227 for r in sorted(repo.requirements):
3227 for r in sorted(repo.requirements):
3228 ui.write(b"%s\n" % r)
3228 ui.write(b"%s\n" % r)
3229
3229
3230
3230
3231 @command(
3231 @command(
3232 b'debugrevlog',
3232 b'debugrevlog',
3233 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3233 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3234 _(b'-c|-m|FILE'),
3234 _(b'-c|-m|FILE'),
3235 optionalrepo=True,
3235 optionalrepo=True,
3236 )
3236 )
3237 def debugrevlog(ui, repo, file_=None, **opts):
3237 def debugrevlog(ui, repo, file_=None, **opts):
3238 """show data and statistics about a revlog"""
3238 """show data and statistics about a revlog"""
3239 opts = pycompat.byteskwargs(opts)
3239 opts = pycompat.byteskwargs(opts)
3240 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3240 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3241
3241
3242 if opts.get(b"dump"):
3242 if opts.get(b"dump"):
3243 numrevs = len(r)
3243 revlog_debug.dump(ui, r)
3244 ui.write(
3245 (
3246 b"# rev p1rev p2rev start end deltastart base p1 p2"
3247 b" rawsize totalsize compression heads chainlen\n"
3248 )
3249 )
3250 ts = 0
3251 heads = set()
3252
3253 for rev in range(numrevs):
3254 dbase = r.deltaparent(rev)
3255 if dbase == -1:
3256 dbase = rev
3257 cbase = r.chainbase(rev)
3258 clen = r.chainlen(rev)
3259 p1, p2 = r.parentrevs(rev)
3260 rs = r.rawsize(rev)
3261 ts = ts + rs
3262 heads -= set(r.parentrevs(rev))
3263 heads.add(rev)
3264 try:
3265 compression = ts / r.end(rev)
3266 except ZeroDivisionError:
3267 compression = 0
3268 ui.write(
3269 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3270 b"%11d %5d %8d\n"
3271 % (
3272 rev,
3273 p1,
3274 p2,
3275 r.start(rev),
3276 r.end(rev),
3277 r.start(dbase),
3278 r.start(cbase),
3279 r.start(p1),
3280 r.start(p2),
3281 rs,
3282 ts,
3283 compression,
3284 len(heads),
3285 clen,
3286 )
3287 )
3288 return 0
3244 return 0
3289
3245
3290 format = r._format_version
3246 format = r._format_version
3291 v = r._format_flags
3247 v = r._format_flags
3292 flags = []
3248 flags = []
3293 gdelta = False
3249 gdelta = False
3294 if v & revlog.FLAG_INLINE_DATA:
3250 if v & revlog.FLAG_INLINE_DATA:
3295 flags.append(b'inline')
3251 flags.append(b'inline')
3296 if v & revlog.FLAG_GENERALDELTA:
3252 if v & revlog.FLAG_GENERALDELTA:
3297 gdelta = True
3253 gdelta = True
3298 flags.append(b'generaldelta')
3254 flags.append(b'generaldelta')
3299 if not flags:
3255 if not flags:
3300 flags = [b'(none)']
3256 flags = [b'(none)']
3301
3257
3302 ### tracks merge vs single parent
3258 ### tracks merge vs single parent
3303 nummerges = 0
3259 nummerges = 0
3304
3260
3305 ### tracks ways the "delta" are build
3261 ### tracks ways the "delta" are build
3306 # nodelta
3262 # nodelta
3307 numempty = 0
3263 numempty = 0
3308 numemptytext = 0
3264 numemptytext = 0
3309 numemptydelta = 0
3265 numemptydelta = 0
3310 # full file content
3266 # full file content
3311 numfull = 0
3267 numfull = 0
3312 # intermediate snapshot against a prior snapshot
3268 # intermediate snapshot against a prior snapshot
3313 numsemi = 0
3269 numsemi = 0
3314 # snapshot count per depth
3270 # snapshot count per depth
3315 numsnapdepth = collections.defaultdict(lambda: 0)
3271 numsnapdepth = collections.defaultdict(lambda: 0)
3316 # delta against previous revision
3272 # delta against previous revision
3317 numprev = 0
3273 numprev = 0
3318 # delta against first or second parent (not prev)
3274 # delta against first or second parent (not prev)
3319 nump1 = 0
3275 nump1 = 0
3320 nump2 = 0
3276 nump2 = 0
3321 # delta against neither prev nor parents
3277 # delta against neither prev nor parents
3322 numother = 0
3278 numother = 0
3323 # delta against prev that are also first or second parent
3279 # delta against prev that are also first or second parent
3324 # (details of `numprev`)
3280 # (details of `numprev`)
3325 nump1prev = 0
3281 nump1prev = 0
3326 nump2prev = 0
3282 nump2prev = 0
3327
3283
3328 # data about delta chain of each revs
3284 # data about delta chain of each revs
3329 chainlengths = []
3285 chainlengths = []
3330 chainbases = []
3286 chainbases = []
3331 chainspans = []
3287 chainspans = []
3332
3288
3333 # data about each revision
3289 # data about each revision
3334 datasize = [None, 0, 0]
3290 datasize = [None, 0, 0]
3335 fullsize = [None, 0, 0]
3291 fullsize = [None, 0, 0]
3336 semisize = [None, 0, 0]
3292 semisize = [None, 0, 0]
3337 # snapshot count per depth
3293 # snapshot count per depth
3338 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3294 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3339 deltasize = [None, 0, 0]
3295 deltasize = [None, 0, 0]
3340 chunktypecounts = {}
3296 chunktypecounts = {}
3341 chunktypesizes = {}
3297 chunktypesizes = {}
3342
3298
3343 def addsize(size, l):
3299 def addsize(size, l):
3344 if l[0] is None or size < l[0]:
3300 if l[0] is None or size < l[0]:
3345 l[0] = size
3301 l[0] = size
3346 if size > l[1]:
3302 if size > l[1]:
3347 l[1] = size
3303 l[1] = size
3348 l[2] += size
3304 l[2] += size
3349
3305
3350 numrevs = len(r)
3306 numrevs = len(r)
3351 for rev in range(numrevs):
3307 for rev in range(numrevs):
3352 p1, p2 = r.parentrevs(rev)
3308 p1, p2 = r.parentrevs(rev)
3353 delta = r.deltaparent(rev)
3309 delta = r.deltaparent(rev)
3354 if format > 0:
3310 if format > 0:
3355 addsize(r.rawsize(rev), datasize)
3311 addsize(r.rawsize(rev), datasize)
3356 if p2 != nullrev:
3312 if p2 != nullrev:
3357 nummerges += 1
3313 nummerges += 1
3358 size = r.length(rev)
3314 size = r.length(rev)
3359 if delta == nullrev:
3315 if delta == nullrev:
3360 chainlengths.append(0)
3316 chainlengths.append(0)
3361 chainbases.append(r.start(rev))
3317 chainbases.append(r.start(rev))
3362 chainspans.append(size)
3318 chainspans.append(size)
3363 if size == 0:
3319 if size == 0:
3364 numempty += 1
3320 numempty += 1
3365 numemptytext += 1
3321 numemptytext += 1
3366 else:
3322 else:
3367 numfull += 1
3323 numfull += 1
3368 numsnapdepth[0] += 1
3324 numsnapdepth[0] += 1
3369 addsize(size, fullsize)
3325 addsize(size, fullsize)
3370 addsize(size, snapsizedepth[0])
3326 addsize(size, snapsizedepth[0])
3371 else:
3327 else:
3372 chainlengths.append(chainlengths[delta] + 1)
3328 chainlengths.append(chainlengths[delta] + 1)
3373 baseaddr = chainbases[delta]
3329 baseaddr = chainbases[delta]
3374 revaddr = r.start(rev)
3330 revaddr = r.start(rev)
3375 chainbases.append(baseaddr)
3331 chainbases.append(baseaddr)
3376 chainspans.append((revaddr - baseaddr) + size)
3332 chainspans.append((revaddr - baseaddr) + size)
3377 if size == 0:
3333 if size == 0:
3378 numempty += 1
3334 numempty += 1
3379 numemptydelta += 1
3335 numemptydelta += 1
3380 elif r.issnapshot(rev):
3336 elif r.issnapshot(rev):
3381 addsize(size, semisize)
3337 addsize(size, semisize)
3382 numsemi += 1
3338 numsemi += 1
3383 depth = r.snapshotdepth(rev)
3339 depth = r.snapshotdepth(rev)
3384 numsnapdepth[depth] += 1
3340 numsnapdepth[depth] += 1
3385 addsize(size, snapsizedepth[depth])
3341 addsize(size, snapsizedepth[depth])
3386 else:
3342 else:
3387 addsize(size, deltasize)
3343 addsize(size, deltasize)
3388 if delta == rev - 1:
3344 if delta == rev - 1:
3389 numprev += 1
3345 numprev += 1
3390 if delta == p1:
3346 if delta == p1:
3391 nump1prev += 1
3347 nump1prev += 1
3392 elif delta == p2:
3348 elif delta == p2:
3393 nump2prev += 1
3349 nump2prev += 1
3394 elif delta == p1:
3350 elif delta == p1:
3395 nump1 += 1
3351 nump1 += 1
3396 elif delta == p2:
3352 elif delta == p2:
3397 nump2 += 1
3353 nump2 += 1
3398 elif delta != nullrev:
3354 elif delta != nullrev:
3399 numother += 1
3355 numother += 1
3400
3356
3401 # Obtain data on the raw chunks in the revlog.
3357 # Obtain data on the raw chunks in the revlog.
3402 if util.safehasattr(r, b'_getsegmentforrevs'):
3358 if util.safehasattr(r, b'_getsegmentforrevs'):
3403 segment = r._getsegmentforrevs(rev, rev)[1]
3359 segment = r._getsegmentforrevs(rev, rev)[1]
3404 else:
3360 else:
3405 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3361 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3406 if segment:
3362 if segment:
3407 chunktype = bytes(segment[0:1])
3363 chunktype = bytes(segment[0:1])
3408 else:
3364 else:
3409 chunktype = b'empty'
3365 chunktype = b'empty'
3410
3366
3411 if chunktype not in chunktypecounts:
3367 if chunktype not in chunktypecounts:
3412 chunktypecounts[chunktype] = 0
3368 chunktypecounts[chunktype] = 0
3413 chunktypesizes[chunktype] = 0
3369 chunktypesizes[chunktype] = 0
3414
3370
3415 chunktypecounts[chunktype] += 1
3371 chunktypecounts[chunktype] += 1
3416 chunktypesizes[chunktype] += size
3372 chunktypesizes[chunktype] += size
3417
3373
3418 # Adjust size min value for empty cases
3374 # Adjust size min value for empty cases
3419 for size in (datasize, fullsize, semisize, deltasize):
3375 for size in (datasize, fullsize, semisize, deltasize):
3420 if size[0] is None:
3376 if size[0] is None:
3421 size[0] = 0
3377 size[0] = 0
3422
3378
3423 numdeltas = numrevs - numfull - numempty - numsemi
3379 numdeltas = numrevs - numfull - numempty - numsemi
3424 numoprev = numprev - nump1prev - nump2prev
3380 numoprev = numprev - nump1prev - nump2prev
3425 totalrawsize = datasize[2]
3381 totalrawsize = datasize[2]
3426 datasize[2] /= numrevs
3382 datasize[2] /= numrevs
3427 fulltotal = fullsize[2]
3383 fulltotal = fullsize[2]
3428 if numfull == 0:
3384 if numfull == 0:
3429 fullsize[2] = 0
3385 fullsize[2] = 0
3430 else:
3386 else:
3431 fullsize[2] /= numfull
3387 fullsize[2] /= numfull
3432 semitotal = semisize[2]
3388 semitotal = semisize[2]
3433 snaptotal = {}
3389 snaptotal = {}
3434 if numsemi > 0:
3390 if numsemi > 0:
3435 semisize[2] /= numsemi
3391 semisize[2] /= numsemi
3436 for depth in snapsizedepth:
3392 for depth in snapsizedepth:
3437 snaptotal[depth] = snapsizedepth[depth][2]
3393 snaptotal[depth] = snapsizedepth[depth][2]
3438 snapsizedepth[depth][2] /= numsnapdepth[depth]
3394 snapsizedepth[depth][2] /= numsnapdepth[depth]
3439
3395
3440 deltatotal = deltasize[2]
3396 deltatotal = deltasize[2]
3441 if numdeltas > 0:
3397 if numdeltas > 0:
3442 deltasize[2] /= numdeltas
3398 deltasize[2] /= numdeltas
3443 totalsize = fulltotal + semitotal + deltatotal
3399 totalsize = fulltotal + semitotal + deltatotal
3444 avgchainlen = sum(chainlengths) / numrevs
3400 avgchainlen = sum(chainlengths) / numrevs
3445 maxchainlen = max(chainlengths)
3401 maxchainlen = max(chainlengths)
3446 maxchainspan = max(chainspans)
3402 maxchainspan = max(chainspans)
3447 compratio = 1
3403 compratio = 1
3448 if totalsize:
3404 if totalsize:
3449 compratio = totalrawsize / totalsize
3405 compratio = totalrawsize / totalsize
3450
3406
3451 basedfmtstr = b'%%%dd\n'
3407 basedfmtstr = b'%%%dd\n'
3452 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3408 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3453
3409
3454 def dfmtstr(max):
3410 def dfmtstr(max):
3455 return basedfmtstr % len(str(max))
3411 return basedfmtstr % len(str(max))
3456
3412
3457 def pcfmtstr(max, padding=0):
3413 def pcfmtstr(max, padding=0):
3458 return basepcfmtstr % (len(str(max)), b' ' * padding)
3414 return basepcfmtstr % (len(str(max)), b' ' * padding)
3459
3415
3460 def pcfmt(value, total):
3416 def pcfmt(value, total):
3461 if total:
3417 if total:
3462 return (value, 100 * float(value) / total)
3418 return (value, 100 * float(value) / total)
3463 else:
3419 else:
3464 return value, 100.0
3420 return value, 100.0
3465
3421
3466 ui.writenoi18n(b'format : %d\n' % format)
3422 ui.writenoi18n(b'format : %d\n' % format)
3467 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3423 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3468
3424
3469 ui.write(b'\n')
3425 ui.write(b'\n')
3470 fmt = pcfmtstr(totalsize)
3426 fmt = pcfmtstr(totalsize)
3471 fmt2 = dfmtstr(totalsize)
3427 fmt2 = dfmtstr(totalsize)
3472 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3428 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3473 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3429 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3474 ui.writenoi18n(
3430 ui.writenoi18n(
3475 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3431 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3476 )
3432 )
3477 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3433 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3478 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3434 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3479 ui.writenoi18n(
3435 ui.writenoi18n(
3480 b' text : '
3436 b' text : '
3481 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3437 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3482 )
3438 )
3483 ui.writenoi18n(
3439 ui.writenoi18n(
3484 b' delta : '
3440 b' delta : '
3485 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3441 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3486 )
3442 )
3487 ui.writenoi18n(
3443 ui.writenoi18n(
3488 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3444 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3489 )
3445 )
3490 for depth in sorted(numsnapdepth):
3446 for depth in sorted(numsnapdepth):
3491 ui.write(
3447 ui.write(
3492 (b' lvl-%-3d : ' % depth)
3448 (b' lvl-%-3d : ' % depth)
3493 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3449 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3494 )
3450 )
3495 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3451 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3496 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3452 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3497 ui.writenoi18n(
3453 ui.writenoi18n(
3498 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3454 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3499 )
3455 )
3500 for depth in sorted(numsnapdepth):
3456 for depth in sorted(numsnapdepth):
3501 ui.write(
3457 ui.write(
3502 (b' lvl-%-3d : ' % depth)
3458 (b' lvl-%-3d : ' % depth)
3503 + fmt % pcfmt(snaptotal[depth], totalsize)
3459 + fmt % pcfmt(snaptotal[depth], totalsize)
3504 )
3460 )
3505 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3461 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3506
3462
3507 def fmtchunktype(chunktype):
3463 def fmtchunktype(chunktype):
3508 if chunktype == b'empty':
3464 if chunktype == b'empty':
3509 return b' %s : ' % chunktype
3465 return b' %s : ' % chunktype
3510 elif chunktype in pycompat.bytestr(string.ascii_letters):
3466 elif chunktype in pycompat.bytestr(string.ascii_letters):
3511 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3467 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3512 else:
3468 else:
3513 return b' 0x%s : ' % hex(chunktype)
3469 return b' 0x%s : ' % hex(chunktype)
3514
3470
3515 ui.write(b'\n')
3471 ui.write(b'\n')
3516 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3472 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3517 for chunktype in sorted(chunktypecounts):
3473 for chunktype in sorted(chunktypecounts):
3518 ui.write(fmtchunktype(chunktype))
3474 ui.write(fmtchunktype(chunktype))
3519 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3475 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3520 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3476 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3521 for chunktype in sorted(chunktypecounts):
3477 for chunktype in sorted(chunktypecounts):
3522 ui.write(fmtchunktype(chunktype))
3478 ui.write(fmtchunktype(chunktype))
3523 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3479 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3524
3480
3525 ui.write(b'\n')
3481 ui.write(b'\n')
3526 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3482 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3527 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3483 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3528 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3484 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3529 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3485 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3530 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3486 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3531
3487
3532 if format > 0:
3488 if format > 0:
3533 ui.write(b'\n')
3489 ui.write(b'\n')
3534 ui.writenoi18n(
3490 ui.writenoi18n(
3535 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3491 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3536 % tuple(datasize)
3492 % tuple(datasize)
3537 )
3493 )
3538 ui.writenoi18n(
3494 ui.writenoi18n(
3539 b'full revision size (min/max/avg) : %d / %d / %d\n'
3495 b'full revision size (min/max/avg) : %d / %d / %d\n'
3540 % tuple(fullsize)
3496 % tuple(fullsize)
3541 )
3497 )
3542 ui.writenoi18n(
3498 ui.writenoi18n(
3543 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3499 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3544 % tuple(semisize)
3500 % tuple(semisize)
3545 )
3501 )
3546 for depth in sorted(snapsizedepth):
3502 for depth in sorted(snapsizedepth):
3547 if depth == 0:
3503 if depth == 0:
3548 continue
3504 continue
3549 ui.writenoi18n(
3505 ui.writenoi18n(
3550 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3506 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3551 % ((depth,) + tuple(snapsizedepth[depth]))
3507 % ((depth,) + tuple(snapsizedepth[depth]))
3552 )
3508 )
3553 ui.writenoi18n(
3509 ui.writenoi18n(
3554 b'delta size (min/max/avg) : %d / %d / %d\n'
3510 b'delta size (min/max/avg) : %d / %d / %d\n'
3555 % tuple(deltasize)
3511 % tuple(deltasize)
3556 )
3512 )
3557
3513
3558 if numdeltas > 0:
3514 if numdeltas > 0:
3559 ui.write(b'\n')
3515 ui.write(b'\n')
3560 fmt = pcfmtstr(numdeltas)
3516 fmt = pcfmtstr(numdeltas)
3561 fmt2 = pcfmtstr(numdeltas, 4)
3517 fmt2 = pcfmtstr(numdeltas, 4)
3562 ui.writenoi18n(
3518 ui.writenoi18n(
3563 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3519 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3564 )
3520 )
3565 if numprev > 0:
3521 if numprev > 0:
3566 ui.writenoi18n(
3522 ui.writenoi18n(
3567 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3523 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3568 )
3524 )
3569 ui.writenoi18n(
3525 ui.writenoi18n(
3570 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3526 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3571 )
3527 )
3572 ui.writenoi18n(
3528 ui.writenoi18n(
3573 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3529 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3574 )
3530 )
3575 if gdelta:
3531 if gdelta:
3576 ui.writenoi18n(
3532 ui.writenoi18n(
3577 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3533 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3578 )
3534 )
3579 ui.writenoi18n(
3535 ui.writenoi18n(
3580 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3536 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3581 )
3537 )
3582 ui.writenoi18n(
3538 ui.writenoi18n(
3583 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3539 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3584 )
3540 )
3585
3541
3586
3542
3587 @command(
3543 @command(
3588 b'debugrevlogindex',
3544 b'debugrevlogindex',
3589 cmdutil.debugrevlogopts
3545 cmdutil.debugrevlogopts
3590 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3546 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3591 _(b'[-f FORMAT] -c|-m|FILE'),
3547 _(b'[-f FORMAT] -c|-m|FILE'),
3592 optionalrepo=True,
3548 optionalrepo=True,
3593 )
3549 )
3594 def debugrevlogindex(ui, repo, file_=None, **opts):
3550 def debugrevlogindex(ui, repo, file_=None, **opts):
3595 """dump the contents of a revlog index"""
3551 """dump the contents of a revlog index"""
3596 opts = pycompat.byteskwargs(opts)
3552 opts = pycompat.byteskwargs(opts)
3597 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3553 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3598 format = opts.get(b'format', 0)
3554 format = opts.get(b'format', 0)
3599 if format not in (0, 1):
3555 if format not in (0, 1):
3600 raise error.Abort(_(b"unknown format %d") % format)
3556 raise error.Abort(_(b"unknown format %d") % format)
3601
3557
3602 if ui.debugflag:
3558 if ui.debugflag:
3603 shortfn = hex
3559 shortfn = hex
3604 else:
3560 else:
3605 shortfn = short
3561 shortfn = short
3606
3562
3607 # There might not be anything in r, so have a sane default
3563 # There might not be anything in r, so have a sane default
3608 idlen = 12
3564 idlen = 12
3609 for i in r:
3565 for i in r:
3610 idlen = len(shortfn(r.node(i)))
3566 idlen = len(shortfn(r.node(i)))
3611 break
3567 break
3612
3568
3613 if format == 0:
3569 if format == 0:
3614 if ui.verbose:
3570 if ui.verbose:
3615 ui.writenoi18n(
3571 ui.writenoi18n(
3616 b" rev offset length linkrev %s %s p2\n"
3572 b" rev offset length linkrev %s %s p2\n"
3617 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3573 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3618 )
3574 )
3619 else:
3575 else:
3620 ui.writenoi18n(
3576 ui.writenoi18n(
3621 b" rev linkrev %s %s p2\n"
3577 b" rev linkrev %s %s p2\n"
3622 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3578 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3623 )
3579 )
3624 elif format == 1:
3580 elif format == 1:
3625 if ui.verbose:
3581 if ui.verbose:
3626 ui.writenoi18n(
3582 ui.writenoi18n(
3627 (
3583 (
3628 b" rev flag offset length size link p1"
3584 b" rev flag offset length size link p1"
3629 b" p2 %s\n"
3585 b" p2 %s\n"
3630 )
3586 )
3631 % b"nodeid".rjust(idlen)
3587 % b"nodeid".rjust(idlen)
3632 )
3588 )
3633 else:
3589 else:
3634 ui.writenoi18n(
3590 ui.writenoi18n(
3635 b" rev flag size link p1 p2 %s\n"
3591 b" rev flag size link p1 p2 %s\n"
3636 % b"nodeid".rjust(idlen)
3592 % b"nodeid".rjust(idlen)
3637 )
3593 )
3638
3594
3639 for i in r:
3595 for i in r:
3640 node = r.node(i)
3596 node = r.node(i)
3641 if format == 0:
3597 if format == 0:
3642 try:
3598 try:
3643 pp = r.parents(node)
3599 pp = r.parents(node)
3644 except Exception:
3600 except Exception:
3645 pp = [repo.nullid, repo.nullid]
3601 pp = [repo.nullid, repo.nullid]
3646 if ui.verbose:
3602 if ui.verbose:
3647 ui.write(
3603 ui.write(
3648 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3604 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3649 % (
3605 % (
3650 i,
3606 i,
3651 r.start(i),
3607 r.start(i),
3652 r.length(i),
3608 r.length(i),
3653 r.linkrev(i),
3609 r.linkrev(i),
3654 shortfn(node),
3610 shortfn(node),
3655 shortfn(pp[0]),
3611 shortfn(pp[0]),
3656 shortfn(pp[1]),
3612 shortfn(pp[1]),
3657 )
3613 )
3658 )
3614 )
3659 else:
3615 else:
3660 ui.write(
3616 ui.write(
3661 b"% 6d % 7d %s %s %s\n"
3617 b"% 6d % 7d %s %s %s\n"
3662 % (
3618 % (
3663 i,
3619 i,
3664 r.linkrev(i),
3620 r.linkrev(i),
3665 shortfn(node),
3621 shortfn(node),
3666 shortfn(pp[0]),
3622 shortfn(pp[0]),
3667 shortfn(pp[1]),
3623 shortfn(pp[1]),
3668 )
3624 )
3669 )
3625 )
3670 elif format == 1:
3626 elif format == 1:
3671 pr = r.parentrevs(i)
3627 pr = r.parentrevs(i)
3672 if ui.verbose:
3628 if ui.verbose:
3673 ui.write(
3629 ui.write(
3674 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3630 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3675 % (
3631 % (
3676 i,
3632 i,
3677 r.flags(i),
3633 r.flags(i),
3678 r.start(i),
3634 r.start(i),
3679 r.length(i),
3635 r.length(i),
3680 r.rawsize(i),
3636 r.rawsize(i),
3681 r.linkrev(i),
3637 r.linkrev(i),
3682 pr[0],
3638 pr[0],
3683 pr[1],
3639 pr[1],
3684 shortfn(node),
3640 shortfn(node),
3685 )
3641 )
3686 )
3642 )
3687 else:
3643 else:
3688 ui.write(
3644 ui.write(
3689 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3645 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3690 % (
3646 % (
3691 i,
3647 i,
3692 r.flags(i),
3648 r.flags(i),
3693 r.rawsize(i),
3649 r.rawsize(i),
3694 r.linkrev(i),
3650 r.linkrev(i),
3695 pr[0],
3651 pr[0],
3696 pr[1],
3652 pr[1],
3697 shortfn(node),
3653 shortfn(node),
3698 )
3654 )
3699 )
3655 )
3700
3656
3701
3657
3702 @command(
3658 @command(
3703 b'debugrevspec',
3659 b'debugrevspec',
3704 [
3660 [
3705 (
3661 (
3706 b'',
3662 b'',
3707 b'optimize',
3663 b'optimize',
3708 None,
3664 None,
3709 _(b'print parsed tree after optimizing (DEPRECATED)'),
3665 _(b'print parsed tree after optimizing (DEPRECATED)'),
3710 ),
3666 ),
3711 (
3667 (
3712 b'',
3668 b'',
3713 b'show-revs',
3669 b'show-revs',
3714 True,
3670 True,
3715 _(b'print list of result revisions (default)'),
3671 _(b'print list of result revisions (default)'),
3716 ),
3672 ),
3717 (
3673 (
3718 b's',
3674 b's',
3719 b'show-set',
3675 b'show-set',
3720 None,
3676 None,
3721 _(b'print internal representation of result set'),
3677 _(b'print internal representation of result set'),
3722 ),
3678 ),
3723 (
3679 (
3724 b'p',
3680 b'p',
3725 b'show-stage',
3681 b'show-stage',
3726 [],
3682 [],
3727 _(b'print parsed tree at the given stage'),
3683 _(b'print parsed tree at the given stage'),
3728 _(b'NAME'),
3684 _(b'NAME'),
3729 ),
3685 ),
3730 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3686 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3731 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3687 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3732 ],
3688 ],
3733 b'REVSPEC',
3689 b'REVSPEC',
3734 )
3690 )
3735 def debugrevspec(ui, repo, expr, **opts):
3691 def debugrevspec(ui, repo, expr, **opts):
3736 """parse and apply a revision specification
3692 """parse and apply a revision specification
3737
3693
3738 Use -p/--show-stage option to print the parsed tree at the given stages.
3694 Use -p/--show-stage option to print the parsed tree at the given stages.
3739 Use -p all to print tree at every stage.
3695 Use -p all to print tree at every stage.
3740
3696
3741 Use --no-show-revs option with -s or -p to print only the set
3697 Use --no-show-revs option with -s or -p to print only the set
3742 representation or the parsed tree respectively.
3698 representation or the parsed tree respectively.
3743
3699
3744 Use --verify-optimized to compare the optimized result with the unoptimized
3700 Use --verify-optimized to compare the optimized result with the unoptimized
3745 one. Returns 1 if the optimized result differs.
3701 one. Returns 1 if the optimized result differs.
3746 """
3702 """
3747 opts = pycompat.byteskwargs(opts)
3703 opts = pycompat.byteskwargs(opts)
3748 aliases = ui.configitems(b'revsetalias')
3704 aliases = ui.configitems(b'revsetalias')
3749 stages = [
3705 stages = [
3750 (b'parsed', lambda tree: tree),
3706 (b'parsed', lambda tree: tree),
3751 (
3707 (
3752 b'expanded',
3708 b'expanded',
3753 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3709 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3754 ),
3710 ),
3755 (b'concatenated', revsetlang.foldconcat),
3711 (b'concatenated', revsetlang.foldconcat),
3756 (b'analyzed', revsetlang.analyze),
3712 (b'analyzed', revsetlang.analyze),
3757 (b'optimized', revsetlang.optimize),
3713 (b'optimized', revsetlang.optimize),
3758 ]
3714 ]
3759 if opts[b'no_optimized']:
3715 if opts[b'no_optimized']:
3760 stages = stages[:-1]
3716 stages = stages[:-1]
3761 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3717 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3762 raise error.Abort(
3718 raise error.Abort(
3763 _(b'cannot use --verify-optimized with --no-optimized')
3719 _(b'cannot use --verify-optimized with --no-optimized')
3764 )
3720 )
3765 stagenames = {n for n, f in stages}
3721 stagenames = {n for n, f in stages}
3766
3722
3767 showalways = set()
3723 showalways = set()
3768 showchanged = set()
3724 showchanged = set()
3769 if ui.verbose and not opts[b'show_stage']:
3725 if ui.verbose and not opts[b'show_stage']:
3770 # show parsed tree by --verbose (deprecated)
3726 # show parsed tree by --verbose (deprecated)
3771 showalways.add(b'parsed')
3727 showalways.add(b'parsed')
3772 showchanged.update([b'expanded', b'concatenated'])
3728 showchanged.update([b'expanded', b'concatenated'])
3773 if opts[b'optimize']:
3729 if opts[b'optimize']:
3774 showalways.add(b'optimized')
3730 showalways.add(b'optimized')
3775 if opts[b'show_stage'] and opts[b'optimize']:
3731 if opts[b'show_stage'] and opts[b'optimize']:
3776 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3732 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3777 if opts[b'show_stage'] == [b'all']:
3733 if opts[b'show_stage'] == [b'all']:
3778 showalways.update(stagenames)
3734 showalways.update(stagenames)
3779 else:
3735 else:
3780 for n in opts[b'show_stage']:
3736 for n in opts[b'show_stage']:
3781 if n not in stagenames:
3737 if n not in stagenames:
3782 raise error.Abort(_(b'invalid stage name: %s') % n)
3738 raise error.Abort(_(b'invalid stage name: %s') % n)
3783 showalways.update(opts[b'show_stage'])
3739 showalways.update(opts[b'show_stage'])
3784
3740
3785 treebystage = {}
3741 treebystage = {}
3786 printedtree = None
3742 printedtree = None
3787 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3743 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3788 for n, f in stages:
3744 for n, f in stages:
3789 treebystage[n] = tree = f(tree)
3745 treebystage[n] = tree = f(tree)
3790 if n in showalways or (n in showchanged and tree != printedtree):
3746 if n in showalways or (n in showchanged and tree != printedtree):
3791 if opts[b'show_stage'] or n != b'parsed':
3747 if opts[b'show_stage'] or n != b'parsed':
3792 ui.write(b"* %s:\n" % n)
3748 ui.write(b"* %s:\n" % n)
3793 ui.write(revsetlang.prettyformat(tree), b"\n")
3749 ui.write(revsetlang.prettyformat(tree), b"\n")
3794 printedtree = tree
3750 printedtree = tree
3795
3751
3796 if opts[b'verify_optimized']:
3752 if opts[b'verify_optimized']:
3797 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3753 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3798 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3754 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3799 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3755 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3800 ui.writenoi18n(
3756 ui.writenoi18n(
3801 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3757 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3802 )
3758 )
3803 ui.writenoi18n(
3759 ui.writenoi18n(
3804 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3760 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3805 )
3761 )
3806 arevs = list(arevs)
3762 arevs = list(arevs)
3807 brevs = list(brevs)
3763 brevs = list(brevs)
3808 if arevs == brevs:
3764 if arevs == brevs:
3809 return 0
3765 return 0
3810 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3766 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3811 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3767 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3812 sm = difflib.SequenceMatcher(None, arevs, brevs)
3768 sm = difflib.SequenceMatcher(None, arevs, brevs)
3813 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3769 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3814 if tag in ('delete', 'replace'):
3770 if tag in ('delete', 'replace'):
3815 for c in arevs[alo:ahi]:
3771 for c in arevs[alo:ahi]:
3816 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3772 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3817 if tag in ('insert', 'replace'):
3773 if tag in ('insert', 'replace'):
3818 for c in brevs[blo:bhi]:
3774 for c in brevs[blo:bhi]:
3819 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3775 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3820 if tag == 'equal':
3776 if tag == 'equal':
3821 for c in arevs[alo:ahi]:
3777 for c in arevs[alo:ahi]:
3822 ui.write(b' %d\n' % c)
3778 ui.write(b' %d\n' % c)
3823 return 1
3779 return 1
3824
3780
3825 func = revset.makematcher(tree)
3781 func = revset.makematcher(tree)
3826 revs = func(repo)
3782 revs = func(repo)
3827 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3783 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3828 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3784 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3829 if not opts[b'show_revs']:
3785 if not opts[b'show_revs']:
3830 return
3786 return
3831 for c in revs:
3787 for c in revs:
3832 ui.write(b"%d\n" % c)
3788 ui.write(b"%d\n" % c)
3833
3789
3834
3790
3835 @command(
3791 @command(
3836 b'debugserve',
3792 b'debugserve',
3837 [
3793 [
3838 (
3794 (
3839 b'',
3795 b'',
3840 b'sshstdio',
3796 b'sshstdio',
3841 False,
3797 False,
3842 _(b'run an SSH server bound to process handles'),
3798 _(b'run an SSH server bound to process handles'),
3843 ),
3799 ),
3844 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3800 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3845 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3801 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3846 ],
3802 ],
3847 b'',
3803 b'',
3848 )
3804 )
3849 def debugserve(ui, repo, **opts):
3805 def debugserve(ui, repo, **opts):
3850 """run a server with advanced settings
3806 """run a server with advanced settings
3851
3807
3852 This command is similar to :hg:`serve`. It exists partially as a
3808 This command is similar to :hg:`serve`. It exists partially as a
3853 workaround to the fact that ``hg serve --stdio`` must have specific
3809 workaround to the fact that ``hg serve --stdio`` must have specific
3854 arguments for security reasons.
3810 arguments for security reasons.
3855 """
3811 """
3856 opts = pycompat.byteskwargs(opts)
3812 opts = pycompat.byteskwargs(opts)
3857
3813
3858 if not opts[b'sshstdio']:
3814 if not opts[b'sshstdio']:
3859 raise error.Abort(_(b'only --sshstdio is currently supported'))
3815 raise error.Abort(_(b'only --sshstdio is currently supported'))
3860
3816
3861 logfh = None
3817 logfh = None
3862
3818
3863 if opts[b'logiofd'] and opts[b'logiofile']:
3819 if opts[b'logiofd'] and opts[b'logiofile']:
3864 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3820 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3865
3821
3866 if opts[b'logiofd']:
3822 if opts[b'logiofd']:
3867 # Ideally we would be line buffered. But line buffering in binary
3823 # Ideally we would be line buffered. But line buffering in binary
3868 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3824 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3869 # buffering could have performance impacts. But since this isn't
3825 # buffering could have performance impacts. But since this isn't
3870 # performance critical code, it should be fine.
3826 # performance critical code, it should be fine.
3871 try:
3827 try:
3872 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3828 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3873 except OSError as e:
3829 except OSError as e:
3874 if e.errno != errno.ESPIPE:
3830 if e.errno != errno.ESPIPE:
3875 raise
3831 raise
3876 # can't seek a pipe, so `ab` mode fails on py3
3832 # can't seek a pipe, so `ab` mode fails on py3
3877 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3833 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3878 elif opts[b'logiofile']:
3834 elif opts[b'logiofile']:
3879 logfh = open(opts[b'logiofile'], b'ab', 0)
3835 logfh = open(opts[b'logiofile'], b'ab', 0)
3880
3836
3881 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3837 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3882 s.serve_forever()
3838 s.serve_forever()
3883
3839
3884
3840
3885 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3841 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3886 def debugsetparents(ui, repo, rev1, rev2=None):
3842 def debugsetparents(ui, repo, rev1, rev2=None):
3887 """manually set the parents of the current working directory (DANGEROUS)
3843 """manually set the parents of the current working directory (DANGEROUS)
3888
3844
3889 This command is not what you are looking for and should not be used. Using
3845 This command is not what you are looking for and should not be used. Using
3890 this command will most certainly results in slight corruption of the file
3846 this command will most certainly results in slight corruption of the file
3891 level histories withing your repository. DO NOT USE THIS COMMAND.
3847 level histories withing your repository. DO NOT USE THIS COMMAND.
3892
3848
3893 The command update the p1 and p2 field in the dirstate, and not touching
3849 The command update the p1 and p2 field in the dirstate, and not touching
3894 anything else. This useful for writing repository conversion tools, but
3850 anything else. This useful for writing repository conversion tools, but
3895 should be used with extreme care. For example, neither the working
3851 should be used with extreme care. For example, neither the working
3896 directory nor the dirstate is updated, so file status may be incorrect
3852 directory nor the dirstate is updated, so file status may be incorrect
3897 after running this command. Only used if you are one of the few people that
3853 after running this command. Only used if you are one of the few people that
3898 deeply unstand both conversion tools and file level histories. If you are
3854 deeply unstand both conversion tools and file level histories. If you are
3899 reading this help, you are not one of this people (most of them sailed west
3855 reading this help, you are not one of this people (most of them sailed west
3900 from Mithlond anyway.
3856 from Mithlond anyway.
3901
3857
3902 So one last time DO NOT USE THIS COMMAND.
3858 So one last time DO NOT USE THIS COMMAND.
3903
3859
3904 Returns 0 on success.
3860 Returns 0 on success.
3905 """
3861 """
3906
3862
3907 node1 = scmutil.revsingle(repo, rev1).node()
3863 node1 = scmutil.revsingle(repo, rev1).node()
3908 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3864 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3909
3865
3910 with repo.wlock():
3866 with repo.wlock():
3911 repo.setparents(node1, node2)
3867 repo.setparents(node1, node2)
3912
3868
3913
3869
3914 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3870 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3915 def debugsidedata(ui, repo, file_, rev=None, **opts):
3871 def debugsidedata(ui, repo, file_, rev=None, **opts):
3916 """dump the side data for a cl/manifest/file revision
3872 """dump the side data for a cl/manifest/file revision
3917
3873
3918 Use --verbose to dump the sidedata content."""
3874 Use --verbose to dump the sidedata content."""
3919 opts = pycompat.byteskwargs(opts)
3875 opts = pycompat.byteskwargs(opts)
3920 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3876 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3921 if rev is not None:
3877 if rev is not None:
3922 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3878 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3923 file_, rev = None, file_
3879 file_, rev = None, file_
3924 elif rev is None:
3880 elif rev is None:
3925 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3881 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3926 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3882 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3927 r = getattr(r, '_revlog', r)
3883 r = getattr(r, '_revlog', r)
3928 try:
3884 try:
3929 sidedata = r.sidedata(r.lookup(rev))
3885 sidedata = r.sidedata(r.lookup(rev))
3930 except KeyError:
3886 except KeyError:
3931 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3887 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3932 if sidedata:
3888 if sidedata:
3933 sidedata = list(sidedata.items())
3889 sidedata = list(sidedata.items())
3934 sidedata.sort()
3890 sidedata.sort()
3935 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3891 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3936 for key, value in sidedata:
3892 for key, value in sidedata:
3937 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3893 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3938 if ui.verbose:
3894 if ui.verbose:
3939 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3895 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3940
3896
3941
3897
3942 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3898 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3943 def debugssl(ui, repo, source=None, **opts):
3899 def debugssl(ui, repo, source=None, **opts):
3944 """test a secure connection to a server
3900 """test a secure connection to a server
3945
3901
3946 This builds the certificate chain for the server on Windows, installing the
3902 This builds the certificate chain for the server on Windows, installing the
3947 missing intermediates and trusted root via Windows Update if necessary. It
3903 missing intermediates and trusted root via Windows Update if necessary. It
3948 does nothing on other platforms.
3904 does nothing on other platforms.
3949
3905
3950 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3906 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3951 that server is used. See :hg:`help urls` for more information.
3907 that server is used. See :hg:`help urls` for more information.
3952
3908
3953 If the update succeeds, retry the original operation. Otherwise, the cause
3909 If the update succeeds, retry the original operation. Otherwise, the cause
3954 of the SSL error is likely another issue.
3910 of the SSL error is likely another issue.
3955 """
3911 """
3956 if not pycompat.iswindows:
3912 if not pycompat.iswindows:
3957 raise error.Abort(
3913 raise error.Abort(
3958 _(b'certificate chain building is only possible on Windows')
3914 _(b'certificate chain building is only possible on Windows')
3959 )
3915 )
3960
3916
3961 if not source:
3917 if not source:
3962 if not repo:
3918 if not repo:
3963 raise error.Abort(
3919 raise error.Abort(
3964 _(
3920 _(
3965 b"there is no Mercurial repository here, and no "
3921 b"there is no Mercurial repository here, and no "
3966 b"server specified"
3922 b"server specified"
3967 )
3923 )
3968 )
3924 )
3969 source = b"default"
3925 source = b"default"
3970
3926
3971 source, branches = urlutil.get_unique_pull_path(
3927 source, branches = urlutil.get_unique_pull_path(
3972 b'debugssl', repo, ui, source
3928 b'debugssl', repo, ui, source
3973 )
3929 )
3974 url = urlutil.url(source)
3930 url = urlutil.url(source)
3975
3931
3976 defaultport = {b'https': 443, b'ssh': 22}
3932 defaultport = {b'https': 443, b'ssh': 22}
3977 if url.scheme in defaultport:
3933 if url.scheme in defaultport:
3978 try:
3934 try:
3979 addr = (url.host, int(url.port or defaultport[url.scheme]))
3935 addr = (url.host, int(url.port or defaultport[url.scheme]))
3980 except ValueError:
3936 except ValueError:
3981 raise error.Abort(_(b"malformed port number in URL"))
3937 raise error.Abort(_(b"malformed port number in URL"))
3982 else:
3938 else:
3983 raise error.Abort(_(b"only https and ssh connections are supported"))
3939 raise error.Abort(_(b"only https and ssh connections are supported"))
3984
3940
3985 from . import win32
3941 from . import win32
3986
3942
3987 s = ssl.wrap_socket(
3943 s = ssl.wrap_socket(
3988 socket.socket(),
3944 socket.socket(),
3989 ssl_version=ssl.PROTOCOL_TLS,
3945 ssl_version=ssl.PROTOCOL_TLS,
3990 cert_reqs=ssl.CERT_NONE,
3946 cert_reqs=ssl.CERT_NONE,
3991 ca_certs=None,
3947 ca_certs=None,
3992 )
3948 )
3993
3949
3994 try:
3950 try:
3995 s.connect(addr)
3951 s.connect(addr)
3996 cert = s.getpeercert(True)
3952 cert = s.getpeercert(True)
3997
3953
3998 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3954 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3999
3955
4000 complete = win32.checkcertificatechain(cert, build=False)
3956 complete = win32.checkcertificatechain(cert, build=False)
4001
3957
4002 if not complete:
3958 if not complete:
4003 ui.status(_(b'certificate chain is incomplete, updating... '))
3959 ui.status(_(b'certificate chain is incomplete, updating... '))
4004
3960
4005 if not win32.checkcertificatechain(cert):
3961 if not win32.checkcertificatechain(cert):
4006 ui.status(_(b'failed.\n'))
3962 ui.status(_(b'failed.\n'))
4007 else:
3963 else:
4008 ui.status(_(b'done.\n'))
3964 ui.status(_(b'done.\n'))
4009 else:
3965 else:
4010 ui.status(_(b'full certificate chain is available\n'))
3966 ui.status(_(b'full certificate chain is available\n'))
4011 finally:
3967 finally:
4012 s.close()
3968 s.close()
4013
3969
4014
3970
4015 @command(
3971 @command(
4016 b"debugbackupbundle",
3972 b"debugbackupbundle",
4017 [
3973 [
4018 (
3974 (
4019 b"",
3975 b"",
4020 b"recover",
3976 b"recover",
4021 b"",
3977 b"",
4022 b"brings the specified changeset back into the repository",
3978 b"brings the specified changeset back into the repository",
4023 )
3979 )
4024 ]
3980 ]
4025 + cmdutil.logopts,
3981 + cmdutil.logopts,
4026 _(b"hg debugbackupbundle [--recover HASH]"),
3982 _(b"hg debugbackupbundle [--recover HASH]"),
4027 )
3983 )
4028 def debugbackupbundle(ui, repo, *pats, **opts):
3984 def debugbackupbundle(ui, repo, *pats, **opts):
4029 """lists the changesets available in backup bundles
3985 """lists the changesets available in backup bundles
4030
3986
4031 Without any arguments, this command prints a list of the changesets in each
3987 Without any arguments, this command prints a list of the changesets in each
4032 backup bundle.
3988 backup bundle.
4033
3989
4034 --recover takes a changeset hash and unbundles the first bundle that
3990 --recover takes a changeset hash and unbundles the first bundle that
4035 contains that hash, which puts that changeset back in your repository.
3991 contains that hash, which puts that changeset back in your repository.
4036
3992
4037 --verbose will print the entire commit message and the bundle path for that
3993 --verbose will print the entire commit message and the bundle path for that
4038 backup.
3994 backup.
4039 """
3995 """
4040 backups = list(
3996 backups = list(
4041 filter(
3997 filter(
4042 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3998 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4043 )
3999 )
4044 )
4000 )
4045 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4001 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4046
4002
4047 opts = pycompat.byteskwargs(opts)
4003 opts = pycompat.byteskwargs(opts)
4048 opts[b"bundle"] = b""
4004 opts[b"bundle"] = b""
4049 opts[b"force"] = None
4005 opts[b"force"] = None
4050 limit = logcmdutil.getlimit(opts)
4006 limit = logcmdutil.getlimit(opts)
4051
4007
4052 def display(other, chlist, displayer):
4008 def display(other, chlist, displayer):
4053 if opts.get(b"newest_first"):
4009 if opts.get(b"newest_first"):
4054 chlist.reverse()
4010 chlist.reverse()
4055 count = 0
4011 count = 0
4056 for n in chlist:
4012 for n in chlist:
4057 if limit is not None and count >= limit:
4013 if limit is not None and count >= limit:
4058 break
4014 break
4059 parents = [
4015 parents = [
4060 True for p in other.changelog.parents(n) if p != repo.nullid
4016 True for p in other.changelog.parents(n) if p != repo.nullid
4061 ]
4017 ]
4062 if opts.get(b"no_merges") and len(parents) == 2:
4018 if opts.get(b"no_merges") and len(parents) == 2:
4063 continue
4019 continue
4064 count += 1
4020 count += 1
4065 displayer.show(other[n])
4021 displayer.show(other[n])
4066
4022
4067 recovernode = opts.get(b"recover")
4023 recovernode = opts.get(b"recover")
4068 if recovernode:
4024 if recovernode:
4069 if scmutil.isrevsymbol(repo, recovernode):
4025 if scmutil.isrevsymbol(repo, recovernode):
4070 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4026 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4071 return
4027 return
4072 elif backups:
4028 elif backups:
4073 msg = _(
4029 msg = _(
4074 b"Recover changesets using: hg debugbackupbundle --recover "
4030 b"Recover changesets using: hg debugbackupbundle --recover "
4075 b"<changeset hash>\n\nAvailable backup changesets:"
4031 b"<changeset hash>\n\nAvailable backup changesets:"
4076 )
4032 )
4077 ui.status(msg, label=b"status.removed")
4033 ui.status(msg, label=b"status.removed")
4078 else:
4034 else:
4079 ui.status(_(b"no backup changesets found\n"))
4035 ui.status(_(b"no backup changesets found\n"))
4080 return
4036 return
4081
4037
4082 for backup in backups:
4038 for backup in backups:
4083 # Much of this is copied from the hg incoming logic
4039 # Much of this is copied from the hg incoming logic
4084 source = os.path.relpath(backup, encoding.getcwd())
4040 source = os.path.relpath(backup, encoding.getcwd())
4085 source, branches = urlutil.get_unique_pull_path(
4041 source, branches = urlutil.get_unique_pull_path(
4086 b'debugbackupbundle',
4042 b'debugbackupbundle',
4087 repo,
4043 repo,
4088 ui,
4044 ui,
4089 source,
4045 source,
4090 default_branches=opts.get(b'branch'),
4046 default_branches=opts.get(b'branch'),
4091 )
4047 )
4092 try:
4048 try:
4093 other = hg.peer(repo, opts, source)
4049 other = hg.peer(repo, opts, source)
4094 except error.LookupError as ex:
4050 except error.LookupError as ex:
4095 msg = _(b"\nwarning: unable to open bundle %s") % source
4051 msg = _(b"\nwarning: unable to open bundle %s") % source
4096 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4052 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4097 ui.warn(msg, hint=hint)
4053 ui.warn(msg, hint=hint)
4098 continue
4054 continue
4099 revs, checkout = hg.addbranchrevs(
4055 revs, checkout = hg.addbranchrevs(
4100 repo, other, branches, opts.get(b"rev")
4056 repo, other, branches, opts.get(b"rev")
4101 )
4057 )
4102
4058
4103 if revs:
4059 if revs:
4104 revs = [other.lookup(rev) for rev in revs]
4060 revs = [other.lookup(rev) for rev in revs]
4105
4061
4106 with ui.silent():
4062 with ui.silent():
4107 try:
4063 try:
4108 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4064 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4109 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4065 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4110 )
4066 )
4111 except error.LookupError:
4067 except error.LookupError:
4112 continue
4068 continue
4113
4069
4114 try:
4070 try:
4115 if not chlist:
4071 if not chlist:
4116 continue
4072 continue
4117 if recovernode:
4073 if recovernode:
4118 with repo.lock(), repo.transaction(b"unbundle") as tr:
4074 with repo.lock(), repo.transaction(b"unbundle") as tr:
4119 if scmutil.isrevsymbol(other, recovernode):
4075 if scmutil.isrevsymbol(other, recovernode):
4120 ui.status(_(b"Unbundling %s\n") % (recovernode))
4076 ui.status(_(b"Unbundling %s\n") % (recovernode))
4121 f = hg.openpath(ui, source)
4077 f = hg.openpath(ui, source)
4122 gen = exchange.readbundle(ui, f, source)
4078 gen = exchange.readbundle(ui, f, source)
4123 if isinstance(gen, bundle2.unbundle20):
4079 if isinstance(gen, bundle2.unbundle20):
4124 bundle2.applybundle(
4080 bundle2.applybundle(
4125 repo,
4081 repo,
4126 gen,
4082 gen,
4127 tr,
4083 tr,
4128 source=b"unbundle",
4084 source=b"unbundle",
4129 url=b"bundle:" + source,
4085 url=b"bundle:" + source,
4130 )
4086 )
4131 else:
4087 else:
4132 gen.apply(repo, b"unbundle", b"bundle:" + source)
4088 gen.apply(repo, b"unbundle", b"bundle:" + source)
4133 break
4089 break
4134 else:
4090 else:
4135 backupdate = encoding.strtolocal(
4091 backupdate = encoding.strtolocal(
4136 time.strftime(
4092 time.strftime(
4137 "%a %H:%M, %Y-%m-%d",
4093 "%a %H:%M, %Y-%m-%d",
4138 time.localtime(os.path.getmtime(source)),
4094 time.localtime(os.path.getmtime(source)),
4139 )
4095 )
4140 )
4096 )
4141 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4097 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4142 if ui.verbose:
4098 if ui.verbose:
4143 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4099 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4144 else:
4100 else:
4145 opts[
4101 opts[
4146 b"template"
4102 b"template"
4147 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4103 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4148 displayer = logcmdutil.changesetdisplayer(
4104 displayer = logcmdutil.changesetdisplayer(
4149 ui, other, opts, False
4105 ui, other, opts, False
4150 )
4106 )
4151 display(other, chlist, displayer)
4107 display(other, chlist, displayer)
4152 displayer.close()
4108 displayer.close()
4153 finally:
4109 finally:
4154 cleanupfn()
4110 cleanupfn()
4155
4111
4156
4112
4157 @command(
4113 @command(
4158 b'debugsub',
4114 b'debugsub',
4159 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4115 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4160 _(b'[-r REV] [REV]'),
4116 _(b'[-r REV] [REV]'),
4161 )
4117 )
4162 def debugsub(ui, repo, rev=None):
4118 def debugsub(ui, repo, rev=None):
4163 ctx = scmutil.revsingle(repo, rev, None)
4119 ctx = scmutil.revsingle(repo, rev, None)
4164 for k, v in sorted(ctx.substate.items()):
4120 for k, v in sorted(ctx.substate.items()):
4165 ui.writenoi18n(b'path %s\n' % k)
4121 ui.writenoi18n(b'path %s\n' % k)
4166 ui.writenoi18n(b' source %s\n' % v[0])
4122 ui.writenoi18n(b' source %s\n' % v[0])
4167 ui.writenoi18n(b' revision %s\n' % v[1])
4123 ui.writenoi18n(b' revision %s\n' % v[1])
4168
4124
4169
4125
4170 @command(b'debugshell', optionalrepo=True)
4126 @command(b'debugshell', optionalrepo=True)
4171 def debugshell(ui, repo):
4127 def debugshell(ui, repo):
4172 """run an interactive Python interpreter
4128 """run an interactive Python interpreter
4173
4129
4174 The local namespace is provided with a reference to the ui and
4130 The local namespace is provided with a reference to the ui and
4175 the repo instance (if available).
4131 the repo instance (if available).
4176 """
4132 """
4177 import code
4133 import code
4178
4134
4179 imported_objects = {
4135 imported_objects = {
4180 'ui': ui,
4136 'ui': ui,
4181 'repo': repo,
4137 'repo': repo,
4182 }
4138 }
4183
4139
4184 code.interact(local=imported_objects)
4140 code.interact(local=imported_objects)
4185
4141
4186
4142
4187 @command(
4143 @command(
4188 b'debugsuccessorssets',
4144 b'debugsuccessorssets',
4189 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4145 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4190 _(b'[REV]'),
4146 _(b'[REV]'),
4191 )
4147 )
4192 def debugsuccessorssets(ui, repo, *revs, **opts):
4148 def debugsuccessorssets(ui, repo, *revs, **opts):
4193 """show set of successors for revision
4149 """show set of successors for revision
4194
4150
4195 A successors set of changeset A is a consistent group of revisions that
4151 A successors set of changeset A is a consistent group of revisions that
4196 succeed A. It contains non-obsolete changesets only unless closests
4152 succeed A. It contains non-obsolete changesets only unless closests
4197 successors set is set.
4153 successors set is set.
4198
4154
4199 In most cases a changeset A has a single successors set containing a single
4155 In most cases a changeset A has a single successors set containing a single
4200 successor (changeset A replaced by A').
4156 successor (changeset A replaced by A').
4201
4157
4202 A changeset that is made obsolete with no successors are called "pruned".
4158 A changeset that is made obsolete with no successors are called "pruned".
4203 Such changesets have no successors sets at all.
4159 Such changesets have no successors sets at all.
4204
4160
4205 A changeset that has been "split" will have a successors set containing
4161 A changeset that has been "split" will have a successors set containing
4206 more than one successor.
4162 more than one successor.
4207
4163
4208 A changeset that has been rewritten in multiple different ways is called
4164 A changeset that has been rewritten in multiple different ways is called
4209 "divergent". Such changesets have multiple successor sets (each of which
4165 "divergent". Such changesets have multiple successor sets (each of which
4210 may also be split, i.e. have multiple successors).
4166 may also be split, i.e. have multiple successors).
4211
4167
4212 Results are displayed as follows::
4168 Results are displayed as follows::
4213
4169
4214 <rev1>
4170 <rev1>
4215 <successors-1A>
4171 <successors-1A>
4216 <rev2>
4172 <rev2>
4217 <successors-2A>
4173 <successors-2A>
4218 <successors-2B1> <successors-2B2> <successors-2B3>
4174 <successors-2B1> <successors-2B2> <successors-2B3>
4219
4175
4220 Here rev2 has two possible (i.e. divergent) successors sets. The first
4176 Here rev2 has two possible (i.e. divergent) successors sets. The first
4221 holds one element, whereas the second holds three (i.e. the changeset has
4177 holds one element, whereas the second holds three (i.e. the changeset has
4222 been split).
4178 been split).
4223 """
4179 """
4224 # passed to successorssets caching computation from one call to another
4180 # passed to successorssets caching computation from one call to another
4225 cache = {}
4181 cache = {}
4226 ctx2str = bytes
4182 ctx2str = bytes
4227 node2str = short
4183 node2str = short
4228 for rev in logcmdutil.revrange(repo, revs):
4184 for rev in logcmdutil.revrange(repo, revs):
4229 ctx = repo[rev]
4185 ctx = repo[rev]
4230 ui.write(b'%s\n' % ctx2str(ctx))
4186 ui.write(b'%s\n' % ctx2str(ctx))
4231 for succsset in obsutil.successorssets(
4187 for succsset in obsutil.successorssets(
4232 repo, ctx.node(), closest=opts['closest'], cache=cache
4188 repo, ctx.node(), closest=opts['closest'], cache=cache
4233 ):
4189 ):
4234 if succsset:
4190 if succsset:
4235 ui.write(b' ')
4191 ui.write(b' ')
4236 ui.write(node2str(succsset[0]))
4192 ui.write(node2str(succsset[0]))
4237 for node in succsset[1:]:
4193 for node in succsset[1:]:
4238 ui.write(b' ')
4194 ui.write(b' ')
4239 ui.write(node2str(node))
4195 ui.write(node2str(node))
4240 ui.write(b'\n')
4196 ui.write(b'\n')
4241
4197
4242
4198
4243 @command(b'debugtagscache', [])
4199 @command(b'debugtagscache', [])
4244 def debugtagscache(ui, repo):
4200 def debugtagscache(ui, repo):
4245 """display the contents of .hg/cache/hgtagsfnodes1"""
4201 """display the contents of .hg/cache/hgtagsfnodes1"""
4246 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4202 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4247 flog = repo.file(b'.hgtags')
4203 flog = repo.file(b'.hgtags')
4248 for r in repo:
4204 for r in repo:
4249 node = repo[r].node()
4205 node = repo[r].node()
4250 tagsnode = cache.getfnode(node, computemissing=False)
4206 tagsnode = cache.getfnode(node, computemissing=False)
4251 if tagsnode:
4207 if tagsnode:
4252 tagsnodedisplay = hex(tagsnode)
4208 tagsnodedisplay = hex(tagsnode)
4253 if not flog.hasnode(tagsnode):
4209 if not flog.hasnode(tagsnode):
4254 tagsnodedisplay += b' (unknown node)'
4210 tagsnodedisplay += b' (unknown node)'
4255 elif tagsnode is None:
4211 elif tagsnode is None:
4256 tagsnodedisplay = b'missing'
4212 tagsnodedisplay = b'missing'
4257 else:
4213 else:
4258 tagsnodedisplay = b'invalid'
4214 tagsnodedisplay = b'invalid'
4259
4215
4260 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4216 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4261
4217
4262
4218
4263 @command(
4219 @command(
4264 b'debugtemplate',
4220 b'debugtemplate',
4265 [
4221 [
4266 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4222 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4267 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4223 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4268 ],
4224 ],
4269 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4225 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4270 optionalrepo=True,
4226 optionalrepo=True,
4271 )
4227 )
4272 def debugtemplate(ui, repo, tmpl, **opts):
4228 def debugtemplate(ui, repo, tmpl, **opts):
4273 """parse and apply a template
4229 """parse and apply a template
4274
4230
4275 If -r/--rev is given, the template is processed as a log template and
4231 If -r/--rev is given, the template is processed as a log template and
4276 applied to the given changesets. Otherwise, it is processed as a generic
4232 applied to the given changesets. Otherwise, it is processed as a generic
4277 template.
4233 template.
4278
4234
4279 Use --verbose to print the parsed tree.
4235 Use --verbose to print the parsed tree.
4280 """
4236 """
4281 revs = None
4237 revs = None
4282 if opts['rev']:
4238 if opts['rev']:
4283 if repo is None:
4239 if repo is None:
4284 raise error.RepoError(
4240 raise error.RepoError(
4285 _(b'there is no Mercurial repository here (.hg not found)')
4241 _(b'there is no Mercurial repository here (.hg not found)')
4286 )
4242 )
4287 revs = logcmdutil.revrange(repo, opts['rev'])
4243 revs = logcmdutil.revrange(repo, opts['rev'])
4288
4244
4289 props = {}
4245 props = {}
4290 for d in opts['define']:
4246 for d in opts['define']:
4291 try:
4247 try:
4292 k, v = (e.strip() for e in d.split(b'=', 1))
4248 k, v = (e.strip() for e in d.split(b'=', 1))
4293 if not k or k == b'ui':
4249 if not k or k == b'ui':
4294 raise ValueError
4250 raise ValueError
4295 props[k] = v
4251 props[k] = v
4296 except ValueError:
4252 except ValueError:
4297 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4253 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4298
4254
4299 if ui.verbose:
4255 if ui.verbose:
4300 aliases = ui.configitems(b'templatealias')
4256 aliases = ui.configitems(b'templatealias')
4301 tree = templater.parse(tmpl)
4257 tree = templater.parse(tmpl)
4302 ui.note(templater.prettyformat(tree), b'\n')
4258 ui.note(templater.prettyformat(tree), b'\n')
4303 newtree = templater.expandaliases(tree, aliases)
4259 newtree = templater.expandaliases(tree, aliases)
4304 if newtree != tree:
4260 if newtree != tree:
4305 ui.notenoi18n(
4261 ui.notenoi18n(
4306 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4262 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4307 )
4263 )
4308
4264
4309 if revs is None:
4265 if revs is None:
4310 tres = formatter.templateresources(ui, repo)
4266 tres = formatter.templateresources(ui, repo)
4311 t = formatter.maketemplater(ui, tmpl, resources=tres)
4267 t = formatter.maketemplater(ui, tmpl, resources=tres)
4312 if ui.verbose:
4268 if ui.verbose:
4313 kwds, funcs = t.symbolsuseddefault()
4269 kwds, funcs = t.symbolsuseddefault()
4314 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4270 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4315 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4271 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4316 ui.write(t.renderdefault(props))
4272 ui.write(t.renderdefault(props))
4317 else:
4273 else:
4318 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4274 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4319 if ui.verbose:
4275 if ui.verbose:
4320 kwds, funcs = displayer.t.symbolsuseddefault()
4276 kwds, funcs = displayer.t.symbolsuseddefault()
4321 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4277 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4322 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4278 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4323 for r in revs:
4279 for r in revs:
4324 displayer.show(repo[r], **pycompat.strkwargs(props))
4280 displayer.show(repo[r], **pycompat.strkwargs(props))
4325 displayer.close()
4281 displayer.close()
4326
4282
4327
4283
4328 @command(
4284 @command(
4329 b'debuguigetpass',
4285 b'debuguigetpass',
4330 [
4286 [
4331 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4287 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4332 ],
4288 ],
4333 _(b'[-p TEXT]'),
4289 _(b'[-p TEXT]'),
4334 norepo=True,
4290 norepo=True,
4335 )
4291 )
4336 def debuguigetpass(ui, prompt=b''):
4292 def debuguigetpass(ui, prompt=b''):
4337 """show prompt to type password"""
4293 """show prompt to type password"""
4338 r = ui.getpass(prompt)
4294 r = ui.getpass(prompt)
4339 if r is None:
4295 if r is None:
4340 r = b"<default response>"
4296 r = b"<default response>"
4341 ui.writenoi18n(b'response: %s\n' % r)
4297 ui.writenoi18n(b'response: %s\n' % r)
4342
4298
4343
4299
4344 @command(
4300 @command(
4345 b'debuguiprompt',
4301 b'debuguiprompt',
4346 [
4302 [
4347 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4303 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4348 ],
4304 ],
4349 _(b'[-p TEXT]'),
4305 _(b'[-p TEXT]'),
4350 norepo=True,
4306 norepo=True,
4351 )
4307 )
4352 def debuguiprompt(ui, prompt=b''):
4308 def debuguiprompt(ui, prompt=b''):
4353 """show plain prompt"""
4309 """show plain prompt"""
4354 r = ui.prompt(prompt)
4310 r = ui.prompt(prompt)
4355 ui.writenoi18n(b'response: %s\n' % r)
4311 ui.writenoi18n(b'response: %s\n' % r)
4356
4312
4357
4313
4358 @command(b'debugupdatecaches', [])
4314 @command(b'debugupdatecaches', [])
4359 def debugupdatecaches(ui, repo, *pats, **opts):
4315 def debugupdatecaches(ui, repo, *pats, **opts):
4360 """warm all known caches in the repository"""
4316 """warm all known caches in the repository"""
4361 with repo.wlock(), repo.lock():
4317 with repo.wlock(), repo.lock():
4362 repo.updatecaches(caches=repository.CACHES_ALL)
4318 repo.updatecaches(caches=repository.CACHES_ALL)
4363
4319
4364
4320
4365 @command(
4321 @command(
4366 b'debugupgraderepo',
4322 b'debugupgraderepo',
4367 [
4323 [
4368 (
4324 (
4369 b'o',
4325 b'o',
4370 b'optimize',
4326 b'optimize',
4371 [],
4327 [],
4372 _(b'extra optimization to perform'),
4328 _(b'extra optimization to perform'),
4373 _(b'NAME'),
4329 _(b'NAME'),
4374 ),
4330 ),
4375 (b'', b'run', False, _(b'performs an upgrade')),
4331 (b'', b'run', False, _(b'performs an upgrade')),
4376 (b'', b'backup', True, _(b'keep the old repository content around')),
4332 (b'', b'backup', True, _(b'keep the old repository content around')),
4377 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4333 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4378 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4334 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4379 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4335 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4380 ],
4336 ],
4381 )
4337 )
4382 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4338 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4383 """upgrade a repository to use different features
4339 """upgrade a repository to use different features
4384
4340
4385 If no arguments are specified, the repository is evaluated for upgrade
4341 If no arguments are specified, the repository is evaluated for upgrade
4386 and a list of problems and potential optimizations is printed.
4342 and a list of problems and potential optimizations is printed.
4387
4343
4388 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4344 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4389 can be influenced via additional arguments. More details will be provided
4345 can be influenced via additional arguments. More details will be provided
4390 by the command output when run without ``--run``.
4346 by the command output when run without ``--run``.
4391
4347
4392 During the upgrade, the repository will be locked and no writes will be
4348 During the upgrade, the repository will be locked and no writes will be
4393 allowed.
4349 allowed.
4394
4350
4395 At the end of the upgrade, the repository may not be readable while new
4351 At the end of the upgrade, the repository may not be readable while new
4396 repository data is swapped in. This window will be as long as it takes to
4352 repository data is swapped in. This window will be as long as it takes to
4397 rename some directories inside the ``.hg`` directory. On most machines, this
4353 rename some directories inside the ``.hg`` directory. On most machines, this
4398 should complete almost instantaneously and the chances of a consumer being
4354 should complete almost instantaneously and the chances of a consumer being
4399 unable to access the repository should be low.
4355 unable to access the repository should be low.
4400
4356
4401 By default, all revlogs will be upgraded. You can restrict this using flags
4357 By default, all revlogs will be upgraded. You can restrict this using flags
4402 such as `--manifest`:
4358 such as `--manifest`:
4403
4359
4404 * `--manifest`: only optimize the manifest
4360 * `--manifest`: only optimize the manifest
4405 * `--no-manifest`: optimize all revlog but the manifest
4361 * `--no-manifest`: optimize all revlog but the manifest
4406 * `--changelog`: optimize the changelog only
4362 * `--changelog`: optimize the changelog only
4407 * `--no-changelog --no-manifest`: optimize filelogs only
4363 * `--no-changelog --no-manifest`: optimize filelogs only
4408 * `--filelogs`: optimize the filelogs only
4364 * `--filelogs`: optimize the filelogs only
4409 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4365 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4410 """
4366 """
4411 return upgrade.upgraderepo(
4367 return upgrade.upgraderepo(
4412 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4368 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4413 )
4369 )
4414
4370
4415
4371
4416 @command(
4372 @command(
4417 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4373 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4418 )
4374 )
4419 def debugwalk(ui, repo, *pats, **opts):
4375 def debugwalk(ui, repo, *pats, **opts):
4420 """show how files match on given patterns"""
4376 """show how files match on given patterns"""
4421 opts = pycompat.byteskwargs(opts)
4377 opts = pycompat.byteskwargs(opts)
4422 m = scmutil.match(repo[None], pats, opts)
4378 m = scmutil.match(repo[None], pats, opts)
4423 if ui.verbose:
4379 if ui.verbose:
4424 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4380 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4425 items = list(repo[None].walk(m))
4381 items = list(repo[None].walk(m))
4426 if not items:
4382 if not items:
4427 return
4383 return
4428 f = lambda fn: fn
4384 f = lambda fn: fn
4429 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4385 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4430 f = lambda fn: util.normpath(fn)
4386 f = lambda fn: util.normpath(fn)
4431 fmt = b'f %%-%ds %%-%ds %%s' % (
4387 fmt = b'f %%-%ds %%-%ds %%s' % (
4432 max([len(abs) for abs in items]),
4388 max([len(abs) for abs in items]),
4433 max([len(repo.pathto(abs)) for abs in items]),
4389 max([len(repo.pathto(abs)) for abs in items]),
4434 )
4390 )
4435 for abs in items:
4391 for abs in items:
4436 line = fmt % (
4392 line = fmt % (
4437 abs,
4393 abs,
4438 f(repo.pathto(abs)),
4394 f(repo.pathto(abs)),
4439 m.exact(abs) and b'exact' or b'',
4395 m.exact(abs) and b'exact' or b'',
4440 )
4396 )
4441 ui.write(b"%s\n" % line.rstrip())
4397 ui.write(b"%s\n" % line.rstrip())
4442
4398
4443
4399
4444 @command(b'debugwhyunstable', [], _(b'REV'))
4400 @command(b'debugwhyunstable', [], _(b'REV'))
4445 def debugwhyunstable(ui, repo, rev):
4401 def debugwhyunstable(ui, repo, rev):
4446 """explain instabilities of a changeset"""
4402 """explain instabilities of a changeset"""
4447 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4403 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4448 dnodes = b''
4404 dnodes = b''
4449 if entry.get(b'divergentnodes'):
4405 if entry.get(b'divergentnodes'):
4450 dnodes = (
4406 dnodes = (
4451 b' '.join(
4407 b' '.join(
4452 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4408 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4453 for ctx in entry[b'divergentnodes']
4409 for ctx in entry[b'divergentnodes']
4454 )
4410 )
4455 + b' '
4411 + b' '
4456 )
4412 )
4457 ui.write(
4413 ui.write(
4458 b'%s: %s%s %s\n'
4414 b'%s: %s%s %s\n'
4459 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4415 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4460 )
4416 )
4461
4417
4462
4418
4463 @command(
4419 @command(
4464 b'debugwireargs',
4420 b'debugwireargs',
4465 [
4421 [
4466 (b'', b'three', b'', b'three'),
4422 (b'', b'three', b'', b'three'),
4467 (b'', b'four', b'', b'four'),
4423 (b'', b'four', b'', b'four'),
4468 (b'', b'five', b'', b'five'),
4424 (b'', b'five', b'', b'five'),
4469 ]
4425 ]
4470 + cmdutil.remoteopts,
4426 + cmdutil.remoteopts,
4471 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4427 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4472 norepo=True,
4428 norepo=True,
4473 )
4429 )
4474 def debugwireargs(ui, repopath, *vals, **opts):
4430 def debugwireargs(ui, repopath, *vals, **opts):
4475 opts = pycompat.byteskwargs(opts)
4431 opts = pycompat.byteskwargs(opts)
4476 repo = hg.peer(ui, opts, repopath)
4432 repo = hg.peer(ui, opts, repopath)
4477 try:
4433 try:
4478 for opt in cmdutil.remoteopts:
4434 for opt in cmdutil.remoteopts:
4479 del opts[opt[1]]
4435 del opts[opt[1]]
4480 args = {}
4436 args = {}
4481 for k, v in opts.items():
4437 for k, v in opts.items():
4482 if v:
4438 if v:
4483 args[k] = v
4439 args[k] = v
4484 args = pycompat.strkwargs(args)
4440 args = pycompat.strkwargs(args)
4485 # run twice to check that we don't mess up the stream for the next command
4441 # run twice to check that we don't mess up the stream for the next command
4486 res1 = repo.debugwireargs(*vals, **args)
4442 res1 = repo.debugwireargs(*vals, **args)
4487 res2 = repo.debugwireargs(*vals, **args)
4443 res2 = repo.debugwireargs(*vals, **args)
4488 ui.write(b"%s\n" % res1)
4444 ui.write(b"%s\n" % res1)
4489 if res1 != res2:
4445 if res1 != res2:
4490 ui.warn(b"%s\n" % res2)
4446 ui.warn(b"%s\n" % res2)
4491 finally:
4447 finally:
4492 repo.close()
4448 repo.close()
4493
4449
4494
4450
4495 def _parsewirelangblocks(fh):
4451 def _parsewirelangblocks(fh):
4496 activeaction = None
4452 activeaction = None
4497 blocklines = []
4453 blocklines = []
4498 lastindent = 0
4454 lastindent = 0
4499
4455
4500 for line in fh:
4456 for line in fh:
4501 line = line.rstrip()
4457 line = line.rstrip()
4502 if not line:
4458 if not line:
4503 continue
4459 continue
4504
4460
4505 if line.startswith(b'#'):
4461 if line.startswith(b'#'):
4506 continue
4462 continue
4507
4463
4508 if not line.startswith(b' '):
4464 if not line.startswith(b' '):
4509 # New block. Flush previous one.
4465 # New block. Flush previous one.
4510 if activeaction:
4466 if activeaction:
4511 yield activeaction, blocklines
4467 yield activeaction, blocklines
4512
4468
4513 activeaction = line
4469 activeaction = line
4514 blocklines = []
4470 blocklines = []
4515 lastindent = 0
4471 lastindent = 0
4516 continue
4472 continue
4517
4473
4518 # Else we start with an indent.
4474 # Else we start with an indent.
4519
4475
4520 if not activeaction:
4476 if not activeaction:
4521 raise error.Abort(_(b'indented line outside of block'))
4477 raise error.Abort(_(b'indented line outside of block'))
4522
4478
4523 indent = len(line) - len(line.lstrip())
4479 indent = len(line) - len(line.lstrip())
4524
4480
4525 # If this line is indented more than the last line, concatenate it.
4481 # If this line is indented more than the last line, concatenate it.
4526 if indent > lastindent and blocklines:
4482 if indent > lastindent and blocklines:
4527 blocklines[-1] += line.lstrip()
4483 blocklines[-1] += line.lstrip()
4528 else:
4484 else:
4529 blocklines.append(line)
4485 blocklines.append(line)
4530 lastindent = indent
4486 lastindent = indent
4531
4487
4532 # Flush last block.
4488 # Flush last block.
4533 if activeaction:
4489 if activeaction:
4534 yield activeaction, blocklines
4490 yield activeaction, blocklines
4535
4491
4536
4492
4537 @command(
4493 @command(
4538 b'debugwireproto',
4494 b'debugwireproto',
4539 [
4495 [
4540 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4496 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4541 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4497 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4542 (
4498 (
4543 b'',
4499 b'',
4544 b'noreadstderr',
4500 b'noreadstderr',
4545 False,
4501 False,
4546 _(b'do not read from stderr of the remote'),
4502 _(b'do not read from stderr of the remote'),
4547 ),
4503 ),
4548 (
4504 (
4549 b'',
4505 b'',
4550 b'nologhandshake',
4506 b'nologhandshake',
4551 False,
4507 False,
4552 _(b'do not log I/O related to the peer handshake'),
4508 _(b'do not log I/O related to the peer handshake'),
4553 ),
4509 ),
4554 ]
4510 ]
4555 + cmdutil.remoteopts,
4511 + cmdutil.remoteopts,
4556 _(b'[PATH]'),
4512 _(b'[PATH]'),
4557 optionalrepo=True,
4513 optionalrepo=True,
4558 )
4514 )
4559 def debugwireproto(ui, repo, path=None, **opts):
4515 def debugwireproto(ui, repo, path=None, **opts):
4560 """send wire protocol commands to a server
4516 """send wire protocol commands to a server
4561
4517
4562 This command can be used to issue wire protocol commands to remote
4518 This command can be used to issue wire protocol commands to remote
4563 peers and to debug the raw data being exchanged.
4519 peers and to debug the raw data being exchanged.
4564
4520
4565 ``--localssh`` will start an SSH server against the current repository
4521 ``--localssh`` will start an SSH server against the current repository
4566 and connect to that. By default, the connection will perform a handshake
4522 and connect to that. By default, the connection will perform a handshake
4567 and establish an appropriate peer instance.
4523 and establish an appropriate peer instance.
4568
4524
4569 ``--peer`` can be used to bypass the handshake protocol and construct a
4525 ``--peer`` can be used to bypass the handshake protocol and construct a
4570 peer instance using the specified class type. Valid values are ``raw``,
4526 peer instance using the specified class type. Valid values are ``raw``,
4571 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4527 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4572 don't support higher-level command actions.
4528 don't support higher-level command actions.
4573
4529
4574 ``--noreadstderr`` can be used to disable automatic reading from stderr
4530 ``--noreadstderr`` can be used to disable automatic reading from stderr
4575 of the peer (for SSH connections only). Disabling automatic reading of
4531 of the peer (for SSH connections only). Disabling automatic reading of
4576 stderr is useful for making output more deterministic.
4532 stderr is useful for making output more deterministic.
4577
4533
4578 Commands are issued via a mini language which is specified via stdin.
4534 Commands are issued via a mini language which is specified via stdin.
4579 The language consists of individual actions to perform. An action is
4535 The language consists of individual actions to perform. An action is
4580 defined by a block. A block is defined as a line with no leading
4536 defined by a block. A block is defined as a line with no leading
4581 space followed by 0 or more lines with leading space. Blocks are
4537 space followed by 0 or more lines with leading space. Blocks are
4582 effectively a high-level command with additional metadata.
4538 effectively a high-level command with additional metadata.
4583
4539
4584 Lines beginning with ``#`` are ignored.
4540 Lines beginning with ``#`` are ignored.
4585
4541
4586 The following sections denote available actions.
4542 The following sections denote available actions.
4587
4543
4588 raw
4544 raw
4589 ---
4545 ---
4590
4546
4591 Send raw data to the server.
4547 Send raw data to the server.
4592
4548
4593 The block payload contains the raw data to send as one atomic send
4549 The block payload contains the raw data to send as one atomic send
4594 operation. The data may not actually be delivered in a single system
4550 operation. The data may not actually be delivered in a single system
4595 call: it depends on the abilities of the transport being used.
4551 call: it depends on the abilities of the transport being used.
4596
4552
4597 Each line in the block is de-indented and concatenated. Then, that
4553 Each line in the block is de-indented and concatenated. Then, that
4598 value is evaluated as a Python b'' literal. This allows the use of
4554 value is evaluated as a Python b'' literal. This allows the use of
4599 backslash escaping, etc.
4555 backslash escaping, etc.
4600
4556
4601 raw+
4557 raw+
4602 ----
4558 ----
4603
4559
4604 Behaves like ``raw`` except flushes output afterwards.
4560 Behaves like ``raw`` except flushes output afterwards.
4605
4561
4606 command <X>
4562 command <X>
4607 -----------
4563 -----------
4608
4564
4609 Send a request to run a named command, whose name follows the ``command``
4565 Send a request to run a named command, whose name follows the ``command``
4610 string.
4566 string.
4611
4567
4612 Arguments to the command are defined as lines in this block. The format of
4568 Arguments to the command are defined as lines in this block. The format of
4613 each line is ``<key> <value>``. e.g.::
4569 each line is ``<key> <value>``. e.g.::
4614
4570
4615 command listkeys
4571 command listkeys
4616 namespace bookmarks
4572 namespace bookmarks
4617
4573
4618 If the value begins with ``eval:``, it will be interpreted as a Python
4574 If the value begins with ``eval:``, it will be interpreted as a Python
4619 literal expression. Otherwise values are interpreted as Python b'' literals.
4575 literal expression. Otherwise values are interpreted as Python b'' literals.
4620 This allows sending complex types and encoding special byte sequences via
4576 This allows sending complex types and encoding special byte sequences via
4621 backslash escaping.
4577 backslash escaping.
4622
4578
4623 The following arguments have special meaning:
4579 The following arguments have special meaning:
4624
4580
4625 ``PUSHFILE``
4581 ``PUSHFILE``
4626 When defined, the *push* mechanism of the peer will be used instead
4582 When defined, the *push* mechanism of the peer will be used instead
4627 of the static request-response mechanism and the content of the
4583 of the static request-response mechanism and the content of the
4628 file specified in the value of this argument will be sent as the
4584 file specified in the value of this argument will be sent as the
4629 command payload.
4585 command payload.
4630
4586
4631 This can be used to submit a local bundle file to the remote.
4587 This can be used to submit a local bundle file to the remote.
4632
4588
4633 batchbegin
4589 batchbegin
4634 ----------
4590 ----------
4635
4591
4636 Instruct the peer to begin a batched send.
4592 Instruct the peer to begin a batched send.
4637
4593
4638 All ``command`` blocks are queued for execution until the next
4594 All ``command`` blocks are queued for execution until the next
4639 ``batchsubmit`` block.
4595 ``batchsubmit`` block.
4640
4596
4641 batchsubmit
4597 batchsubmit
4642 -----------
4598 -----------
4643
4599
4644 Submit previously queued ``command`` blocks as a batch request.
4600 Submit previously queued ``command`` blocks as a batch request.
4645
4601
4646 This action MUST be paired with a ``batchbegin`` action.
4602 This action MUST be paired with a ``batchbegin`` action.
4647
4603
4648 httprequest <method> <path>
4604 httprequest <method> <path>
4649 ---------------------------
4605 ---------------------------
4650
4606
4651 (HTTP peer only)
4607 (HTTP peer only)
4652
4608
4653 Send an HTTP request to the peer.
4609 Send an HTTP request to the peer.
4654
4610
4655 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4611 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4656
4612
4657 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4613 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4658 headers to add to the request. e.g. ``Accept: foo``.
4614 headers to add to the request. e.g. ``Accept: foo``.
4659
4615
4660 The following arguments are special:
4616 The following arguments are special:
4661
4617
4662 ``BODYFILE``
4618 ``BODYFILE``
4663 The content of the file defined as the value to this argument will be
4619 The content of the file defined as the value to this argument will be
4664 transferred verbatim as the HTTP request body.
4620 transferred verbatim as the HTTP request body.
4665
4621
4666 ``frame <type> <flags> <payload>``
4622 ``frame <type> <flags> <payload>``
4667 Send a unified protocol frame as part of the request body.
4623 Send a unified protocol frame as part of the request body.
4668
4624
4669 All frames will be collected and sent as the body to the HTTP
4625 All frames will be collected and sent as the body to the HTTP
4670 request.
4626 request.
4671
4627
4672 close
4628 close
4673 -----
4629 -----
4674
4630
4675 Close the connection to the server.
4631 Close the connection to the server.
4676
4632
4677 flush
4633 flush
4678 -----
4634 -----
4679
4635
4680 Flush data written to the server.
4636 Flush data written to the server.
4681
4637
4682 readavailable
4638 readavailable
4683 -------------
4639 -------------
4684
4640
4685 Close the write end of the connection and read all available data from
4641 Close the write end of the connection and read all available data from
4686 the server.
4642 the server.
4687
4643
4688 If the connection to the server encompasses multiple pipes, we poll both
4644 If the connection to the server encompasses multiple pipes, we poll both
4689 pipes and read available data.
4645 pipes and read available data.
4690
4646
4691 readline
4647 readline
4692 --------
4648 --------
4693
4649
4694 Read a line of output from the server. If there are multiple output
4650 Read a line of output from the server. If there are multiple output
4695 pipes, reads only the main pipe.
4651 pipes, reads only the main pipe.
4696
4652
4697 ereadline
4653 ereadline
4698 ---------
4654 ---------
4699
4655
4700 Like ``readline``, but read from the stderr pipe, if available.
4656 Like ``readline``, but read from the stderr pipe, if available.
4701
4657
4702 read <X>
4658 read <X>
4703 --------
4659 --------
4704
4660
4705 ``read()`` N bytes from the server's main output pipe.
4661 ``read()`` N bytes from the server's main output pipe.
4706
4662
4707 eread <X>
4663 eread <X>
4708 ---------
4664 ---------
4709
4665
4710 ``read()`` N bytes from the server's stderr pipe, if available.
4666 ``read()`` N bytes from the server's stderr pipe, if available.
4711
4667
4712 Specifying Unified Frame-Based Protocol Frames
4668 Specifying Unified Frame-Based Protocol Frames
4713 ----------------------------------------------
4669 ----------------------------------------------
4714
4670
4715 It is possible to emit a *Unified Frame-Based Protocol* by using special
4671 It is possible to emit a *Unified Frame-Based Protocol* by using special
4716 syntax.
4672 syntax.
4717
4673
4718 A frame is composed as a type, flags, and payload. These can be parsed
4674 A frame is composed as a type, flags, and payload. These can be parsed
4719 from a string of the form:
4675 from a string of the form:
4720
4676
4721 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4677 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4722
4678
4723 ``request-id`` and ``stream-id`` are integers defining the request and
4679 ``request-id`` and ``stream-id`` are integers defining the request and
4724 stream identifiers.
4680 stream identifiers.
4725
4681
4726 ``type`` can be an integer value for the frame type or the string name
4682 ``type`` can be an integer value for the frame type or the string name
4727 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4683 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4728 ``command-name``.
4684 ``command-name``.
4729
4685
4730 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4686 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4731 components. Each component (and there can be just one) can be an integer
4687 components. Each component (and there can be just one) can be an integer
4732 or a flag name for stream flags or frame flags, respectively. Values are
4688 or a flag name for stream flags or frame flags, respectively. Values are
4733 resolved to integers and then bitwise OR'd together.
4689 resolved to integers and then bitwise OR'd together.
4734
4690
4735 ``payload`` represents the raw frame payload. If it begins with
4691 ``payload`` represents the raw frame payload. If it begins with
4736 ``cbor:``, the following string is evaluated as Python code and the
4692 ``cbor:``, the following string is evaluated as Python code and the
4737 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4693 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4738 as a Python byte string literal.
4694 as a Python byte string literal.
4739 """
4695 """
4740 opts = pycompat.byteskwargs(opts)
4696 opts = pycompat.byteskwargs(opts)
4741
4697
4742 if opts[b'localssh'] and not repo:
4698 if opts[b'localssh'] and not repo:
4743 raise error.Abort(_(b'--localssh requires a repository'))
4699 raise error.Abort(_(b'--localssh requires a repository'))
4744
4700
4745 if opts[b'peer'] and opts[b'peer'] not in (
4701 if opts[b'peer'] and opts[b'peer'] not in (
4746 b'raw',
4702 b'raw',
4747 b'ssh1',
4703 b'ssh1',
4748 ):
4704 ):
4749 raise error.Abort(
4705 raise error.Abort(
4750 _(b'invalid value for --peer'),
4706 _(b'invalid value for --peer'),
4751 hint=_(b'valid values are "raw" and "ssh1"'),
4707 hint=_(b'valid values are "raw" and "ssh1"'),
4752 )
4708 )
4753
4709
4754 if path and opts[b'localssh']:
4710 if path and opts[b'localssh']:
4755 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4711 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4756
4712
4757 if ui.interactive():
4713 if ui.interactive():
4758 ui.write(_(b'(waiting for commands on stdin)\n'))
4714 ui.write(_(b'(waiting for commands on stdin)\n'))
4759
4715
4760 blocks = list(_parsewirelangblocks(ui.fin))
4716 blocks = list(_parsewirelangblocks(ui.fin))
4761
4717
4762 proc = None
4718 proc = None
4763 stdin = None
4719 stdin = None
4764 stdout = None
4720 stdout = None
4765 stderr = None
4721 stderr = None
4766 opener = None
4722 opener = None
4767
4723
4768 if opts[b'localssh']:
4724 if opts[b'localssh']:
4769 # We start the SSH server in its own process so there is process
4725 # We start the SSH server in its own process so there is process
4770 # separation. This prevents a whole class of potential bugs around
4726 # separation. This prevents a whole class of potential bugs around
4771 # shared state from interfering with server operation.
4727 # shared state from interfering with server operation.
4772 args = procutil.hgcmd() + [
4728 args = procutil.hgcmd() + [
4773 b'-R',
4729 b'-R',
4774 repo.root,
4730 repo.root,
4775 b'debugserve',
4731 b'debugserve',
4776 b'--sshstdio',
4732 b'--sshstdio',
4777 ]
4733 ]
4778 proc = subprocess.Popen(
4734 proc = subprocess.Popen(
4779 pycompat.rapply(procutil.tonativestr, args),
4735 pycompat.rapply(procutil.tonativestr, args),
4780 stdin=subprocess.PIPE,
4736 stdin=subprocess.PIPE,
4781 stdout=subprocess.PIPE,
4737 stdout=subprocess.PIPE,
4782 stderr=subprocess.PIPE,
4738 stderr=subprocess.PIPE,
4783 bufsize=0,
4739 bufsize=0,
4784 )
4740 )
4785
4741
4786 stdin = proc.stdin
4742 stdin = proc.stdin
4787 stdout = proc.stdout
4743 stdout = proc.stdout
4788 stderr = proc.stderr
4744 stderr = proc.stderr
4789
4745
4790 # We turn the pipes into observers so we can log I/O.
4746 # We turn the pipes into observers so we can log I/O.
4791 if ui.verbose or opts[b'peer'] == b'raw':
4747 if ui.verbose or opts[b'peer'] == b'raw':
4792 stdin = util.makeloggingfileobject(
4748 stdin = util.makeloggingfileobject(
4793 ui, proc.stdin, b'i', logdata=True
4749 ui, proc.stdin, b'i', logdata=True
4794 )
4750 )
4795 stdout = util.makeloggingfileobject(
4751 stdout = util.makeloggingfileobject(
4796 ui, proc.stdout, b'o', logdata=True
4752 ui, proc.stdout, b'o', logdata=True
4797 )
4753 )
4798 stderr = util.makeloggingfileobject(
4754 stderr = util.makeloggingfileobject(
4799 ui, proc.stderr, b'e', logdata=True
4755 ui, proc.stderr, b'e', logdata=True
4800 )
4756 )
4801
4757
4802 # --localssh also implies the peer connection settings.
4758 # --localssh also implies the peer connection settings.
4803
4759
4804 url = b'ssh://localserver'
4760 url = b'ssh://localserver'
4805 autoreadstderr = not opts[b'noreadstderr']
4761 autoreadstderr = not opts[b'noreadstderr']
4806
4762
4807 if opts[b'peer'] == b'ssh1':
4763 if opts[b'peer'] == b'ssh1':
4808 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4764 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4809 peer = sshpeer.sshv1peer(
4765 peer = sshpeer.sshv1peer(
4810 ui,
4766 ui,
4811 url,
4767 url,
4812 proc,
4768 proc,
4813 stdin,
4769 stdin,
4814 stdout,
4770 stdout,
4815 stderr,
4771 stderr,
4816 None,
4772 None,
4817 autoreadstderr=autoreadstderr,
4773 autoreadstderr=autoreadstderr,
4818 )
4774 )
4819 elif opts[b'peer'] == b'raw':
4775 elif opts[b'peer'] == b'raw':
4820 ui.write(_(b'using raw connection to peer\n'))
4776 ui.write(_(b'using raw connection to peer\n'))
4821 peer = None
4777 peer = None
4822 else:
4778 else:
4823 ui.write(_(b'creating ssh peer from handshake results\n'))
4779 ui.write(_(b'creating ssh peer from handshake results\n'))
4824 peer = sshpeer.makepeer(
4780 peer = sshpeer.makepeer(
4825 ui,
4781 ui,
4826 url,
4782 url,
4827 proc,
4783 proc,
4828 stdin,
4784 stdin,
4829 stdout,
4785 stdout,
4830 stderr,
4786 stderr,
4831 autoreadstderr=autoreadstderr,
4787 autoreadstderr=autoreadstderr,
4832 )
4788 )
4833
4789
4834 elif path:
4790 elif path:
4835 # We bypass hg.peer() so we can proxy the sockets.
4791 # We bypass hg.peer() so we can proxy the sockets.
4836 # TODO consider not doing this because we skip
4792 # TODO consider not doing this because we skip
4837 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4793 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4838 u = urlutil.url(path)
4794 u = urlutil.url(path)
4839 if u.scheme != b'http':
4795 if u.scheme != b'http':
4840 raise error.Abort(_(b'only http:// paths are currently supported'))
4796 raise error.Abort(_(b'only http:// paths are currently supported'))
4841
4797
4842 url, authinfo = u.authinfo()
4798 url, authinfo = u.authinfo()
4843 openerargs = {
4799 openerargs = {
4844 'useragent': b'Mercurial debugwireproto',
4800 'useragent': b'Mercurial debugwireproto',
4845 }
4801 }
4846
4802
4847 # Turn pipes/sockets into observers so we can log I/O.
4803 # Turn pipes/sockets into observers so we can log I/O.
4848 if ui.verbose:
4804 if ui.verbose:
4849 openerargs.update(
4805 openerargs.update(
4850 {
4806 {
4851 'loggingfh': ui,
4807 'loggingfh': ui,
4852 'loggingname': b's',
4808 'loggingname': b's',
4853 'loggingopts': {
4809 'loggingopts': {
4854 'logdata': True,
4810 'logdata': True,
4855 'logdataapis': False,
4811 'logdataapis': False,
4856 },
4812 },
4857 }
4813 }
4858 )
4814 )
4859
4815
4860 if ui.debugflag:
4816 if ui.debugflag:
4861 openerargs['loggingopts']['logdataapis'] = True
4817 openerargs['loggingopts']['logdataapis'] = True
4862
4818
4863 # Don't send default headers when in raw mode. This allows us to
4819 # Don't send default headers when in raw mode. This allows us to
4864 # bypass most of the behavior of our URL handling code so we can
4820 # bypass most of the behavior of our URL handling code so we can
4865 # have near complete control over what's sent on the wire.
4821 # have near complete control over what's sent on the wire.
4866 if opts[b'peer'] == b'raw':
4822 if opts[b'peer'] == b'raw':
4867 openerargs['sendaccept'] = False
4823 openerargs['sendaccept'] = False
4868
4824
4869 opener = urlmod.opener(ui, authinfo, **openerargs)
4825 opener = urlmod.opener(ui, authinfo, **openerargs)
4870
4826
4871 if opts[b'peer'] == b'raw':
4827 if opts[b'peer'] == b'raw':
4872 ui.write(_(b'using raw connection to peer\n'))
4828 ui.write(_(b'using raw connection to peer\n'))
4873 peer = None
4829 peer = None
4874 elif opts[b'peer']:
4830 elif opts[b'peer']:
4875 raise error.Abort(
4831 raise error.Abort(
4876 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4832 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4877 )
4833 )
4878 else:
4834 else:
4879 peer = httppeer.makepeer(ui, path, opener=opener)
4835 peer = httppeer.makepeer(ui, path, opener=opener)
4880
4836
4881 # We /could/ populate stdin/stdout with sock.makefile()...
4837 # We /could/ populate stdin/stdout with sock.makefile()...
4882 else:
4838 else:
4883 raise error.Abort(_(b'unsupported connection configuration'))
4839 raise error.Abort(_(b'unsupported connection configuration'))
4884
4840
4885 batchedcommands = None
4841 batchedcommands = None
4886
4842
4887 # Now perform actions based on the parsed wire language instructions.
4843 # Now perform actions based on the parsed wire language instructions.
4888 for action, lines in blocks:
4844 for action, lines in blocks:
4889 if action in (b'raw', b'raw+'):
4845 if action in (b'raw', b'raw+'):
4890 if not stdin:
4846 if not stdin:
4891 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4847 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4892
4848
4893 # Concatenate the data together.
4849 # Concatenate the data together.
4894 data = b''.join(l.lstrip() for l in lines)
4850 data = b''.join(l.lstrip() for l in lines)
4895 data = stringutil.unescapestr(data)
4851 data = stringutil.unescapestr(data)
4896 stdin.write(data)
4852 stdin.write(data)
4897
4853
4898 if action == b'raw+':
4854 if action == b'raw+':
4899 stdin.flush()
4855 stdin.flush()
4900 elif action == b'flush':
4856 elif action == b'flush':
4901 if not stdin:
4857 if not stdin:
4902 raise error.Abort(_(b'cannot call flush on this peer'))
4858 raise error.Abort(_(b'cannot call flush on this peer'))
4903 stdin.flush()
4859 stdin.flush()
4904 elif action.startswith(b'command'):
4860 elif action.startswith(b'command'):
4905 if not peer:
4861 if not peer:
4906 raise error.Abort(
4862 raise error.Abort(
4907 _(
4863 _(
4908 b'cannot send commands unless peer instance '
4864 b'cannot send commands unless peer instance '
4909 b'is available'
4865 b'is available'
4910 )
4866 )
4911 )
4867 )
4912
4868
4913 command = action.split(b' ', 1)[1]
4869 command = action.split(b' ', 1)[1]
4914
4870
4915 args = {}
4871 args = {}
4916 for line in lines:
4872 for line in lines:
4917 # We need to allow empty values.
4873 # We need to allow empty values.
4918 fields = line.lstrip().split(b' ', 1)
4874 fields = line.lstrip().split(b' ', 1)
4919 if len(fields) == 1:
4875 if len(fields) == 1:
4920 key = fields[0]
4876 key = fields[0]
4921 value = b''
4877 value = b''
4922 else:
4878 else:
4923 key, value = fields
4879 key, value = fields
4924
4880
4925 if value.startswith(b'eval:'):
4881 if value.startswith(b'eval:'):
4926 value = stringutil.evalpythonliteral(value[5:])
4882 value = stringutil.evalpythonliteral(value[5:])
4927 else:
4883 else:
4928 value = stringutil.unescapestr(value)
4884 value = stringutil.unescapestr(value)
4929
4885
4930 args[key] = value
4886 args[key] = value
4931
4887
4932 if batchedcommands is not None:
4888 if batchedcommands is not None:
4933 batchedcommands.append((command, args))
4889 batchedcommands.append((command, args))
4934 continue
4890 continue
4935
4891
4936 ui.status(_(b'sending %s command\n') % command)
4892 ui.status(_(b'sending %s command\n') % command)
4937
4893
4938 if b'PUSHFILE' in args:
4894 if b'PUSHFILE' in args:
4939 with open(args[b'PUSHFILE'], 'rb') as fh:
4895 with open(args[b'PUSHFILE'], 'rb') as fh:
4940 del args[b'PUSHFILE']
4896 del args[b'PUSHFILE']
4941 res, output = peer._callpush(
4897 res, output = peer._callpush(
4942 command, fh, **pycompat.strkwargs(args)
4898 command, fh, **pycompat.strkwargs(args)
4943 )
4899 )
4944 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4900 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4945 ui.status(
4901 ui.status(
4946 _(b'remote output: %s\n') % stringutil.escapestr(output)
4902 _(b'remote output: %s\n') % stringutil.escapestr(output)
4947 )
4903 )
4948 else:
4904 else:
4949 with peer.commandexecutor() as e:
4905 with peer.commandexecutor() as e:
4950 res = e.callcommand(command, args).result()
4906 res = e.callcommand(command, args).result()
4951
4907
4952 ui.status(
4908 ui.status(
4953 _(b'response: %s\n')
4909 _(b'response: %s\n')
4954 % stringutil.pprint(res, bprefix=True, indent=2)
4910 % stringutil.pprint(res, bprefix=True, indent=2)
4955 )
4911 )
4956
4912
4957 elif action == b'batchbegin':
4913 elif action == b'batchbegin':
4958 if batchedcommands is not None:
4914 if batchedcommands is not None:
4959 raise error.Abort(_(b'nested batchbegin not allowed'))
4915 raise error.Abort(_(b'nested batchbegin not allowed'))
4960
4916
4961 batchedcommands = []
4917 batchedcommands = []
4962 elif action == b'batchsubmit':
4918 elif action == b'batchsubmit':
4963 # There is a batching API we could go through. But it would be
4919 # There is a batching API we could go through. But it would be
4964 # difficult to normalize requests into function calls. It is easier
4920 # difficult to normalize requests into function calls. It is easier
4965 # to bypass this layer and normalize to commands + args.
4921 # to bypass this layer and normalize to commands + args.
4966 ui.status(
4922 ui.status(
4967 _(b'sending batch with %d sub-commands\n')
4923 _(b'sending batch with %d sub-commands\n')
4968 % len(batchedcommands)
4924 % len(batchedcommands)
4969 )
4925 )
4970 assert peer is not None
4926 assert peer is not None
4971 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4927 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4972 ui.status(
4928 ui.status(
4973 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4929 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4974 )
4930 )
4975
4931
4976 batchedcommands = None
4932 batchedcommands = None
4977
4933
4978 elif action.startswith(b'httprequest '):
4934 elif action.startswith(b'httprequest '):
4979 if not opener:
4935 if not opener:
4980 raise error.Abort(
4936 raise error.Abort(
4981 _(b'cannot use httprequest without an HTTP peer')
4937 _(b'cannot use httprequest without an HTTP peer')
4982 )
4938 )
4983
4939
4984 request = action.split(b' ', 2)
4940 request = action.split(b' ', 2)
4985 if len(request) != 3:
4941 if len(request) != 3:
4986 raise error.Abort(
4942 raise error.Abort(
4987 _(
4943 _(
4988 b'invalid httprequest: expected format is '
4944 b'invalid httprequest: expected format is '
4989 b'"httprequest <method> <path>'
4945 b'"httprequest <method> <path>'
4990 )
4946 )
4991 )
4947 )
4992
4948
4993 method, httppath = request[1:]
4949 method, httppath = request[1:]
4994 headers = {}
4950 headers = {}
4995 body = None
4951 body = None
4996 frames = []
4952 frames = []
4997 for line in lines:
4953 for line in lines:
4998 line = line.lstrip()
4954 line = line.lstrip()
4999 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4955 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
5000 if m:
4956 if m:
5001 # Headers need to use native strings.
4957 # Headers need to use native strings.
5002 key = pycompat.strurl(m.group(1))
4958 key = pycompat.strurl(m.group(1))
5003 value = pycompat.strurl(m.group(2))
4959 value = pycompat.strurl(m.group(2))
5004 headers[key] = value
4960 headers[key] = value
5005 continue
4961 continue
5006
4962
5007 if line.startswith(b'BODYFILE '):
4963 if line.startswith(b'BODYFILE '):
5008 with open(line.split(b' ', 1), b'rb') as fh:
4964 with open(line.split(b' ', 1), b'rb') as fh:
5009 body = fh.read()
4965 body = fh.read()
5010 elif line.startswith(b'frame '):
4966 elif line.startswith(b'frame '):
5011 frame = wireprotoframing.makeframefromhumanstring(
4967 frame = wireprotoframing.makeframefromhumanstring(
5012 line[len(b'frame ') :]
4968 line[len(b'frame ') :]
5013 )
4969 )
5014
4970
5015 frames.append(frame)
4971 frames.append(frame)
5016 else:
4972 else:
5017 raise error.Abort(
4973 raise error.Abort(
5018 _(b'unknown argument to httprequest: %s') % line
4974 _(b'unknown argument to httprequest: %s') % line
5019 )
4975 )
5020
4976
5021 url = path + httppath
4977 url = path + httppath
5022
4978
5023 if frames:
4979 if frames:
5024 body = b''.join(bytes(f) for f in frames)
4980 body = b''.join(bytes(f) for f in frames)
5025
4981
5026 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4982 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
5027
4983
5028 # urllib.Request insists on using has_data() as a proxy for
4984 # urllib.Request insists on using has_data() as a proxy for
5029 # determining the request method. Override that to use our
4985 # determining the request method. Override that to use our
5030 # explicitly requested method.
4986 # explicitly requested method.
5031 req.get_method = lambda: pycompat.sysstr(method)
4987 req.get_method = lambda: pycompat.sysstr(method)
5032
4988
5033 try:
4989 try:
5034 res = opener.open(req)
4990 res = opener.open(req)
5035 body = res.read()
4991 body = res.read()
5036 except util.urlerr.urlerror as e:
4992 except util.urlerr.urlerror as e:
5037 # read() method must be called, but only exists in Python 2
4993 # read() method must be called, but only exists in Python 2
5038 getattr(e, 'read', lambda: None)()
4994 getattr(e, 'read', lambda: None)()
5039 continue
4995 continue
5040
4996
5041 ct = res.headers.get('Content-Type')
4997 ct = res.headers.get('Content-Type')
5042 if ct == 'application/mercurial-cbor':
4998 if ct == 'application/mercurial-cbor':
5043 ui.write(
4999 ui.write(
5044 _(b'cbor> %s\n')
5000 _(b'cbor> %s\n')
5045 % stringutil.pprint(
5001 % stringutil.pprint(
5046 cborutil.decodeall(body), bprefix=True, indent=2
5002 cborutil.decodeall(body), bprefix=True, indent=2
5047 )
5003 )
5048 )
5004 )
5049
5005
5050 elif action == b'close':
5006 elif action == b'close':
5051 assert peer is not None
5007 assert peer is not None
5052 peer.close()
5008 peer.close()
5053 elif action == b'readavailable':
5009 elif action == b'readavailable':
5054 if not stdout or not stderr:
5010 if not stdout or not stderr:
5055 raise error.Abort(
5011 raise error.Abort(
5056 _(b'readavailable not available on this peer')
5012 _(b'readavailable not available on this peer')
5057 )
5013 )
5058
5014
5059 stdin.close()
5015 stdin.close()
5060 stdout.read()
5016 stdout.read()
5061 stderr.read()
5017 stderr.read()
5062
5018
5063 elif action == b'readline':
5019 elif action == b'readline':
5064 if not stdout:
5020 if not stdout:
5065 raise error.Abort(_(b'readline not available on this peer'))
5021 raise error.Abort(_(b'readline not available on this peer'))
5066 stdout.readline()
5022 stdout.readline()
5067 elif action == b'ereadline':
5023 elif action == b'ereadline':
5068 if not stderr:
5024 if not stderr:
5069 raise error.Abort(_(b'ereadline not available on this peer'))
5025 raise error.Abort(_(b'ereadline not available on this peer'))
5070 stderr.readline()
5026 stderr.readline()
5071 elif action.startswith(b'read '):
5027 elif action.startswith(b'read '):
5072 count = int(action.split(b' ', 1)[1])
5028 count = int(action.split(b' ', 1)[1])
5073 if not stdout:
5029 if not stdout:
5074 raise error.Abort(_(b'read not available on this peer'))
5030 raise error.Abort(_(b'read not available on this peer'))
5075 stdout.read(count)
5031 stdout.read(count)
5076 elif action.startswith(b'eread '):
5032 elif action.startswith(b'eread '):
5077 count = int(action.split(b' ', 1)[1])
5033 count = int(action.split(b' ', 1)[1])
5078 if not stderr:
5034 if not stderr:
5079 raise error.Abort(_(b'eread not available on this peer'))
5035 raise error.Abort(_(b'eread not available on this peer'))
5080 stderr.read(count)
5036 stderr.read(count)
5081 else:
5037 else:
5082 raise error.Abort(_(b'unknown action: %s') % action)
5038 raise error.Abort(_(b'unknown action: %s') % action)
5083
5039
5084 if batchedcommands is not None:
5040 if batchedcommands is not None:
5085 raise error.Abort(_(b'unclosed "batchbegin" request'))
5041 raise error.Abort(_(b'unclosed "batchbegin" request'))
5086
5042
5087 if peer:
5043 if peer:
5088 peer.close()
5044 peer.close()
5089
5045
5090 if proc:
5046 if proc:
5091 proc.kill()
5047 proc.kill()
@@ -1,218 +1,269
1 # revlogutils/debug.py - utility used for revlog debuging
1 # revlogutils/debug.py - utility used for revlog debuging
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2022 Octobus <contact@octobus.net>
4 # Copyright 2022 Octobus <contact@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from .. import (
9 from .. import (
10 node as nodemod,
10 node as nodemod,
11 )
11 )
12
12
13 from . import (
13 from . import (
14 constants,
14 constants,
15 )
15 )
16
16
17 INDEX_ENTRY_DEBUG_COLUMN = []
17 INDEX_ENTRY_DEBUG_COLUMN = []
18
18
19 NODE_SIZE = object()
19 NODE_SIZE = object()
20
20
21
21
22 class _column_base:
22 class _column_base:
23 """constains the definition of a revlog column
23 """constains the definition of a revlog column
24
24
25 name: the column header,
25 name: the column header,
26 value_func: the function called to get a value,
26 value_func: the function called to get a value,
27 size: the width of the column,
27 size: the width of the column,
28 verbose_only: only include the column in verbose mode.
28 verbose_only: only include the column in verbose mode.
29 """
29 """
30
30
31 def __init__(self, name, value_func, size=None, verbose=False):
31 def __init__(self, name, value_func, size=None, verbose=False):
32 self.name = name
32 self.name = name
33 self.value_func = value_func
33 self.value_func = value_func
34 if size is not NODE_SIZE:
34 if size is not NODE_SIZE:
35 if size is None:
35 if size is None:
36 size = 8 # arbitrary default
36 size = 8 # arbitrary default
37 size = max(len(name), size)
37 size = max(len(name), size)
38 self._size = size
38 self._size = size
39 self.verbose_only = verbose
39 self.verbose_only = verbose
40
40
41 def get_size(self, node_size):
41 def get_size(self, node_size):
42 if self._size is NODE_SIZE:
42 if self._size is NODE_SIZE:
43 return node_size
43 return node_size
44 else:
44 else:
45 return self._size
45 return self._size
46
46
47
47
48 def debug_column(name, size=None, verbose=False):
48 def debug_column(name, size=None, verbose=False):
49 """decorated function is registered as a column
49 """decorated function is registered as a column
50
50
51 name: the name of the column,
51 name: the name of the column,
52 size: the expected size of the column.
52 size: the expected size of the column.
53 """
53 """
54
54
55 def register(func):
55 def register(func):
56 entry = _column_base(
56 entry = _column_base(
57 name=name,
57 name=name,
58 value_func=func,
58 value_func=func,
59 size=size,
59 size=size,
60 verbose=verbose,
60 verbose=verbose,
61 )
61 )
62 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
62 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
63 return entry
63 return entry
64
64
65 return register
65 return register
66
66
67
67
68 @debug_column(b"rev", size=6)
68 @debug_column(b"rev", size=6)
69 def _rev(index, rev, entry, hexfn):
69 def _rev(index, rev, entry, hexfn):
70 return b"%d" % rev
70 return b"%d" % rev
71
71
72
72
73 @debug_column(b"rank", size=6, verbose=True)
73 @debug_column(b"rank", size=6, verbose=True)
74 def rank(index, rev, entry, hexfn):
74 def rank(index, rev, entry, hexfn):
75 return b"%d" % entry[constants.ENTRY_RANK]
75 return b"%d" % entry[constants.ENTRY_RANK]
76
76
77
77
78 @debug_column(b"linkrev", size=6)
78 @debug_column(b"linkrev", size=6)
79 def _linkrev(index, rev, entry, hexfn):
79 def _linkrev(index, rev, entry, hexfn):
80 return b"%d" % entry[constants.ENTRY_LINK_REV]
80 return b"%d" % entry[constants.ENTRY_LINK_REV]
81
81
82
82
83 @debug_column(b"nodeid", size=NODE_SIZE)
83 @debug_column(b"nodeid", size=NODE_SIZE)
84 def _nodeid(index, rev, entry, hexfn):
84 def _nodeid(index, rev, entry, hexfn):
85 return hexfn(entry[constants.ENTRY_NODE_ID])
85 return hexfn(entry[constants.ENTRY_NODE_ID])
86
86
87
87
88 @debug_column(b"p1-rev", size=6, verbose=True)
88 @debug_column(b"p1-rev", size=6, verbose=True)
89 def _p1_rev(index, rev, entry, hexfn):
89 def _p1_rev(index, rev, entry, hexfn):
90 return b"%d" % entry[constants.ENTRY_PARENT_1]
90 return b"%d" % entry[constants.ENTRY_PARENT_1]
91
91
92
92
93 @debug_column(b"p1-nodeid", size=NODE_SIZE)
93 @debug_column(b"p1-nodeid", size=NODE_SIZE)
94 def _p1_node(index, rev, entry, hexfn):
94 def _p1_node(index, rev, entry, hexfn):
95 parent = entry[constants.ENTRY_PARENT_1]
95 parent = entry[constants.ENTRY_PARENT_1]
96 p_entry = index[parent]
96 p_entry = index[parent]
97 return hexfn(p_entry[constants.ENTRY_NODE_ID])
97 return hexfn(p_entry[constants.ENTRY_NODE_ID])
98
98
99
99
100 @debug_column(b"p2-rev", size=6, verbose=True)
100 @debug_column(b"p2-rev", size=6, verbose=True)
101 def _p2_rev(index, rev, entry, hexfn):
101 def _p2_rev(index, rev, entry, hexfn):
102 return b"%d" % entry[constants.ENTRY_PARENT_2]
102 return b"%d" % entry[constants.ENTRY_PARENT_2]
103
103
104
104
105 @debug_column(b"p2-nodeid", size=NODE_SIZE)
105 @debug_column(b"p2-nodeid", size=NODE_SIZE)
106 def _p2_node(index, rev, entry, hexfn):
106 def _p2_node(index, rev, entry, hexfn):
107 parent = entry[constants.ENTRY_PARENT_2]
107 parent = entry[constants.ENTRY_PARENT_2]
108 p_entry = index[parent]
108 p_entry = index[parent]
109 return hexfn(p_entry[constants.ENTRY_NODE_ID])
109 return hexfn(p_entry[constants.ENTRY_NODE_ID])
110
110
111
111
112 @debug_column(b"full-size", size=20, verbose=True)
112 @debug_column(b"full-size", size=20, verbose=True)
113 def full_size(index, rev, entry, hexfn):
113 def full_size(index, rev, entry, hexfn):
114 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
114 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
115
115
116
116
117 @debug_column(b"delta-base", size=6, verbose=True)
117 @debug_column(b"delta-base", size=6, verbose=True)
118 def delta_base(index, rev, entry, hexfn):
118 def delta_base(index, rev, entry, hexfn):
119 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
119 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
120
120
121
121
122 @debug_column(b"flags", size=2, verbose=True)
122 @debug_column(b"flags", size=2, verbose=True)
123 def flags(index, rev, entry, hexfn):
123 def flags(index, rev, entry, hexfn):
124 field = entry[constants.ENTRY_DATA_OFFSET]
124 field = entry[constants.ENTRY_DATA_OFFSET]
125 field &= 0xFFFF
125 field &= 0xFFFF
126 return b"%d" % field
126 return b"%d" % field
127
127
128
128
129 @debug_column(b"comp-mode", size=4, verbose=True)
129 @debug_column(b"comp-mode", size=4, verbose=True)
130 def compression_mode(index, rev, entry, hexfn):
130 def compression_mode(index, rev, entry, hexfn):
131 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
131 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
132
132
133
133
134 @debug_column(b"data-offset", size=20, verbose=True)
134 @debug_column(b"data-offset", size=20, verbose=True)
135 def data_offset(index, rev, entry, hexfn):
135 def data_offset(index, rev, entry, hexfn):
136 field = entry[constants.ENTRY_DATA_OFFSET]
136 field = entry[constants.ENTRY_DATA_OFFSET]
137 field >>= 16
137 field >>= 16
138 return b"%d" % field
138 return b"%d" % field
139
139
140
140
141 @debug_column(b"chunk-size", size=10, verbose=True)
141 @debug_column(b"chunk-size", size=10, verbose=True)
142 def data_chunk_size(index, rev, entry, hexfn):
142 def data_chunk_size(index, rev, entry, hexfn):
143 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
143 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
144
144
145
145
146 @debug_column(b"sd-comp-mode", size=7, verbose=True)
146 @debug_column(b"sd-comp-mode", size=7, verbose=True)
147 def sidedata_compression_mode(index, rev, entry, hexfn):
147 def sidedata_compression_mode(index, rev, entry, hexfn):
148 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
148 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
149 if compression == constants.COMP_MODE_PLAIN:
149 if compression == constants.COMP_MODE_PLAIN:
150 return b"plain"
150 return b"plain"
151 elif compression == constants.COMP_MODE_DEFAULT:
151 elif compression == constants.COMP_MODE_DEFAULT:
152 return b"default"
152 return b"default"
153 elif compression == constants.COMP_MODE_INLINE:
153 elif compression == constants.COMP_MODE_INLINE:
154 return b"inline"
154 return b"inline"
155 else:
155 else:
156 return b"%d" % compression
156 return b"%d" % compression
157
157
158
158
159 @debug_column(b"sidedata-offset", size=20, verbose=True)
159 @debug_column(b"sidedata-offset", size=20, verbose=True)
160 def sidedata_offset(index, rev, entry, hexfn):
160 def sidedata_offset(index, rev, entry, hexfn):
161 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
161 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
162
162
163
163
164 @debug_column(b"sd-chunk-size", size=10, verbose=True)
164 @debug_column(b"sd-chunk-size", size=10, verbose=True)
165 def sidedata_chunk_size(index, rev, entry, hexfn):
165 def sidedata_chunk_size(index, rev, entry, hexfn):
166 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
166 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
167
167
168
168
169 def debug_index(
169 def debug_index(
170 ui,
170 ui,
171 repo,
171 repo,
172 formatter,
172 formatter,
173 revlog,
173 revlog,
174 full_node,
174 full_node,
175 ):
175 ):
176 """display index data for a revlog"""
176 """display index data for a revlog"""
177 if full_node:
177 if full_node:
178 hexfn = nodemod.hex
178 hexfn = nodemod.hex
179 else:
179 else:
180 hexfn = nodemod.short
180 hexfn = nodemod.short
181
181
182 idlen = 12
182 idlen = 12
183 for i in revlog:
183 for i in revlog:
184 idlen = len(hexfn(revlog.node(i)))
184 idlen = len(hexfn(revlog.node(i)))
185 break
185 break
186
186
187 fm = formatter
187 fm = formatter
188
188
189 header_pieces = []
189 header_pieces = []
190 for column in INDEX_ENTRY_DEBUG_COLUMN:
190 for column in INDEX_ENTRY_DEBUG_COLUMN:
191 if column.verbose_only and not ui.verbose:
191 if column.verbose_only and not ui.verbose:
192 continue
192 continue
193 size = column.get_size(idlen)
193 size = column.get_size(idlen)
194 name = column.name
194 name = column.name
195 header_pieces.append(name.rjust(size))
195 header_pieces.append(name.rjust(size))
196
196
197 fm.plain(b' '.join(header_pieces) + b'\n')
197 fm.plain(b' '.join(header_pieces) + b'\n')
198
198
199 index = revlog.index
199 index = revlog.index
200
200
201 for rev in revlog:
201 for rev in revlog:
202 fm.startitem()
202 fm.startitem()
203 entry = index[rev]
203 entry = index[rev]
204 first = True
204 first = True
205 for column in INDEX_ENTRY_DEBUG_COLUMN:
205 for column in INDEX_ENTRY_DEBUG_COLUMN:
206 if column.verbose_only and not ui.verbose:
206 if column.verbose_only and not ui.verbose:
207 continue
207 continue
208 if not first:
208 if not first:
209 fm.plain(b' ')
209 fm.plain(b' ')
210 first = False
210 first = False
211
211
212 size = column.get_size(idlen)
212 size = column.get_size(idlen)
213 value = column.value_func(index, rev, entry, hexfn)
213 value = column.value_func(index, rev, entry, hexfn)
214 display = b"%%%ds" % size
214 display = b"%%%ds" % size
215 fm.write(column.name, display, value)
215 fm.write(column.name, display, value)
216 fm.plain(b'\n')
216 fm.plain(b'\n')
217
217
218 fm.end()
218 fm.end()
219
220
221 def dump(ui, revlog):
222 """perform the work for `hg debugrevlog --dump"""
223 # XXX seems redundant with debug index ?
224 r = revlog
225 numrevs = len(r)
226 ui.write(
227 (
228 b"# rev p1rev p2rev start end deltastart base p1 p2"
229 b" rawsize totalsize compression heads chainlen\n"
230 )
231 )
232 ts = 0
233 heads = set()
234
235 for rev in range(numrevs):
236 dbase = r.deltaparent(rev)
237 if dbase == -1:
238 dbase = rev
239 cbase = r.chainbase(rev)
240 clen = r.chainlen(rev)
241 p1, p2 = r.parentrevs(rev)
242 rs = r.rawsize(rev)
243 ts = ts + rs
244 heads -= set(r.parentrevs(rev))
245 heads.add(rev)
246 try:
247 compression = ts / r.end(rev)
248 except ZeroDivisionError:
249 compression = 0
250 ui.write(
251 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
252 b"%11d %5d %8d\n"
253 % (
254 rev,
255 p1,
256 p2,
257 r.start(rev),
258 r.end(rev),
259 r.start(dbase),
260 r.start(cbase),
261 r.start(p1),
262 r.start(p2),
263 rs,
264 ts,
265 compression,
266 len(heads),
267 clen,
268 )
269 )
General Comments 0
You need to be logged in to leave comments. Login now