##// END OF EJS Templates
debuglock: make the command more useful in non-interactive mode...
marmoute -
r50092:883be4c7 default
parent child Browse files
Show More
@@ -1,4918 +1,4930 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 deltas as deltautil,
106 deltas as deltautil,
107 nodemap,
107 nodemap,
108 rewrite,
108 rewrite,
109 sidedata,
109 sidedata,
110 )
110 )
111
111
112 release = lockmod.release
112 release = lockmod.release
113
113
114 table = {}
114 table = {}
115 table.update(strip.command._table)
115 table.update(strip.command._table)
116 command = registrar.command(table)
116 command = registrar.command(table)
117
117
118
118
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 def debugancestor(ui, repo, *args):
120 def debugancestor(ui, repo, *args):
121 """find the ancestor revision of two revisions in a given index"""
121 """find the ancestor revision of two revisions in a given index"""
122 if len(args) == 3:
122 if len(args) == 3:
123 index, rev1, rev2 = args
123 index, rev1, rev2 = args
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 lookup = r.lookup
125 lookup = r.lookup
126 elif len(args) == 2:
126 elif len(args) == 2:
127 if not repo:
127 if not repo:
128 raise error.Abort(
128 raise error.Abort(
129 _(b'there is no Mercurial repository here (.hg not found)')
129 _(b'there is no Mercurial repository here (.hg not found)')
130 )
130 )
131 rev1, rev2 = args
131 rev1, rev2 = args
132 r = repo.changelog
132 r = repo.changelog
133 lookup = repo.lookup
133 lookup = repo.lookup
134 else:
134 else:
135 raise error.Abort(_(b'either two or three arguments required'))
135 raise error.Abort(_(b'either two or three arguments required'))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138
138
139
139
140 @command(b'debugantivirusrunning', [])
140 @command(b'debugantivirusrunning', [])
141 def debugantivirusrunning(ui, repo):
141 def debugantivirusrunning(ui, repo):
142 """attempt to trigger an antivirus scanner to see if one is active"""
142 """attempt to trigger an antivirus scanner to see if one is active"""
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 f.write(
144 f.write(
145 util.b85decode(
145 util.b85decode(
146 # This is a base85-armored version of the EICAR test file. See
146 # This is a base85-armored version of the EICAR test file. See
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 )
150 )
151 )
151 )
152 # Give an AV engine time to scan the file.
152 # Give an AV engine time to scan the file.
153 time.sleep(2)
153 time.sleep(2)
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155
155
156
156
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 def debugapplystreamclonebundle(ui, repo, fname):
158 def debugapplystreamclonebundle(ui, repo, fname):
159 """apply a stream clone bundle file"""
159 """apply a stream clone bundle file"""
160 f = hg.openpath(ui, fname)
160 f = hg.openpath(ui, fname)
161 gen = exchange.readbundle(ui, f, fname)
161 gen = exchange.readbundle(ui, f, fname)
162 gen.apply(repo)
162 gen.apply(repo)
163
163
164
164
165 @command(
165 @command(
166 b'debugbuilddag',
166 b'debugbuilddag',
167 [
167 [
168 (
168 (
169 b'm',
169 b'm',
170 b'mergeable-file',
170 b'mergeable-file',
171 None,
171 None,
172 _(b'add single file mergeable changes'),
172 _(b'add single file mergeable changes'),
173 ),
173 ),
174 (
174 (
175 b'o',
175 b'o',
176 b'overwritten-file',
176 b'overwritten-file',
177 None,
177 None,
178 _(b'add single file all revs overwrite'),
178 _(b'add single file all revs overwrite'),
179 ),
179 ),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (
181 (
182 b'',
182 b'',
183 b'from-existing',
183 b'from-existing',
184 None,
184 None,
185 _(b'continue from a non-empty repository'),
185 _(b'continue from a non-empty repository'),
186 ),
186 ),
187 ],
187 ],
188 _(b'[OPTION]... [TEXT]'),
188 _(b'[OPTION]... [TEXT]'),
189 )
189 )
190 def debugbuilddag(
190 def debugbuilddag(
191 ui,
191 ui,
192 repo,
192 repo,
193 text=None,
193 text=None,
194 mergeable_file=False,
194 mergeable_file=False,
195 overwritten_file=False,
195 overwritten_file=False,
196 new_file=False,
196 new_file=False,
197 from_existing=False,
197 from_existing=False,
198 ):
198 ):
199 """builds a repo with a given DAG from scratch in the current empty repo
199 """builds a repo with a given DAG from scratch in the current empty repo
200
200
201 The description of the DAG is read from stdin if not given on the
201 The description of the DAG is read from stdin if not given on the
202 command line.
202 command line.
203
203
204 Elements:
204 Elements:
205
205
206 - "+n" is a linear run of n nodes based on the current default parent
206 - "+n" is a linear run of n nodes based on the current default parent
207 - "." is a single node based on the current default parent
207 - "." is a single node based on the current default parent
208 - "$" resets the default parent to null (implied at the start);
208 - "$" resets the default parent to null (implied at the start);
209 otherwise the default parent is always the last node created
209 otherwise the default parent is always the last node created
210 - "<p" sets the default parent to the backref p
210 - "<p" sets the default parent to the backref p
211 - "*p" is a fork at parent p, which is a backref
211 - "*p" is a fork at parent p, which is a backref
212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "/p2" is a merge of the preceding node and p2
213 - "/p2" is a merge of the preceding node and p2
214 - ":tag" defines a local tag for the preceding node
214 - ":tag" defines a local tag for the preceding node
215 - "@branch" sets the named branch for subsequent nodes
215 - "@branch" sets the named branch for subsequent nodes
216 - "#...\\n" is a comment up to the end of the line
216 - "#...\\n" is a comment up to the end of the line
217
217
218 Whitespace between the above elements is ignored.
218 Whitespace between the above elements is ignored.
219
219
220 A backref is either
220 A backref is either
221
221
222 - a number n, which references the node curr-n, where curr is the current
222 - a number n, which references the node curr-n, where curr is the current
223 node, or
223 node, or
224 - the name of a local tag you placed earlier using ":tag", or
224 - the name of a local tag you placed earlier using ":tag", or
225 - empty to denote the default parent.
225 - empty to denote the default parent.
226
226
227 All string valued-elements are either strictly alphanumeric, or must
227 All string valued-elements are either strictly alphanumeric, or must
228 be enclosed in double quotes ("..."), with "\\" as escape character.
228 be enclosed in double quotes ("..."), with "\\" as escape character.
229 """
229 """
230
230
231 if text is None:
231 if text is None:
232 ui.status(_(b"reading DAG from stdin\n"))
232 ui.status(_(b"reading DAG from stdin\n"))
233 text = ui.fin.read()
233 text = ui.fin.read()
234
234
235 cl = repo.changelog
235 cl = repo.changelog
236 if len(cl) > 0 and not from_existing:
236 if len(cl) > 0 and not from_existing:
237 raise error.Abort(_(b'repository is not empty'))
237 raise error.Abort(_(b'repository is not empty'))
238
238
239 # determine number of revs in DAG
239 # determine number of revs in DAG
240 total = 0
240 total = 0
241 for type, data in dagparser.parsedag(text):
241 for type, data in dagparser.parsedag(text):
242 if type == b'n':
242 if type == b'n':
243 total += 1
243 total += 1
244
244
245 if mergeable_file:
245 if mergeable_file:
246 linesperrev = 2
246 linesperrev = 2
247 # make a file with k lines per rev
247 # make a file with k lines per rev
248 initialmergedlines = [
248 initialmergedlines = [
249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 ]
250 ]
251 initialmergedlines.append(b"")
251 initialmergedlines.append(b"")
252
252
253 tags = []
253 tags = []
254 progress = ui.makeprogress(
254 progress = ui.makeprogress(
255 _(b'building'), unit=_(b'revisions'), total=total
255 _(b'building'), unit=_(b'revisions'), total=total
256 )
256 )
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 at = -1
258 at = -1
259 atbranch = b'default'
259 atbranch = b'default'
260 nodeids = []
260 nodeids = []
261 id = 0
261 id = 0
262 progress.update(id)
262 progress.update(id)
263 for type, data in dagparser.parsedag(text):
263 for type, data in dagparser.parsedag(text):
264 if type == b'n':
264 if type == b'n':
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 id, ps = data
266 id, ps = data
267
267
268 files = []
268 files = []
269 filecontent = {}
269 filecontent = {}
270
270
271 p2 = None
271 p2 = None
272 if mergeable_file:
272 if mergeable_file:
273 fn = b"mf"
273 fn = b"mf"
274 p1 = repo[ps[0]]
274 p1 = repo[ps[0]]
275 if len(ps) > 1:
275 if len(ps) > 1:
276 p2 = repo[ps[1]]
276 p2 = repo[ps[1]]
277 pa = p1.ancestor(p2)
277 pa = p1.ancestor(p2)
278 base, local, other = [
278 base, local, other = [
279 x[fn].data() for x in (pa, p1, p2)
279 x[fn].data() for x in (pa, p1, p2)
280 ]
280 ]
281 m3 = simplemerge.Merge3Text(base, local, other)
281 m3 = simplemerge.Merge3Text(base, local, other)
282 ml = [
282 ml = [
283 l.strip()
283 l.strip()
284 for l in simplemerge.render_minimized(m3)[0]
284 for l in simplemerge.render_minimized(m3)[0]
285 ]
285 ]
286 ml.append(b"")
286 ml.append(b"")
287 elif at > 0:
287 elif at > 0:
288 ml = p1[fn].data().split(b"\n")
288 ml = p1[fn].data().split(b"\n")
289 else:
289 else:
290 ml = initialmergedlines
290 ml = initialmergedlines
291 ml[id * linesperrev] += b" r%i" % id
291 ml[id * linesperrev] += b" r%i" % id
292 mergedtext = b"\n".join(ml)
292 mergedtext = b"\n".join(ml)
293 files.append(fn)
293 files.append(fn)
294 filecontent[fn] = mergedtext
294 filecontent[fn] = mergedtext
295
295
296 if overwritten_file:
296 if overwritten_file:
297 fn = b"of"
297 fn = b"of"
298 files.append(fn)
298 files.append(fn)
299 filecontent[fn] = b"r%i\n" % id
299 filecontent[fn] = b"r%i\n" % id
300
300
301 if new_file:
301 if new_file:
302 fn = b"nf%i" % id
302 fn = b"nf%i" % id
303 files.append(fn)
303 files.append(fn)
304 filecontent[fn] = b"r%i\n" % id
304 filecontent[fn] = b"r%i\n" % id
305 if len(ps) > 1:
305 if len(ps) > 1:
306 if not p2:
306 if not p2:
307 p2 = repo[ps[1]]
307 p2 = repo[ps[1]]
308 for fn in p2:
308 for fn in p2:
309 if fn.startswith(b"nf"):
309 if fn.startswith(b"nf"):
310 files.append(fn)
310 files.append(fn)
311 filecontent[fn] = p2[fn].data()
311 filecontent[fn] = p2[fn].data()
312
312
313 def fctxfn(repo, cx, path):
313 def fctxfn(repo, cx, path):
314 if path in filecontent:
314 if path in filecontent:
315 return context.memfilectx(
315 return context.memfilectx(
316 repo, cx, path, filecontent[path]
316 repo, cx, path, filecontent[path]
317 )
317 )
318 return None
318 return None
319
319
320 if len(ps) == 0 or ps[0] < 0:
320 if len(ps) == 0 or ps[0] < 0:
321 pars = [None, None]
321 pars = [None, None]
322 elif len(ps) == 1:
322 elif len(ps) == 1:
323 pars = [nodeids[ps[0]], None]
323 pars = [nodeids[ps[0]], None]
324 else:
324 else:
325 pars = [nodeids[p] for p in ps]
325 pars = [nodeids[p] for p in ps]
326 cx = context.memctx(
326 cx = context.memctx(
327 repo,
327 repo,
328 pars,
328 pars,
329 b"r%i" % id,
329 b"r%i" % id,
330 files,
330 files,
331 fctxfn,
331 fctxfn,
332 date=(id, 0),
332 date=(id, 0),
333 user=b"debugbuilddag",
333 user=b"debugbuilddag",
334 extra={b'branch': atbranch},
334 extra={b'branch': atbranch},
335 )
335 )
336 nodeid = repo.commitctx(cx)
336 nodeid = repo.commitctx(cx)
337 nodeids.append(nodeid)
337 nodeids.append(nodeid)
338 at = id
338 at = id
339 elif type == b'l':
339 elif type == b'l':
340 id, name = data
340 id, name = data
341 ui.note((b'tag %s\n' % name))
341 ui.note((b'tag %s\n' % name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 elif type == b'a':
343 elif type == b'a':
344 ui.note((b'branch %s\n' % data))
344 ui.note((b'branch %s\n' % data))
345 atbranch = data
345 atbranch = data
346 progress.update(id)
346 progress.update(id)
347
347
348 if tags:
348 if tags:
349 repo.vfs.write(b"localtags", b"".join(tags))
349 repo.vfs.write(b"localtags", b"".join(tags))
350
350
351
351
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 indent_string = b' ' * indent
353 indent_string = b' ' * indent
354 if all:
354 if all:
355 ui.writenoi18n(
355 ui.writenoi18n(
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 % indent_string
357 % indent_string
358 )
358 )
359
359
360 def showchunks(named):
360 def showchunks(named):
361 ui.write(b"\n%s%s\n" % (indent_string, named))
361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 for deltadata in gen.deltaiter():
362 for deltadata in gen.deltaiter():
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 ui.write(
364 ui.write(
365 b"%s%s %s %s %s %s %d\n"
365 b"%s%s %s %s %s %s %d\n"
366 % (
366 % (
367 indent_string,
367 indent_string,
368 hex(node),
368 hex(node),
369 hex(p1),
369 hex(p1),
370 hex(p2),
370 hex(p2),
371 hex(cs),
371 hex(cs),
372 hex(deltabase),
372 hex(deltabase),
373 len(delta),
373 len(delta),
374 )
374 )
375 )
375 )
376
376
377 gen.changelogheader()
377 gen.changelogheader()
378 showchunks(b"changelog")
378 showchunks(b"changelog")
379 gen.manifestheader()
379 gen.manifestheader()
380 showchunks(b"manifest")
380 showchunks(b"manifest")
381 for chunkdata in iter(gen.filelogheader, {}):
381 for chunkdata in iter(gen.filelogheader, {}):
382 fname = chunkdata[b'filename']
382 fname = chunkdata[b'filename']
383 showchunks(fname)
383 showchunks(fname)
384 else:
384 else:
385 if isinstance(gen, bundle2.unbundle20):
385 if isinstance(gen, bundle2.unbundle20):
386 raise error.Abort(_(b'use debugbundle2 for this file'))
386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 gen.changelogheader()
387 gen.changelogheader()
388 for deltadata in gen.deltaiter():
388 for deltadata in gen.deltaiter():
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391
391
392
392
393 def _debugobsmarkers(ui, part, indent=0, **opts):
393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 """display version and markers contained in 'data'"""
394 """display version and markers contained in 'data'"""
395 opts = pycompat.byteskwargs(opts)
395 opts = pycompat.byteskwargs(opts)
396 data = part.read()
396 data = part.read()
397 indent_string = b' ' * indent
397 indent_string = b' ' * indent
398 try:
398 try:
399 version, markers = obsolete._readmarkers(data)
399 version, markers = obsolete._readmarkers(data)
400 except error.UnknownVersion as exc:
400 except error.UnknownVersion as exc:
401 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg %= indent_string, exc.version, len(data)
402 msg %= indent_string, exc.version, len(data)
403 ui.write(msg)
403 ui.write(msg)
404 else:
404 else:
405 msg = b"%sversion: %d (%d bytes)\n"
405 msg = b"%sversion: %d (%d bytes)\n"
406 msg %= indent_string, version, len(data)
406 msg %= indent_string, version, len(data)
407 ui.write(msg)
407 ui.write(msg)
408 fm = ui.formatter(b'debugobsolete', opts)
408 fm = ui.formatter(b'debugobsolete', opts)
409 for rawmarker in sorted(markers):
409 for rawmarker in sorted(markers):
410 m = obsutil.marker(None, rawmarker)
410 m = obsutil.marker(None, rawmarker)
411 fm.startitem()
411 fm.startitem()
412 fm.plain(indent_string)
412 fm.plain(indent_string)
413 cmdutil.showmarker(fm, m)
413 cmdutil.showmarker(fm, m)
414 fm.end()
414 fm.end()
415
415
416
416
417 def _debugphaseheads(ui, data, indent=0):
417 def _debugphaseheads(ui, data, indent=0):
418 """display version and markers contained in 'data'"""
418 """display version and markers contained in 'data'"""
419 indent_string = b' ' * indent
419 indent_string = b' ' * indent
420 headsbyphase = phases.binarydecode(data)
420 headsbyphase = phases.binarydecode(data)
421 for phase in phases.allphases:
421 for phase in phases.allphases:
422 for head in headsbyphase[phase]:
422 for head in headsbyphase[phase]:
423 ui.write(indent_string)
423 ui.write(indent_string)
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425
425
426
426
427 def _quasirepr(thing):
427 def _quasirepr(thing):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 return b'{%s}' % (
429 return b'{%s}' % (
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 )
431 )
432 return pycompat.bytestr(repr(thing))
432 return pycompat.bytestr(repr(thing))
433
433
434
434
435 def _debugbundle2(ui, gen, all=None, **opts):
435 def _debugbundle2(ui, gen, all=None, **opts):
436 """lists the contents of a bundle2"""
436 """lists the contents of a bundle2"""
437 if not isinstance(gen, bundle2.unbundle20):
437 if not isinstance(gen, bundle2.unbundle20):
438 raise error.Abort(_(b'not a bundle2 file'))
438 raise error.Abort(_(b'not a bundle2 file'))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 parttypes = opts.get('part_type', [])
440 parttypes = opts.get('part_type', [])
441 for part in gen.iterparts():
441 for part in gen.iterparts():
442 if parttypes and part.type not in parttypes:
442 if parttypes and part.type not in parttypes:
443 continue
443 continue
444 msg = b'%s -- %s (mandatory: %r)\n'
444 msg = b'%s -- %s (mandatory: %r)\n'
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 if part.type == b'changegroup':
446 if part.type == b'changegroup':
447 version = part.params.get(b'version', b'01')
447 version = part.params.get(b'version', b'01')
448 cg = changegroup.getunbundler(version, part, b'UN')
448 cg = changegroup.getunbundler(version, part, b'UN')
449 if not ui.quiet:
449 if not ui.quiet:
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 if part.type == b'obsmarkers':
451 if part.type == b'obsmarkers':
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugobsmarkers(ui, part, indent=4, **opts)
453 _debugobsmarkers(ui, part, indent=4, **opts)
454 if part.type == b'phase-heads':
454 if part.type == b'phase-heads':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugphaseheads(ui, part, indent=4)
456 _debugphaseheads(ui, part, indent=4)
457
457
458
458
459 @command(
459 @command(
460 b'debugbundle',
460 b'debugbundle',
461 [
461 [
462 (b'a', b'all', None, _(b'show all details')),
462 (b'a', b'all', None, _(b'show all details')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 ],
465 ],
466 _(b'FILE'),
466 _(b'FILE'),
467 norepo=True,
467 norepo=True,
468 )
468 )
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 """lists the contents of a bundle"""
470 """lists the contents of a bundle"""
471 with hg.openpath(ui, bundlepath) as f:
471 with hg.openpath(ui, bundlepath) as f:
472 if spec:
472 if spec:
473 spec = exchange.getbundlespec(ui, f)
473 spec = exchange.getbundlespec(ui, f)
474 ui.write(b'%s\n' % spec)
474 ui.write(b'%s\n' % spec)
475 return
475 return
476
476
477 gen = exchange.readbundle(ui, f, bundlepath)
477 gen = exchange.readbundle(ui, f, bundlepath)
478 if isinstance(gen, bundle2.unbundle20):
478 if isinstance(gen, bundle2.unbundle20):
479 return _debugbundle2(ui, gen, all=all, **opts)
479 return _debugbundle2(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
481
481
482
482
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 def debugcapabilities(ui, path, **opts):
484 def debugcapabilities(ui, path, **opts):
485 """lists the capabilities of a remote peer"""
485 """lists the capabilities of a remote peer"""
486 opts = pycompat.byteskwargs(opts)
486 opts = pycompat.byteskwargs(opts)
487 peer = hg.peer(ui, opts, path)
487 peer = hg.peer(ui, opts, path)
488 try:
488 try:
489 caps = peer.capabilities()
489 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
490 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
491 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
492 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
493 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
494 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
496 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
497 ui.write(b' %s\n' % key)
498 for v in values:
498 for v in values:
499 ui.write(b' %s\n' % v)
499 ui.write(b' %s\n' % v)
500 finally:
500 finally:
501 peer.close()
501 peer.close()
502
502
503
503
504 @command(
504 @command(
505 b'debugchangedfiles',
505 b'debugchangedfiles',
506 [
506 [
507 (
507 (
508 b'',
508 b'',
509 b'compute',
509 b'compute',
510 False,
510 False,
511 b"compute information instead of reading it from storage",
511 b"compute information instead of reading it from storage",
512 ),
512 ),
513 ],
513 ],
514 b'REV',
514 b'REV',
515 )
515 )
516 def debugchangedfiles(ui, repo, rev, **opts):
516 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
517 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
519 files = None
520
520
521 if opts['compute']:
521 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
522 files = metadata.compute_all_files_changes(ctx)
523 else:
523 else:
524 sd = repo.changelog.sidedata(ctx.rev())
524 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
525 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
526 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
527 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
528 if files is not None:
529 for f in sorted(files.touched):
529 for f in sorted(files.touched):
530 if f in files.added:
530 if f in files.added:
531 action = b"added"
531 action = b"added"
532 elif f in files.removed:
532 elif f in files.removed:
533 action = b"removed"
533 action = b"removed"
534 elif f in files.merged:
534 elif f in files.merged:
535 action = b"merged"
535 action = b"merged"
536 elif f in files.salvaged:
536 elif f in files.salvaged:
537 action = b"salvaged"
537 action = b"salvaged"
538 else:
538 else:
539 action = b"touched"
539 action = b"touched"
540
540
541 copy_parent = b""
541 copy_parent = b""
542 copy_source = b""
542 copy_source = b""
543 if f in files.copied_from_p1:
543 if f in files.copied_from_p1:
544 copy_parent = b"p1"
544 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
545 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
546 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
547 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
548 copy_source = files.copied_from_p2[f]
549
549
550 data = (action, copy_parent, f, copy_source)
550 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
551 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
552 ui.write(template % data)
553
553
554
554
555 @command(b'debugcheckstate', [], b'')
555 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
556 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
557 """validate the correctness of the current dirstate"""
558 parent1, parent2 = repo.dirstate.parents()
558 parent1, parent2 = repo.dirstate.parents()
559 m1 = repo[parent1].manifest()
559 m1 = repo[parent1].manifest()
560 m2 = repo[parent2].manifest()
560 m2 = repo[parent2].manifest()
561 errors = 0
561 errors = 0
562 for err in repo.dirstate.verify(m1, m2):
562 for err in repo.dirstate.verify(m1, m2):
563 ui.warn(err[0] % err[1:])
563 ui.warn(err[0] % err[1:])
564 errors += 1
564 errors += 1
565 if errors:
565 if errors:
566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 raise error.Abort(errstr)
567 raise error.Abort(errstr)
568
568
569
569
570 @command(
570 @command(
571 b'debugcolor',
571 b'debugcolor',
572 [(b'', b'style', None, _(b'show all configured styles'))],
572 [(b'', b'style', None, _(b'show all configured styles'))],
573 b'hg debugcolor',
573 b'hg debugcolor',
574 )
574 )
575 def debugcolor(ui, repo, **opts):
575 def debugcolor(ui, repo, **opts):
576 """show available color, effects or style"""
576 """show available color, effects or style"""
577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 if opts.get('style'):
578 if opts.get('style'):
579 return _debugdisplaystyle(ui)
579 return _debugdisplaystyle(ui)
580 else:
580 else:
581 return _debugdisplaycolor(ui)
581 return _debugdisplaycolor(ui)
582
582
583
583
584 def _debugdisplaycolor(ui):
584 def _debugdisplaycolor(ui):
585 ui = ui.copy()
585 ui = ui.copy()
586 ui._styles.clear()
586 ui._styles.clear()
587 for effect in color._activeeffects(ui).keys():
587 for effect in color._activeeffects(ui).keys():
588 ui._styles[effect] = effect
588 ui._styles[effect] = effect
589 if ui._terminfoparams:
589 if ui._terminfoparams:
590 for k, v in ui.configitems(b'color'):
590 for k, v in ui.configitems(b'color'):
591 if k.startswith(b'color.'):
591 if k.startswith(b'color.'):
592 ui._styles[k] = k[6:]
592 ui._styles[k] = k[6:]
593 elif k.startswith(b'terminfo.'):
593 elif k.startswith(b'terminfo.'):
594 ui._styles[k] = k[9:]
594 ui._styles[k] = k[9:]
595 ui.write(_(b'available colors:\n'))
595 ui.write(_(b'available colors:\n'))
596 # sort label with a '_' after the other to group '_background' entry.
596 # sort label with a '_' after the other to group '_background' entry.
597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 for colorname, label in items:
598 for colorname, label in items:
599 ui.write(b'%s\n' % colorname, label=label)
599 ui.write(b'%s\n' % colorname, label=label)
600
600
601
601
602 def _debugdisplaystyle(ui):
602 def _debugdisplaystyle(ui):
603 ui.write(_(b'available style:\n'))
603 ui.write(_(b'available style:\n'))
604 if not ui._styles:
604 if not ui._styles:
605 return
605 return
606 width = max(len(s) for s in ui._styles)
606 width = max(len(s) for s in ui._styles)
607 for label, effects in sorted(ui._styles.items()):
607 for label, effects in sorted(ui._styles.items()):
608 ui.write(b'%s' % label, label=label)
608 ui.write(b'%s' % label, label=label)
609 if effects:
609 if effects:
610 # 50
610 # 50
611 ui.write(b': ')
611 ui.write(b': ')
612 ui.write(b' ' * (max(0, width - len(label))))
612 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b'\n')
614 ui.write(b'\n')
615
615
616
616
617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 def debugcreatestreamclonebundle(ui, repo, fname):
618 def debugcreatestreamclonebundle(ui, repo, fname):
619 """create a stream clone bundle file
619 """create a stream clone bundle file
620
620
621 Stream bundles are special bundles that are essentially archives of
621 Stream bundles are special bundles that are essentially archives of
622 revlog files. They are commonly used for cloning very quickly.
622 revlog files. They are commonly used for cloning very quickly.
623 """
623 """
624 # TODO we may want to turn this into an abort when this functionality
624 # TODO we may want to turn this into an abort when this functionality
625 # is moved into `hg bundle`.
625 # is moved into `hg bundle`.
626 if phases.hassecret(repo):
626 if phases.hassecret(repo):
627 ui.warn(
627 ui.warn(
628 _(
628 _(
629 b'(warning: stream clone bundle will contain secret '
629 b'(warning: stream clone bundle will contain secret '
630 b'revisions)\n'
630 b'revisions)\n'
631 )
631 )
632 )
632 )
633
633
634 requirements, gen = streamclone.generatebundlev1(repo)
634 requirements, gen = streamclone.generatebundlev1(repo)
635 changegroup.writechunks(ui, gen, fname)
635 changegroup.writechunks(ui, gen, fname)
636
636
637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638
638
639
639
640 @command(
640 @command(
641 b'debugdag',
641 b'debugdag',
642 [
642 [
643 (b't', b'tags', None, _(b'use tags as labels')),
643 (b't', b'tags', None, _(b'use tags as labels')),
644 (b'b', b'branches', None, _(b'annotate with branch names')),
644 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'', b'dots', None, _(b'use dots for runs')),
645 (b'', b'dots', None, _(b'use dots for runs')),
646 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 ],
647 ],
648 _(b'[OPTION]... [FILE [REV]...]'),
648 _(b'[OPTION]... [FILE [REV]...]'),
649 optionalrepo=True,
649 optionalrepo=True,
650 )
650 )
651 def debugdag(ui, repo, file_=None, *revs, **opts):
651 def debugdag(ui, repo, file_=None, *revs, **opts):
652 """format the changelog or an index DAG as a concise textual description
652 """format the changelog or an index DAG as a concise textual description
653
653
654 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 revision numbers, they get labeled in the output as rN.
655 revision numbers, they get labeled in the output as rN.
656
656
657 Otherwise, the changelog DAG of the current repo is emitted.
657 Otherwise, the changelog DAG of the current repo is emitted.
658 """
658 """
659 spaces = opts.get('spaces')
659 spaces = opts.get('spaces')
660 dots = opts.get('dots')
660 dots = opts.get('dots')
661 if file_:
661 if file_:
662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 revs = {int(r) for r in revs}
663 revs = {int(r) for r in revs}
664
664
665 def events():
665 def events():
666 for r in rlog:
666 for r in rlog:
667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 if r in revs:
668 if r in revs:
669 yield b'l', (r, b"r%i" % r)
669 yield b'l', (r, b"r%i" % r)
670
670
671 elif repo:
671 elif repo:
672 cl = repo.changelog
672 cl = repo.changelog
673 tags = opts.get('tags')
673 tags = opts.get('tags')
674 branches = opts.get('branches')
674 branches = opts.get('branches')
675 if tags:
675 if tags:
676 labels = {}
676 labels = {}
677 for l, n in repo.tags().items():
677 for l, n in repo.tags().items():
678 labels.setdefault(cl.rev(n), []).append(l)
678 labels.setdefault(cl.rev(n), []).append(l)
679
679
680 def events():
680 def events():
681 b = b"default"
681 b = b"default"
682 for r in cl:
682 for r in cl:
683 if branches:
683 if branches:
684 newb = cl.read(cl.node(r))[5][b'branch']
684 newb = cl.read(cl.node(r))[5][b'branch']
685 if newb != b:
685 if newb != b:
686 yield b'a', newb
686 yield b'a', newb
687 b = newb
687 b = newb
688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 if tags:
689 if tags:
690 ls = labels.get(r)
690 ls = labels.get(r)
691 if ls:
691 if ls:
692 for l in ls:
692 for l in ls:
693 yield b'l', (r, l)
693 yield b'l', (r, l)
694
694
695 else:
695 else:
696 raise error.Abort(_(b'need repo for changelog dag'))
696 raise error.Abort(_(b'need repo for changelog dag'))
697
697
698 for line in dagparser.dagtextlines(
698 for line in dagparser.dagtextlines(
699 events(),
699 events(),
700 addspaces=spaces,
700 addspaces=spaces,
701 wraplabels=True,
701 wraplabels=True,
702 wrapannotations=True,
702 wrapannotations=True,
703 wrapnonlinear=dots,
703 wrapnonlinear=dots,
704 usedots=dots,
704 usedots=dots,
705 maxlinewidth=70,
705 maxlinewidth=70,
706 ):
706 ):
707 ui.write(line)
707 ui.write(line)
708 ui.write(b"\n")
708 ui.write(b"\n")
709
709
710
710
711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 def debugdata(ui, repo, file_, rev=None, **opts):
712 def debugdata(ui, repo, file_, rev=None, **opts):
713 """dump the contents of a data file revision"""
713 """dump the contents of a data file revision"""
714 opts = pycompat.byteskwargs(opts)
714 opts = pycompat.byteskwargs(opts)
715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if rev is not None:
716 if rev is not None:
717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 file_, rev = None, file_
718 file_, rev = None, file_
719 elif rev is None:
719 elif rev is None:
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 try:
722 try:
723 ui.write(r.rawdata(r.lookup(rev)))
723 ui.write(r.rawdata(r.lookup(rev)))
724 except KeyError:
724 except KeyError:
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726
726
727
727
728 @command(
728 @command(
729 b'debugdate',
729 b'debugdate',
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 _(b'[-e] DATE [RANGE]'),
731 _(b'[-e] DATE [RANGE]'),
732 norepo=True,
732 norepo=True,
733 optionalrepo=True,
733 optionalrepo=True,
734 )
734 )
735 def debugdate(ui, date, range=None, **opts):
735 def debugdate(ui, date, range=None, **opts):
736 """parse and display a date"""
736 """parse and display a date"""
737 if opts["extended"]:
737 if opts["extended"]:
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 else:
739 else:
740 d = dateutil.parsedate(date)
740 d = dateutil.parsedate(date)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 if range:
743 if range:
744 m = dateutil.matchdate(range)
744 m = dateutil.matchdate(range)
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746
746
747
747
748 @command(
748 @command(
749 b'debugdeltachain',
749 b'debugdeltachain',
750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 _(b'-c|-m|FILE'),
751 _(b'-c|-m|FILE'),
752 optionalrepo=True,
752 optionalrepo=True,
753 )
753 )
754 def debugdeltachain(ui, repo, file_=None, **opts):
754 def debugdeltachain(ui, repo, file_=None, **opts):
755 """dump information about delta chains in a revlog
755 """dump information about delta chains in a revlog
756
756
757 Output can be templatized. Available template keywords are:
757 Output can be templatized. Available template keywords are:
758
758
759 :``rev``: revision number
759 :``rev``: revision number
760 :``chainid``: delta chain identifier (numbered by unique base)
760 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainlen``: delta chain length to this revision
761 :``chainlen``: delta chain length to this revision
762 :``prevrev``: previous revision in delta chain
762 :``prevrev``: previous revision in delta chain
763 :``deltatype``: role of delta / how it was computed
763 :``deltatype``: role of delta / how it was computed
764 :``compsize``: compressed size of revision
764 :``compsize``: compressed size of revision
765 :``uncompsize``: uncompressed size of revision
765 :``uncompsize``: uncompressed size of revision
766 :``chainsize``: total size of compressed revisions in chain
766 :``chainsize``: total size of compressed revisions in chain
767 :``chainratio``: total chain size divided by uncompressed revision size
767 :``chainratio``: total chain size divided by uncompressed revision size
768 (new delta chains typically start at ratio 2.00)
768 (new delta chains typically start at ratio 2.00)
769 :``lindist``: linear distance from base revision in delta chain to end
769 :``lindist``: linear distance from base revision in delta chain to end
770 of this revision
770 of this revision
771 :``extradist``: total size of revisions not part of this delta chain from
771 :``extradist``: total size of revisions not part of this delta chain from
772 base of delta chain to end of this revision; a measurement
772 base of delta chain to end of this revision; a measurement
773 of how much extra data we need to read/seek across to read
773 of how much extra data we need to read/seek across to read
774 the delta chain for this revision
774 the delta chain for this revision
775 :``extraratio``: extradist divided by chainsize; another representation of
775 :``extraratio``: extradist divided by chainsize; another representation of
776 how much unrelated data is needed to load this delta chain
776 how much unrelated data is needed to load this delta chain
777
777
778 If the repository is configured to use the sparse read, additional keywords
778 If the repository is configured to use the sparse read, additional keywords
779 are available:
779 are available:
780
780
781 :``readsize``: total size of data read from the disk for a revision
781 :``readsize``: total size of data read from the disk for a revision
782 (sum of the sizes of all the blocks)
782 (sum of the sizes of all the blocks)
783 :``largestblock``: size of the largest block of data read from the disk
783 :``largestblock``: size of the largest block of data read from the disk
784 :``readdensity``: density of useful bytes in the data read from the disk
784 :``readdensity``: density of useful bytes in the data read from the disk
785 :``srchunks``: in how many data hunks the whole revision would be read
785 :``srchunks``: in how many data hunks the whole revision would be read
786
786
787 The sparse read can be enabled with experimental.sparse-read = True
787 The sparse read can be enabled with experimental.sparse-read = True
788 """
788 """
789 opts = pycompat.byteskwargs(opts)
789 opts = pycompat.byteskwargs(opts)
790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 index = r.index
791 index = r.index
792 start = r.start
792 start = r.start
793 length = r.length
793 length = r.length
794 generaldelta = r._generaldelta
794 generaldelta = r._generaldelta
795 withsparseread = getattr(r, '_withsparseread', False)
795 withsparseread = getattr(r, '_withsparseread', False)
796
796
797 def revinfo(rev):
797 def revinfo(rev):
798 e = index[rev]
798 e = index[rev]
799 compsize = e[1]
799 compsize = e[1]
800 uncompsize = e[2]
800 uncompsize = e[2]
801 chainsize = 0
801 chainsize = 0
802
802
803 if generaldelta:
803 if generaldelta:
804 if e[3] == e[5]:
804 if e[3] == e[5]:
805 deltatype = b'p1'
805 deltatype = b'p1'
806 elif e[3] == e[6]:
806 elif e[3] == e[6]:
807 deltatype = b'p2'
807 deltatype = b'p2'
808 elif e[3] == rev - 1:
808 elif e[3] == rev - 1:
809 deltatype = b'prev'
809 deltatype = b'prev'
810 elif e[3] == rev:
810 elif e[3] == rev:
811 deltatype = b'base'
811 deltatype = b'base'
812 else:
812 else:
813 deltatype = b'other'
813 deltatype = b'other'
814 else:
814 else:
815 if e[3] == rev:
815 if e[3] == rev:
816 deltatype = b'base'
816 deltatype = b'base'
817 else:
817 else:
818 deltatype = b'prev'
818 deltatype = b'prev'
819
819
820 chain = r._deltachain(rev)[0]
820 chain = r._deltachain(rev)[0]
821 for iterrev in chain:
821 for iterrev in chain:
822 e = index[iterrev]
822 e = index[iterrev]
823 chainsize += e[1]
823 chainsize += e[1]
824
824
825 return compsize, uncompsize, deltatype, chain, chainsize
825 return compsize, uncompsize, deltatype, chain, chainsize
826
826
827 fm = ui.formatter(b'debugdeltachain', opts)
827 fm = ui.formatter(b'debugdeltachain', opts)
828
828
829 fm.plain(
829 fm.plain(
830 b' rev chain# chainlen prev delta '
830 b' rev chain# chainlen prev delta '
831 b'size rawsize chainsize ratio lindist extradist '
831 b'size rawsize chainsize ratio lindist extradist '
832 b'extraratio'
832 b'extraratio'
833 )
833 )
834 if withsparseread:
834 if withsparseread:
835 fm.plain(b' readsize largestblk rddensity srchunks')
835 fm.plain(b' readsize largestblk rddensity srchunks')
836 fm.plain(b'\n')
836 fm.plain(b'\n')
837
837
838 chainbases = {}
838 chainbases = {}
839 for rev in r:
839 for rev in r:
840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
841 chainbase = chain[0]
841 chainbase = chain[0]
842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
843 basestart = start(chainbase)
843 basestart = start(chainbase)
844 revstart = start(rev)
844 revstart = start(rev)
845 lineardist = revstart + comp - basestart
845 lineardist = revstart + comp - basestart
846 extradist = lineardist - chainsize
846 extradist = lineardist - chainsize
847 try:
847 try:
848 prevrev = chain[-2]
848 prevrev = chain[-2]
849 except IndexError:
849 except IndexError:
850 prevrev = -1
850 prevrev = -1
851
851
852 if uncomp != 0:
852 if uncomp != 0:
853 chainratio = float(chainsize) / float(uncomp)
853 chainratio = float(chainsize) / float(uncomp)
854 else:
854 else:
855 chainratio = chainsize
855 chainratio = chainsize
856
856
857 if chainsize != 0:
857 if chainsize != 0:
858 extraratio = float(extradist) / float(chainsize)
858 extraratio = float(extradist) / float(chainsize)
859 else:
859 else:
860 extraratio = extradist
860 extraratio = extradist
861
861
862 fm.startitem()
862 fm.startitem()
863 fm.write(
863 fm.write(
864 b'rev chainid chainlen prevrev deltatype compsize '
864 b'rev chainid chainlen prevrev deltatype compsize '
865 b'uncompsize chainsize chainratio lindist extradist '
865 b'uncompsize chainsize chainratio lindist extradist '
866 b'extraratio',
866 b'extraratio',
867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
868 rev,
868 rev,
869 chainid,
869 chainid,
870 len(chain),
870 len(chain),
871 prevrev,
871 prevrev,
872 deltatype,
872 deltatype,
873 comp,
873 comp,
874 uncomp,
874 uncomp,
875 chainsize,
875 chainsize,
876 chainratio,
876 chainratio,
877 lineardist,
877 lineardist,
878 extradist,
878 extradist,
879 extraratio,
879 extraratio,
880 rev=rev,
880 rev=rev,
881 chainid=chainid,
881 chainid=chainid,
882 chainlen=len(chain),
882 chainlen=len(chain),
883 prevrev=prevrev,
883 prevrev=prevrev,
884 deltatype=deltatype,
884 deltatype=deltatype,
885 compsize=comp,
885 compsize=comp,
886 uncompsize=uncomp,
886 uncompsize=uncomp,
887 chainsize=chainsize,
887 chainsize=chainsize,
888 chainratio=chainratio,
888 chainratio=chainratio,
889 lindist=lineardist,
889 lindist=lineardist,
890 extradist=extradist,
890 extradist=extradist,
891 extraratio=extraratio,
891 extraratio=extraratio,
892 )
892 )
893 if withsparseread:
893 if withsparseread:
894 readsize = 0
894 readsize = 0
895 largestblock = 0
895 largestblock = 0
896 srchunks = 0
896 srchunks = 0
897
897
898 for revschunk in deltautil.slicechunk(r, chain):
898 for revschunk in deltautil.slicechunk(r, chain):
899 srchunks += 1
899 srchunks += 1
900 blkend = start(revschunk[-1]) + length(revschunk[-1])
900 blkend = start(revschunk[-1]) + length(revschunk[-1])
901 blksize = blkend - start(revschunk[0])
901 blksize = blkend - start(revschunk[0])
902
902
903 readsize += blksize
903 readsize += blksize
904 if largestblock < blksize:
904 if largestblock < blksize:
905 largestblock = blksize
905 largestblock = blksize
906
906
907 if readsize:
907 if readsize:
908 readdensity = float(chainsize) / float(readsize)
908 readdensity = float(chainsize) / float(readsize)
909 else:
909 else:
910 readdensity = 1
910 readdensity = 1
911
911
912 fm.write(
912 fm.write(
913 b'readsize largestblock readdensity srchunks',
913 b'readsize largestblock readdensity srchunks',
914 b' %10d %10d %9.5f %8d',
914 b' %10d %10d %9.5f %8d',
915 readsize,
915 readsize,
916 largestblock,
916 largestblock,
917 readdensity,
917 readdensity,
918 srchunks,
918 srchunks,
919 readsize=readsize,
919 readsize=readsize,
920 largestblock=largestblock,
920 largestblock=largestblock,
921 readdensity=readdensity,
921 readdensity=readdensity,
922 srchunks=srchunks,
922 srchunks=srchunks,
923 )
923 )
924
924
925 fm.plain(b'\n')
925 fm.plain(b'\n')
926
926
927 fm.end()
927 fm.end()
928
928
929
929
930 @command(
930 @command(
931 b'debugdirstate|debugstate',
931 b'debugdirstate|debugstate',
932 [
932 [
933 (
933 (
934 b'',
934 b'',
935 b'nodates',
935 b'nodates',
936 None,
936 None,
937 _(b'do not display the saved mtime (DEPRECATED)'),
937 _(b'do not display the saved mtime (DEPRECATED)'),
938 ),
938 ),
939 (b'', b'dates', True, _(b'display the saved mtime')),
939 (b'', b'dates', True, _(b'display the saved mtime')),
940 (b'', b'datesort', None, _(b'sort by saved mtime')),
940 (b'', b'datesort', None, _(b'sort by saved mtime')),
941 (
941 (
942 b'',
942 b'',
943 b'docket',
943 b'docket',
944 False,
944 False,
945 _(b'display the docket (metadata file) instead'),
945 _(b'display the docket (metadata file) instead'),
946 ),
946 ),
947 (
947 (
948 b'',
948 b'',
949 b'all',
949 b'all',
950 False,
950 False,
951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
952 ),
952 ),
953 ],
953 ],
954 _(b'[OPTION]...'),
954 _(b'[OPTION]...'),
955 )
955 )
956 def debugstate(ui, repo, **opts):
956 def debugstate(ui, repo, **opts):
957 """show the contents of the current dirstate"""
957 """show the contents of the current dirstate"""
958
958
959 if opts.get("docket"):
959 if opts.get("docket"):
960 if not repo.dirstate._use_dirstate_v2:
960 if not repo.dirstate._use_dirstate_v2:
961 raise error.Abort(_(b'dirstate v1 does not have a docket'))
961 raise error.Abort(_(b'dirstate v1 does not have a docket'))
962
962
963 docket = repo.dirstate._map.docket
963 docket = repo.dirstate._map.docket
964 (
964 (
965 start_offset,
965 start_offset,
966 root_nodes,
966 root_nodes,
967 nodes_with_entry,
967 nodes_with_entry,
968 nodes_with_copy,
968 nodes_with_copy,
969 unused_bytes,
969 unused_bytes,
970 _unused,
970 _unused,
971 ignore_pattern,
971 ignore_pattern,
972 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
972 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
973
973
974 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
974 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
975 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
975 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
976 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
976 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
977 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
977 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
978 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
978 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
979 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
979 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
980 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
980 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
981 ui.write(
981 ui.write(
982 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
982 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
983 )
983 )
984 return
984 return
985
985
986 nodates = not opts['dates']
986 nodates = not opts['dates']
987 if opts.get('nodates') is not None:
987 if opts.get('nodates') is not None:
988 nodates = True
988 nodates = True
989 datesort = opts.get('datesort')
989 datesort = opts.get('datesort')
990
990
991 if datesort:
991 if datesort:
992
992
993 def keyfunc(entry):
993 def keyfunc(entry):
994 filename, _state, _mode, _size, mtime = entry
994 filename, _state, _mode, _size, mtime = entry
995 return (mtime, filename)
995 return (mtime, filename)
996
996
997 else:
997 else:
998 keyfunc = None # sort by filename
998 keyfunc = None # sort by filename
999 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
999 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1000 entries.sort(key=keyfunc)
1000 entries.sort(key=keyfunc)
1001 for entry in entries:
1001 for entry in entries:
1002 filename, state, mode, size, mtime = entry
1002 filename, state, mode, size, mtime = entry
1003 if mtime == -1:
1003 if mtime == -1:
1004 timestr = b'unset '
1004 timestr = b'unset '
1005 elif nodates:
1005 elif nodates:
1006 timestr = b'set '
1006 timestr = b'set '
1007 else:
1007 else:
1008 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1008 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1009 timestr = encoding.strtolocal(timestr)
1009 timestr = encoding.strtolocal(timestr)
1010 if mode & 0o20000:
1010 if mode & 0o20000:
1011 mode = b'lnk'
1011 mode = b'lnk'
1012 else:
1012 else:
1013 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1013 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1014 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1014 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1015 for f in repo.dirstate.copies():
1015 for f in repo.dirstate.copies():
1016 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1016 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1017
1017
1018
1018
1019 @command(
1019 @command(
1020 b'debugdirstateignorepatternshash',
1020 b'debugdirstateignorepatternshash',
1021 [],
1021 [],
1022 _(b''),
1022 _(b''),
1023 )
1023 )
1024 def debugdirstateignorepatternshash(ui, repo, **opts):
1024 def debugdirstateignorepatternshash(ui, repo, **opts):
1025 """show the hash of ignore patterns stored in dirstate if v2,
1025 """show the hash of ignore patterns stored in dirstate if v2,
1026 or nothing for dirstate-v2
1026 or nothing for dirstate-v2
1027 """
1027 """
1028 if repo.dirstate._use_dirstate_v2:
1028 if repo.dirstate._use_dirstate_v2:
1029 docket = repo.dirstate._map.docket
1029 docket = repo.dirstate._map.docket
1030 hash_len = 20 # 160 bits for SHA-1
1030 hash_len = 20 # 160 bits for SHA-1
1031 hash_bytes = docket.tree_metadata[-hash_len:]
1031 hash_bytes = docket.tree_metadata[-hash_len:]
1032 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1032 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1033
1033
1034
1034
1035 @command(
1035 @command(
1036 b'debugdiscovery',
1036 b'debugdiscovery',
1037 [
1037 [
1038 (b'', b'old', None, _(b'use old-style discovery')),
1038 (b'', b'old', None, _(b'use old-style discovery')),
1039 (
1039 (
1040 b'',
1040 b'',
1041 b'nonheads',
1041 b'nonheads',
1042 None,
1042 None,
1043 _(b'use old-style discovery with non-heads included'),
1043 _(b'use old-style discovery with non-heads included'),
1044 ),
1044 ),
1045 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1045 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1046 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1046 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1047 (
1047 (
1048 b'',
1048 b'',
1049 b'local-as-revs',
1049 b'local-as-revs',
1050 b"",
1050 b"",
1051 b'treat local has having these revisions only',
1051 b'treat local has having these revisions only',
1052 ),
1052 ),
1053 (
1053 (
1054 b'',
1054 b'',
1055 b'remote-as-revs',
1055 b'remote-as-revs',
1056 b"",
1056 b"",
1057 b'use local as remote, with only these revisions',
1057 b'use local as remote, with only these revisions',
1058 ),
1058 ),
1059 ]
1059 ]
1060 + cmdutil.remoteopts
1060 + cmdutil.remoteopts
1061 + cmdutil.formatteropts,
1061 + cmdutil.formatteropts,
1062 _(b'[--rev REV] [OTHER]'),
1062 _(b'[--rev REV] [OTHER]'),
1063 )
1063 )
1064 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1064 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1065 """runs the changeset discovery protocol in isolation
1065 """runs the changeset discovery protocol in isolation
1066
1066
1067 The local peer can be "replaced" by a subset of the local repository by
1067 The local peer can be "replaced" by a subset of the local repository by
1068 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1068 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1069 be "replaced" by a subset of the local repository using the
1069 be "replaced" by a subset of the local repository using the
1070 `--local-as-revs` flag. This is useful to efficiently debug pathological
1070 `--local-as-revs` flag. This is useful to efficiently debug pathological
1071 discovery situation.
1071 discovery situation.
1072
1072
1073 The following developer oriented config are relevant for people playing with this command:
1073 The following developer oriented config are relevant for people playing with this command:
1074
1074
1075 * devel.discovery.exchange-heads=True
1075 * devel.discovery.exchange-heads=True
1076
1076
1077 If False, the discovery will not start with
1077 If False, the discovery will not start with
1078 remote head fetching and local head querying.
1078 remote head fetching and local head querying.
1079
1079
1080 * devel.discovery.grow-sample=True
1080 * devel.discovery.grow-sample=True
1081
1081
1082 If False, the sample size used in set discovery will not be increased
1082 If False, the sample size used in set discovery will not be increased
1083 through the process
1083 through the process
1084
1084
1085 * devel.discovery.grow-sample.dynamic=True
1085 * devel.discovery.grow-sample.dynamic=True
1086
1086
1087 When discovery.grow-sample.dynamic is True, the default, the sample size is
1087 When discovery.grow-sample.dynamic is True, the default, the sample size is
1088 adapted to the shape of the undecided set (it is set to the max of:
1088 adapted to the shape of the undecided set (it is set to the max of:
1089 <target-size>, len(roots(undecided)), len(heads(undecided)
1089 <target-size>, len(roots(undecided)), len(heads(undecided)
1090
1090
1091 * devel.discovery.grow-sample.rate=1.05
1091 * devel.discovery.grow-sample.rate=1.05
1092
1092
1093 the rate at which the sample grow
1093 the rate at which the sample grow
1094
1094
1095 * devel.discovery.randomize=True
1095 * devel.discovery.randomize=True
1096
1096
1097 If andom sampling during discovery are deterministic. It is meant for
1097 If andom sampling during discovery are deterministic. It is meant for
1098 integration tests.
1098 integration tests.
1099
1099
1100 * devel.discovery.sample-size=200
1100 * devel.discovery.sample-size=200
1101
1101
1102 Control the initial size of the discovery sample
1102 Control the initial size of the discovery sample
1103
1103
1104 * devel.discovery.sample-size.initial=100
1104 * devel.discovery.sample-size.initial=100
1105
1105
1106 Control the initial size of the discovery for initial change
1106 Control the initial size of the discovery for initial change
1107 """
1107 """
1108 opts = pycompat.byteskwargs(opts)
1108 opts = pycompat.byteskwargs(opts)
1109 unfi = repo.unfiltered()
1109 unfi = repo.unfiltered()
1110
1110
1111 # setup potential extra filtering
1111 # setup potential extra filtering
1112 local_revs = opts[b"local_as_revs"]
1112 local_revs = opts[b"local_as_revs"]
1113 remote_revs = opts[b"remote_as_revs"]
1113 remote_revs = opts[b"remote_as_revs"]
1114
1114
1115 # make sure tests are repeatable
1115 # make sure tests are repeatable
1116 random.seed(int(opts[b'seed']))
1116 random.seed(int(opts[b'seed']))
1117
1117
1118 if not remote_revs:
1118 if not remote_revs:
1119
1119
1120 remoteurl, branches = urlutil.get_unique_pull_path(
1120 remoteurl, branches = urlutil.get_unique_pull_path(
1121 b'debugdiscovery', repo, ui, remoteurl
1121 b'debugdiscovery', repo, ui, remoteurl
1122 )
1122 )
1123 remote = hg.peer(repo, opts, remoteurl)
1123 remote = hg.peer(repo, opts, remoteurl)
1124 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1124 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1125 else:
1125 else:
1126 branches = (None, [])
1126 branches = (None, [])
1127 remote_filtered_revs = logcmdutil.revrange(
1127 remote_filtered_revs = logcmdutil.revrange(
1128 unfi, [b"not (::(%s))" % remote_revs]
1128 unfi, [b"not (::(%s))" % remote_revs]
1129 )
1129 )
1130 remote_filtered_revs = frozenset(remote_filtered_revs)
1130 remote_filtered_revs = frozenset(remote_filtered_revs)
1131
1131
1132 def remote_func(x):
1132 def remote_func(x):
1133 return remote_filtered_revs
1133 return remote_filtered_revs
1134
1134
1135 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1135 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1136
1136
1137 remote = repo.peer()
1137 remote = repo.peer()
1138 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1138 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1139
1139
1140 if local_revs:
1140 if local_revs:
1141 local_filtered_revs = logcmdutil.revrange(
1141 local_filtered_revs = logcmdutil.revrange(
1142 unfi, [b"not (::(%s))" % local_revs]
1142 unfi, [b"not (::(%s))" % local_revs]
1143 )
1143 )
1144 local_filtered_revs = frozenset(local_filtered_revs)
1144 local_filtered_revs = frozenset(local_filtered_revs)
1145
1145
1146 def local_func(x):
1146 def local_func(x):
1147 return local_filtered_revs
1147 return local_filtered_revs
1148
1148
1149 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1149 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1150 repo = repo.filtered(b'debug-discovery-local-filter')
1150 repo = repo.filtered(b'debug-discovery-local-filter')
1151
1151
1152 data = {}
1152 data = {}
1153 if opts.get(b'old'):
1153 if opts.get(b'old'):
1154
1154
1155 def doit(pushedrevs, remoteheads, remote=remote):
1155 def doit(pushedrevs, remoteheads, remote=remote):
1156 if not util.safehasattr(remote, b'branches'):
1156 if not util.safehasattr(remote, b'branches'):
1157 # enable in-client legacy support
1157 # enable in-client legacy support
1158 remote = localrepo.locallegacypeer(remote.local())
1158 remote = localrepo.locallegacypeer(remote.local())
1159 common, _in, hds = treediscovery.findcommonincoming(
1159 common, _in, hds = treediscovery.findcommonincoming(
1160 repo, remote, force=True, audit=data
1160 repo, remote, force=True, audit=data
1161 )
1161 )
1162 common = set(common)
1162 common = set(common)
1163 if not opts.get(b'nonheads'):
1163 if not opts.get(b'nonheads'):
1164 ui.writenoi18n(
1164 ui.writenoi18n(
1165 b"unpruned common: %s\n"
1165 b"unpruned common: %s\n"
1166 % b" ".join(sorted(short(n) for n in common))
1166 % b" ".join(sorted(short(n) for n in common))
1167 )
1167 )
1168
1168
1169 clnode = repo.changelog.node
1169 clnode = repo.changelog.node
1170 common = repo.revs(b'heads(::%ln)', common)
1170 common = repo.revs(b'heads(::%ln)', common)
1171 common = {clnode(r) for r in common}
1171 common = {clnode(r) for r in common}
1172 return common, hds
1172 return common, hds
1173
1173
1174 else:
1174 else:
1175
1175
1176 def doit(pushedrevs, remoteheads, remote=remote):
1176 def doit(pushedrevs, remoteheads, remote=remote):
1177 nodes = None
1177 nodes = None
1178 if pushedrevs:
1178 if pushedrevs:
1179 revs = logcmdutil.revrange(repo, pushedrevs)
1179 revs = logcmdutil.revrange(repo, pushedrevs)
1180 nodes = [repo[r].node() for r in revs]
1180 nodes = [repo[r].node() for r in revs]
1181 common, any, hds = setdiscovery.findcommonheads(
1181 common, any, hds = setdiscovery.findcommonheads(
1182 ui, repo, remote, ancestorsof=nodes, audit=data
1182 ui, repo, remote, ancestorsof=nodes, audit=data
1183 )
1183 )
1184 return common, hds
1184 return common, hds
1185
1185
1186 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1186 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1187 localrevs = opts[b'rev']
1187 localrevs = opts[b'rev']
1188
1188
1189 fm = ui.formatter(b'debugdiscovery', opts)
1189 fm = ui.formatter(b'debugdiscovery', opts)
1190 if fm.strict_format:
1190 if fm.strict_format:
1191
1191
1192 @contextlib.contextmanager
1192 @contextlib.contextmanager
1193 def may_capture_output():
1193 def may_capture_output():
1194 ui.pushbuffer()
1194 ui.pushbuffer()
1195 yield
1195 yield
1196 data[b'output'] = ui.popbuffer()
1196 data[b'output'] = ui.popbuffer()
1197
1197
1198 else:
1198 else:
1199 may_capture_output = util.nullcontextmanager
1199 may_capture_output = util.nullcontextmanager
1200 with may_capture_output():
1200 with may_capture_output():
1201 with util.timedcm('debug-discovery') as t:
1201 with util.timedcm('debug-discovery') as t:
1202 common, hds = doit(localrevs, remoterevs)
1202 common, hds = doit(localrevs, remoterevs)
1203
1203
1204 # compute all statistics
1204 # compute all statistics
1205 heads_common = set(common)
1205 heads_common = set(common)
1206 heads_remote = set(hds)
1206 heads_remote = set(hds)
1207 heads_local = set(repo.heads())
1207 heads_local = set(repo.heads())
1208 # note: they cannot be a local or remote head that is in common and not
1208 # note: they cannot be a local or remote head that is in common and not
1209 # itself a head of common.
1209 # itself a head of common.
1210 heads_common_local = heads_common & heads_local
1210 heads_common_local = heads_common & heads_local
1211 heads_common_remote = heads_common & heads_remote
1211 heads_common_remote = heads_common & heads_remote
1212 heads_common_both = heads_common & heads_remote & heads_local
1212 heads_common_both = heads_common & heads_remote & heads_local
1213
1213
1214 all = repo.revs(b'all()')
1214 all = repo.revs(b'all()')
1215 common = repo.revs(b'::%ln', common)
1215 common = repo.revs(b'::%ln', common)
1216 roots_common = repo.revs(b'roots(::%ld)', common)
1216 roots_common = repo.revs(b'roots(::%ld)', common)
1217 missing = repo.revs(b'not ::%ld', common)
1217 missing = repo.revs(b'not ::%ld', common)
1218 heads_missing = repo.revs(b'heads(%ld)', missing)
1218 heads_missing = repo.revs(b'heads(%ld)', missing)
1219 roots_missing = repo.revs(b'roots(%ld)', missing)
1219 roots_missing = repo.revs(b'roots(%ld)', missing)
1220 assert len(common) + len(missing) == len(all)
1220 assert len(common) + len(missing) == len(all)
1221
1221
1222 initial_undecided = repo.revs(
1222 initial_undecided = repo.revs(
1223 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1223 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1224 )
1224 )
1225 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1225 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1226 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1226 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1227 common_initial_undecided = initial_undecided & common
1227 common_initial_undecided = initial_undecided & common
1228 missing_initial_undecided = initial_undecided & missing
1228 missing_initial_undecided = initial_undecided & missing
1229
1229
1230 data[b'elapsed'] = t.elapsed
1230 data[b'elapsed'] = t.elapsed
1231 data[b'nb-common-heads'] = len(heads_common)
1231 data[b'nb-common-heads'] = len(heads_common)
1232 data[b'nb-common-heads-local'] = len(heads_common_local)
1232 data[b'nb-common-heads-local'] = len(heads_common_local)
1233 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1233 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1234 data[b'nb-common-heads-both'] = len(heads_common_both)
1234 data[b'nb-common-heads-both'] = len(heads_common_both)
1235 data[b'nb-common-roots'] = len(roots_common)
1235 data[b'nb-common-roots'] = len(roots_common)
1236 data[b'nb-head-local'] = len(heads_local)
1236 data[b'nb-head-local'] = len(heads_local)
1237 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1237 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1238 data[b'nb-head-remote'] = len(heads_remote)
1238 data[b'nb-head-remote'] = len(heads_remote)
1239 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1239 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1240 heads_common_remote
1240 heads_common_remote
1241 )
1241 )
1242 data[b'nb-revs'] = len(all)
1242 data[b'nb-revs'] = len(all)
1243 data[b'nb-revs-common'] = len(common)
1243 data[b'nb-revs-common'] = len(common)
1244 data[b'nb-revs-missing'] = len(missing)
1244 data[b'nb-revs-missing'] = len(missing)
1245 data[b'nb-missing-heads'] = len(heads_missing)
1245 data[b'nb-missing-heads'] = len(heads_missing)
1246 data[b'nb-missing-roots'] = len(roots_missing)
1246 data[b'nb-missing-roots'] = len(roots_missing)
1247 data[b'nb-ini_und'] = len(initial_undecided)
1247 data[b'nb-ini_und'] = len(initial_undecided)
1248 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1248 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1249 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1249 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1250 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1250 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1251 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1251 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1252
1252
1253 fm.startitem()
1253 fm.startitem()
1254 fm.data(**pycompat.strkwargs(data))
1254 fm.data(**pycompat.strkwargs(data))
1255 # display discovery summary
1255 # display discovery summary
1256 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1256 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1257 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1257 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1258 fm.plain(b"queries: %(total-queries)9d\n" % data)
1258 fm.plain(b"queries: %(total-queries)9d\n" % data)
1259 fm.plain(b"heads summary:\n")
1259 fm.plain(b"heads summary:\n")
1260 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1260 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1261 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1261 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1262 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1262 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1263 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1263 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1264 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1264 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1265 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1265 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1266 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1266 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1267 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1267 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1268 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1268 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1269 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1269 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1270 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1270 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1271 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1271 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1272 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1272 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1273 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1273 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1274 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1274 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1275 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1275 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1276 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1276 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1277 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1277 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1278 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1278 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1279 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1279 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1280 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1280 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1281 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1281 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1282
1282
1283 if ui.verbose:
1283 if ui.verbose:
1284 fm.plain(
1284 fm.plain(
1285 b"common heads: %s\n"
1285 b"common heads: %s\n"
1286 % b" ".join(sorted(short(n) for n in heads_common))
1286 % b" ".join(sorted(short(n) for n in heads_common))
1287 )
1287 )
1288 fm.end()
1288 fm.end()
1289
1289
1290
1290
1291 _chunksize = 4 << 10
1291 _chunksize = 4 << 10
1292
1292
1293
1293
1294 @command(
1294 @command(
1295 b'debugdownload',
1295 b'debugdownload',
1296 [
1296 [
1297 (b'o', b'output', b'', _(b'path')),
1297 (b'o', b'output', b'', _(b'path')),
1298 ],
1298 ],
1299 optionalrepo=True,
1299 optionalrepo=True,
1300 )
1300 )
1301 def debugdownload(ui, repo, url, output=None, **opts):
1301 def debugdownload(ui, repo, url, output=None, **opts):
1302 """download a resource using Mercurial logic and config"""
1302 """download a resource using Mercurial logic and config"""
1303 fh = urlmod.open(ui, url, output)
1303 fh = urlmod.open(ui, url, output)
1304
1304
1305 dest = ui
1305 dest = ui
1306 if output:
1306 if output:
1307 dest = open(output, b"wb", _chunksize)
1307 dest = open(output, b"wb", _chunksize)
1308 try:
1308 try:
1309 data = fh.read(_chunksize)
1309 data = fh.read(_chunksize)
1310 while data:
1310 while data:
1311 dest.write(data)
1311 dest.write(data)
1312 data = fh.read(_chunksize)
1312 data = fh.read(_chunksize)
1313 finally:
1313 finally:
1314 if output:
1314 if output:
1315 dest.close()
1315 dest.close()
1316
1316
1317
1317
1318 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1318 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1319 def debugextensions(ui, repo, **opts):
1319 def debugextensions(ui, repo, **opts):
1320 '''show information about active extensions'''
1320 '''show information about active extensions'''
1321 opts = pycompat.byteskwargs(opts)
1321 opts = pycompat.byteskwargs(opts)
1322 exts = extensions.extensions(ui)
1322 exts = extensions.extensions(ui)
1323 hgver = util.version()
1323 hgver = util.version()
1324 fm = ui.formatter(b'debugextensions', opts)
1324 fm = ui.formatter(b'debugextensions', opts)
1325 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1325 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1326 isinternal = extensions.ismoduleinternal(extmod)
1326 isinternal = extensions.ismoduleinternal(extmod)
1327 extsource = None
1327 extsource = None
1328
1328
1329 if util.safehasattr(extmod, '__file__'):
1329 if util.safehasattr(extmod, '__file__'):
1330 extsource = pycompat.fsencode(extmod.__file__)
1330 extsource = pycompat.fsencode(extmod.__file__)
1331 elif getattr(sys, 'oxidized', False):
1331 elif getattr(sys, 'oxidized', False):
1332 extsource = pycompat.sysexecutable
1332 extsource = pycompat.sysexecutable
1333 if isinternal:
1333 if isinternal:
1334 exttestedwith = [] # never expose magic string to users
1334 exttestedwith = [] # never expose magic string to users
1335 else:
1335 else:
1336 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1336 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1337 extbuglink = getattr(extmod, 'buglink', None)
1337 extbuglink = getattr(extmod, 'buglink', None)
1338
1338
1339 fm.startitem()
1339 fm.startitem()
1340
1340
1341 if ui.quiet or ui.verbose:
1341 if ui.quiet or ui.verbose:
1342 fm.write(b'name', b'%s\n', extname)
1342 fm.write(b'name', b'%s\n', extname)
1343 else:
1343 else:
1344 fm.write(b'name', b'%s', extname)
1344 fm.write(b'name', b'%s', extname)
1345 if isinternal or hgver in exttestedwith:
1345 if isinternal or hgver in exttestedwith:
1346 fm.plain(b'\n')
1346 fm.plain(b'\n')
1347 elif not exttestedwith:
1347 elif not exttestedwith:
1348 fm.plain(_(b' (untested!)\n'))
1348 fm.plain(_(b' (untested!)\n'))
1349 else:
1349 else:
1350 lasttestedversion = exttestedwith[-1]
1350 lasttestedversion = exttestedwith[-1]
1351 fm.plain(b' (%s!)\n' % lasttestedversion)
1351 fm.plain(b' (%s!)\n' % lasttestedversion)
1352
1352
1353 fm.condwrite(
1353 fm.condwrite(
1354 ui.verbose and extsource,
1354 ui.verbose and extsource,
1355 b'source',
1355 b'source',
1356 _(b' location: %s\n'),
1356 _(b' location: %s\n'),
1357 extsource or b"",
1357 extsource or b"",
1358 )
1358 )
1359
1359
1360 if ui.verbose:
1360 if ui.verbose:
1361 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1361 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1362 fm.data(bundled=isinternal)
1362 fm.data(bundled=isinternal)
1363
1363
1364 fm.condwrite(
1364 fm.condwrite(
1365 ui.verbose and exttestedwith,
1365 ui.verbose and exttestedwith,
1366 b'testedwith',
1366 b'testedwith',
1367 _(b' tested with: %s\n'),
1367 _(b' tested with: %s\n'),
1368 fm.formatlist(exttestedwith, name=b'ver'),
1368 fm.formatlist(exttestedwith, name=b'ver'),
1369 )
1369 )
1370
1370
1371 fm.condwrite(
1371 fm.condwrite(
1372 ui.verbose and extbuglink,
1372 ui.verbose and extbuglink,
1373 b'buglink',
1373 b'buglink',
1374 _(b' bug reporting: %s\n'),
1374 _(b' bug reporting: %s\n'),
1375 extbuglink or b"",
1375 extbuglink or b"",
1376 )
1376 )
1377
1377
1378 fm.end()
1378 fm.end()
1379
1379
1380
1380
1381 @command(
1381 @command(
1382 b'debugfileset',
1382 b'debugfileset',
1383 [
1383 [
1384 (
1384 (
1385 b'r',
1385 b'r',
1386 b'rev',
1386 b'rev',
1387 b'',
1387 b'',
1388 _(b'apply the filespec on this revision'),
1388 _(b'apply the filespec on this revision'),
1389 _(b'REV'),
1389 _(b'REV'),
1390 ),
1390 ),
1391 (
1391 (
1392 b'',
1392 b'',
1393 b'all-files',
1393 b'all-files',
1394 False,
1394 False,
1395 _(b'test files from all revisions and working directory'),
1395 _(b'test files from all revisions and working directory'),
1396 ),
1396 ),
1397 (
1397 (
1398 b's',
1398 b's',
1399 b'show-matcher',
1399 b'show-matcher',
1400 None,
1400 None,
1401 _(b'print internal representation of matcher'),
1401 _(b'print internal representation of matcher'),
1402 ),
1402 ),
1403 (
1403 (
1404 b'p',
1404 b'p',
1405 b'show-stage',
1405 b'show-stage',
1406 [],
1406 [],
1407 _(b'print parsed tree at the given stage'),
1407 _(b'print parsed tree at the given stage'),
1408 _(b'NAME'),
1408 _(b'NAME'),
1409 ),
1409 ),
1410 ],
1410 ],
1411 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1411 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1412 )
1412 )
1413 def debugfileset(ui, repo, expr, **opts):
1413 def debugfileset(ui, repo, expr, **opts):
1414 '''parse and apply a fileset specification'''
1414 '''parse and apply a fileset specification'''
1415 from . import fileset
1415 from . import fileset
1416
1416
1417 fileset.symbols # force import of fileset so we have predicates to optimize
1417 fileset.symbols # force import of fileset so we have predicates to optimize
1418 opts = pycompat.byteskwargs(opts)
1418 opts = pycompat.byteskwargs(opts)
1419 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1419 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1420
1420
1421 stages = [
1421 stages = [
1422 (b'parsed', pycompat.identity),
1422 (b'parsed', pycompat.identity),
1423 (b'analyzed', filesetlang.analyze),
1423 (b'analyzed', filesetlang.analyze),
1424 (b'optimized', filesetlang.optimize),
1424 (b'optimized', filesetlang.optimize),
1425 ]
1425 ]
1426 stagenames = {n for n, f in stages}
1426 stagenames = {n for n, f in stages}
1427
1427
1428 showalways = set()
1428 showalways = set()
1429 if ui.verbose and not opts[b'show_stage']:
1429 if ui.verbose and not opts[b'show_stage']:
1430 # show parsed tree by --verbose (deprecated)
1430 # show parsed tree by --verbose (deprecated)
1431 showalways.add(b'parsed')
1431 showalways.add(b'parsed')
1432 if opts[b'show_stage'] == [b'all']:
1432 if opts[b'show_stage'] == [b'all']:
1433 showalways.update(stagenames)
1433 showalways.update(stagenames)
1434 else:
1434 else:
1435 for n in opts[b'show_stage']:
1435 for n in opts[b'show_stage']:
1436 if n not in stagenames:
1436 if n not in stagenames:
1437 raise error.Abort(_(b'invalid stage name: %s') % n)
1437 raise error.Abort(_(b'invalid stage name: %s') % n)
1438 showalways.update(opts[b'show_stage'])
1438 showalways.update(opts[b'show_stage'])
1439
1439
1440 tree = filesetlang.parse(expr)
1440 tree = filesetlang.parse(expr)
1441 for n, f in stages:
1441 for n, f in stages:
1442 tree = f(tree)
1442 tree = f(tree)
1443 if n in showalways:
1443 if n in showalways:
1444 if opts[b'show_stage'] or n != b'parsed':
1444 if opts[b'show_stage'] or n != b'parsed':
1445 ui.write(b"* %s:\n" % n)
1445 ui.write(b"* %s:\n" % n)
1446 ui.write(filesetlang.prettyformat(tree), b"\n")
1446 ui.write(filesetlang.prettyformat(tree), b"\n")
1447
1447
1448 files = set()
1448 files = set()
1449 if opts[b'all_files']:
1449 if opts[b'all_files']:
1450 for r in repo:
1450 for r in repo:
1451 c = repo[r]
1451 c = repo[r]
1452 files.update(c.files())
1452 files.update(c.files())
1453 files.update(c.substate)
1453 files.update(c.substate)
1454 if opts[b'all_files'] or ctx.rev() is None:
1454 if opts[b'all_files'] or ctx.rev() is None:
1455 wctx = repo[None]
1455 wctx = repo[None]
1456 files.update(
1456 files.update(
1457 repo.dirstate.walk(
1457 repo.dirstate.walk(
1458 scmutil.matchall(repo),
1458 scmutil.matchall(repo),
1459 subrepos=list(wctx.substate),
1459 subrepos=list(wctx.substate),
1460 unknown=True,
1460 unknown=True,
1461 ignored=True,
1461 ignored=True,
1462 )
1462 )
1463 )
1463 )
1464 files.update(wctx.substate)
1464 files.update(wctx.substate)
1465 else:
1465 else:
1466 files.update(ctx.files())
1466 files.update(ctx.files())
1467 files.update(ctx.substate)
1467 files.update(ctx.substate)
1468
1468
1469 m = ctx.matchfileset(repo.getcwd(), expr)
1469 m = ctx.matchfileset(repo.getcwd(), expr)
1470 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1470 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1471 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1471 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1472 for f in sorted(files):
1472 for f in sorted(files):
1473 if not m(f):
1473 if not m(f):
1474 continue
1474 continue
1475 ui.write(b"%s\n" % f)
1475 ui.write(b"%s\n" % f)
1476
1476
1477
1477
1478 @command(
1478 @command(
1479 b"debug-repair-issue6528",
1479 b"debug-repair-issue6528",
1480 [
1480 [
1481 (
1481 (
1482 b'',
1482 b'',
1483 b'to-report',
1483 b'to-report',
1484 b'',
1484 b'',
1485 _(b'build a report of affected revisions to this file'),
1485 _(b'build a report of affected revisions to this file'),
1486 _(b'FILE'),
1486 _(b'FILE'),
1487 ),
1487 ),
1488 (
1488 (
1489 b'',
1489 b'',
1490 b'from-report',
1490 b'from-report',
1491 b'',
1491 b'',
1492 _(b'repair revisions listed in this report file'),
1492 _(b'repair revisions listed in this report file'),
1493 _(b'FILE'),
1493 _(b'FILE'),
1494 ),
1494 ),
1495 (
1495 (
1496 b'',
1496 b'',
1497 b'paranoid',
1497 b'paranoid',
1498 False,
1498 False,
1499 _(b'check that both detection methods do the same thing'),
1499 _(b'check that both detection methods do the same thing'),
1500 ),
1500 ),
1501 ]
1501 ]
1502 + cmdutil.dryrunopts,
1502 + cmdutil.dryrunopts,
1503 )
1503 )
1504 def debug_repair_issue6528(ui, repo, **opts):
1504 def debug_repair_issue6528(ui, repo, **opts):
1505 """find affected revisions and repair them. See issue6528 for more details.
1505 """find affected revisions and repair them. See issue6528 for more details.
1506
1506
1507 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1507 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1508 computation of affected revisions for a given repository across clones.
1508 computation of affected revisions for a given repository across clones.
1509 The report format is line-based (with empty lines ignored):
1509 The report format is line-based (with empty lines ignored):
1510
1510
1511 ```
1511 ```
1512 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1512 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1513 ```
1513 ```
1514
1514
1515 There can be multiple broken revisions per filelog, they are separated by
1515 There can be multiple broken revisions per filelog, they are separated by
1516 a comma with no spaces. The only space is between the revision(s) and the
1516 a comma with no spaces. The only space is between the revision(s) and the
1517 filename.
1517 filename.
1518
1518
1519 Note that this does *not* mean that this repairs future affected revisions,
1519 Note that this does *not* mean that this repairs future affected revisions,
1520 that needs a separate fix at the exchange level that was introduced in
1520 that needs a separate fix at the exchange level that was introduced in
1521 Mercurial 5.9.1.
1521 Mercurial 5.9.1.
1522
1522
1523 There is a `--paranoid` flag to test that the fast implementation is correct
1523 There is a `--paranoid` flag to test that the fast implementation is correct
1524 by checking it against the slow implementation. Since this matter is quite
1524 by checking it against the slow implementation. Since this matter is quite
1525 urgent and testing every edge-case is probably quite costly, we use this
1525 urgent and testing every edge-case is probably quite costly, we use this
1526 method to test on large repositories as a fuzzing method of sorts.
1526 method to test on large repositories as a fuzzing method of sorts.
1527 """
1527 """
1528 cmdutil.check_incompatible_arguments(
1528 cmdutil.check_incompatible_arguments(
1529 opts, 'to_report', ['from_report', 'dry_run']
1529 opts, 'to_report', ['from_report', 'dry_run']
1530 )
1530 )
1531 dry_run = opts.get('dry_run')
1531 dry_run = opts.get('dry_run')
1532 to_report = opts.get('to_report')
1532 to_report = opts.get('to_report')
1533 from_report = opts.get('from_report')
1533 from_report = opts.get('from_report')
1534 paranoid = opts.get('paranoid')
1534 paranoid = opts.get('paranoid')
1535 # TODO maybe add filelog pattern and revision pattern parameters to help
1535 # TODO maybe add filelog pattern and revision pattern parameters to help
1536 # narrow down the search for users that know what they're looking for?
1536 # narrow down the search for users that know what they're looking for?
1537
1537
1538 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1538 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1539 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1539 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1540 raise error.Abort(_(msg))
1540 raise error.Abort(_(msg))
1541
1541
1542 rewrite.repair_issue6528(
1542 rewrite.repair_issue6528(
1543 ui,
1543 ui,
1544 repo,
1544 repo,
1545 dry_run=dry_run,
1545 dry_run=dry_run,
1546 to_report=to_report,
1546 to_report=to_report,
1547 from_report=from_report,
1547 from_report=from_report,
1548 paranoid=paranoid,
1548 paranoid=paranoid,
1549 )
1549 )
1550
1550
1551
1551
1552 @command(b'debugformat', [] + cmdutil.formatteropts)
1552 @command(b'debugformat', [] + cmdutil.formatteropts)
1553 def debugformat(ui, repo, **opts):
1553 def debugformat(ui, repo, **opts):
1554 """display format information about the current repository
1554 """display format information about the current repository
1555
1555
1556 Use --verbose to get extra information about current config value and
1556 Use --verbose to get extra information about current config value and
1557 Mercurial default."""
1557 Mercurial default."""
1558 opts = pycompat.byteskwargs(opts)
1558 opts = pycompat.byteskwargs(opts)
1559 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1559 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1560 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1560 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1561
1561
1562 def makeformatname(name):
1562 def makeformatname(name):
1563 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1563 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1564
1564
1565 fm = ui.formatter(b'debugformat', opts)
1565 fm = ui.formatter(b'debugformat', opts)
1566 if fm.isplain():
1566 if fm.isplain():
1567
1567
1568 def formatvalue(value):
1568 def formatvalue(value):
1569 if util.safehasattr(value, b'startswith'):
1569 if util.safehasattr(value, b'startswith'):
1570 return value
1570 return value
1571 if value:
1571 if value:
1572 return b'yes'
1572 return b'yes'
1573 else:
1573 else:
1574 return b'no'
1574 return b'no'
1575
1575
1576 else:
1576 else:
1577 formatvalue = pycompat.identity
1577 formatvalue = pycompat.identity
1578
1578
1579 fm.plain(b'format-variant')
1579 fm.plain(b'format-variant')
1580 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1580 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1581 fm.plain(b' repo')
1581 fm.plain(b' repo')
1582 if ui.verbose:
1582 if ui.verbose:
1583 fm.plain(b' config default')
1583 fm.plain(b' config default')
1584 fm.plain(b'\n')
1584 fm.plain(b'\n')
1585 for fv in upgrade.allformatvariant:
1585 for fv in upgrade.allformatvariant:
1586 fm.startitem()
1586 fm.startitem()
1587 repovalue = fv.fromrepo(repo)
1587 repovalue = fv.fromrepo(repo)
1588 configvalue = fv.fromconfig(repo)
1588 configvalue = fv.fromconfig(repo)
1589
1589
1590 if repovalue != configvalue:
1590 if repovalue != configvalue:
1591 namelabel = b'formatvariant.name.mismatchconfig'
1591 namelabel = b'formatvariant.name.mismatchconfig'
1592 repolabel = b'formatvariant.repo.mismatchconfig'
1592 repolabel = b'formatvariant.repo.mismatchconfig'
1593 elif repovalue != fv.default:
1593 elif repovalue != fv.default:
1594 namelabel = b'formatvariant.name.mismatchdefault'
1594 namelabel = b'formatvariant.name.mismatchdefault'
1595 repolabel = b'formatvariant.repo.mismatchdefault'
1595 repolabel = b'formatvariant.repo.mismatchdefault'
1596 else:
1596 else:
1597 namelabel = b'formatvariant.name.uptodate'
1597 namelabel = b'formatvariant.name.uptodate'
1598 repolabel = b'formatvariant.repo.uptodate'
1598 repolabel = b'formatvariant.repo.uptodate'
1599
1599
1600 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1600 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1601 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1601 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1602 if fv.default != configvalue:
1602 if fv.default != configvalue:
1603 configlabel = b'formatvariant.config.special'
1603 configlabel = b'formatvariant.config.special'
1604 else:
1604 else:
1605 configlabel = b'formatvariant.config.default'
1605 configlabel = b'formatvariant.config.default'
1606 fm.condwrite(
1606 fm.condwrite(
1607 ui.verbose,
1607 ui.verbose,
1608 b'config',
1608 b'config',
1609 b' %6s',
1609 b' %6s',
1610 formatvalue(configvalue),
1610 formatvalue(configvalue),
1611 label=configlabel,
1611 label=configlabel,
1612 )
1612 )
1613 fm.condwrite(
1613 fm.condwrite(
1614 ui.verbose,
1614 ui.verbose,
1615 b'default',
1615 b'default',
1616 b' %7s',
1616 b' %7s',
1617 formatvalue(fv.default),
1617 formatvalue(fv.default),
1618 label=b'formatvariant.default',
1618 label=b'formatvariant.default',
1619 )
1619 )
1620 fm.plain(b'\n')
1620 fm.plain(b'\n')
1621 fm.end()
1621 fm.end()
1622
1622
1623
1623
1624 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1624 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1625 def debugfsinfo(ui, path=b"."):
1625 def debugfsinfo(ui, path=b"."):
1626 """show information detected about current filesystem"""
1626 """show information detected about current filesystem"""
1627 ui.writenoi18n(b'path: %s\n' % path)
1627 ui.writenoi18n(b'path: %s\n' % path)
1628 ui.writenoi18n(
1628 ui.writenoi18n(
1629 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1629 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1630 )
1630 )
1631 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1631 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1632 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1632 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1633 ui.writenoi18n(
1633 ui.writenoi18n(
1634 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1634 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1635 )
1635 )
1636 ui.writenoi18n(
1636 ui.writenoi18n(
1637 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1637 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1638 )
1638 )
1639 casesensitive = b'(unknown)'
1639 casesensitive = b'(unknown)'
1640 try:
1640 try:
1641 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1641 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1642 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1642 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1643 except OSError:
1643 except OSError:
1644 pass
1644 pass
1645 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1645 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1646
1646
1647
1647
1648 @command(
1648 @command(
1649 b'debuggetbundle',
1649 b'debuggetbundle',
1650 [
1650 [
1651 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1651 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1652 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1652 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1653 (
1653 (
1654 b't',
1654 b't',
1655 b'type',
1655 b'type',
1656 b'bzip2',
1656 b'bzip2',
1657 _(b'bundle compression type to use'),
1657 _(b'bundle compression type to use'),
1658 _(b'TYPE'),
1658 _(b'TYPE'),
1659 ),
1659 ),
1660 ],
1660 ],
1661 _(b'REPO FILE [-H|-C ID]...'),
1661 _(b'REPO FILE [-H|-C ID]...'),
1662 norepo=True,
1662 norepo=True,
1663 )
1663 )
1664 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1664 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1665 """retrieves a bundle from a repo
1665 """retrieves a bundle from a repo
1666
1666
1667 Every ID must be a full-length hex node id string. Saves the bundle to the
1667 Every ID must be a full-length hex node id string. Saves the bundle to the
1668 given file.
1668 given file.
1669 """
1669 """
1670 opts = pycompat.byteskwargs(opts)
1670 opts = pycompat.byteskwargs(opts)
1671 repo = hg.peer(ui, opts, repopath)
1671 repo = hg.peer(ui, opts, repopath)
1672 if not repo.capable(b'getbundle'):
1672 if not repo.capable(b'getbundle'):
1673 raise error.Abort(b"getbundle() not supported by target repository")
1673 raise error.Abort(b"getbundle() not supported by target repository")
1674 args = {}
1674 args = {}
1675 if common:
1675 if common:
1676 args['common'] = [bin(s) for s in common]
1676 args['common'] = [bin(s) for s in common]
1677 if head:
1677 if head:
1678 args['heads'] = [bin(s) for s in head]
1678 args['heads'] = [bin(s) for s in head]
1679 # TODO: get desired bundlecaps from command line.
1679 # TODO: get desired bundlecaps from command line.
1680 args['bundlecaps'] = None
1680 args['bundlecaps'] = None
1681 bundle = repo.getbundle(b'debug', **args)
1681 bundle = repo.getbundle(b'debug', **args)
1682
1682
1683 bundletype = opts.get(b'type', b'bzip2').lower()
1683 bundletype = opts.get(b'type', b'bzip2').lower()
1684 btypes = {
1684 btypes = {
1685 b'none': b'HG10UN',
1685 b'none': b'HG10UN',
1686 b'bzip2': b'HG10BZ',
1686 b'bzip2': b'HG10BZ',
1687 b'gzip': b'HG10GZ',
1687 b'gzip': b'HG10GZ',
1688 b'bundle2': b'HG20',
1688 b'bundle2': b'HG20',
1689 }
1689 }
1690 bundletype = btypes.get(bundletype)
1690 bundletype = btypes.get(bundletype)
1691 if bundletype not in bundle2.bundletypes:
1691 if bundletype not in bundle2.bundletypes:
1692 raise error.Abort(_(b'unknown bundle type specified with --type'))
1692 raise error.Abort(_(b'unknown bundle type specified with --type'))
1693 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1693 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1694
1694
1695
1695
1696 @command(b'debugignore', [], b'[FILE]')
1696 @command(b'debugignore', [], b'[FILE]')
1697 def debugignore(ui, repo, *files, **opts):
1697 def debugignore(ui, repo, *files, **opts):
1698 """display the combined ignore pattern and information about ignored files
1698 """display the combined ignore pattern and information about ignored files
1699
1699
1700 With no argument display the combined ignore pattern.
1700 With no argument display the combined ignore pattern.
1701
1701
1702 Given space separated file names, shows if the given file is ignored and
1702 Given space separated file names, shows if the given file is ignored and
1703 if so, show the ignore rule (file and line number) that matched it.
1703 if so, show the ignore rule (file and line number) that matched it.
1704 """
1704 """
1705 ignore = repo.dirstate._ignore
1705 ignore = repo.dirstate._ignore
1706 if not files:
1706 if not files:
1707 # Show all the patterns
1707 # Show all the patterns
1708 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1708 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1709 else:
1709 else:
1710 m = scmutil.match(repo[None], pats=files)
1710 m = scmutil.match(repo[None], pats=files)
1711 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1711 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1712 for f in m.files():
1712 for f in m.files():
1713 nf = util.normpath(f)
1713 nf = util.normpath(f)
1714 ignored = None
1714 ignored = None
1715 ignoredata = None
1715 ignoredata = None
1716 if nf != b'.':
1716 if nf != b'.':
1717 if ignore(nf):
1717 if ignore(nf):
1718 ignored = nf
1718 ignored = nf
1719 ignoredata = repo.dirstate._ignorefileandline(nf)
1719 ignoredata = repo.dirstate._ignorefileandline(nf)
1720 else:
1720 else:
1721 for p in pathutil.finddirs(nf):
1721 for p in pathutil.finddirs(nf):
1722 if ignore(p):
1722 if ignore(p):
1723 ignored = p
1723 ignored = p
1724 ignoredata = repo.dirstate._ignorefileandline(p)
1724 ignoredata = repo.dirstate._ignorefileandline(p)
1725 break
1725 break
1726 if ignored:
1726 if ignored:
1727 if ignored == nf:
1727 if ignored == nf:
1728 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1728 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1729 else:
1729 else:
1730 ui.write(
1730 ui.write(
1731 _(
1731 _(
1732 b"%s is ignored because of "
1732 b"%s is ignored because of "
1733 b"containing directory %s\n"
1733 b"containing directory %s\n"
1734 )
1734 )
1735 % (uipathfn(f), ignored)
1735 % (uipathfn(f), ignored)
1736 )
1736 )
1737 ignorefile, lineno, line = ignoredata
1737 ignorefile, lineno, line = ignoredata
1738 ui.write(
1738 ui.write(
1739 _(b"(ignore rule in %s, line %d: '%s')\n")
1739 _(b"(ignore rule in %s, line %d: '%s')\n")
1740 % (ignorefile, lineno, line)
1740 % (ignorefile, lineno, line)
1741 )
1741 )
1742 else:
1742 else:
1743 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1743 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1744
1744
1745
1745
1746 @command(
1746 @command(
1747 b'debugindex',
1747 b'debugindex',
1748 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1748 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1749 _(b'-c|-m|FILE'),
1749 _(b'-c|-m|FILE'),
1750 )
1750 )
1751 def debugindex(ui, repo, file_=None, **opts):
1751 def debugindex(ui, repo, file_=None, **opts):
1752 """dump index data for a storage primitive"""
1752 """dump index data for a storage primitive"""
1753 opts = pycompat.byteskwargs(opts)
1753 opts = pycompat.byteskwargs(opts)
1754 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1754 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1755
1755
1756 if ui.debugflag:
1756 if ui.debugflag:
1757 shortfn = hex
1757 shortfn = hex
1758 else:
1758 else:
1759 shortfn = short
1759 shortfn = short
1760
1760
1761 idlen = 12
1761 idlen = 12
1762 for i in store:
1762 for i in store:
1763 idlen = len(shortfn(store.node(i)))
1763 idlen = len(shortfn(store.node(i)))
1764 break
1764 break
1765
1765
1766 fm = ui.formatter(b'debugindex', opts)
1766 fm = ui.formatter(b'debugindex', opts)
1767 fm.plain(
1767 fm.plain(
1768 b' rev linkrev %s %s p2\n'
1768 b' rev linkrev %s %s p2\n'
1769 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1769 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1770 )
1770 )
1771
1771
1772 for rev in store:
1772 for rev in store:
1773 node = store.node(rev)
1773 node = store.node(rev)
1774 parents = store.parents(node)
1774 parents = store.parents(node)
1775
1775
1776 fm.startitem()
1776 fm.startitem()
1777 fm.write(b'rev', b'%6d ', rev)
1777 fm.write(b'rev', b'%6d ', rev)
1778 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1778 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1779 fm.write(b'node', b'%s ', shortfn(node))
1779 fm.write(b'node', b'%s ', shortfn(node))
1780 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1780 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1781 fm.write(b'p2', b'%s', shortfn(parents[1]))
1781 fm.write(b'p2', b'%s', shortfn(parents[1]))
1782 fm.plain(b'\n')
1782 fm.plain(b'\n')
1783
1783
1784 fm.end()
1784 fm.end()
1785
1785
1786
1786
1787 @command(
1787 @command(
1788 b'debugindexdot',
1788 b'debugindexdot',
1789 cmdutil.debugrevlogopts,
1789 cmdutil.debugrevlogopts,
1790 _(b'-c|-m|FILE'),
1790 _(b'-c|-m|FILE'),
1791 optionalrepo=True,
1791 optionalrepo=True,
1792 )
1792 )
1793 def debugindexdot(ui, repo, file_=None, **opts):
1793 def debugindexdot(ui, repo, file_=None, **opts):
1794 """dump an index DAG as a graphviz dot file"""
1794 """dump an index DAG as a graphviz dot file"""
1795 opts = pycompat.byteskwargs(opts)
1795 opts = pycompat.byteskwargs(opts)
1796 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1796 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1797 ui.writenoi18n(b"digraph G {\n")
1797 ui.writenoi18n(b"digraph G {\n")
1798 for i in r:
1798 for i in r:
1799 node = r.node(i)
1799 node = r.node(i)
1800 pp = r.parents(node)
1800 pp = r.parents(node)
1801 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1801 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1802 if pp[1] != repo.nullid:
1802 if pp[1] != repo.nullid:
1803 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1803 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1804 ui.write(b"}\n")
1804 ui.write(b"}\n")
1805
1805
1806
1806
1807 @command(b'debugindexstats', [])
1807 @command(b'debugindexstats', [])
1808 def debugindexstats(ui, repo):
1808 def debugindexstats(ui, repo):
1809 """show stats related to the changelog index"""
1809 """show stats related to the changelog index"""
1810 repo.changelog.shortest(repo.nullid, 1)
1810 repo.changelog.shortest(repo.nullid, 1)
1811 index = repo.changelog.index
1811 index = repo.changelog.index
1812 if not util.safehasattr(index, b'stats'):
1812 if not util.safehasattr(index, b'stats'):
1813 raise error.Abort(_(b'debugindexstats only works with native code'))
1813 raise error.Abort(_(b'debugindexstats only works with native code'))
1814 for k, v in sorted(index.stats().items()):
1814 for k, v in sorted(index.stats().items()):
1815 ui.write(b'%s: %d\n' % (k, v))
1815 ui.write(b'%s: %d\n' % (k, v))
1816
1816
1817
1817
1818 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1818 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1819 def debuginstall(ui, **opts):
1819 def debuginstall(ui, **opts):
1820 """test Mercurial installation
1820 """test Mercurial installation
1821
1821
1822 Returns 0 on success.
1822 Returns 0 on success.
1823 """
1823 """
1824 opts = pycompat.byteskwargs(opts)
1824 opts = pycompat.byteskwargs(opts)
1825
1825
1826 problems = 0
1826 problems = 0
1827
1827
1828 fm = ui.formatter(b'debuginstall', opts)
1828 fm = ui.formatter(b'debuginstall', opts)
1829 fm.startitem()
1829 fm.startitem()
1830
1830
1831 # encoding might be unknown or wrong. don't translate these messages.
1831 # encoding might be unknown or wrong. don't translate these messages.
1832 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1832 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1833 err = None
1833 err = None
1834 try:
1834 try:
1835 codecs.lookup(pycompat.sysstr(encoding.encoding))
1835 codecs.lookup(pycompat.sysstr(encoding.encoding))
1836 except LookupError as inst:
1836 except LookupError as inst:
1837 err = stringutil.forcebytestr(inst)
1837 err = stringutil.forcebytestr(inst)
1838 problems += 1
1838 problems += 1
1839 fm.condwrite(
1839 fm.condwrite(
1840 err,
1840 err,
1841 b'encodingerror',
1841 b'encodingerror',
1842 b" %s\n (check that your locale is properly set)\n",
1842 b" %s\n (check that your locale is properly set)\n",
1843 err,
1843 err,
1844 )
1844 )
1845
1845
1846 # Python
1846 # Python
1847 pythonlib = None
1847 pythonlib = None
1848 if util.safehasattr(os, '__file__'):
1848 if util.safehasattr(os, '__file__'):
1849 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1849 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1850 elif getattr(sys, 'oxidized', False):
1850 elif getattr(sys, 'oxidized', False):
1851 pythonlib = pycompat.sysexecutable
1851 pythonlib = pycompat.sysexecutable
1852
1852
1853 fm.write(
1853 fm.write(
1854 b'pythonexe',
1854 b'pythonexe',
1855 _(b"checking Python executable (%s)\n"),
1855 _(b"checking Python executable (%s)\n"),
1856 pycompat.sysexecutable or _(b"unknown"),
1856 pycompat.sysexecutable or _(b"unknown"),
1857 )
1857 )
1858 fm.write(
1858 fm.write(
1859 b'pythonimplementation',
1859 b'pythonimplementation',
1860 _(b"checking Python implementation (%s)\n"),
1860 _(b"checking Python implementation (%s)\n"),
1861 pycompat.sysbytes(platform.python_implementation()),
1861 pycompat.sysbytes(platform.python_implementation()),
1862 )
1862 )
1863 fm.write(
1863 fm.write(
1864 b'pythonver',
1864 b'pythonver',
1865 _(b"checking Python version (%s)\n"),
1865 _(b"checking Python version (%s)\n"),
1866 (b"%d.%d.%d" % sys.version_info[:3]),
1866 (b"%d.%d.%d" % sys.version_info[:3]),
1867 )
1867 )
1868 fm.write(
1868 fm.write(
1869 b'pythonlib',
1869 b'pythonlib',
1870 _(b"checking Python lib (%s)...\n"),
1870 _(b"checking Python lib (%s)...\n"),
1871 pythonlib or _(b"unknown"),
1871 pythonlib or _(b"unknown"),
1872 )
1872 )
1873
1873
1874 try:
1874 try:
1875 from . import rustext # pytype: disable=import-error
1875 from . import rustext # pytype: disable=import-error
1876
1876
1877 rustext.__doc__ # trigger lazy import
1877 rustext.__doc__ # trigger lazy import
1878 except ImportError:
1878 except ImportError:
1879 rustext = None
1879 rustext = None
1880
1880
1881 security = set(sslutil.supportedprotocols)
1881 security = set(sslutil.supportedprotocols)
1882 if sslutil.hassni:
1882 if sslutil.hassni:
1883 security.add(b'sni')
1883 security.add(b'sni')
1884
1884
1885 fm.write(
1885 fm.write(
1886 b'pythonsecurity',
1886 b'pythonsecurity',
1887 _(b"checking Python security support (%s)\n"),
1887 _(b"checking Python security support (%s)\n"),
1888 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1888 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1889 )
1889 )
1890
1890
1891 # These are warnings, not errors. So don't increment problem count. This
1891 # These are warnings, not errors. So don't increment problem count. This
1892 # may change in the future.
1892 # may change in the future.
1893 if b'tls1.2' not in security:
1893 if b'tls1.2' not in security:
1894 fm.plain(
1894 fm.plain(
1895 _(
1895 _(
1896 b' TLS 1.2 not supported by Python install; '
1896 b' TLS 1.2 not supported by Python install; '
1897 b'network connections lack modern security\n'
1897 b'network connections lack modern security\n'
1898 )
1898 )
1899 )
1899 )
1900 if b'sni' not in security:
1900 if b'sni' not in security:
1901 fm.plain(
1901 fm.plain(
1902 _(
1902 _(
1903 b' SNI not supported by Python install; may have '
1903 b' SNI not supported by Python install; may have '
1904 b'connectivity issues with some servers\n'
1904 b'connectivity issues with some servers\n'
1905 )
1905 )
1906 )
1906 )
1907
1907
1908 fm.plain(
1908 fm.plain(
1909 _(
1909 _(
1910 b"checking Rust extensions (%s)\n"
1910 b"checking Rust extensions (%s)\n"
1911 % (b'missing' if rustext is None else b'installed')
1911 % (b'missing' if rustext is None else b'installed')
1912 ),
1912 ),
1913 )
1913 )
1914
1914
1915 # TODO print CA cert info
1915 # TODO print CA cert info
1916
1916
1917 # hg version
1917 # hg version
1918 hgver = util.version()
1918 hgver = util.version()
1919 fm.write(
1919 fm.write(
1920 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1920 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1921 )
1921 )
1922 fm.write(
1922 fm.write(
1923 b'hgverextra',
1923 b'hgverextra',
1924 _(b"checking Mercurial custom build (%s)\n"),
1924 _(b"checking Mercurial custom build (%s)\n"),
1925 b'+'.join(hgver.split(b'+')[1:]),
1925 b'+'.join(hgver.split(b'+')[1:]),
1926 )
1926 )
1927
1927
1928 # compiled modules
1928 # compiled modules
1929 hgmodules = None
1929 hgmodules = None
1930 if util.safehasattr(sys.modules[__name__], '__file__'):
1930 if util.safehasattr(sys.modules[__name__], '__file__'):
1931 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1931 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1932 elif getattr(sys, 'oxidized', False):
1932 elif getattr(sys, 'oxidized', False):
1933 hgmodules = pycompat.sysexecutable
1933 hgmodules = pycompat.sysexecutable
1934
1934
1935 fm.write(
1935 fm.write(
1936 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1936 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1937 )
1937 )
1938 fm.write(
1938 fm.write(
1939 b'hgmodules',
1939 b'hgmodules',
1940 _(b"checking installed modules (%s)...\n"),
1940 _(b"checking installed modules (%s)...\n"),
1941 hgmodules or _(b"unknown"),
1941 hgmodules or _(b"unknown"),
1942 )
1942 )
1943
1943
1944 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1944 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1945 rustext = rustandc # for now, that's the only case
1945 rustext = rustandc # for now, that's the only case
1946 cext = policy.policy in (b'c', b'allow') or rustandc
1946 cext = policy.policy in (b'c', b'allow') or rustandc
1947 nopure = cext or rustext
1947 nopure = cext or rustext
1948 if nopure:
1948 if nopure:
1949 err = None
1949 err = None
1950 try:
1950 try:
1951 if cext:
1951 if cext:
1952 from .cext import ( # pytype: disable=import-error
1952 from .cext import ( # pytype: disable=import-error
1953 base85,
1953 base85,
1954 bdiff,
1954 bdiff,
1955 mpatch,
1955 mpatch,
1956 osutil,
1956 osutil,
1957 )
1957 )
1958
1958
1959 # quiet pyflakes
1959 # quiet pyflakes
1960 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1960 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1961 if rustext:
1961 if rustext:
1962 from .rustext import ( # pytype: disable=import-error
1962 from .rustext import ( # pytype: disable=import-error
1963 ancestor,
1963 ancestor,
1964 dirstate,
1964 dirstate,
1965 )
1965 )
1966
1966
1967 dir(ancestor), dir(dirstate) # quiet pyflakes
1967 dir(ancestor), dir(dirstate) # quiet pyflakes
1968 except Exception as inst:
1968 except Exception as inst:
1969 err = stringutil.forcebytestr(inst)
1969 err = stringutil.forcebytestr(inst)
1970 problems += 1
1970 problems += 1
1971 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1971 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1972
1972
1973 compengines = util.compengines._engines.values()
1973 compengines = util.compengines._engines.values()
1974 fm.write(
1974 fm.write(
1975 b'compengines',
1975 b'compengines',
1976 _(b'checking registered compression engines (%s)\n'),
1976 _(b'checking registered compression engines (%s)\n'),
1977 fm.formatlist(
1977 fm.formatlist(
1978 sorted(e.name() for e in compengines),
1978 sorted(e.name() for e in compengines),
1979 name=b'compengine',
1979 name=b'compengine',
1980 fmt=b'%s',
1980 fmt=b'%s',
1981 sep=b', ',
1981 sep=b', ',
1982 ),
1982 ),
1983 )
1983 )
1984 fm.write(
1984 fm.write(
1985 b'compenginesavail',
1985 b'compenginesavail',
1986 _(b'checking available compression engines (%s)\n'),
1986 _(b'checking available compression engines (%s)\n'),
1987 fm.formatlist(
1987 fm.formatlist(
1988 sorted(e.name() for e in compengines if e.available()),
1988 sorted(e.name() for e in compengines if e.available()),
1989 name=b'compengine',
1989 name=b'compengine',
1990 fmt=b'%s',
1990 fmt=b'%s',
1991 sep=b', ',
1991 sep=b', ',
1992 ),
1992 ),
1993 )
1993 )
1994 wirecompengines = compression.compengines.supportedwireengines(
1994 wirecompengines = compression.compengines.supportedwireengines(
1995 compression.SERVERROLE
1995 compression.SERVERROLE
1996 )
1996 )
1997 fm.write(
1997 fm.write(
1998 b'compenginesserver',
1998 b'compenginesserver',
1999 _(
1999 _(
2000 b'checking available compression engines '
2000 b'checking available compression engines '
2001 b'for wire protocol (%s)\n'
2001 b'for wire protocol (%s)\n'
2002 ),
2002 ),
2003 fm.formatlist(
2003 fm.formatlist(
2004 [e.name() for e in wirecompengines if e.wireprotosupport()],
2004 [e.name() for e in wirecompengines if e.wireprotosupport()],
2005 name=b'compengine',
2005 name=b'compengine',
2006 fmt=b'%s',
2006 fmt=b'%s',
2007 sep=b', ',
2007 sep=b', ',
2008 ),
2008 ),
2009 )
2009 )
2010 re2 = b'missing'
2010 re2 = b'missing'
2011 if util._re2:
2011 if util._re2:
2012 re2 = b'available'
2012 re2 = b'available'
2013 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2013 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2014 fm.data(re2=bool(util._re2))
2014 fm.data(re2=bool(util._re2))
2015
2015
2016 # templates
2016 # templates
2017 p = templater.templatedir()
2017 p = templater.templatedir()
2018 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2018 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2019 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2019 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2020 if p:
2020 if p:
2021 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2021 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2022 if m:
2022 if m:
2023 # template found, check if it is working
2023 # template found, check if it is working
2024 err = None
2024 err = None
2025 try:
2025 try:
2026 templater.templater.frommapfile(m)
2026 templater.templater.frommapfile(m)
2027 except Exception as inst:
2027 except Exception as inst:
2028 err = stringutil.forcebytestr(inst)
2028 err = stringutil.forcebytestr(inst)
2029 p = None
2029 p = None
2030 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2030 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2031 else:
2031 else:
2032 p = None
2032 p = None
2033 fm.condwrite(
2033 fm.condwrite(
2034 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2034 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2035 )
2035 )
2036 fm.condwrite(
2036 fm.condwrite(
2037 not m,
2037 not m,
2038 b'defaulttemplatenotfound',
2038 b'defaulttemplatenotfound',
2039 _(b" template '%s' not found\n"),
2039 _(b" template '%s' not found\n"),
2040 b"default",
2040 b"default",
2041 )
2041 )
2042 if not p:
2042 if not p:
2043 problems += 1
2043 problems += 1
2044 fm.condwrite(
2044 fm.condwrite(
2045 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2045 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2046 )
2046 )
2047
2047
2048 # editor
2048 # editor
2049 editor = ui.geteditor()
2049 editor = ui.geteditor()
2050 editor = util.expandpath(editor)
2050 editor = util.expandpath(editor)
2051 editorbin = procutil.shellsplit(editor)[0]
2051 editorbin = procutil.shellsplit(editor)[0]
2052 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2052 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2053 cmdpath = procutil.findexe(editorbin)
2053 cmdpath = procutil.findexe(editorbin)
2054 fm.condwrite(
2054 fm.condwrite(
2055 not cmdpath and editor == b'vi',
2055 not cmdpath and editor == b'vi',
2056 b'vinotfound',
2056 b'vinotfound',
2057 _(
2057 _(
2058 b" No commit editor set and can't find %s in PATH\n"
2058 b" No commit editor set and can't find %s in PATH\n"
2059 b" (specify a commit editor in your configuration"
2059 b" (specify a commit editor in your configuration"
2060 b" file)\n"
2060 b" file)\n"
2061 ),
2061 ),
2062 not cmdpath and editor == b'vi' and editorbin,
2062 not cmdpath and editor == b'vi' and editorbin,
2063 )
2063 )
2064 fm.condwrite(
2064 fm.condwrite(
2065 not cmdpath and editor != b'vi',
2065 not cmdpath and editor != b'vi',
2066 b'editornotfound',
2066 b'editornotfound',
2067 _(
2067 _(
2068 b" Can't find editor '%s' in PATH\n"
2068 b" Can't find editor '%s' in PATH\n"
2069 b" (specify a commit editor in your configuration"
2069 b" (specify a commit editor in your configuration"
2070 b" file)\n"
2070 b" file)\n"
2071 ),
2071 ),
2072 not cmdpath and editorbin,
2072 not cmdpath and editorbin,
2073 )
2073 )
2074 if not cmdpath and editor != b'vi':
2074 if not cmdpath and editor != b'vi':
2075 problems += 1
2075 problems += 1
2076
2076
2077 # check username
2077 # check username
2078 username = None
2078 username = None
2079 err = None
2079 err = None
2080 try:
2080 try:
2081 username = ui.username()
2081 username = ui.username()
2082 except error.Abort as e:
2082 except error.Abort as e:
2083 err = e.message
2083 err = e.message
2084 problems += 1
2084 problems += 1
2085
2085
2086 fm.condwrite(
2086 fm.condwrite(
2087 username, b'username', _(b"checking username (%s)\n"), username
2087 username, b'username', _(b"checking username (%s)\n"), username
2088 )
2088 )
2089 fm.condwrite(
2089 fm.condwrite(
2090 err,
2090 err,
2091 b'usernameerror',
2091 b'usernameerror',
2092 _(
2092 _(
2093 b"checking username...\n %s\n"
2093 b"checking username...\n %s\n"
2094 b" (specify a username in your configuration file)\n"
2094 b" (specify a username in your configuration file)\n"
2095 ),
2095 ),
2096 err,
2096 err,
2097 )
2097 )
2098
2098
2099 for name, mod in extensions.extensions():
2099 for name, mod in extensions.extensions():
2100 handler = getattr(mod, 'debuginstall', None)
2100 handler = getattr(mod, 'debuginstall', None)
2101 if handler is not None:
2101 if handler is not None:
2102 problems += handler(ui, fm)
2102 problems += handler(ui, fm)
2103
2103
2104 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2104 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2105 if not problems:
2105 if not problems:
2106 fm.data(problems=problems)
2106 fm.data(problems=problems)
2107 fm.condwrite(
2107 fm.condwrite(
2108 problems,
2108 problems,
2109 b'problems',
2109 b'problems',
2110 _(b"%d problems detected, please check your install!\n"),
2110 _(b"%d problems detected, please check your install!\n"),
2111 problems,
2111 problems,
2112 )
2112 )
2113 fm.end()
2113 fm.end()
2114
2114
2115 return problems
2115 return problems
2116
2116
2117
2117
2118 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2118 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2119 def debugknown(ui, repopath, *ids, **opts):
2119 def debugknown(ui, repopath, *ids, **opts):
2120 """test whether node ids are known to a repo
2120 """test whether node ids are known to a repo
2121
2121
2122 Every ID must be a full-length hex node id string. Returns a list of 0s
2122 Every ID must be a full-length hex node id string. Returns a list of 0s
2123 and 1s indicating unknown/known.
2123 and 1s indicating unknown/known.
2124 """
2124 """
2125 opts = pycompat.byteskwargs(opts)
2125 opts = pycompat.byteskwargs(opts)
2126 repo = hg.peer(ui, opts, repopath)
2126 repo = hg.peer(ui, opts, repopath)
2127 if not repo.capable(b'known'):
2127 if not repo.capable(b'known'):
2128 raise error.Abort(b"known() not supported by target repository")
2128 raise error.Abort(b"known() not supported by target repository")
2129 flags = repo.known([bin(s) for s in ids])
2129 flags = repo.known([bin(s) for s in ids])
2130 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2130 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2131
2131
2132
2132
2133 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2133 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2134 def debuglabelcomplete(ui, repo, *args):
2134 def debuglabelcomplete(ui, repo, *args):
2135 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2135 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2136 debugnamecomplete(ui, repo, *args)
2136 debugnamecomplete(ui, repo, *args)
2137
2137
2138
2138
2139 @command(
2139 @command(
2140 b'debuglocks',
2140 b'debuglocks',
2141 [
2141 [
2142 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2142 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2143 (
2143 (
2144 b'W',
2144 b'W',
2145 b'force-free-wlock',
2145 b'force-free-wlock',
2146 None,
2146 None,
2147 _(b'free the working state lock (DANGEROUS)'),
2147 _(b'free the working state lock (DANGEROUS)'),
2148 ),
2148 ),
2149 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2149 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2150 (
2150 (
2151 b'S',
2151 b'S',
2152 b'set-wlock',
2152 b'set-wlock',
2153 None,
2153 None,
2154 _(b'set the working state lock until stopped'),
2154 _(b'set the working state lock until stopped'),
2155 ),
2155 ),
2156 ],
2156 ],
2157 _(b'[OPTION]...'),
2157 _(b'[OPTION]...'),
2158 )
2158 )
2159 def debuglocks(ui, repo, **opts):
2159 def debuglocks(ui, repo, **opts):
2160 """show or modify state of locks
2160 """show or modify state of locks
2161
2161
2162 By default, this command will show which locks are held. This
2162 By default, this command will show which locks are held. This
2163 includes the user and process holding the lock, the amount of time
2163 includes the user and process holding the lock, the amount of time
2164 the lock has been held, and the machine name where the process is
2164 the lock has been held, and the machine name where the process is
2165 running if it's not local.
2165 running if it's not local.
2166
2166
2167 Locks protect the integrity of Mercurial's data, so should be
2167 Locks protect the integrity of Mercurial's data, so should be
2168 treated with care. System crashes or other interruptions may cause
2168 treated with care. System crashes or other interruptions may cause
2169 locks to not be properly released, though Mercurial will usually
2169 locks to not be properly released, though Mercurial will usually
2170 detect and remove such stale locks automatically.
2170 detect and remove such stale locks automatically.
2171
2171
2172 However, detecting stale locks may not always be possible (for
2172 However, detecting stale locks may not always be possible (for
2173 instance, on a shared filesystem). Removing locks may also be
2173 instance, on a shared filesystem). Removing locks may also be
2174 blocked by filesystem permissions.
2174 blocked by filesystem permissions.
2175
2175
2176 Setting a lock will prevent other commands from changing the data.
2176 Setting a lock will prevent other commands from changing the data.
2177 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2177 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2178 The set locks are removed when the command exits.
2178 The set locks are removed when the command exits.
2179
2179
2180 Returns 0 if no locks are held.
2180 Returns 0 if no locks are held.
2181
2181
2182 """
2182 """
2183
2183
2184 if opts.get('force_free_lock'):
2184 if opts.get('force_free_lock'):
2185 repo.svfs.tryunlink(b'lock')
2185 repo.svfs.tryunlink(b'lock')
2186 if opts.get('force_free_wlock'):
2186 if opts.get('force_free_wlock'):
2187 repo.vfs.tryunlink(b'wlock')
2187 repo.vfs.tryunlink(b'wlock')
2188 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2188 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2189 return 0
2189 return 0
2190
2190
2191 locks = []
2191 locks = []
2192 try:
2192 try:
2193 if opts.get('set_wlock'):
2193 if opts.get('set_wlock'):
2194 try:
2194 try:
2195 locks.append(repo.wlock(False))
2195 locks.append(repo.wlock(False))
2196 except error.LockHeld:
2196 except error.LockHeld:
2197 raise error.Abort(_(b'wlock is already held'))
2197 raise error.Abort(_(b'wlock is already held'))
2198 if opts.get('set_lock'):
2198 if opts.get('set_lock'):
2199 try:
2199 try:
2200 locks.append(repo.lock(False))
2200 locks.append(repo.lock(False))
2201 except error.LockHeld:
2201 except error.LockHeld:
2202 raise error.Abort(_(b'lock is already held'))
2202 raise error.Abort(_(b'lock is already held'))
2203 if len(locks):
2203 if len(locks):
2204 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2204 try:
2205 if ui.interactive():
2206 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2207 ui.promptchoice(prompt)
2208 else:
2209 msg = b"%d locks held, waiting for signal\n"
2210 msg %= len(locks)
2211 ui.status(msg)
2212 while True: # XXX wait for a signal
2213 time.sleep(0.1)
2214 except KeyboardInterrupt:
2215 msg = b"signal-received releasing locks\n"
2216 ui.status(msg)
2205 return 0
2217 return 0
2206 finally:
2218 finally:
2207 release(*locks)
2219 release(*locks)
2208
2220
2209 now = time.time()
2221 now = time.time()
2210 held = 0
2222 held = 0
2211
2223
2212 def report(vfs, name, method):
2224 def report(vfs, name, method):
2213 # this causes stale locks to get reaped for more accurate reporting
2225 # this causes stale locks to get reaped for more accurate reporting
2214 try:
2226 try:
2215 l = method(False)
2227 l = method(False)
2216 except error.LockHeld:
2228 except error.LockHeld:
2217 l = None
2229 l = None
2218
2230
2219 if l:
2231 if l:
2220 l.release()
2232 l.release()
2221 else:
2233 else:
2222 try:
2234 try:
2223 st = vfs.lstat(name)
2235 st = vfs.lstat(name)
2224 age = now - st[stat.ST_MTIME]
2236 age = now - st[stat.ST_MTIME]
2225 user = util.username(st.st_uid)
2237 user = util.username(st.st_uid)
2226 locker = vfs.readlock(name)
2238 locker = vfs.readlock(name)
2227 if b":" in locker:
2239 if b":" in locker:
2228 host, pid = locker.split(b':')
2240 host, pid = locker.split(b':')
2229 if host == socket.gethostname():
2241 if host == socket.gethostname():
2230 locker = b'user %s, process %s' % (user or b'None', pid)
2242 locker = b'user %s, process %s' % (user or b'None', pid)
2231 else:
2243 else:
2232 locker = b'user %s, process %s, host %s' % (
2244 locker = b'user %s, process %s, host %s' % (
2233 user or b'None',
2245 user or b'None',
2234 pid,
2246 pid,
2235 host,
2247 host,
2236 )
2248 )
2237 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2249 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2238 return 1
2250 return 1
2239 except OSError as e:
2251 except OSError as e:
2240 if e.errno != errno.ENOENT:
2252 if e.errno != errno.ENOENT:
2241 raise
2253 raise
2242
2254
2243 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2255 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2244 return 0
2256 return 0
2245
2257
2246 held += report(repo.svfs, b"lock", repo.lock)
2258 held += report(repo.svfs, b"lock", repo.lock)
2247 held += report(repo.vfs, b"wlock", repo.wlock)
2259 held += report(repo.vfs, b"wlock", repo.wlock)
2248
2260
2249 return held
2261 return held
2250
2262
2251
2263
2252 @command(
2264 @command(
2253 b'debugmanifestfulltextcache',
2265 b'debugmanifestfulltextcache',
2254 [
2266 [
2255 (b'', b'clear', False, _(b'clear the cache')),
2267 (b'', b'clear', False, _(b'clear the cache')),
2256 (
2268 (
2257 b'a',
2269 b'a',
2258 b'add',
2270 b'add',
2259 [],
2271 [],
2260 _(b'add the given manifest nodes to the cache'),
2272 _(b'add the given manifest nodes to the cache'),
2261 _(b'NODE'),
2273 _(b'NODE'),
2262 ),
2274 ),
2263 ],
2275 ],
2264 b'',
2276 b'',
2265 )
2277 )
2266 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2278 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2267 """show, clear or amend the contents of the manifest fulltext cache"""
2279 """show, clear or amend the contents of the manifest fulltext cache"""
2268
2280
2269 def getcache():
2281 def getcache():
2270 r = repo.manifestlog.getstorage(b'')
2282 r = repo.manifestlog.getstorage(b'')
2271 try:
2283 try:
2272 return r._fulltextcache
2284 return r._fulltextcache
2273 except AttributeError:
2285 except AttributeError:
2274 msg = _(
2286 msg = _(
2275 b"Current revlog implementation doesn't appear to have a "
2287 b"Current revlog implementation doesn't appear to have a "
2276 b"manifest fulltext cache\n"
2288 b"manifest fulltext cache\n"
2277 )
2289 )
2278 raise error.Abort(msg)
2290 raise error.Abort(msg)
2279
2291
2280 if opts.get('clear'):
2292 if opts.get('clear'):
2281 with repo.wlock():
2293 with repo.wlock():
2282 cache = getcache()
2294 cache = getcache()
2283 cache.clear(clear_persisted_data=True)
2295 cache.clear(clear_persisted_data=True)
2284 return
2296 return
2285
2297
2286 if add:
2298 if add:
2287 with repo.wlock():
2299 with repo.wlock():
2288 m = repo.manifestlog
2300 m = repo.manifestlog
2289 store = m.getstorage(b'')
2301 store = m.getstorage(b'')
2290 for n in add:
2302 for n in add:
2291 try:
2303 try:
2292 manifest = m[store.lookup(n)]
2304 manifest = m[store.lookup(n)]
2293 except error.LookupError as e:
2305 except error.LookupError as e:
2294 raise error.Abort(
2306 raise error.Abort(
2295 bytes(e), hint=b"Check your manifest node id"
2307 bytes(e), hint=b"Check your manifest node id"
2296 )
2308 )
2297 manifest.read() # stores revisision in cache too
2309 manifest.read() # stores revisision in cache too
2298 return
2310 return
2299
2311
2300 cache = getcache()
2312 cache = getcache()
2301 if not len(cache):
2313 if not len(cache):
2302 ui.write(_(b'cache empty\n'))
2314 ui.write(_(b'cache empty\n'))
2303 else:
2315 else:
2304 ui.write(
2316 ui.write(
2305 _(
2317 _(
2306 b'cache contains %d manifest entries, in order of most to '
2318 b'cache contains %d manifest entries, in order of most to '
2307 b'least recent:\n'
2319 b'least recent:\n'
2308 )
2320 )
2309 % (len(cache),)
2321 % (len(cache),)
2310 )
2322 )
2311 totalsize = 0
2323 totalsize = 0
2312 for nodeid in cache:
2324 for nodeid in cache:
2313 # Use cache.get to not update the LRU order
2325 # Use cache.get to not update the LRU order
2314 data = cache.peek(nodeid)
2326 data = cache.peek(nodeid)
2315 size = len(data)
2327 size = len(data)
2316 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2328 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2317 ui.write(
2329 ui.write(
2318 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2330 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2319 )
2331 )
2320 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2332 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2321 ui.write(
2333 ui.write(
2322 _(b'total cache data size %s, on-disk %s\n')
2334 _(b'total cache data size %s, on-disk %s\n')
2323 % (util.bytecount(totalsize), util.bytecount(ondisk))
2335 % (util.bytecount(totalsize), util.bytecount(ondisk))
2324 )
2336 )
2325
2337
2326
2338
2327 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2339 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2328 def debugmergestate(ui, repo, *args, **opts):
2340 def debugmergestate(ui, repo, *args, **opts):
2329 """print merge state
2341 """print merge state
2330
2342
2331 Use --verbose to print out information about whether v1 or v2 merge state
2343 Use --verbose to print out information about whether v1 or v2 merge state
2332 was chosen."""
2344 was chosen."""
2333
2345
2334 if ui.verbose:
2346 if ui.verbose:
2335 ms = mergestatemod.mergestate(repo)
2347 ms = mergestatemod.mergestate(repo)
2336
2348
2337 # sort so that reasonable information is on top
2349 # sort so that reasonable information is on top
2338 v1records = ms._readrecordsv1()
2350 v1records = ms._readrecordsv1()
2339 v2records = ms._readrecordsv2()
2351 v2records = ms._readrecordsv2()
2340
2352
2341 if not v1records and not v2records:
2353 if not v1records and not v2records:
2342 pass
2354 pass
2343 elif not v2records:
2355 elif not v2records:
2344 ui.writenoi18n(b'no version 2 merge state\n')
2356 ui.writenoi18n(b'no version 2 merge state\n')
2345 elif ms._v1v2match(v1records, v2records):
2357 elif ms._v1v2match(v1records, v2records):
2346 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2358 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2347 else:
2359 else:
2348 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2360 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2349
2361
2350 opts = pycompat.byteskwargs(opts)
2362 opts = pycompat.byteskwargs(opts)
2351 if not opts[b'template']:
2363 if not opts[b'template']:
2352 opts[b'template'] = (
2364 opts[b'template'] = (
2353 b'{if(commits, "", "no merge state found\n")}'
2365 b'{if(commits, "", "no merge state found\n")}'
2354 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2366 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2355 b'{files % "file: {path} (state \\"{state}\\")\n'
2367 b'{files % "file: {path} (state \\"{state}\\")\n'
2356 b'{if(local_path, "'
2368 b'{if(local_path, "'
2357 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2369 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2358 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2370 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2359 b' other path: {other_path} (node {other_node})\n'
2371 b' other path: {other_path} (node {other_node})\n'
2360 b'")}'
2372 b'")}'
2361 b'{if(rename_side, "'
2373 b'{if(rename_side, "'
2362 b' rename side: {rename_side}\n'
2374 b' rename side: {rename_side}\n'
2363 b' renamed path: {renamed_path}\n'
2375 b' renamed path: {renamed_path}\n'
2364 b'")}'
2376 b'")}'
2365 b'{extras % " extra: {key} = {value}\n"}'
2377 b'{extras % " extra: {key} = {value}\n"}'
2366 b'"}'
2378 b'"}'
2367 b'{extras % "extra: {file} ({key} = {value})\n"}'
2379 b'{extras % "extra: {file} ({key} = {value})\n"}'
2368 )
2380 )
2369
2381
2370 ms = mergestatemod.mergestate.read(repo)
2382 ms = mergestatemod.mergestate.read(repo)
2371
2383
2372 fm = ui.formatter(b'debugmergestate', opts)
2384 fm = ui.formatter(b'debugmergestate', opts)
2373 fm.startitem()
2385 fm.startitem()
2374
2386
2375 fm_commits = fm.nested(b'commits')
2387 fm_commits = fm.nested(b'commits')
2376 if ms.active():
2388 if ms.active():
2377 for name, node, label_index in (
2389 for name, node, label_index in (
2378 (b'local', ms.local, 0),
2390 (b'local', ms.local, 0),
2379 (b'other', ms.other, 1),
2391 (b'other', ms.other, 1),
2380 ):
2392 ):
2381 fm_commits.startitem()
2393 fm_commits.startitem()
2382 fm_commits.data(name=name)
2394 fm_commits.data(name=name)
2383 fm_commits.data(node=hex(node))
2395 fm_commits.data(node=hex(node))
2384 if ms._labels and len(ms._labels) > label_index:
2396 if ms._labels and len(ms._labels) > label_index:
2385 fm_commits.data(label=ms._labels[label_index])
2397 fm_commits.data(label=ms._labels[label_index])
2386 fm_commits.end()
2398 fm_commits.end()
2387
2399
2388 fm_files = fm.nested(b'files')
2400 fm_files = fm.nested(b'files')
2389 if ms.active():
2401 if ms.active():
2390 for f in ms:
2402 for f in ms:
2391 fm_files.startitem()
2403 fm_files.startitem()
2392 fm_files.data(path=f)
2404 fm_files.data(path=f)
2393 state = ms._state[f]
2405 state = ms._state[f]
2394 fm_files.data(state=state[0])
2406 fm_files.data(state=state[0])
2395 if state[0] in (
2407 if state[0] in (
2396 mergestatemod.MERGE_RECORD_UNRESOLVED,
2408 mergestatemod.MERGE_RECORD_UNRESOLVED,
2397 mergestatemod.MERGE_RECORD_RESOLVED,
2409 mergestatemod.MERGE_RECORD_RESOLVED,
2398 ):
2410 ):
2399 fm_files.data(local_key=state[1])
2411 fm_files.data(local_key=state[1])
2400 fm_files.data(local_path=state[2])
2412 fm_files.data(local_path=state[2])
2401 fm_files.data(ancestor_path=state[3])
2413 fm_files.data(ancestor_path=state[3])
2402 fm_files.data(ancestor_node=state[4])
2414 fm_files.data(ancestor_node=state[4])
2403 fm_files.data(other_path=state[5])
2415 fm_files.data(other_path=state[5])
2404 fm_files.data(other_node=state[6])
2416 fm_files.data(other_node=state[6])
2405 fm_files.data(local_flags=state[7])
2417 fm_files.data(local_flags=state[7])
2406 elif state[0] in (
2418 elif state[0] in (
2407 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2419 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2408 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2420 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2409 ):
2421 ):
2410 fm_files.data(renamed_path=state[1])
2422 fm_files.data(renamed_path=state[1])
2411 fm_files.data(rename_side=state[2])
2423 fm_files.data(rename_side=state[2])
2412 fm_extras = fm_files.nested(b'extras')
2424 fm_extras = fm_files.nested(b'extras')
2413 for k, v in sorted(ms.extras(f).items()):
2425 for k, v in sorted(ms.extras(f).items()):
2414 fm_extras.startitem()
2426 fm_extras.startitem()
2415 fm_extras.data(key=k)
2427 fm_extras.data(key=k)
2416 fm_extras.data(value=v)
2428 fm_extras.data(value=v)
2417 fm_extras.end()
2429 fm_extras.end()
2418
2430
2419 fm_files.end()
2431 fm_files.end()
2420
2432
2421 fm_extras = fm.nested(b'extras')
2433 fm_extras = fm.nested(b'extras')
2422 for f, d in sorted(ms.allextras().items()):
2434 for f, d in sorted(ms.allextras().items()):
2423 if f in ms:
2435 if f in ms:
2424 # If file is in mergestate, we have already processed it's extras
2436 # If file is in mergestate, we have already processed it's extras
2425 continue
2437 continue
2426 for k, v in d.items():
2438 for k, v in d.items():
2427 fm_extras.startitem()
2439 fm_extras.startitem()
2428 fm_extras.data(file=f)
2440 fm_extras.data(file=f)
2429 fm_extras.data(key=k)
2441 fm_extras.data(key=k)
2430 fm_extras.data(value=v)
2442 fm_extras.data(value=v)
2431 fm_extras.end()
2443 fm_extras.end()
2432
2444
2433 fm.end()
2445 fm.end()
2434
2446
2435
2447
2436 @command(b'debugnamecomplete', [], _(b'NAME...'))
2448 @command(b'debugnamecomplete', [], _(b'NAME...'))
2437 def debugnamecomplete(ui, repo, *args):
2449 def debugnamecomplete(ui, repo, *args):
2438 '''complete "names" - tags, open branch names, bookmark names'''
2450 '''complete "names" - tags, open branch names, bookmark names'''
2439
2451
2440 names = set()
2452 names = set()
2441 # since we previously only listed open branches, we will handle that
2453 # since we previously only listed open branches, we will handle that
2442 # specially (after this for loop)
2454 # specially (after this for loop)
2443 for name, ns in repo.names.items():
2455 for name, ns in repo.names.items():
2444 if name != b'branches':
2456 if name != b'branches':
2445 names.update(ns.listnames(repo))
2457 names.update(ns.listnames(repo))
2446 names.update(
2458 names.update(
2447 tag
2459 tag
2448 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2460 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2449 if not closed
2461 if not closed
2450 )
2462 )
2451 completions = set()
2463 completions = set()
2452 if not args:
2464 if not args:
2453 args = [b'']
2465 args = [b'']
2454 for a in args:
2466 for a in args:
2455 completions.update(n for n in names if n.startswith(a))
2467 completions.update(n for n in names if n.startswith(a))
2456 ui.write(b'\n'.join(sorted(completions)))
2468 ui.write(b'\n'.join(sorted(completions)))
2457 ui.write(b'\n')
2469 ui.write(b'\n')
2458
2470
2459
2471
2460 @command(
2472 @command(
2461 b'debugnodemap',
2473 b'debugnodemap',
2462 [
2474 [
2463 (
2475 (
2464 b'',
2476 b'',
2465 b'dump-new',
2477 b'dump-new',
2466 False,
2478 False,
2467 _(b'write a (new) persistent binary nodemap on stdout'),
2479 _(b'write a (new) persistent binary nodemap on stdout'),
2468 ),
2480 ),
2469 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2481 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2470 (
2482 (
2471 b'',
2483 b'',
2472 b'check',
2484 b'check',
2473 False,
2485 False,
2474 _(b'check that the data on disk data are correct.'),
2486 _(b'check that the data on disk data are correct.'),
2475 ),
2487 ),
2476 (
2488 (
2477 b'',
2489 b'',
2478 b'metadata',
2490 b'metadata',
2479 False,
2491 False,
2480 _(b'display the on disk meta data for the nodemap'),
2492 _(b'display the on disk meta data for the nodemap'),
2481 ),
2493 ),
2482 ],
2494 ],
2483 )
2495 )
2484 def debugnodemap(ui, repo, **opts):
2496 def debugnodemap(ui, repo, **opts):
2485 """write and inspect on disk nodemap"""
2497 """write and inspect on disk nodemap"""
2486 if opts['dump_new']:
2498 if opts['dump_new']:
2487 unfi = repo.unfiltered()
2499 unfi = repo.unfiltered()
2488 cl = unfi.changelog
2500 cl = unfi.changelog
2489 if util.safehasattr(cl.index, "nodemap_data_all"):
2501 if util.safehasattr(cl.index, "nodemap_data_all"):
2490 data = cl.index.nodemap_data_all()
2502 data = cl.index.nodemap_data_all()
2491 else:
2503 else:
2492 data = nodemap.persistent_data(cl.index)
2504 data = nodemap.persistent_data(cl.index)
2493 ui.write(data)
2505 ui.write(data)
2494 elif opts['dump_disk']:
2506 elif opts['dump_disk']:
2495 unfi = repo.unfiltered()
2507 unfi = repo.unfiltered()
2496 cl = unfi.changelog
2508 cl = unfi.changelog
2497 nm_data = nodemap.persisted_data(cl)
2509 nm_data = nodemap.persisted_data(cl)
2498 if nm_data is not None:
2510 if nm_data is not None:
2499 docket, data = nm_data
2511 docket, data = nm_data
2500 ui.write(data[:])
2512 ui.write(data[:])
2501 elif opts['check']:
2513 elif opts['check']:
2502 unfi = repo.unfiltered()
2514 unfi = repo.unfiltered()
2503 cl = unfi.changelog
2515 cl = unfi.changelog
2504 nm_data = nodemap.persisted_data(cl)
2516 nm_data = nodemap.persisted_data(cl)
2505 if nm_data is not None:
2517 if nm_data is not None:
2506 docket, data = nm_data
2518 docket, data = nm_data
2507 return nodemap.check_data(ui, cl.index, data)
2519 return nodemap.check_data(ui, cl.index, data)
2508 elif opts['metadata']:
2520 elif opts['metadata']:
2509 unfi = repo.unfiltered()
2521 unfi = repo.unfiltered()
2510 cl = unfi.changelog
2522 cl = unfi.changelog
2511 nm_data = nodemap.persisted_data(cl)
2523 nm_data = nodemap.persisted_data(cl)
2512 if nm_data is not None:
2524 if nm_data is not None:
2513 docket, data = nm_data
2525 docket, data = nm_data
2514 ui.write((b"uid: %s\n") % docket.uid)
2526 ui.write((b"uid: %s\n") % docket.uid)
2515 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2527 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2516 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2528 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2517 ui.write((b"data-length: %d\n") % docket.data_length)
2529 ui.write((b"data-length: %d\n") % docket.data_length)
2518 ui.write((b"data-unused: %d\n") % docket.data_unused)
2530 ui.write((b"data-unused: %d\n") % docket.data_unused)
2519 unused_perc = docket.data_unused * 100.0 / docket.data_length
2531 unused_perc = docket.data_unused * 100.0 / docket.data_length
2520 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2532 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2521
2533
2522
2534
2523 @command(
2535 @command(
2524 b'debugobsolete',
2536 b'debugobsolete',
2525 [
2537 [
2526 (b'', b'flags', 0, _(b'markers flag')),
2538 (b'', b'flags', 0, _(b'markers flag')),
2527 (
2539 (
2528 b'',
2540 b'',
2529 b'record-parents',
2541 b'record-parents',
2530 False,
2542 False,
2531 _(b'record parent information for the precursor'),
2543 _(b'record parent information for the precursor'),
2532 ),
2544 ),
2533 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2545 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2534 (
2546 (
2535 b'',
2547 b'',
2536 b'exclusive',
2548 b'exclusive',
2537 False,
2549 False,
2538 _(b'restrict display to markers only relevant to REV'),
2550 _(b'restrict display to markers only relevant to REV'),
2539 ),
2551 ),
2540 (b'', b'index', False, _(b'display index of the marker')),
2552 (b'', b'index', False, _(b'display index of the marker')),
2541 (b'', b'delete', [], _(b'delete markers specified by indices')),
2553 (b'', b'delete', [], _(b'delete markers specified by indices')),
2542 ]
2554 ]
2543 + cmdutil.commitopts2
2555 + cmdutil.commitopts2
2544 + cmdutil.formatteropts,
2556 + cmdutil.formatteropts,
2545 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2557 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2546 )
2558 )
2547 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2559 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2548 """create arbitrary obsolete marker
2560 """create arbitrary obsolete marker
2549
2561
2550 With no arguments, displays the list of obsolescence markers."""
2562 With no arguments, displays the list of obsolescence markers."""
2551
2563
2552 opts = pycompat.byteskwargs(opts)
2564 opts = pycompat.byteskwargs(opts)
2553
2565
2554 def parsenodeid(s):
2566 def parsenodeid(s):
2555 try:
2567 try:
2556 # We do not use revsingle/revrange functions here to accept
2568 # We do not use revsingle/revrange functions here to accept
2557 # arbitrary node identifiers, possibly not present in the
2569 # arbitrary node identifiers, possibly not present in the
2558 # local repository.
2570 # local repository.
2559 n = bin(s)
2571 n = bin(s)
2560 if len(n) != repo.nodeconstants.nodelen:
2572 if len(n) != repo.nodeconstants.nodelen:
2561 raise TypeError()
2573 raise TypeError()
2562 return n
2574 return n
2563 except TypeError:
2575 except TypeError:
2564 raise error.InputError(
2576 raise error.InputError(
2565 b'changeset references must be full hexadecimal '
2577 b'changeset references must be full hexadecimal '
2566 b'node identifiers'
2578 b'node identifiers'
2567 )
2579 )
2568
2580
2569 if opts.get(b'delete'):
2581 if opts.get(b'delete'):
2570 indices = []
2582 indices = []
2571 for v in opts.get(b'delete'):
2583 for v in opts.get(b'delete'):
2572 try:
2584 try:
2573 indices.append(int(v))
2585 indices.append(int(v))
2574 except ValueError:
2586 except ValueError:
2575 raise error.InputError(
2587 raise error.InputError(
2576 _(b'invalid index value: %r') % v,
2588 _(b'invalid index value: %r') % v,
2577 hint=_(b'use integers for indices'),
2589 hint=_(b'use integers for indices'),
2578 )
2590 )
2579
2591
2580 if repo.currenttransaction():
2592 if repo.currenttransaction():
2581 raise error.Abort(
2593 raise error.Abort(
2582 _(b'cannot delete obsmarkers in the middle of transaction.')
2594 _(b'cannot delete obsmarkers in the middle of transaction.')
2583 )
2595 )
2584
2596
2585 with repo.lock():
2597 with repo.lock():
2586 n = repair.deleteobsmarkers(repo.obsstore, indices)
2598 n = repair.deleteobsmarkers(repo.obsstore, indices)
2587 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2599 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2588
2600
2589 return
2601 return
2590
2602
2591 if precursor is not None:
2603 if precursor is not None:
2592 if opts[b'rev']:
2604 if opts[b'rev']:
2593 raise error.InputError(
2605 raise error.InputError(
2594 b'cannot select revision when creating marker'
2606 b'cannot select revision when creating marker'
2595 )
2607 )
2596 metadata = {}
2608 metadata = {}
2597 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2609 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2598 succs = tuple(parsenodeid(succ) for succ in successors)
2610 succs = tuple(parsenodeid(succ) for succ in successors)
2599 l = repo.lock()
2611 l = repo.lock()
2600 try:
2612 try:
2601 tr = repo.transaction(b'debugobsolete')
2613 tr = repo.transaction(b'debugobsolete')
2602 try:
2614 try:
2603 date = opts.get(b'date')
2615 date = opts.get(b'date')
2604 if date:
2616 if date:
2605 date = dateutil.parsedate(date)
2617 date = dateutil.parsedate(date)
2606 else:
2618 else:
2607 date = None
2619 date = None
2608 prec = parsenodeid(precursor)
2620 prec = parsenodeid(precursor)
2609 parents = None
2621 parents = None
2610 if opts[b'record_parents']:
2622 if opts[b'record_parents']:
2611 if prec not in repo.unfiltered():
2623 if prec not in repo.unfiltered():
2612 raise error.Abort(
2624 raise error.Abort(
2613 b'cannot used --record-parents on '
2625 b'cannot used --record-parents on '
2614 b'unknown changesets'
2626 b'unknown changesets'
2615 )
2627 )
2616 parents = repo.unfiltered()[prec].parents()
2628 parents = repo.unfiltered()[prec].parents()
2617 parents = tuple(p.node() for p in parents)
2629 parents = tuple(p.node() for p in parents)
2618 repo.obsstore.create(
2630 repo.obsstore.create(
2619 tr,
2631 tr,
2620 prec,
2632 prec,
2621 succs,
2633 succs,
2622 opts[b'flags'],
2634 opts[b'flags'],
2623 parents=parents,
2635 parents=parents,
2624 date=date,
2636 date=date,
2625 metadata=metadata,
2637 metadata=metadata,
2626 ui=ui,
2638 ui=ui,
2627 )
2639 )
2628 tr.close()
2640 tr.close()
2629 except ValueError as exc:
2641 except ValueError as exc:
2630 raise error.Abort(
2642 raise error.Abort(
2631 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2643 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2632 )
2644 )
2633 finally:
2645 finally:
2634 tr.release()
2646 tr.release()
2635 finally:
2647 finally:
2636 l.release()
2648 l.release()
2637 else:
2649 else:
2638 if opts[b'rev']:
2650 if opts[b'rev']:
2639 revs = logcmdutil.revrange(repo, opts[b'rev'])
2651 revs = logcmdutil.revrange(repo, opts[b'rev'])
2640 nodes = [repo[r].node() for r in revs]
2652 nodes = [repo[r].node() for r in revs]
2641 markers = list(
2653 markers = list(
2642 obsutil.getmarkers(
2654 obsutil.getmarkers(
2643 repo, nodes=nodes, exclusive=opts[b'exclusive']
2655 repo, nodes=nodes, exclusive=opts[b'exclusive']
2644 )
2656 )
2645 )
2657 )
2646 markers.sort(key=lambda x: x._data)
2658 markers.sort(key=lambda x: x._data)
2647 else:
2659 else:
2648 markers = obsutil.getmarkers(repo)
2660 markers = obsutil.getmarkers(repo)
2649
2661
2650 markerstoiter = markers
2662 markerstoiter = markers
2651 isrelevant = lambda m: True
2663 isrelevant = lambda m: True
2652 if opts.get(b'rev') and opts.get(b'index'):
2664 if opts.get(b'rev') and opts.get(b'index'):
2653 markerstoiter = obsutil.getmarkers(repo)
2665 markerstoiter = obsutil.getmarkers(repo)
2654 markerset = set(markers)
2666 markerset = set(markers)
2655 isrelevant = lambda m: m in markerset
2667 isrelevant = lambda m: m in markerset
2656
2668
2657 fm = ui.formatter(b'debugobsolete', opts)
2669 fm = ui.formatter(b'debugobsolete', opts)
2658 for i, m in enumerate(markerstoiter):
2670 for i, m in enumerate(markerstoiter):
2659 if not isrelevant(m):
2671 if not isrelevant(m):
2660 # marker can be irrelevant when we're iterating over a set
2672 # marker can be irrelevant when we're iterating over a set
2661 # of markers (markerstoiter) which is bigger than the set
2673 # of markers (markerstoiter) which is bigger than the set
2662 # of markers we want to display (markers)
2674 # of markers we want to display (markers)
2663 # this can happen if both --index and --rev options are
2675 # this can happen if both --index and --rev options are
2664 # provided and thus we need to iterate over all of the markers
2676 # provided and thus we need to iterate over all of the markers
2665 # to get the correct indices, but only display the ones that
2677 # to get the correct indices, but only display the ones that
2666 # are relevant to --rev value
2678 # are relevant to --rev value
2667 continue
2679 continue
2668 fm.startitem()
2680 fm.startitem()
2669 ind = i if opts.get(b'index') else None
2681 ind = i if opts.get(b'index') else None
2670 cmdutil.showmarker(fm, m, index=ind)
2682 cmdutil.showmarker(fm, m, index=ind)
2671 fm.end()
2683 fm.end()
2672
2684
2673
2685
2674 @command(
2686 @command(
2675 b'debugp1copies',
2687 b'debugp1copies',
2676 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2688 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2677 _(b'[-r REV]'),
2689 _(b'[-r REV]'),
2678 )
2690 )
2679 def debugp1copies(ui, repo, **opts):
2691 def debugp1copies(ui, repo, **opts):
2680 """dump copy information compared to p1"""
2692 """dump copy information compared to p1"""
2681
2693
2682 opts = pycompat.byteskwargs(opts)
2694 opts = pycompat.byteskwargs(opts)
2683 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2695 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2684 for dst, src in ctx.p1copies().items():
2696 for dst, src in ctx.p1copies().items():
2685 ui.write(b'%s -> %s\n' % (src, dst))
2697 ui.write(b'%s -> %s\n' % (src, dst))
2686
2698
2687
2699
2688 @command(
2700 @command(
2689 b'debugp2copies',
2701 b'debugp2copies',
2690 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2702 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2691 _(b'[-r REV]'),
2703 _(b'[-r REV]'),
2692 )
2704 )
2693 def debugp1copies(ui, repo, **opts):
2705 def debugp1copies(ui, repo, **opts):
2694 """dump copy information compared to p2"""
2706 """dump copy information compared to p2"""
2695
2707
2696 opts = pycompat.byteskwargs(opts)
2708 opts = pycompat.byteskwargs(opts)
2697 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2709 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2698 for dst, src in ctx.p2copies().items():
2710 for dst, src in ctx.p2copies().items():
2699 ui.write(b'%s -> %s\n' % (src, dst))
2711 ui.write(b'%s -> %s\n' % (src, dst))
2700
2712
2701
2713
2702 @command(
2714 @command(
2703 b'debugpathcomplete',
2715 b'debugpathcomplete',
2704 [
2716 [
2705 (b'f', b'full', None, _(b'complete an entire path')),
2717 (b'f', b'full', None, _(b'complete an entire path')),
2706 (b'n', b'normal', None, _(b'show only normal files')),
2718 (b'n', b'normal', None, _(b'show only normal files')),
2707 (b'a', b'added', None, _(b'show only added files')),
2719 (b'a', b'added', None, _(b'show only added files')),
2708 (b'r', b'removed', None, _(b'show only removed files')),
2720 (b'r', b'removed', None, _(b'show only removed files')),
2709 ],
2721 ],
2710 _(b'FILESPEC...'),
2722 _(b'FILESPEC...'),
2711 )
2723 )
2712 def debugpathcomplete(ui, repo, *specs, **opts):
2724 def debugpathcomplete(ui, repo, *specs, **opts):
2713 """complete part or all of a tracked path
2725 """complete part or all of a tracked path
2714
2726
2715 This command supports shells that offer path name completion. It
2727 This command supports shells that offer path name completion. It
2716 currently completes only files already known to the dirstate.
2728 currently completes only files already known to the dirstate.
2717
2729
2718 Completion extends only to the next path segment unless
2730 Completion extends only to the next path segment unless
2719 --full is specified, in which case entire paths are used."""
2731 --full is specified, in which case entire paths are used."""
2720
2732
2721 def complete(path, acceptable):
2733 def complete(path, acceptable):
2722 dirstate = repo.dirstate
2734 dirstate = repo.dirstate
2723 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2735 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2724 rootdir = repo.root + pycompat.ossep
2736 rootdir = repo.root + pycompat.ossep
2725 if spec != repo.root and not spec.startswith(rootdir):
2737 if spec != repo.root and not spec.startswith(rootdir):
2726 return [], []
2738 return [], []
2727 if os.path.isdir(spec):
2739 if os.path.isdir(spec):
2728 spec += b'/'
2740 spec += b'/'
2729 spec = spec[len(rootdir) :]
2741 spec = spec[len(rootdir) :]
2730 fixpaths = pycompat.ossep != b'/'
2742 fixpaths = pycompat.ossep != b'/'
2731 if fixpaths:
2743 if fixpaths:
2732 spec = spec.replace(pycompat.ossep, b'/')
2744 spec = spec.replace(pycompat.ossep, b'/')
2733 speclen = len(spec)
2745 speclen = len(spec)
2734 fullpaths = opts['full']
2746 fullpaths = opts['full']
2735 files, dirs = set(), set()
2747 files, dirs = set(), set()
2736 adddir, addfile = dirs.add, files.add
2748 adddir, addfile = dirs.add, files.add
2737 for f, st in dirstate.items():
2749 for f, st in dirstate.items():
2738 if f.startswith(spec) and st.state in acceptable:
2750 if f.startswith(spec) and st.state in acceptable:
2739 if fixpaths:
2751 if fixpaths:
2740 f = f.replace(b'/', pycompat.ossep)
2752 f = f.replace(b'/', pycompat.ossep)
2741 if fullpaths:
2753 if fullpaths:
2742 addfile(f)
2754 addfile(f)
2743 continue
2755 continue
2744 s = f.find(pycompat.ossep, speclen)
2756 s = f.find(pycompat.ossep, speclen)
2745 if s >= 0:
2757 if s >= 0:
2746 adddir(f[:s])
2758 adddir(f[:s])
2747 else:
2759 else:
2748 addfile(f)
2760 addfile(f)
2749 return files, dirs
2761 return files, dirs
2750
2762
2751 acceptable = b''
2763 acceptable = b''
2752 if opts['normal']:
2764 if opts['normal']:
2753 acceptable += b'nm'
2765 acceptable += b'nm'
2754 if opts['added']:
2766 if opts['added']:
2755 acceptable += b'a'
2767 acceptable += b'a'
2756 if opts['removed']:
2768 if opts['removed']:
2757 acceptable += b'r'
2769 acceptable += b'r'
2758 cwd = repo.getcwd()
2770 cwd = repo.getcwd()
2759 if not specs:
2771 if not specs:
2760 specs = [b'.']
2772 specs = [b'.']
2761
2773
2762 files, dirs = set(), set()
2774 files, dirs = set(), set()
2763 for spec in specs:
2775 for spec in specs:
2764 f, d = complete(spec, acceptable or b'nmar')
2776 f, d = complete(spec, acceptable or b'nmar')
2765 files.update(f)
2777 files.update(f)
2766 dirs.update(d)
2778 dirs.update(d)
2767 files.update(dirs)
2779 files.update(dirs)
2768 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2780 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2769 ui.write(b'\n')
2781 ui.write(b'\n')
2770
2782
2771
2783
2772 @command(
2784 @command(
2773 b'debugpathcopies',
2785 b'debugpathcopies',
2774 cmdutil.walkopts,
2786 cmdutil.walkopts,
2775 b'hg debugpathcopies REV1 REV2 [FILE]',
2787 b'hg debugpathcopies REV1 REV2 [FILE]',
2776 inferrepo=True,
2788 inferrepo=True,
2777 )
2789 )
2778 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2790 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2779 """show copies between two revisions"""
2791 """show copies between two revisions"""
2780 ctx1 = scmutil.revsingle(repo, rev1)
2792 ctx1 = scmutil.revsingle(repo, rev1)
2781 ctx2 = scmutil.revsingle(repo, rev2)
2793 ctx2 = scmutil.revsingle(repo, rev2)
2782 m = scmutil.match(ctx1, pats, opts)
2794 m = scmutil.match(ctx1, pats, opts)
2783 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2795 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2784 ui.write(b'%s -> %s\n' % (src, dst))
2796 ui.write(b'%s -> %s\n' % (src, dst))
2785
2797
2786
2798
2787 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2799 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2788 def debugpeer(ui, path):
2800 def debugpeer(ui, path):
2789 """establish a connection to a peer repository"""
2801 """establish a connection to a peer repository"""
2790 # Always enable peer request logging. Requires --debug to display
2802 # Always enable peer request logging. Requires --debug to display
2791 # though.
2803 # though.
2792 overrides = {
2804 overrides = {
2793 (b'devel', b'debug.peer-request'): True,
2805 (b'devel', b'debug.peer-request'): True,
2794 }
2806 }
2795
2807
2796 with ui.configoverride(overrides):
2808 with ui.configoverride(overrides):
2797 peer = hg.peer(ui, {}, path)
2809 peer = hg.peer(ui, {}, path)
2798
2810
2799 try:
2811 try:
2800 local = peer.local() is not None
2812 local = peer.local() is not None
2801 canpush = peer.canpush()
2813 canpush = peer.canpush()
2802
2814
2803 ui.write(_(b'url: %s\n') % peer.url())
2815 ui.write(_(b'url: %s\n') % peer.url())
2804 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2816 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2805 ui.write(
2817 ui.write(
2806 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2818 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2807 )
2819 )
2808 finally:
2820 finally:
2809 peer.close()
2821 peer.close()
2810
2822
2811
2823
2812 @command(
2824 @command(
2813 b'debugpickmergetool',
2825 b'debugpickmergetool',
2814 [
2826 [
2815 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2827 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2816 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2828 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2817 ]
2829 ]
2818 + cmdutil.walkopts
2830 + cmdutil.walkopts
2819 + cmdutil.mergetoolopts,
2831 + cmdutil.mergetoolopts,
2820 _(b'[PATTERN]...'),
2832 _(b'[PATTERN]...'),
2821 inferrepo=True,
2833 inferrepo=True,
2822 )
2834 )
2823 def debugpickmergetool(ui, repo, *pats, **opts):
2835 def debugpickmergetool(ui, repo, *pats, **opts):
2824 """examine which merge tool is chosen for specified file
2836 """examine which merge tool is chosen for specified file
2825
2837
2826 As described in :hg:`help merge-tools`, Mercurial examines
2838 As described in :hg:`help merge-tools`, Mercurial examines
2827 configurations below in this order to decide which merge tool is
2839 configurations below in this order to decide which merge tool is
2828 chosen for specified file.
2840 chosen for specified file.
2829
2841
2830 1. ``--tool`` option
2842 1. ``--tool`` option
2831 2. ``HGMERGE`` environment variable
2843 2. ``HGMERGE`` environment variable
2832 3. configurations in ``merge-patterns`` section
2844 3. configurations in ``merge-patterns`` section
2833 4. configuration of ``ui.merge``
2845 4. configuration of ``ui.merge``
2834 5. configurations in ``merge-tools`` section
2846 5. configurations in ``merge-tools`` section
2835 6. ``hgmerge`` tool (for historical reason only)
2847 6. ``hgmerge`` tool (for historical reason only)
2836 7. default tool for fallback (``:merge`` or ``:prompt``)
2848 7. default tool for fallback (``:merge`` or ``:prompt``)
2837
2849
2838 This command writes out examination result in the style below::
2850 This command writes out examination result in the style below::
2839
2851
2840 FILE = MERGETOOL
2852 FILE = MERGETOOL
2841
2853
2842 By default, all files known in the first parent context of the
2854 By default, all files known in the first parent context of the
2843 working directory are examined. Use file patterns and/or -I/-X
2855 working directory are examined. Use file patterns and/or -I/-X
2844 options to limit target files. -r/--rev is also useful to examine
2856 options to limit target files. -r/--rev is also useful to examine
2845 files in another context without actual updating to it.
2857 files in another context without actual updating to it.
2846
2858
2847 With --debug, this command shows warning messages while matching
2859 With --debug, this command shows warning messages while matching
2848 against ``merge-patterns`` and so on, too. It is recommended to
2860 against ``merge-patterns`` and so on, too. It is recommended to
2849 use this option with explicit file patterns and/or -I/-X options,
2861 use this option with explicit file patterns and/or -I/-X options,
2850 because this option increases amount of output per file according
2862 because this option increases amount of output per file according
2851 to configurations in hgrc.
2863 to configurations in hgrc.
2852
2864
2853 With -v/--verbose, this command shows configurations below at
2865 With -v/--verbose, this command shows configurations below at
2854 first (only if specified).
2866 first (only if specified).
2855
2867
2856 - ``--tool`` option
2868 - ``--tool`` option
2857 - ``HGMERGE`` environment variable
2869 - ``HGMERGE`` environment variable
2858 - configuration of ``ui.merge``
2870 - configuration of ``ui.merge``
2859
2871
2860 If merge tool is chosen before matching against
2872 If merge tool is chosen before matching against
2861 ``merge-patterns``, this command can't show any helpful
2873 ``merge-patterns``, this command can't show any helpful
2862 information, even with --debug. In such case, information above is
2874 information, even with --debug. In such case, information above is
2863 useful to know why a merge tool is chosen.
2875 useful to know why a merge tool is chosen.
2864 """
2876 """
2865 opts = pycompat.byteskwargs(opts)
2877 opts = pycompat.byteskwargs(opts)
2866 overrides = {}
2878 overrides = {}
2867 if opts[b'tool']:
2879 if opts[b'tool']:
2868 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2880 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2869 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2881 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2870
2882
2871 with ui.configoverride(overrides, b'debugmergepatterns'):
2883 with ui.configoverride(overrides, b'debugmergepatterns'):
2872 hgmerge = encoding.environ.get(b"HGMERGE")
2884 hgmerge = encoding.environ.get(b"HGMERGE")
2873 if hgmerge is not None:
2885 if hgmerge is not None:
2874 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2886 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2875 uimerge = ui.config(b"ui", b"merge")
2887 uimerge = ui.config(b"ui", b"merge")
2876 if uimerge:
2888 if uimerge:
2877 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2889 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2878
2890
2879 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2891 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2880 m = scmutil.match(ctx, pats, opts)
2892 m = scmutil.match(ctx, pats, opts)
2881 changedelete = opts[b'changedelete']
2893 changedelete = opts[b'changedelete']
2882 for path in ctx.walk(m):
2894 for path in ctx.walk(m):
2883 fctx = ctx[path]
2895 fctx = ctx[path]
2884 with ui.silent(
2896 with ui.silent(
2885 error=True
2897 error=True
2886 ) if not ui.debugflag else util.nullcontextmanager():
2898 ) if not ui.debugflag else util.nullcontextmanager():
2887 tool, toolpath = filemerge._picktool(
2899 tool, toolpath = filemerge._picktool(
2888 repo,
2900 repo,
2889 ui,
2901 ui,
2890 path,
2902 path,
2891 fctx.isbinary(),
2903 fctx.isbinary(),
2892 b'l' in fctx.flags(),
2904 b'l' in fctx.flags(),
2893 changedelete,
2905 changedelete,
2894 )
2906 )
2895 ui.write(b'%s = %s\n' % (path, tool))
2907 ui.write(b'%s = %s\n' % (path, tool))
2896
2908
2897
2909
2898 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2910 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2899 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2911 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2900 """access the pushkey key/value protocol
2912 """access the pushkey key/value protocol
2901
2913
2902 With two args, list the keys in the given namespace.
2914 With two args, list the keys in the given namespace.
2903
2915
2904 With five args, set a key to new if it currently is set to old.
2916 With five args, set a key to new if it currently is set to old.
2905 Reports success or failure.
2917 Reports success or failure.
2906 """
2918 """
2907
2919
2908 target = hg.peer(ui, {}, repopath)
2920 target = hg.peer(ui, {}, repopath)
2909 try:
2921 try:
2910 if keyinfo:
2922 if keyinfo:
2911 key, old, new = keyinfo
2923 key, old, new = keyinfo
2912 with target.commandexecutor() as e:
2924 with target.commandexecutor() as e:
2913 r = e.callcommand(
2925 r = e.callcommand(
2914 b'pushkey',
2926 b'pushkey',
2915 {
2927 {
2916 b'namespace': namespace,
2928 b'namespace': namespace,
2917 b'key': key,
2929 b'key': key,
2918 b'old': old,
2930 b'old': old,
2919 b'new': new,
2931 b'new': new,
2920 },
2932 },
2921 ).result()
2933 ).result()
2922
2934
2923 ui.status(pycompat.bytestr(r) + b'\n')
2935 ui.status(pycompat.bytestr(r) + b'\n')
2924 return not r
2936 return not r
2925 else:
2937 else:
2926 for k, v in sorted(target.listkeys(namespace).items()):
2938 for k, v in sorted(target.listkeys(namespace).items()):
2927 ui.write(
2939 ui.write(
2928 b"%s\t%s\n"
2940 b"%s\t%s\n"
2929 % (stringutil.escapestr(k), stringutil.escapestr(v))
2941 % (stringutil.escapestr(k), stringutil.escapestr(v))
2930 )
2942 )
2931 finally:
2943 finally:
2932 target.close()
2944 target.close()
2933
2945
2934
2946
2935 @command(b'debugpvec', [], _(b'A B'))
2947 @command(b'debugpvec', [], _(b'A B'))
2936 def debugpvec(ui, repo, a, b=None):
2948 def debugpvec(ui, repo, a, b=None):
2937 ca = scmutil.revsingle(repo, a)
2949 ca = scmutil.revsingle(repo, a)
2938 cb = scmutil.revsingle(repo, b)
2950 cb = scmutil.revsingle(repo, b)
2939 pa = pvec.ctxpvec(ca)
2951 pa = pvec.ctxpvec(ca)
2940 pb = pvec.ctxpvec(cb)
2952 pb = pvec.ctxpvec(cb)
2941 if pa == pb:
2953 if pa == pb:
2942 rel = b"="
2954 rel = b"="
2943 elif pa > pb:
2955 elif pa > pb:
2944 rel = b">"
2956 rel = b">"
2945 elif pa < pb:
2957 elif pa < pb:
2946 rel = b"<"
2958 rel = b"<"
2947 elif pa | pb:
2959 elif pa | pb:
2948 rel = b"|"
2960 rel = b"|"
2949 ui.write(_(b"a: %s\n") % pa)
2961 ui.write(_(b"a: %s\n") % pa)
2950 ui.write(_(b"b: %s\n") % pb)
2962 ui.write(_(b"b: %s\n") % pb)
2951 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2963 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2952 ui.write(
2964 ui.write(
2953 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2965 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2954 % (
2966 % (
2955 abs(pa._depth - pb._depth),
2967 abs(pa._depth - pb._depth),
2956 pvec._hamming(pa._vec, pb._vec),
2968 pvec._hamming(pa._vec, pb._vec),
2957 pa.distance(pb),
2969 pa.distance(pb),
2958 rel,
2970 rel,
2959 )
2971 )
2960 )
2972 )
2961
2973
2962
2974
2963 @command(
2975 @command(
2964 b'debugrebuilddirstate|debugrebuildstate',
2976 b'debugrebuilddirstate|debugrebuildstate',
2965 [
2977 [
2966 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2978 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2967 (
2979 (
2968 b'',
2980 b'',
2969 b'minimal',
2981 b'minimal',
2970 None,
2982 None,
2971 _(
2983 _(
2972 b'only rebuild files that are inconsistent with '
2984 b'only rebuild files that are inconsistent with '
2973 b'the working copy parent'
2985 b'the working copy parent'
2974 ),
2986 ),
2975 ),
2987 ),
2976 ],
2988 ],
2977 _(b'[-r REV]'),
2989 _(b'[-r REV]'),
2978 )
2990 )
2979 def debugrebuilddirstate(ui, repo, rev, **opts):
2991 def debugrebuilddirstate(ui, repo, rev, **opts):
2980 """rebuild the dirstate as it would look like for the given revision
2992 """rebuild the dirstate as it would look like for the given revision
2981
2993
2982 If no revision is specified the first current parent will be used.
2994 If no revision is specified the first current parent will be used.
2983
2995
2984 The dirstate will be set to the files of the given revision.
2996 The dirstate will be set to the files of the given revision.
2985 The actual working directory content or existing dirstate
2997 The actual working directory content or existing dirstate
2986 information such as adds or removes is not considered.
2998 information such as adds or removes is not considered.
2987
2999
2988 ``minimal`` will only rebuild the dirstate status for files that claim to be
3000 ``minimal`` will only rebuild the dirstate status for files that claim to be
2989 tracked but are not in the parent manifest, or that exist in the parent
3001 tracked but are not in the parent manifest, or that exist in the parent
2990 manifest but are not in the dirstate. It will not change adds, removes, or
3002 manifest but are not in the dirstate. It will not change adds, removes, or
2991 modified files that are in the working copy parent.
3003 modified files that are in the working copy parent.
2992
3004
2993 One use of this command is to make the next :hg:`status` invocation
3005 One use of this command is to make the next :hg:`status` invocation
2994 check the actual file content.
3006 check the actual file content.
2995 """
3007 """
2996 ctx = scmutil.revsingle(repo, rev)
3008 ctx = scmutil.revsingle(repo, rev)
2997 with repo.wlock():
3009 with repo.wlock():
2998 dirstate = repo.dirstate
3010 dirstate = repo.dirstate
2999 changedfiles = None
3011 changedfiles = None
3000 # See command doc for what minimal does.
3012 # See command doc for what minimal does.
3001 if opts.get('minimal'):
3013 if opts.get('minimal'):
3002 manifestfiles = set(ctx.manifest().keys())
3014 manifestfiles = set(ctx.manifest().keys())
3003 dirstatefiles = set(dirstate)
3015 dirstatefiles = set(dirstate)
3004 manifestonly = manifestfiles - dirstatefiles
3016 manifestonly = manifestfiles - dirstatefiles
3005 dsonly = dirstatefiles - manifestfiles
3017 dsonly = dirstatefiles - manifestfiles
3006 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3018 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3007 changedfiles = manifestonly | dsnotadded
3019 changedfiles = manifestonly | dsnotadded
3008
3020
3009 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3021 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3010
3022
3011
3023
3012 @command(
3024 @command(
3013 b'debugrebuildfncache',
3025 b'debugrebuildfncache',
3014 [
3026 [
3015 (
3027 (
3016 b'',
3028 b'',
3017 b'only-data',
3029 b'only-data',
3018 False,
3030 False,
3019 _(b'only look for wrong .d files (much faster)'),
3031 _(b'only look for wrong .d files (much faster)'),
3020 )
3032 )
3021 ],
3033 ],
3022 b'',
3034 b'',
3023 )
3035 )
3024 def debugrebuildfncache(ui, repo, **opts):
3036 def debugrebuildfncache(ui, repo, **opts):
3025 """rebuild the fncache file"""
3037 """rebuild the fncache file"""
3026 opts = pycompat.byteskwargs(opts)
3038 opts = pycompat.byteskwargs(opts)
3027 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3039 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3028
3040
3029
3041
3030 @command(
3042 @command(
3031 b'debugrename',
3043 b'debugrename',
3032 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3044 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3033 _(b'[-r REV] [FILE]...'),
3045 _(b'[-r REV] [FILE]...'),
3034 )
3046 )
3035 def debugrename(ui, repo, *pats, **opts):
3047 def debugrename(ui, repo, *pats, **opts):
3036 """dump rename information"""
3048 """dump rename information"""
3037
3049
3038 opts = pycompat.byteskwargs(opts)
3050 opts = pycompat.byteskwargs(opts)
3039 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3051 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3040 m = scmutil.match(ctx, pats, opts)
3052 m = scmutil.match(ctx, pats, opts)
3041 for abs in ctx.walk(m):
3053 for abs in ctx.walk(m):
3042 fctx = ctx[abs]
3054 fctx = ctx[abs]
3043 o = fctx.filelog().renamed(fctx.filenode())
3055 o = fctx.filelog().renamed(fctx.filenode())
3044 rel = repo.pathto(abs)
3056 rel = repo.pathto(abs)
3045 if o:
3057 if o:
3046 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3058 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3047 else:
3059 else:
3048 ui.write(_(b"%s not renamed\n") % rel)
3060 ui.write(_(b"%s not renamed\n") % rel)
3049
3061
3050
3062
3051 @command(b'debugrequires|debugrequirements', [], b'')
3063 @command(b'debugrequires|debugrequirements', [], b'')
3052 def debugrequirements(ui, repo):
3064 def debugrequirements(ui, repo):
3053 """print the current repo requirements"""
3065 """print the current repo requirements"""
3054 for r in sorted(repo.requirements):
3066 for r in sorted(repo.requirements):
3055 ui.write(b"%s\n" % r)
3067 ui.write(b"%s\n" % r)
3056
3068
3057
3069
3058 @command(
3070 @command(
3059 b'debugrevlog',
3071 b'debugrevlog',
3060 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3072 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3061 _(b'-c|-m|FILE'),
3073 _(b'-c|-m|FILE'),
3062 optionalrepo=True,
3074 optionalrepo=True,
3063 )
3075 )
3064 def debugrevlog(ui, repo, file_=None, **opts):
3076 def debugrevlog(ui, repo, file_=None, **opts):
3065 """show data and statistics about a revlog"""
3077 """show data and statistics about a revlog"""
3066 opts = pycompat.byteskwargs(opts)
3078 opts = pycompat.byteskwargs(opts)
3067 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3079 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3068
3080
3069 if opts.get(b"dump"):
3081 if opts.get(b"dump"):
3070 numrevs = len(r)
3082 numrevs = len(r)
3071 ui.write(
3083 ui.write(
3072 (
3084 (
3073 b"# rev p1rev p2rev start end deltastart base p1 p2"
3085 b"# rev p1rev p2rev start end deltastart base p1 p2"
3074 b" rawsize totalsize compression heads chainlen\n"
3086 b" rawsize totalsize compression heads chainlen\n"
3075 )
3087 )
3076 )
3088 )
3077 ts = 0
3089 ts = 0
3078 heads = set()
3090 heads = set()
3079
3091
3080 for rev in pycompat.xrange(numrevs):
3092 for rev in pycompat.xrange(numrevs):
3081 dbase = r.deltaparent(rev)
3093 dbase = r.deltaparent(rev)
3082 if dbase == -1:
3094 if dbase == -1:
3083 dbase = rev
3095 dbase = rev
3084 cbase = r.chainbase(rev)
3096 cbase = r.chainbase(rev)
3085 clen = r.chainlen(rev)
3097 clen = r.chainlen(rev)
3086 p1, p2 = r.parentrevs(rev)
3098 p1, p2 = r.parentrevs(rev)
3087 rs = r.rawsize(rev)
3099 rs = r.rawsize(rev)
3088 ts = ts + rs
3100 ts = ts + rs
3089 heads -= set(r.parentrevs(rev))
3101 heads -= set(r.parentrevs(rev))
3090 heads.add(rev)
3102 heads.add(rev)
3091 try:
3103 try:
3092 compression = ts / r.end(rev)
3104 compression = ts / r.end(rev)
3093 except ZeroDivisionError:
3105 except ZeroDivisionError:
3094 compression = 0
3106 compression = 0
3095 ui.write(
3107 ui.write(
3096 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3108 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3097 b"%11d %5d %8d\n"
3109 b"%11d %5d %8d\n"
3098 % (
3110 % (
3099 rev,
3111 rev,
3100 p1,
3112 p1,
3101 p2,
3113 p2,
3102 r.start(rev),
3114 r.start(rev),
3103 r.end(rev),
3115 r.end(rev),
3104 r.start(dbase),
3116 r.start(dbase),
3105 r.start(cbase),
3117 r.start(cbase),
3106 r.start(p1),
3118 r.start(p1),
3107 r.start(p2),
3119 r.start(p2),
3108 rs,
3120 rs,
3109 ts,
3121 ts,
3110 compression,
3122 compression,
3111 len(heads),
3123 len(heads),
3112 clen,
3124 clen,
3113 )
3125 )
3114 )
3126 )
3115 return 0
3127 return 0
3116
3128
3117 format = r._format_version
3129 format = r._format_version
3118 v = r._format_flags
3130 v = r._format_flags
3119 flags = []
3131 flags = []
3120 gdelta = False
3132 gdelta = False
3121 if v & revlog.FLAG_INLINE_DATA:
3133 if v & revlog.FLAG_INLINE_DATA:
3122 flags.append(b'inline')
3134 flags.append(b'inline')
3123 if v & revlog.FLAG_GENERALDELTA:
3135 if v & revlog.FLAG_GENERALDELTA:
3124 gdelta = True
3136 gdelta = True
3125 flags.append(b'generaldelta')
3137 flags.append(b'generaldelta')
3126 if not flags:
3138 if not flags:
3127 flags = [b'(none)']
3139 flags = [b'(none)']
3128
3140
3129 ### tracks merge vs single parent
3141 ### tracks merge vs single parent
3130 nummerges = 0
3142 nummerges = 0
3131
3143
3132 ### tracks ways the "delta" are build
3144 ### tracks ways the "delta" are build
3133 # nodelta
3145 # nodelta
3134 numempty = 0
3146 numempty = 0
3135 numemptytext = 0
3147 numemptytext = 0
3136 numemptydelta = 0
3148 numemptydelta = 0
3137 # full file content
3149 # full file content
3138 numfull = 0
3150 numfull = 0
3139 # intermediate snapshot against a prior snapshot
3151 # intermediate snapshot against a prior snapshot
3140 numsemi = 0
3152 numsemi = 0
3141 # snapshot count per depth
3153 # snapshot count per depth
3142 numsnapdepth = collections.defaultdict(lambda: 0)
3154 numsnapdepth = collections.defaultdict(lambda: 0)
3143 # delta against previous revision
3155 # delta against previous revision
3144 numprev = 0
3156 numprev = 0
3145 # delta against first or second parent (not prev)
3157 # delta against first or second parent (not prev)
3146 nump1 = 0
3158 nump1 = 0
3147 nump2 = 0
3159 nump2 = 0
3148 # delta against neither prev nor parents
3160 # delta against neither prev nor parents
3149 numother = 0
3161 numother = 0
3150 # delta against prev that are also first or second parent
3162 # delta against prev that are also first or second parent
3151 # (details of `numprev`)
3163 # (details of `numprev`)
3152 nump1prev = 0
3164 nump1prev = 0
3153 nump2prev = 0
3165 nump2prev = 0
3154
3166
3155 # data about delta chain of each revs
3167 # data about delta chain of each revs
3156 chainlengths = []
3168 chainlengths = []
3157 chainbases = []
3169 chainbases = []
3158 chainspans = []
3170 chainspans = []
3159
3171
3160 # data about each revision
3172 # data about each revision
3161 datasize = [None, 0, 0]
3173 datasize = [None, 0, 0]
3162 fullsize = [None, 0, 0]
3174 fullsize = [None, 0, 0]
3163 semisize = [None, 0, 0]
3175 semisize = [None, 0, 0]
3164 # snapshot count per depth
3176 # snapshot count per depth
3165 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3177 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3166 deltasize = [None, 0, 0]
3178 deltasize = [None, 0, 0]
3167 chunktypecounts = {}
3179 chunktypecounts = {}
3168 chunktypesizes = {}
3180 chunktypesizes = {}
3169
3181
3170 def addsize(size, l):
3182 def addsize(size, l):
3171 if l[0] is None or size < l[0]:
3183 if l[0] is None or size < l[0]:
3172 l[0] = size
3184 l[0] = size
3173 if size > l[1]:
3185 if size > l[1]:
3174 l[1] = size
3186 l[1] = size
3175 l[2] += size
3187 l[2] += size
3176
3188
3177 numrevs = len(r)
3189 numrevs = len(r)
3178 for rev in pycompat.xrange(numrevs):
3190 for rev in pycompat.xrange(numrevs):
3179 p1, p2 = r.parentrevs(rev)
3191 p1, p2 = r.parentrevs(rev)
3180 delta = r.deltaparent(rev)
3192 delta = r.deltaparent(rev)
3181 if format > 0:
3193 if format > 0:
3182 addsize(r.rawsize(rev), datasize)
3194 addsize(r.rawsize(rev), datasize)
3183 if p2 != nullrev:
3195 if p2 != nullrev:
3184 nummerges += 1
3196 nummerges += 1
3185 size = r.length(rev)
3197 size = r.length(rev)
3186 if delta == nullrev:
3198 if delta == nullrev:
3187 chainlengths.append(0)
3199 chainlengths.append(0)
3188 chainbases.append(r.start(rev))
3200 chainbases.append(r.start(rev))
3189 chainspans.append(size)
3201 chainspans.append(size)
3190 if size == 0:
3202 if size == 0:
3191 numempty += 1
3203 numempty += 1
3192 numemptytext += 1
3204 numemptytext += 1
3193 else:
3205 else:
3194 numfull += 1
3206 numfull += 1
3195 numsnapdepth[0] += 1
3207 numsnapdepth[0] += 1
3196 addsize(size, fullsize)
3208 addsize(size, fullsize)
3197 addsize(size, snapsizedepth[0])
3209 addsize(size, snapsizedepth[0])
3198 else:
3210 else:
3199 chainlengths.append(chainlengths[delta] + 1)
3211 chainlengths.append(chainlengths[delta] + 1)
3200 baseaddr = chainbases[delta]
3212 baseaddr = chainbases[delta]
3201 revaddr = r.start(rev)
3213 revaddr = r.start(rev)
3202 chainbases.append(baseaddr)
3214 chainbases.append(baseaddr)
3203 chainspans.append((revaddr - baseaddr) + size)
3215 chainspans.append((revaddr - baseaddr) + size)
3204 if size == 0:
3216 if size == 0:
3205 numempty += 1
3217 numempty += 1
3206 numemptydelta += 1
3218 numemptydelta += 1
3207 elif r.issnapshot(rev):
3219 elif r.issnapshot(rev):
3208 addsize(size, semisize)
3220 addsize(size, semisize)
3209 numsemi += 1
3221 numsemi += 1
3210 depth = r.snapshotdepth(rev)
3222 depth = r.snapshotdepth(rev)
3211 numsnapdepth[depth] += 1
3223 numsnapdepth[depth] += 1
3212 addsize(size, snapsizedepth[depth])
3224 addsize(size, snapsizedepth[depth])
3213 else:
3225 else:
3214 addsize(size, deltasize)
3226 addsize(size, deltasize)
3215 if delta == rev - 1:
3227 if delta == rev - 1:
3216 numprev += 1
3228 numprev += 1
3217 if delta == p1:
3229 if delta == p1:
3218 nump1prev += 1
3230 nump1prev += 1
3219 elif delta == p2:
3231 elif delta == p2:
3220 nump2prev += 1
3232 nump2prev += 1
3221 elif delta == p1:
3233 elif delta == p1:
3222 nump1 += 1
3234 nump1 += 1
3223 elif delta == p2:
3235 elif delta == p2:
3224 nump2 += 1
3236 nump2 += 1
3225 elif delta != nullrev:
3237 elif delta != nullrev:
3226 numother += 1
3238 numother += 1
3227
3239
3228 # Obtain data on the raw chunks in the revlog.
3240 # Obtain data on the raw chunks in the revlog.
3229 if util.safehasattr(r, b'_getsegmentforrevs'):
3241 if util.safehasattr(r, b'_getsegmentforrevs'):
3230 segment = r._getsegmentforrevs(rev, rev)[1]
3242 segment = r._getsegmentforrevs(rev, rev)[1]
3231 else:
3243 else:
3232 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3244 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3233 if segment:
3245 if segment:
3234 chunktype = bytes(segment[0:1])
3246 chunktype = bytes(segment[0:1])
3235 else:
3247 else:
3236 chunktype = b'empty'
3248 chunktype = b'empty'
3237
3249
3238 if chunktype not in chunktypecounts:
3250 if chunktype not in chunktypecounts:
3239 chunktypecounts[chunktype] = 0
3251 chunktypecounts[chunktype] = 0
3240 chunktypesizes[chunktype] = 0
3252 chunktypesizes[chunktype] = 0
3241
3253
3242 chunktypecounts[chunktype] += 1
3254 chunktypecounts[chunktype] += 1
3243 chunktypesizes[chunktype] += size
3255 chunktypesizes[chunktype] += size
3244
3256
3245 # Adjust size min value for empty cases
3257 # Adjust size min value for empty cases
3246 for size in (datasize, fullsize, semisize, deltasize):
3258 for size in (datasize, fullsize, semisize, deltasize):
3247 if size[0] is None:
3259 if size[0] is None:
3248 size[0] = 0
3260 size[0] = 0
3249
3261
3250 numdeltas = numrevs - numfull - numempty - numsemi
3262 numdeltas = numrevs - numfull - numempty - numsemi
3251 numoprev = numprev - nump1prev - nump2prev
3263 numoprev = numprev - nump1prev - nump2prev
3252 totalrawsize = datasize[2]
3264 totalrawsize = datasize[2]
3253 datasize[2] /= numrevs
3265 datasize[2] /= numrevs
3254 fulltotal = fullsize[2]
3266 fulltotal = fullsize[2]
3255 if numfull == 0:
3267 if numfull == 0:
3256 fullsize[2] = 0
3268 fullsize[2] = 0
3257 else:
3269 else:
3258 fullsize[2] /= numfull
3270 fullsize[2] /= numfull
3259 semitotal = semisize[2]
3271 semitotal = semisize[2]
3260 snaptotal = {}
3272 snaptotal = {}
3261 if numsemi > 0:
3273 if numsemi > 0:
3262 semisize[2] /= numsemi
3274 semisize[2] /= numsemi
3263 for depth in snapsizedepth:
3275 for depth in snapsizedepth:
3264 snaptotal[depth] = snapsizedepth[depth][2]
3276 snaptotal[depth] = snapsizedepth[depth][2]
3265 snapsizedepth[depth][2] /= numsnapdepth[depth]
3277 snapsizedepth[depth][2] /= numsnapdepth[depth]
3266
3278
3267 deltatotal = deltasize[2]
3279 deltatotal = deltasize[2]
3268 if numdeltas > 0:
3280 if numdeltas > 0:
3269 deltasize[2] /= numdeltas
3281 deltasize[2] /= numdeltas
3270 totalsize = fulltotal + semitotal + deltatotal
3282 totalsize = fulltotal + semitotal + deltatotal
3271 avgchainlen = sum(chainlengths) / numrevs
3283 avgchainlen = sum(chainlengths) / numrevs
3272 maxchainlen = max(chainlengths)
3284 maxchainlen = max(chainlengths)
3273 maxchainspan = max(chainspans)
3285 maxchainspan = max(chainspans)
3274 compratio = 1
3286 compratio = 1
3275 if totalsize:
3287 if totalsize:
3276 compratio = totalrawsize / totalsize
3288 compratio = totalrawsize / totalsize
3277
3289
3278 basedfmtstr = b'%%%dd\n'
3290 basedfmtstr = b'%%%dd\n'
3279 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3291 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3280
3292
3281 def dfmtstr(max):
3293 def dfmtstr(max):
3282 return basedfmtstr % len(str(max))
3294 return basedfmtstr % len(str(max))
3283
3295
3284 def pcfmtstr(max, padding=0):
3296 def pcfmtstr(max, padding=0):
3285 return basepcfmtstr % (len(str(max)), b' ' * padding)
3297 return basepcfmtstr % (len(str(max)), b' ' * padding)
3286
3298
3287 def pcfmt(value, total):
3299 def pcfmt(value, total):
3288 if total:
3300 if total:
3289 return (value, 100 * float(value) / total)
3301 return (value, 100 * float(value) / total)
3290 else:
3302 else:
3291 return value, 100.0
3303 return value, 100.0
3292
3304
3293 ui.writenoi18n(b'format : %d\n' % format)
3305 ui.writenoi18n(b'format : %d\n' % format)
3294 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3306 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3295
3307
3296 ui.write(b'\n')
3308 ui.write(b'\n')
3297 fmt = pcfmtstr(totalsize)
3309 fmt = pcfmtstr(totalsize)
3298 fmt2 = dfmtstr(totalsize)
3310 fmt2 = dfmtstr(totalsize)
3299 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3311 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3300 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3312 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3301 ui.writenoi18n(
3313 ui.writenoi18n(
3302 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3314 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3303 )
3315 )
3304 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3316 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3305 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3317 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3306 ui.writenoi18n(
3318 ui.writenoi18n(
3307 b' text : '
3319 b' text : '
3308 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3320 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3309 )
3321 )
3310 ui.writenoi18n(
3322 ui.writenoi18n(
3311 b' delta : '
3323 b' delta : '
3312 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3324 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3313 )
3325 )
3314 ui.writenoi18n(
3326 ui.writenoi18n(
3315 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3327 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3316 )
3328 )
3317 for depth in sorted(numsnapdepth):
3329 for depth in sorted(numsnapdepth):
3318 ui.write(
3330 ui.write(
3319 (b' lvl-%-3d : ' % depth)
3331 (b' lvl-%-3d : ' % depth)
3320 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3332 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3321 )
3333 )
3322 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3334 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3323 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3335 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3324 ui.writenoi18n(
3336 ui.writenoi18n(
3325 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3337 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3326 )
3338 )
3327 for depth in sorted(numsnapdepth):
3339 for depth in sorted(numsnapdepth):
3328 ui.write(
3340 ui.write(
3329 (b' lvl-%-3d : ' % depth)
3341 (b' lvl-%-3d : ' % depth)
3330 + fmt % pcfmt(snaptotal[depth], totalsize)
3342 + fmt % pcfmt(snaptotal[depth], totalsize)
3331 )
3343 )
3332 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3344 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3333
3345
3334 def fmtchunktype(chunktype):
3346 def fmtchunktype(chunktype):
3335 if chunktype == b'empty':
3347 if chunktype == b'empty':
3336 return b' %s : ' % chunktype
3348 return b' %s : ' % chunktype
3337 elif chunktype in pycompat.bytestr(string.ascii_letters):
3349 elif chunktype in pycompat.bytestr(string.ascii_letters):
3338 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3350 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3339 else:
3351 else:
3340 return b' 0x%s : ' % hex(chunktype)
3352 return b' 0x%s : ' % hex(chunktype)
3341
3353
3342 ui.write(b'\n')
3354 ui.write(b'\n')
3343 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3355 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3344 for chunktype in sorted(chunktypecounts):
3356 for chunktype in sorted(chunktypecounts):
3345 ui.write(fmtchunktype(chunktype))
3357 ui.write(fmtchunktype(chunktype))
3346 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3358 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3347 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3359 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3348 for chunktype in sorted(chunktypecounts):
3360 for chunktype in sorted(chunktypecounts):
3349 ui.write(fmtchunktype(chunktype))
3361 ui.write(fmtchunktype(chunktype))
3350 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3362 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3351
3363
3352 ui.write(b'\n')
3364 ui.write(b'\n')
3353 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3365 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3354 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3366 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3355 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3367 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3356 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3368 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3357 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3369 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3358
3370
3359 if format > 0:
3371 if format > 0:
3360 ui.write(b'\n')
3372 ui.write(b'\n')
3361 ui.writenoi18n(
3373 ui.writenoi18n(
3362 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3374 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3363 % tuple(datasize)
3375 % tuple(datasize)
3364 )
3376 )
3365 ui.writenoi18n(
3377 ui.writenoi18n(
3366 b'full revision size (min/max/avg) : %d / %d / %d\n'
3378 b'full revision size (min/max/avg) : %d / %d / %d\n'
3367 % tuple(fullsize)
3379 % tuple(fullsize)
3368 )
3380 )
3369 ui.writenoi18n(
3381 ui.writenoi18n(
3370 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3382 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3371 % tuple(semisize)
3383 % tuple(semisize)
3372 )
3384 )
3373 for depth in sorted(snapsizedepth):
3385 for depth in sorted(snapsizedepth):
3374 if depth == 0:
3386 if depth == 0:
3375 continue
3387 continue
3376 ui.writenoi18n(
3388 ui.writenoi18n(
3377 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3389 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3378 % ((depth,) + tuple(snapsizedepth[depth]))
3390 % ((depth,) + tuple(snapsizedepth[depth]))
3379 )
3391 )
3380 ui.writenoi18n(
3392 ui.writenoi18n(
3381 b'delta size (min/max/avg) : %d / %d / %d\n'
3393 b'delta size (min/max/avg) : %d / %d / %d\n'
3382 % tuple(deltasize)
3394 % tuple(deltasize)
3383 )
3395 )
3384
3396
3385 if numdeltas > 0:
3397 if numdeltas > 0:
3386 ui.write(b'\n')
3398 ui.write(b'\n')
3387 fmt = pcfmtstr(numdeltas)
3399 fmt = pcfmtstr(numdeltas)
3388 fmt2 = pcfmtstr(numdeltas, 4)
3400 fmt2 = pcfmtstr(numdeltas, 4)
3389 ui.writenoi18n(
3401 ui.writenoi18n(
3390 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3402 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3391 )
3403 )
3392 if numprev > 0:
3404 if numprev > 0:
3393 ui.writenoi18n(
3405 ui.writenoi18n(
3394 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3406 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3395 )
3407 )
3396 ui.writenoi18n(
3408 ui.writenoi18n(
3397 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3409 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3398 )
3410 )
3399 ui.writenoi18n(
3411 ui.writenoi18n(
3400 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3412 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3401 )
3413 )
3402 if gdelta:
3414 if gdelta:
3403 ui.writenoi18n(
3415 ui.writenoi18n(
3404 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3416 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3405 )
3417 )
3406 ui.writenoi18n(
3418 ui.writenoi18n(
3407 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3419 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3408 )
3420 )
3409 ui.writenoi18n(
3421 ui.writenoi18n(
3410 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3422 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3411 )
3423 )
3412
3424
3413
3425
3414 @command(
3426 @command(
3415 b'debugrevlogindex',
3427 b'debugrevlogindex',
3416 cmdutil.debugrevlogopts
3428 cmdutil.debugrevlogopts
3417 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3429 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3418 _(b'[-f FORMAT] -c|-m|FILE'),
3430 _(b'[-f FORMAT] -c|-m|FILE'),
3419 optionalrepo=True,
3431 optionalrepo=True,
3420 )
3432 )
3421 def debugrevlogindex(ui, repo, file_=None, **opts):
3433 def debugrevlogindex(ui, repo, file_=None, **opts):
3422 """dump the contents of a revlog index"""
3434 """dump the contents of a revlog index"""
3423 opts = pycompat.byteskwargs(opts)
3435 opts = pycompat.byteskwargs(opts)
3424 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3436 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3425 format = opts.get(b'format', 0)
3437 format = opts.get(b'format', 0)
3426 if format not in (0, 1):
3438 if format not in (0, 1):
3427 raise error.Abort(_(b"unknown format %d") % format)
3439 raise error.Abort(_(b"unknown format %d") % format)
3428
3440
3429 if ui.debugflag:
3441 if ui.debugflag:
3430 shortfn = hex
3442 shortfn = hex
3431 else:
3443 else:
3432 shortfn = short
3444 shortfn = short
3433
3445
3434 # There might not be anything in r, so have a sane default
3446 # There might not be anything in r, so have a sane default
3435 idlen = 12
3447 idlen = 12
3436 for i in r:
3448 for i in r:
3437 idlen = len(shortfn(r.node(i)))
3449 idlen = len(shortfn(r.node(i)))
3438 break
3450 break
3439
3451
3440 if format == 0:
3452 if format == 0:
3441 if ui.verbose:
3453 if ui.verbose:
3442 ui.writenoi18n(
3454 ui.writenoi18n(
3443 b" rev offset length linkrev %s %s p2\n"
3455 b" rev offset length linkrev %s %s p2\n"
3444 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3456 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3445 )
3457 )
3446 else:
3458 else:
3447 ui.writenoi18n(
3459 ui.writenoi18n(
3448 b" rev linkrev %s %s p2\n"
3460 b" rev linkrev %s %s p2\n"
3449 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3461 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3450 )
3462 )
3451 elif format == 1:
3463 elif format == 1:
3452 if ui.verbose:
3464 if ui.verbose:
3453 ui.writenoi18n(
3465 ui.writenoi18n(
3454 (
3466 (
3455 b" rev flag offset length size link p1"
3467 b" rev flag offset length size link p1"
3456 b" p2 %s\n"
3468 b" p2 %s\n"
3457 )
3469 )
3458 % b"nodeid".rjust(idlen)
3470 % b"nodeid".rjust(idlen)
3459 )
3471 )
3460 else:
3472 else:
3461 ui.writenoi18n(
3473 ui.writenoi18n(
3462 b" rev flag size link p1 p2 %s\n"
3474 b" rev flag size link p1 p2 %s\n"
3463 % b"nodeid".rjust(idlen)
3475 % b"nodeid".rjust(idlen)
3464 )
3476 )
3465
3477
3466 for i in r:
3478 for i in r:
3467 node = r.node(i)
3479 node = r.node(i)
3468 if format == 0:
3480 if format == 0:
3469 try:
3481 try:
3470 pp = r.parents(node)
3482 pp = r.parents(node)
3471 except Exception:
3483 except Exception:
3472 pp = [repo.nullid, repo.nullid]
3484 pp = [repo.nullid, repo.nullid]
3473 if ui.verbose:
3485 if ui.verbose:
3474 ui.write(
3486 ui.write(
3475 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3487 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3476 % (
3488 % (
3477 i,
3489 i,
3478 r.start(i),
3490 r.start(i),
3479 r.length(i),
3491 r.length(i),
3480 r.linkrev(i),
3492 r.linkrev(i),
3481 shortfn(node),
3493 shortfn(node),
3482 shortfn(pp[0]),
3494 shortfn(pp[0]),
3483 shortfn(pp[1]),
3495 shortfn(pp[1]),
3484 )
3496 )
3485 )
3497 )
3486 else:
3498 else:
3487 ui.write(
3499 ui.write(
3488 b"% 6d % 7d %s %s %s\n"
3500 b"% 6d % 7d %s %s %s\n"
3489 % (
3501 % (
3490 i,
3502 i,
3491 r.linkrev(i),
3503 r.linkrev(i),
3492 shortfn(node),
3504 shortfn(node),
3493 shortfn(pp[0]),
3505 shortfn(pp[0]),
3494 shortfn(pp[1]),
3506 shortfn(pp[1]),
3495 )
3507 )
3496 )
3508 )
3497 elif format == 1:
3509 elif format == 1:
3498 pr = r.parentrevs(i)
3510 pr = r.parentrevs(i)
3499 if ui.verbose:
3511 if ui.verbose:
3500 ui.write(
3512 ui.write(
3501 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3513 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3502 % (
3514 % (
3503 i,
3515 i,
3504 r.flags(i),
3516 r.flags(i),
3505 r.start(i),
3517 r.start(i),
3506 r.length(i),
3518 r.length(i),
3507 r.rawsize(i),
3519 r.rawsize(i),
3508 r.linkrev(i),
3520 r.linkrev(i),
3509 pr[0],
3521 pr[0],
3510 pr[1],
3522 pr[1],
3511 shortfn(node),
3523 shortfn(node),
3512 )
3524 )
3513 )
3525 )
3514 else:
3526 else:
3515 ui.write(
3527 ui.write(
3516 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3528 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3517 % (
3529 % (
3518 i,
3530 i,
3519 r.flags(i),
3531 r.flags(i),
3520 r.rawsize(i),
3532 r.rawsize(i),
3521 r.linkrev(i),
3533 r.linkrev(i),
3522 pr[0],
3534 pr[0],
3523 pr[1],
3535 pr[1],
3524 shortfn(node),
3536 shortfn(node),
3525 )
3537 )
3526 )
3538 )
3527
3539
3528
3540
3529 @command(
3541 @command(
3530 b'debugrevspec',
3542 b'debugrevspec',
3531 [
3543 [
3532 (
3544 (
3533 b'',
3545 b'',
3534 b'optimize',
3546 b'optimize',
3535 None,
3547 None,
3536 _(b'print parsed tree after optimizing (DEPRECATED)'),
3548 _(b'print parsed tree after optimizing (DEPRECATED)'),
3537 ),
3549 ),
3538 (
3550 (
3539 b'',
3551 b'',
3540 b'show-revs',
3552 b'show-revs',
3541 True,
3553 True,
3542 _(b'print list of result revisions (default)'),
3554 _(b'print list of result revisions (default)'),
3543 ),
3555 ),
3544 (
3556 (
3545 b's',
3557 b's',
3546 b'show-set',
3558 b'show-set',
3547 None,
3559 None,
3548 _(b'print internal representation of result set'),
3560 _(b'print internal representation of result set'),
3549 ),
3561 ),
3550 (
3562 (
3551 b'p',
3563 b'p',
3552 b'show-stage',
3564 b'show-stage',
3553 [],
3565 [],
3554 _(b'print parsed tree at the given stage'),
3566 _(b'print parsed tree at the given stage'),
3555 _(b'NAME'),
3567 _(b'NAME'),
3556 ),
3568 ),
3557 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3569 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3558 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3570 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3559 ],
3571 ],
3560 b'REVSPEC',
3572 b'REVSPEC',
3561 )
3573 )
3562 def debugrevspec(ui, repo, expr, **opts):
3574 def debugrevspec(ui, repo, expr, **opts):
3563 """parse and apply a revision specification
3575 """parse and apply a revision specification
3564
3576
3565 Use -p/--show-stage option to print the parsed tree at the given stages.
3577 Use -p/--show-stage option to print the parsed tree at the given stages.
3566 Use -p all to print tree at every stage.
3578 Use -p all to print tree at every stage.
3567
3579
3568 Use --no-show-revs option with -s or -p to print only the set
3580 Use --no-show-revs option with -s or -p to print only the set
3569 representation or the parsed tree respectively.
3581 representation or the parsed tree respectively.
3570
3582
3571 Use --verify-optimized to compare the optimized result with the unoptimized
3583 Use --verify-optimized to compare the optimized result with the unoptimized
3572 one. Returns 1 if the optimized result differs.
3584 one. Returns 1 if the optimized result differs.
3573 """
3585 """
3574 opts = pycompat.byteskwargs(opts)
3586 opts = pycompat.byteskwargs(opts)
3575 aliases = ui.configitems(b'revsetalias')
3587 aliases = ui.configitems(b'revsetalias')
3576 stages = [
3588 stages = [
3577 (b'parsed', lambda tree: tree),
3589 (b'parsed', lambda tree: tree),
3578 (
3590 (
3579 b'expanded',
3591 b'expanded',
3580 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3592 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3581 ),
3593 ),
3582 (b'concatenated', revsetlang.foldconcat),
3594 (b'concatenated', revsetlang.foldconcat),
3583 (b'analyzed', revsetlang.analyze),
3595 (b'analyzed', revsetlang.analyze),
3584 (b'optimized', revsetlang.optimize),
3596 (b'optimized', revsetlang.optimize),
3585 ]
3597 ]
3586 if opts[b'no_optimized']:
3598 if opts[b'no_optimized']:
3587 stages = stages[:-1]
3599 stages = stages[:-1]
3588 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3600 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3589 raise error.Abort(
3601 raise error.Abort(
3590 _(b'cannot use --verify-optimized with --no-optimized')
3602 _(b'cannot use --verify-optimized with --no-optimized')
3591 )
3603 )
3592 stagenames = {n for n, f in stages}
3604 stagenames = {n for n, f in stages}
3593
3605
3594 showalways = set()
3606 showalways = set()
3595 showchanged = set()
3607 showchanged = set()
3596 if ui.verbose and not opts[b'show_stage']:
3608 if ui.verbose and not opts[b'show_stage']:
3597 # show parsed tree by --verbose (deprecated)
3609 # show parsed tree by --verbose (deprecated)
3598 showalways.add(b'parsed')
3610 showalways.add(b'parsed')
3599 showchanged.update([b'expanded', b'concatenated'])
3611 showchanged.update([b'expanded', b'concatenated'])
3600 if opts[b'optimize']:
3612 if opts[b'optimize']:
3601 showalways.add(b'optimized')
3613 showalways.add(b'optimized')
3602 if opts[b'show_stage'] and opts[b'optimize']:
3614 if opts[b'show_stage'] and opts[b'optimize']:
3603 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3615 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3604 if opts[b'show_stage'] == [b'all']:
3616 if opts[b'show_stage'] == [b'all']:
3605 showalways.update(stagenames)
3617 showalways.update(stagenames)
3606 else:
3618 else:
3607 for n in opts[b'show_stage']:
3619 for n in opts[b'show_stage']:
3608 if n not in stagenames:
3620 if n not in stagenames:
3609 raise error.Abort(_(b'invalid stage name: %s') % n)
3621 raise error.Abort(_(b'invalid stage name: %s') % n)
3610 showalways.update(opts[b'show_stage'])
3622 showalways.update(opts[b'show_stage'])
3611
3623
3612 treebystage = {}
3624 treebystage = {}
3613 printedtree = None
3625 printedtree = None
3614 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3626 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3615 for n, f in stages:
3627 for n, f in stages:
3616 treebystage[n] = tree = f(tree)
3628 treebystage[n] = tree = f(tree)
3617 if n in showalways or (n in showchanged and tree != printedtree):
3629 if n in showalways or (n in showchanged and tree != printedtree):
3618 if opts[b'show_stage'] or n != b'parsed':
3630 if opts[b'show_stage'] or n != b'parsed':
3619 ui.write(b"* %s:\n" % n)
3631 ui.write(b"* %s:\n" % n)
3620 ui.write(revsetlang.prettyformat(tree), b"\n")
3632 ui.write(revsetlang.prettyformat(tree), b"\n")
3621 printedtree = tree
3633 printedtree = tree
3622
3634
3623 if opts[b'verify_optimized']:
3635 if opts[b'verify_optimized']:
3624 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3636 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3625 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3637 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3626 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3638 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3627 ui.writenoi18n(
3639 ui.writenoi18n(
3628 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3640 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3629 )
3641 )
3630 ui.writenoi18n(
3642 ui.writenoi18n(
3631 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3643 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3632 )
3644 )
3633 arevs = list(arevs)
3645 arevs = list(arevs)
3634 brevs = list(brevs)
3646 brevs = list(brevs)
3635 if arevs == brevs:
3647 if arevs == brevs:
3636 return 0
3648 return 0
3637 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3649 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3638 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3650 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3639 sm = difflib.SequenceMatcher(None, arevs, brevs)
3651 sm = difflib.SequenceMatcher(None, arevs, brevs)
3640 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3652 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3641 if tag in ('delete', 'replace'):
3653 if tag in ('delete', 'replace'):
3642 for c in arevs[alo:ahi]:
3654 for c in arevs[alo:ahi]:
3643 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3655 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3644 if tag in ('insert', 'replace'):
3656 if tag in ('insert', 'replace'):
3645 for c in brevs[blo:bhi]:
3657 for c in brevs[blo:bhi]:
3646 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3658 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3647 if tag == 'equal':
3659 if tag == 'equal':
3648 for c in arevs[alo:ahi]:
3660 for c in arevs[alo:ahi]:
3649 ui.write(b' %d\n' % c)
3661 ui.write(b' %d\n' % c)
3650 return 1
3662 return 1
3651
3663
3652 func = revset.makematcher(tree)
3664 func = revset.makematcher(tree)
3653 revs = func(repo)
3665 revs = func(repo)
3654 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3666 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3655 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3667 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3656 if not opts[b'show_revs']:
3668 if not opts[b'show_revs']:
3657 return
3669 return
3658 for c in revs:
3670 for c in revs:
3659 ui.write(b"%d\n" % c)
3671 ui.write(b"%d\n" % c)
3660
3672
3661
3673
3662 @command(
3674 @command(
3663 b'debugserve',
3675 b'debugserve',
3664 [
3676 [
3665 (
3677 (
3666 b'',
3678 b'',
3667 b'sshstdio',
3679 b'sshstdio',
3668 False,
3680 False,
3669 _(b'run an SSH server bound to process handles'),
3681 _(b'run an SSH server bound to process handles'),
3670 ),
3682 ),
3671 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3683 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3672 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3684 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3673 ],
3685 ],
3674 b'',
3686 b'',
3675 )
3687 )
3676 def debugserve(ui, repo, **opts):
3688 def debugserve(ui, repo, **opts):
3677 """run a server with advanced settings
3689 """run a server with advanced settings
3678
3690
3679 This command is similar to :hg:`serve`. It exists partially as a
3691 This command is similar to :hg:`serve`. It exists partially as a
3680 workaround to the fact that ``hg serve --stdio`` must have specific
3692 workaround to the fact that ``hg serve --stdio`` must have specific
3681 arguments for security reasons.
3693 arguments for security reasons.
3682 """
3694 """
3683 opts = pycompat.byteskwargs(opts)
3695 opts = pycompat.byteskwargs(opts)
3684
3696
3685 if not opts[b'sshstdio']:
3697 if not opts[b'sshstdio']:
3686 raise error.Abort(_(b'only --sshstdio is currently supported'))
3698 raise error.Abort(_(b'only --sshstdio is currently supported'))
3687
3699
3688 logfh = None
3700 logfh = None
3689
3701
3690 if opts[b'logiofd'] and opts[b'logiofile']:
3702 if opts[b'logiofd'] and opts[b'logiofile']:
3691 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3703 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3692
3704
3693 if opts[b'logiofd']:
3705 if opts[b'logiofd']:
3694 # Ideally we would be line buffered. But line buffering in binary
3706 # Ideally we would be line buffered. But line buffering in binary
3695 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3707 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3696 # buffering could have performance impacts. But since this isn't
3708 # buffering could have performance impacts. But since this isn't
3697 # performance critical code, it should be fine.
3709 # performance critical code, it should be fine.
3698 try:
3710 try:
3699 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3711 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3700 except OSError as e:
3712 except OSError as e:
3701 if e.errno != errno.ESPIPE:
3713 if e.errno != errno.ESPIPE:
3702 raise
3714 raise
3703 # can't seek a pipe, so `ab` mode fails on py3
3715 # can't seek a pipe, so `ab` mode fails on py3
3704 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3716 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3705 elif opts[b'logiofile']:
3717 elif opts[b'logiofile']:
3706 logfh = open(opts[b'logiofile'], b'ab', 0)
3718 logfh = open(opts[b'logiofile'], b'ab', 0)
3707
3719
3708 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3720 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3709 s.serve_forever()
3721 s.serve_forever()
3710
3722
3711
3723
3712 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3724 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3713 def debugsetparents(ui, repo, rev1, rev2=None):
3725 def debugsetparents(ui, repo, rev1, rev2=None):
3714 """manually set the parents of the current working directory (DANGEROUS)
3726 """manually set the parents of the current working directory (DANGEROUS)
3715
3727
3716 This command is not what you are looking for and should not be used. Using
3728 This command is not what you are looking for and should not be used. Using
3717 this command will most certainly results in slight corruption of the file
3729 this command will most certainly results in slight corruption of the file
3718 level histories withing your repository. DO NOT USE THIS COMMAND.
3730 level histories withing your repository. DO NOT USE THIS COMMAND.
3719
3731
3720 The command update the p1 and p2 field in the dirstate, and not touching
3732 The command update the p1 and p2 field in the dirstate, and not touching
3721 anything else. This useful for writing repository conversion tools, but
3733 anything else. This useful for writing repository conversion tools, but
3722 should be used with extreme care. For example, neither the working
3734 should be used with extreme care. For example, neither the working
3723 directory nor the dirstate is updated, so file status may be incorrect
3735 directory nor the dirstate is updated, so file status may be incorrect
3724 after running this command. Only used if you are one of the few people that
3736 after running this command. Only used if you are one of the few people that
3725 deeply unstand both conversion tools and file level histories. If you are
3737 deeply unstand both conversion tools and file level histories. If you are
3726 reading this help, you are not one of this people (most of them sailed west
3738 reading this help, you are not one of this people (most of them sailed west
3727 from Mithlond anyway.
3739 from Mithlond anyway.
3728
3740
3729 So one last time DO NOT USE THIS COMMAND.
3741 So one last time DO NOT USE THIS COMMAND.
3730
3742
3731 Returns 0 on success.
3743 Returns 0 on success.
3732 """
3744 """
3733
3745
3734 node1 = scmutil.revsingle(repo, rev1).node()
3746 node1 = scmutil.revsingle(repo, rev1).node()
3735 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3747 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3736
3748
3737 with repo.wlock():
3749 with repo.wlock():
3738 repo.setparents(node1, node2)
3750 repo.setparents(node1, node2)
3739
3751
3740
3752
3741 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3753 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3742 def debugsidedata(ui, repo, file_, rev=None, **opts):
3754 def debugsidedata(ui, repo, file_, rev=None, **opts):
3743 """dump the side data for a cl/manifest/file revision
3755 """dump the side data for a cl/manifest/file revision
3744
3756
3745 Use --verbose to dump the sidedata content."""
3757 Use --verbose to dump the sidedata content."""
3746 opts = pycompat.byteskwargs(opts)
3758 opts = pycompat.byteskwargs(opts)
3747 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3759 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3748 if rev is not None:
3760 if rev is not None:
3749 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3761 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3750 file_, rev = None, file_
3762 file_, rev = None, file_
3751 elif rev is None:
3763 elif rev is None:
3752 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3764 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3753 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3765 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3754 r = getattr(r, '_revlog', r)
3766 r = getattr(r, '_revlog', r)
3755 try:
3767 try:
3756 sidedata = r.sidedata(r.lookup(rev))
3768 sidedata = r.sidedata(r.lookup(rev))
3757 except KeyError:
3769 except KeyError:
3758 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3770 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3759 if sidedata:
3771 if sidedata:
3760 sidedata = list(sidedata.items())
3772 sidedata = list(sidedata.items())
3761 sidedata.sort()
3773 sidedata.sort()
3762 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3774 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3763 for key, value in sidedata:
3775 for key, value in sidedata:
3764 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3776 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3765 if ui.verbose:
3777 if ui.verbose:
3766 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3778 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3767
3779
3768
3780
3769 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3781 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3770 def debugssl(ui, repo, source=None, **opts):
3782 def debugssl(ui, repo, source=None, **opts):
3771 """test a secure connection to a server
3783 """test a secure connection to a server
3772
3784
3773 This builds the certificate chain for the server on Windows, installing the
3785 This builds the certificate chain for the server on Windows, installing the
3774 missing intermediates and trusted root via Windows Update if necessary. It
3786 missing intermediates and trusted root via Windows Update if necessary. It
3775 does nothing on other platforms.
3787 does nothing on other platforms.
3776
3788
3777 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3789 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3778 that server is used. See :hg:`help urls` for more information.
3790 that server is used. See :hg:`help urls` for more information.
3779
3791
3780 If the update succeeds, retry the original operation. Otherwise, the cause
3792 If the update succeeds, retry the original operation. Otherwise, the cause
3781 of the SSL error is likely another issue.
3793 of the SSL error is likely another issue.
3782 """
3794 """
3783 if not pycompat.iswindows:
3795 if not pycompat.iswindows:
3784 raise error.Abort(
3796 raise error.Abort(
3785 _(b'certificate chain building is only possible on Windows')
3797 _(b'certificate chain building is only possible on Windows')
3786 )
3798 )
3787
3799
3788 if not source:
3800 if not source:
3789 if not repo:
3801 if not repo:
3790 raise error.Abort(
3802 raise error.Abort(
3791 _(
3803 _(
3792 b"there is no Mercurial repository here, and no "
3804 b"there is no Mercurial repository here, and no "
3793 b"server specified"
3805 b"server specified"
3794 )
3806 )
3795 )
3807 )
3796 source = b"default"
3808 source = b"default"
3797
3809
3798 source, branches = urlutil.get_unique_pull_path(
3810 source, branches = urlutil.get_unique_pull_path(
3799 b'debugssl', repo, ui, source
3811 b'debugssl', repo, ui, source
3800 )
3812 )
3801 url = urlutil.url(source)
3813 url = urlutil.url(source)
3802
3814
3803 defaultport = {b'https': 443, b'ssh': 22}
3815 defaultport = {b'https': 443, b'ssh': 22}
3804 if url.scheme in defaultport:
3816 if url.scheme in defaultport:
3805 try:
3817 try:
3806 addr = (url.host, int(url.port or defaultport[url.scheme]))
3818 addr = (url.host, int(url.port or defaultport[url.scheme]))
3807 except ValueError:
3819 except ValueError:
3808 raise error.Abort(_(b"malformed port number in URL"))
3820 raise error.Abort(_(b"malformed port number in URL"))
3809 else:
3821 else:
3810 raise error.Abort(_(b"only https and ssh connections are supported"))
3822 raise error.Abort(_(b"only https and ssh connections are supported"))
3811
3823
3812 from . import win32
3824 from . import win32
3813
3825
3814 s = ssl.wrap_socket(
3826 s = ssl.wrap_socket(
3815 socket.socket(),
3827 socket.socket(),
3816 ssl_version=ssl.PROTOCOL_TLS,
3828 ssl_version=ssl.PROTOCOL_TLS,
3817 cert_reqs=ssl.CERT_NONE,
3829 cert_reqs=ssl.CERT_NONE,
3818 ca_certs=None,
3830 ca_certs=None,
3819 )
3831 )
3820
3832
3821 try:
3833 try:
3822 s.connect(addr)
3834 s.connect(addr)
3823 cert = s.getpeercert(True)
3835 cert = s.getpeercert(True)
3824
3836
3825 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3837 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3826
3838
3827 complete = win32.checkcertificatechain(cert, build=False)
3839 complete = win32.checkcertificatechain(cert, build=False)
3828
3840
3829 if not complete:
3841 if not complete:
3830 ui.status(_(b'certificate chain is incomplete, updating... '))
3842 ui.status(_(b'certificate chain is incomplete, updating... '))
3831
3843
3832 if not win32.checkcertificatechain(cert):
3844 if not win32.checkcertificatechain(cert):
3833 ui.status(_(b'failed.\n'))
3845 ui.status(_(b'failed.\n'))
3834 else:
3846 else:
3835 ui.status(_(b'done.\n'))
3847 ui.status(_(b'done.\n'))
3836 else:
3848 else:
3837 ui.status(_(b'full certificate chain is available\n'))
3849 ui.status(_(b'full certificate chain is available\n'))
3838 finally:
3850 finally:
3839 s.close()
3851 s.close()
3840
3852
3841
3853
3842 @command(
3854 @command(
3843 b"debugbackupbundle",
3855 b"debugbackupbundle",
3844 [
3856 [
3845 (
3857 (
3846 b"",
3858 b"",
3847 b"recover",
3859 b"recover",
3848 b"",
3860 b"",
3849 b"brings the specified changeset back into the repository",
3861 b"brings the specified changeset back into the repository",
3850 )
3862 )
3851 ]
3863 ]
3852 + cmdutil.logopts,
3864 + cmdutil.logopts,
3853 _(b"hg debugbackupbundle [--recover HASH]"),
3865 _(b"hg debugbackupbundle [--recover HASH]"),
3854 )
3866 )
3855 def debugbackupbundle(ui, repo, *pats, **opts):
3867 def debugbackupbundle(ui, repo, *pats, **opts):
3856 """lists the changesets available in backup bundles
3868 """lists the changesets available in backup bundles
3857
3869
3858 Without any arguments, this command prints a list of the changesets in each
3870 Without any arguments, this command prints a list of the changesets in each
3859 backup bundle.
3871 backup bundle.
3860
3872
3861 --recover takes a changeset hash and unbundles the first bundle that
3873 --recover takes a changeset hash and unbundles the first bundle that
3862 contains that hash, which puts that changeset back in your repository.
3874 contains that hash, which puts that changeset back in your repository.
3863
3875
3864 --verbose will print the entire commit message and the bundle path for that
3876 --verbose will print the entire commit message and the bundle path for that
3865 backup.
3877 backup.
3866 """
3878 """
3867 backups = list(
3879 backups = list(
3868 filter(
3880 filter(
3869 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3881 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3870 )
3882 )
3871 )
3883 )
3872 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3884 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3873
3885
3874 opts = pycompat.byteskwargs(opts)
3886 opts = pycompat.byteskwargs(opts)
3875 opts[b"bundle"] = b""
3887 opts[b"bundle"] = b""
3876 opts[b"force"] = None
3888 opts[b"force"] = None
3877 limit = logcmdutil.getlimit(opts)
3889 limit = logcmdutil.getlimit(opts)
3878
3890
3879 def display(other, chlist, displayer):
3891 def display(other, chlist, displayer):
3880 if opts.get(b"newest_first"):
3892 if opts.get(b"newest_first"):
3881 chlist.reverse()
3893 chlist.reverse()
3882 count = 0
3894 count = 0
3883 for n in chlist:
3895 for n in chlist:
3884 if limit is not None and count >= limit:
3896 if limit is not None and count >= limit:
3885 break
3897 break
3886 parents = [
3898 parents = [
3887 True for p in other.changelog.parents(n) if p != repo.nullid
3899 True for p in other.changelog.parents(n) if p != repo.nullid
3888 ]
3900 ]
3889 if opts.get(b"no_merges") and len(parents) == 2:
3901 if opts.get(b"no_merges") and len(parents) == 2:
3890 continue
3902 continue
3891 count += 1
3903 count += 1
3892 displayer.show(other[n])
3904 displayer.show(other[n])
3893
3905
3894 recovernode = opts.get(b"recover")
3906 recovernode = opts.get(b"recover")
3895 if recovernode:
3907 if recovernode:
3896 if scmutil.isrevsymbol(repo, recovernode):
3908 if scmutil.isrevsymbol(repo, recovernode):
3897 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3909 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3898 return
3910 return
3899 elif backups:
3911 elif backups:
3900 msg = _(
3912 msg = _(
3901 b"Recover changesets using: hg debugbackupbundle --recover "
3913 b"Recover changesets using: hg debugbackupbundle --recover "
3902 b"<changeset hash>\n\nAvailable backup changesets:"
3914 b"<changeset hash>\n\nAvailable backup changesets:"
3903 )
3915 )
3904 ui.status(msg, label=b"status.removed")
3916 ui.status(msg, label=b"status.removed")
3905 else:
3917 else:
3906 ui.status(_(b"no backup changesets found\n"))
3918 ui.status(_(b"no backup changesets found\n"))
3907 return
3919 return
3908
3920
3909 for backup in backups:
3921 for backup in backups:
3910 # Much of this is copied from the hg incoming logic
3922 # Much of this is copied from the hg incoming logic
3911 source = os.path.relpath(backup, encoding.getcwd())
3923 source = os.path.relpath(backup, encoding.getcwd())
3912 source, branches = urlutil.get_unique_pull_path(
3924 source, branches = urlutil.get_unique_pull_path(
3913 b'debugbackupbundle',
3925 b'debugbackupbundle',
3914 repo,
3926 repo,
3915 ui,
3927 ui,
3916 source,
3928 source,
3917 default_branches=opts.get(b'branch'),
3929 default_branches=opts.get(b'branch'),
3918 )
3930 )
3919 try:
3931 try:
3920 other = hg.peer(repo, opts, source)
3932 other = hg.peer(repo, opts, source)
3921 except error.LookupError as ex:
3933 except error.LookupError as ex:
3922 msg = _(b"\nwarning: unable to open bundle %s") % source
3934 msg = _(b"\nwarning: unable to open bundle %s") % source
3923 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3935 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3924 ui.warn(msg, hint=hint)
3936 ui.warn(msg, hint=hint)
3925 continue
3937 continue
3926 revs, checkout = hg.addbranchrevs(
3938 revs, checkout = hg.addbranchrevs(
3927 repo, other, branches, opts.get(b"rev")
3939 repo, other, branches, opts.get(b"rev")
3928 )
3940 )
3929
3941
3930 if revs:
3942 if revs:
3931 revs = [other.lookup(rev) for rev in revs]
3943 revs = [other.lookup(rev) for rev in revs]
3932
3944
3933 with ui.silent():
3945 with ui.silent():
3934 try:
3946 try:
3935 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3947 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3936 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3948 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3937 )
3949 )
3938 except error.LookupError:
3950 except error.LookupError:
3939 continue
3951 continue
3940
3952
3941 try:
3953 try:
3942 if not chlist:
3954 if not chlist:
3943 continue
3955 continue
3944 if recovernode:
3956 if recovernode:
3945 with repo.lock(), repo.transaction(b"unbundle") as tr:
3957 with repo.lock(), repo.transaction(b"unbundle") as tr:
3946 if scmutil.isrevsymbol(other, recovernode):
3958 if scmutil.isrevsymbol(other, recovernode):
3947 ui.status(_(b"Unbundling %s\n") % (recovernode))
3959 ui.status(_(b"Unbundling %s\n") % (recovernode))
3948 f = hg.openpath(ui, source)
3960 f = hg.openpath(ui, source)
3949 gen = exchange.readbundle(ui, f, source)
3961 gen = exchange.readbundle(ui, f, source)
3950 if isinstance(gen, bundle2.unbundle20):
3962 if isinstance(gen, bundle2.unbundle20):
3951 bundle2.applybundle(
3963 bundle2.applybundle(
3952 repo,
3964 repo,
3953 gen,
3965 gen,
3954 tr,
3966 tr,
3955 source=b"unbundle",
3967 source=b"unbundle",
3956 url=b"bundle:" + source,
3968 url=b"bundle:" + source,
3957 )
3969 )
3958 else:
3970 else:
3959 gen.apply(repo, b"unbundle", b"bundle:" + source)
3971 gen.apply(repo, b"unbundle", b"bundle:" + source)
3960 break
3972 break
3961 else:
3973 else:
3962 backupdate = encoding.strtolocal(
3974 backupdate = encoding.strtolocal(
3963 time.strftime(
3975 time.strftime(
3964 "%a %H:%M, %Y-%m-%d",
3976 "%a %H:%M, %Y-%m-%d",
3965 time.localtime(os.path.getmtime(source)),
3977 time.localtime(os.path.getmtime(source)),
3966 )
3978 )
3967 )
3979 )
3968 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3980 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3969 if ui.verbose:
3981 if ui.verbose:
3970 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3982 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3971 else:
3983 else:
3972 opts[
3984 opts[
3973 b"template"
3985 b"template"
3974 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3986 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3975 displayer = logcmdutil.changesetdisplayer(
3987 displayer = logcmdutil.changesetdisplayer(
3976 ui, other, opts, False
3988 ui, other, opts, False
3977 )
3989 )
3978 display(other, chlist, displayer)
3990 display(other, chlist, displayer)
3979 displayer.close()
3991 displayer.close()
3980 finally:
3992 finally:
3981 cleanupfn()
3993 cleanupfn()
3982
3994
3983
3995
3984 @command(
3996 @command(
3985 b'debugsub',
3997 b'debugsub',
3986 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3998 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3987 _(b'[-r REV] [REV]'),
3999 _(b'[-r REV] [REV]'),
3988 )
4000 )
3989 def debugsub(ui, repo, rev=None):
4001 def debugsub(ui, repo, rev=None):
3990 ctx = scmutil.revsingle(repo, rev, None)
4002 ctx = scmutil.revsingle(repo, rev, None)
3991 for k, v in sorted(ctx.substate.items()):
4003 for k, v in sorted(ctx.substate.items()):
3992 ui.writenoi18n(b'path %s\n' % k)
4004 ui.writenoi18n(b'path %s\n' % k)
3993 ui.writenoi18n(b' source %s\n' % v[0])
4005 ui.writenoi18n(b' source %s\n' % v[0])
3994 ui.writenoi18n(b' revision %s\n' % v[1])
4006 ui.writenoi18n(b' revision %s\n' % v[1])
3995
4007
3996
4008
3997 @command(b'debugshell', optionalrepo=True)
4009 @command(b'debugshell', optionalrepo=True)
3998 def debugshell(ui, repo):
4010 def debugshell(ui, repo):
3999 """run an interactive Python interpreter
4011 """run an interactive Python interpreter
4000
4012
4001 The local namespace is provided with a reference to the ui and
4013 The local namespace is provided with a reference to the ui and
4002 the repo instance (if available).
4014 the repo instance (if available).
4003 """
4015 """
4004 import code
4016 import code
4005
4017
4006 imported_objects = {
4018 imported_objects = {
4007 'ui': ui,
4019 'ui': ui,
4008 'repo': repo,
4020 'repo': repo,
4009 }
4021 }
4010
4022
4011 code.interact(local=imported_objects)
4023 code.interact(local=imported_objects)
4012
4024
4013
4025
4014 @command(
4026 @command(
4015 b'debugsuccessorssets',
4027 b'debugsuccessorssets',
4016 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4028 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4017 _(b'[REV]'),
4029 _(b'[REV]'),
4018 )
4030 )
4019 def debugsuccessorssets(ui, repo, *revs, **opts):
4031 def debugsuccessorssets(ui, repo, *revs, **opts):
4020 """show set of successors for revision
4032 """show set of successors for revision
4021
4033
4022 A successors set of changeset A is a consistent group of revisions that
4034 A successors set of changeset A is a consistent group of revisions that
4023 succeed A. It contains non-obsolete changesets only unless closests
4035 succeed A. It contains non-obsolete changesets only unless closests
4024 successors set is set.
4036 successors set is set.
4025
4037
4026 In most cases a changeset A has a single successors set containing a single
4038 In most cases a changeset A has a single successors set containing a single
4027 successor (changeset A replaced by A').
4039 successor (changeset A replaced by A').
4028
4040
4029 A changeset that is made obsolete with no successors are called "pruned".
4041 A changeset that is made obsolete with no successors are called "pruned".
4030 Such changesets have no successors sets at all.
4042 Such changesets have no successors sets at all.
4031
4043
4032 A changeset that has been "split" will have a successors set containing
4044 A changeset that has been "split" will have a successors set containing
4033 more than one successor.
4045 more than one successor.
4034
4046
4035 A changeset that has been rewritten in multiple different ways is called
4047 A changeset that has been rewritten in multiple different ways is called
4036 "divergent". Such changesets have multiple successor sets (each of which
4048 "divergent". Such changesets have multiple successor sets (each of which
4037 may also be split, i.e. have multiple successors).
4049 may also be split, i.e. have multiple successors).
4038
4050
4039 Results are displayed as follows::
4051 Results are displayed as follows::
4040
4052
4041 <rev1>
4053 <rev1>
4042 <successors-1A>
4054 <successors-1A>
4043 <rev2>
4055 <rev2>
4044 <successors-2A>
4056 <successors-2A>
4045 <successors-2B1> <successors-2B2> <successors-2B3>
4057 <successors-2B1> <successors-2B2> <successors-2B3>
4046
4058
4047 Here rev2 has two possible (i.e. divergent) successors sets. The first
4059 Here rev2 has two possible (i.e. divergent) successors sets. The first
4048 holds one element, whereas the second holds three (i.e. the changeset has
4060 holds one element, whereas the second holds three (i.e. the changeset has
4049 been split).
4061 been split).
4050 """
4062 """
4051 # passed to successorssets caching computation from one call to another
4063 # passed to successorssets caching computation from one call to another
4052 cache = {}
4064 cache = {}
4053 ctx2str = bytes
4065 ctx2str = bytes
4054 node2str = short
4066 node2str = short
4055 for rev in logcmdutil.revrange(repo, revs):
4067 for rev in logcmdutil.revrange(repo, revs):
4056 ctx = repo[rev]
4068 ctx = repo[rev]
4057 ui.write(b'%s\n' % ctx2str(ctx))
4069 ui.write(b'%s\n' % ctx2str(ctx))
4058 for succsset in obsutil.successorssets(
4070 for succsset in obsutil.successorssets(
4059 repo, ctx.node(), closest=opts['closest'], cache=cache
4071 repo, ctx.node(), closest=opts['closest'], cache=cache
4060 ):
4072 ):
4061 if succsset:
4073 if succsset:
4062 ui.write(b' ')
4074 ui.write(b' ')
4063 ui.write(node2str(succsset[0]))
4075 ui.write(node2str(succsset[0]))
4064 for node in succsset[1:]:
4076 for node in succsset[1:]:
4065 ui.write(b' ')
4077 ui.write(b' ')
4066 ui.write(node2str(node))
4078 ui.write(node2str(node))
4067 ui.write(b'\n')
4079 ui.write(b'\n')
4068
4080
4069
4081
4070 @command(b'debugtagscache', [])
4082 @command(b'debugtagscache', [])
4071 def debugtagscache(ui, repo):
4083 def debugtagscache(ui, repo):
4072 """display the contents of .hg/cache/hgtagsfnodes1"""
4084 """display the contents of .hg/cache/hgtagsfnodes1"""
4073 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4085 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4074 flog = repo.file(b'.hgtags')
4086 flog = repo.file(b'.hgtags')
4075 for r in repo:
4087 for r in repo:
4076 node = repo[r].node()
4088 node = repo[r].node()
4077 tagsnode = cache.getfnode(node, computemissing=False)
4089 tagsnode = cache.getfnode(node, computemissing=False)
4078 if tagsnode:
4090 if tagsnode:
4079 tagsnodedisplay = hex(tagsnode)
4091 tagsnodedisplay = hex(tagsnode)
4080 if not flog.hasnode(tagsnode):
4092 if not flog.hasnode(tagsnode):
4081 tagsnodedisplay += b' (unknown node)'
4093 tagsnodedisplay += b' (unknown node)'
4082 elif tagsnode is None:
4094 elif tagsnode is None:
4083 tagsnodedisplay = b'missing'
4095 tagsnodedisplay = b'missing'
4084 else:
4096 else:
4085 tagsnodedisplay = b'invalid'
4097 tagsnodedisplay = b'invalid'
4086
4098
4087 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4099 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4088
4100
4089
4101
4090 @command(
4102 @command(
4091 b'debugtemplate',
4103 b'debugtemplate',
4092 [
4104 [
4093 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4105 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4094 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4106 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4095 ],
4107 ],
4096 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4108 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4097 optionalrepo=True,
4109 optionalrepo=True,
4098 )
4110 )
4099 def debugtemplate(ui, repo, tmpl, **opts):
4111 def debugtemplate(ui, repo, tmpl, **opts):
4100 """parse and apply a template
4112 """parse and apply a template
4101
4113
4102 If -r/--rev is given, the template is processed as a log template and
4114 If -r/--rev is given, the template is processed as a log template and
4103 applied to the given changesets. Otherwise, it is processed as a generic
4115 applied to the given changesets. Otherwise, it is processed as a generic
4104 template.
4116 template.
4105
4117
4106 Use --verbose to print the parsed tree.
4118 Use --verbose to print the parsed tree.
4107 """
4119 """
4108 revs = None
4120 revs = None
4109 if opts['rev']:
4121 if opts['rev']:
4110 if repo is None:
4122 if repo is None:
4111 raise error.RepoError(
4123 raise error.RepoError(
4112 _(b'there is no Mercurial repository here (.hg not found)')
4124 _(b'there is no Mercurial repository here (.hg not found)')
4113 )
4125 )
4114 revs = logcmdutil.revrange(repo, opts['rev'])
4126 revs = logcmdutil.revrange(repo, opts['rev'])
4115
4127
4116 props = {}
4128 props = {}
4117 for d in opts['define']:
4129 for d in opts['define']:
4118 try:
4130 try:
4119 k, v = (e.strip() for e in d.split(b'=', 1))
4131 k, v = (e.strip() for e in d.split(b'=', 1))
4120 if not k or k == b'ui':
4132 if not k or k == b'ui':
4121 raise ValueError
4133 raise ValueError
4122 props[k] = v
4134 props[k] = v
4123 except ValueError:
4135 except ValueError:
4124 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4136 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4125
4137
4126 if ui.verbose:
4138 if ui.verbose:
4127 aliases = ui.configitems(b'templatealias')
4139 aliases = ui.configitems(b'templatealias')
4128 tree = templater.parse(tmpl)
4140 tree = templater.parse(tmpl)
4129 ui.note(templater.prettyformat(tree), b'\n')
4141 ui.note(templater.prettyformat(tree), b'\n')
4130 newtree = templater.expandaliases(tree, aliases)
4142 newtree = templater.expandaliases(tree, aliases)
4131 if newtree != tree:
4143 if newtree != tree:
4132 ui.notenoi18n(
4144 ui.notenoi18n(
4133 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4145 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4134 )
4146 )
4135
4147
4136 if revs is None:
4148 if revs is None:
4137 tres = formatter.templateresources(ui, repo)
4149 tres = formatter.templateresources(ui, repo)
4138 t = formatter.maketemplater(ui, tmpl, resources=tres)
4150 t = formatter.maketemplater(ui, tmpl, resources=tres)
4139 if ui.verbose:
4151 if ui.verbose:
4140 kwds, funcs = t.symbolsuseddefault()
4152 kwds, funcs = t.symbolsuseddefault()
4141 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4153 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4142 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4154 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4143 ui.write(t.renderdefault(props))
4155 ui.write(t.renderdefault(props))
4144 else:
4156 else:
4145 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4157 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4146 if ui.verbose:
4158 if ui.verbose:
4147 kwds, funcs = displayer.t.symbolsuseddefault()
4159 kwds, funcs = displayer.t.symbolsuseddefault()
4148 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4160 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4149 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4161 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4150 for r in revs:
4162 for r in revs:
4151 displayer.show(repo[r], **pycompat.strkwargs(props))
4163 displayer.show(repo[r], **pycompat.strkwargs(props))
4152 displayer.close()
4164 displayer.close()
4153
4165
4154
4166
4155 @command(
4167 @command(
4156 b'debuguigetpass',
4168 b'debuguigetpass',
4157 [
4169 [
4158 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4170 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4159 ],
4171 ],
4160 _(b'[-p TEXT]'),
4172 _(b'[-p TEXT]'),
4161 norepo=True,
4173 norepo=True,
4162 )
4174 )
4163 def debuguigetpass(ui, prompt=b''):
4175 def debuguigetpass(ui, prompt=b''):
4164 """show prompt to type password"""
4176 """show prompt to type password"""
4165 r = ui.getpass(prompt)
4177 r = ui.getpass(prompt)
4166 if r is None:
4178 if r is None:
4167 r = b"<default response>"
4179 r = b"<default response>"
4168 ui.writenoi18n(b'response: %s\n' % r)
4180 ui.writenoi18n(b'response: %s\n' % r)
4169
4181
4170
4182
4171 @command(
4183 @command(
4172 b'debuguiprompt',
4184 b'debuguiprompt',
4173 [
4185 [
4174 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4186 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4175 ],
4187 ],
4176 _(b'[-p TEXT]'),
4188 _(b'[-p TEXT]'),
4177 norepo=True,
4189 norepo=True,
4178 )
4190 )
4179 def debuguiprompt(ui, prompt=b''):
4191 def debuguiprompt(ui, prompt=b''):
4180 """show plain prompt"""
4192 """show plain prompt"""
4181 r = ui.prompt(prompt)
4193 r = ui.prompt(prompt)
4182 ui.writenoi18n(b'response: %s\n' % r)
4194 ui.writenoi18n(b'response: %s\n' % r)
4183
4195
4184
4196
4185 @command(b'debugupdatecaches', [])
4197 @command(b'debugupdatecaches', [])
4186 def debugupdatecaches(ui, repo, *pats, **opts):
4198 def debugupdatecaches(ui, repo, *pats, **opts):
4187 """warm all known caches in the repository"""
4199 """warm all known caches in the repository"""
4188 with repo.wlock(), repo.lock():
4200 with repo.wlock(), repo.lock():
4189 repo.updatecaches(caches=repository.CACHES_ALL)
4201 repo.updatecaches(caches=repository.CACHES_ALL)
4190
4202
4191
4203
4192 @command(
4204 @command(
4193 b'debugupgraderepo',
4205 b'debugupgraderepo',
4194 [
4206 [
4195 (
4207 (
4196 b'o',
4208 b'o',
4197 b'optimize',
4209 b'optimize',
4198 [],
4210 [],
4199 _(b'extra optimization to perform'),
4211 _(b'extra optimization to perform'),
4200 _(b'NAME'),
4212 _(b'NAME'),
4201 ),
4213 ),
4202 (b'', b'run', False, _(b'performs an upgrade')),
4214 (b'', b'run', False, _(b'performs an upgrade')),
4203 (b'', b'backup', True, _(b'keep the old repository content around')),
4215 (b'', b'backup', True, _(b'keep the old repository content around')),
4204 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4216 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4205 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4217 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4206 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4218 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4207 ],
4219 ],
4208 )
4220 )
4209 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4221 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4210 """upgrade a repository to use different features
4222 """upgrade a repository to use different features
4211
4223
4212 If no arguments are specified, the repository is evaluated for upgrade
4224 If no arguments are specified, the repository is evaluated for upgrade
4213 and a list of problems and potential optimizations is printed.
4225 and a list of problems and potential optimizations is printed.
4214
4226
4215 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4227 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4216 can be influenced via additional arguments. More details will be provided
4228 can be influenced via additional arguments. More details will be provided
4217 by the command output when run without ``--run``.
4229 by the command output when run without ``--run``.
4218
4230
4219 During the upgrade, the repository will be locked and no writes will be
4231 During the upgrade, the repository will be locked and no writes will be
4220 allowed.
4232 allowed.
4221
4233
4222 At the end of the upgrade, the repository may not be readable while new
4234 At the end of the upgrade, the repository may not be readable while new
4223 repository data is swapped in. This window will be as long as it takes to
4235 repository data is swapped in. This window will be as long as it takes to
4224 rename some directories inside the ``.hg`` directory. On most machines, this
4236 rename some directories inside the ``.hg`` directory. On most machines, this
4225 should complete almost instantaneously and the chances of a consumer being
4237 should complete almost instantaneously and the chances of a consumer being
4226 unable to access the repository should be low.
4238 unable to access the repository should be low.
4227
4239
4228 By default, all revlogs will be upgraded. You can restrict this using flags
4240 By default, all revlogs will be upgraded. You can restrict this using flags
4229 such as `--manifest`:
4241 such as `--manifest`:
4230
4242
4231 * `--manifest`: only optimize the manifest
4243 * `--manifest`: only optimize the manifest
4232 * `--no-manifest`: optimize all revlog but the manifest
4244 * `--no-manifest`: optimize all revlog but the manifest
4233 * `--changelog`: optimize the changelog only
4245 * `--changelog`: optimize the changelog only
4234 * `--no-changelog --no-manifest`: optimize filelogs only
4246 * `--no-changelog --no-manifest`: optimize filelogs only
4235 * `--filelogs`: optimize the filelogs only
4247 * `--filelogs`: optimize the filelogs only
4236 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4248 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4237 """
4249 """
4238 return upgrade.upgraderepo(
4250 return upgrade.upgraderepo(
4239 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4251 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4240 )
4252 )
4241
4253
4242
4254
4243 @command(
4255 @command(
4244 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4256 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4245 )
4257 )
4246 def debugwalk(ui, repo, *pats, **opts):
4258 def debugwalk(ui, repo, *pats, **opts):
4247 """show how files match on given patterns"""
4259 """show how files match on given patterns"""
4248 opts = pycompat.byteskwargs(opts)
4260 opts = pycompat.byteskwargs(opts)
4249 m = scmutil.match(repo[None], pats, opts)
4261 m = scmutil.match(repo[None], pats, opts)
4250 if ui.verbose:
4262 if ui.verbose:
4251 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4263 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4252 items = list(repo[None].walk(m))
4264 items = list(repo[None].walk(m))
4253 if not items:
4265 if not items:
4254 return
4266 return
4255 f = lambda fn: fn
4267 f = lambda fn: fn
4256 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4268 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4257 f = lambda fn: util.normpath(fn)
4269 f = lambda fn: util.normpath(fn)
4258 fmt = b'f %%-%ds %%-%ds %%s' % (
4270 fmt = b'f %%-%ds %%-%ds %%s' % (
4259 max([len(abs) for abs in items]),
4271 max([len(abs) for abs in items]),
4260 max([len(repo.pathto(abs)) for abs in items]),
4272 max([len(repo.pathto(abs)) for abs in items]),
4261 )
4273 )
4262 for abs in items:
4274 for abs in items:
4263 line = fmt % (
4275 line = fmt % (
4264 abs,
4276 abs,
4265 f(repo.pathto(abs)),
4277 f(repo.pathto(abs)),
4266 m.exact(abs) and b'exact' or b'',
4278 m.exact(abs) and b'exact' or b'',
4267 )
4279 )
4268 ui.write(b"%s\n" % line.rstrip())
4280 ui.write(b"%s\n" % line.rstrip())
4269
4281
4270
4282
4271 @command(b'debugwhyunstable', [], _(b'REV'))
4283 @command(b'debugwhyunstable', [], _(b'REV'))
4272 def debugwhyunstable(ui, repo, rev):
4284 def debugwhyunstable(ui, repo, rev):
4273 """explain instabilities of a changeset"""
4285 """explain instabilities of a changeset"""
4274 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4286 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4275 dnodes = b''
4287 dnodes = b''
4276 if entry.get(b'divergentnodes'):
4288 if entry.get(b'divergentnodes'):
4277 dnodes = (
4289 dnodes = (
4278 b' '.join(
4290 b' '.join(
4279 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4291 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4280 for ctx in entry[b'divergentnodes']
4292 for ctx in entry[b'divergentnodes']
4281 )
4293 )
4282 + b' '
4294 + b' '
4283 )
4295 )
4284 ui.write(
4296 ui.write(
4285 b'%s: %s%s %s\n'
4297 b'%s: %s%s %s\n'
4286 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4298 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4287 )
4299 )
4288
4300
4289
4301
4290 @command(
4302 @command(
4291 b'debugwireargs',
4303 b'debugwireargs',
4292 [
4304 [
4293 (b'', b'three', b'', b'three'),
4305 (b'', b'three', b'', b'three'),
4294 (b'', b'four', b'', b'four'),
4306 (b'', b'four', b'', b'four'),
4295 (b'', b'five', b'', b'five'),
4307 (b'', b'five', b'', b'five'),
4296 ]
4308 ]
4297 + cmdutil.remoteopts,
4309 + cmdutil.remoteopts,
4298 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4310 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4299 norepo=True,
4311 norepo=True,
4300 )
4312 )
4301 def debugwireargs(ui, repopath, *vals, **opts):
4313 def debugwireargs(ui, repopath, *vals, **opts):
4302 opts = pycompat.byteskwargs(opts)
4314 opts = pycompat.byteskwargs(opts)
4303 repo = hg.peer(ui, opts, repopath)
4315 repo = hg.peer(ui, opts, repopath)
4304 try:
4316 try:
4305 for opt in cmdutil.remoteopts:
4317 for opt in cmdutil.remoteopts:
4306 del opts[opt[1]]
4318 del opts[opt[1]]
4307 args = {}
4319 args = {}
4308 for k, v in opts.items():
4320 for k, v in opts.items():
4309 if v:
4321 if v:
4310 args[k] = v
4322 args[k] = v
4311 args = pycompat.strkwargs(args)
4323 args = pycompat.strkwargs(args)
4312 # run twice to check that we don't mess up the stream for the next command
4324 # run twice to check that we don't mess up the stream for the next command
4313 res1 = repo.debugwireargs(*vals, **args)
4325 res1 = repo.debugwireargs(*vals, **args)
4314 res2 = repo.debugwireargs(*vals, **args)
4326 res2 = repo.debugwireargs(*vals, **args)
4315 ui.write(b"%s\n" % res1)
4327 ui.write(b"%s\n" % res1)
4316 if res1 != res2:
4328 if res1 != res2:
4317 ui.warn(b"%s\n" % res2)
4329 ui.warn(b"%s\n" % res2)
4318 finally:
4330 finally:
4319 repo.close()
4331 repo.close()
4320
4332
4321
4333
4322 def _parsewirelangblocks(fh):
4334 def _parsewirelangblocks(fh):
4323 activeaction = None
4335 activeaction = None
4324 blocklines = []
4336 blocklines = []
4325 lastindent = 0
4337 lastindent = 0
4326
4338
4327 for line in fh:
4339 for line in fh:
4328 line = line.rstrip()
4340 line = line.rstrip()
4329 if not line:
4341 if not line:
4330 continue
4342 continue
4331
4343
4332 if line.startswith(b'#'):
4344 if line.startswith(b'#'):
4333 continue
4345 continue
4334
4346
4335 if not line.startswith(b' '):
4347 if not line.startswith(b' '):
4336 # New block. Flush previous one.
4348 # New block. Flush previous one.
4337 if activeaction:
4349 if activeaction:
4338 yield activeaction, blocklines
4350 yield activeaction, blocklines
4339
4351
4340 activeaction = line
4352 activeaction = line
4341 blocklines = []
4353 blocklines = []
4342 lastindent = 0
4354 lastindent = 0
4343 continue
4355 continue
4344
4356
4345 # Else we start with an indent.
4357 # Else we start with an indent.
4346
4358
4347 if not activeaction:
4359 if not activeaction:
4348 raise error.Abort(_(b'indented line outside of block'))
4360 raise error.Abort(_(b'indented line outside of block'))
4349
4361
4350 indent = len(line) - len(line.lstrip())
4362 indent = len(line) - len(line.lstrip())
4351
4363
4352 # If this line is indented more than the last line, concatenate it.
4364 # If this line is indented more than the last line, concatenate it.
4353 if indent > lastindent and blocklines:
4365 if indent > lastindent and blocklines:
4354 blocklines[-1] += line.lstrip()
4366 blocklines[-1] += line.lstrip()
4355 else:
4367 else:
4356 blocklines.append(line)
4368 blocklines.append(line)
4357 lastindent = indent
4369 lastindent = indent
4358
4370
4359 # Flush last block.
4371 # Flush last block.
4360 if activeaction:
4372 if activeaction:
4361 yield activeaction, blocklines
4373 yield activeaction, blocklines
4362
4374
4363
4375
4364 @command(
4376 @command(
4365 b'debugwireproto',
4377 b'debugwireproto',
4366 [
4378 [
4367 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4379 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4368 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4380 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4369 (
4381 (
4370 b'',
4382 b'',
4371 b'noreadstderr',
4383 b'noreadstderr',
4372 False,
4384 False,
4373 _(b'do not read from stderr of the remote'),
4385 _(b'do not read from stderr of the remote'),
4374 ),
4386 ),
4375 (
4387 (
4376 b'',
4388 b'',
4377 b'nologhandshake',
4389 b'nologhandshake',
4378 False,
4390 False,
4379 _(b'do not log I/O related to the peer handshake'),
4391 _(b'do not log I/O related to the peer handshake'),
4380 ),
4392 ),
4381 ]
4393 ]
4382 + cmdutil.remoteopts,
4394 + cmdutil.remoteopts,
4383 _(b'[PATH]'),
4395 _(b'[PATH]'),
4384 optionalrepo=True,
4396 optionalrepo=True,
4385 )
4397 )
4386 def debugwireproto(ui, repo, path=None, **opts):
4398 def debugwireproto(ui, repo, path=None, **opts):
4387 """send wire protocol commands to a server
4399 """send wire protocol commands to a server
4388
4400
4389 This command can be used to issue wire protocol commands to remote
4401 This command can be used to issue wire protocol commands to remote
4390 peers and to debug the raw data being exchanged.
4402 peers and to debug the raw data being exchanged.
4391
4403
4392 ``--localssh`` will start an SSH server against the current repository
4404 ``--localssh`` will start an SSH server against the current repository
4393 and connect to that. By default, the connection will perform a handshake
4405 and connect to that. By default, the connection will perform a handshake
4394 and establish an appropriate peer instance.
4406 and establish an appropriate peer instance.
4395
4407
4396 ``--peer`` can be used to bypass the handshake protocol and construct a
4408 ``--peer`` can be used to bypass the handshake protocol and construct a
4397 peer instance using the specified class type. Valid values are ``raw``,
4409 peer instance using the specified class type. Valid values are ``raw``,
4398 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4410 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4399 don't support higher-level command actions.
4411 don't support higher-level command actions.
4400
4412
4401 ``--noreadstderr`` can be used to disable automatic reading from stderr
4413 ``--noreadstderr`` can be used to disable automatic reading from stderr
4402 of the peer (for SSH connections only). Disabling automatic reading of
4414 of the peer (for SSH connections only). Disabling automatic reading of
4403 stderr is useful for making output more deterministic.
4415 stderr is useful for making output more deterministic.
4404
4416
4405 Commands are issued via a mini language which is specified via stdin.
4417 Commands are issued via a mini language which is specified via stdin.
4406 The language consists of individual actions to perform. An action is
4418 The language consists of individual actions to perform. An action is
4407 defined by a block. A block is defined as a line with no leading
4419 defined by a block. A block is defined as a line with no leading
4408 space followed by 0 or more lines with leading space. Blocks are
4420 space followed by 0 or more lines with leading space. Blocks are
4409 effectively a high-level command with additional metadata.
4421 effectively a high-level command with additional metadata.
4410
4422
4411 Lines beginning with ``#`` are ignored.
4423 Lines beginning with ``#`` are ignored.
4412
4424
4413 The following sections denote available actions.
4425 The following sections denote available actions.
4414
4426
4415 raw
4427 raw
4416 ---
4428 ---
4417
4429
4418 Send raw data to the server.
4430 Send raw data to the server.
4419
4431
4420 The block payload contains the raw data to send as one atomic send
4432 The block payload contains the raw data to send as one atomic send
4421 operation. The data may not actually be delivered in a single system
4433 operation. The data may not actually be delivered in a single system
4422 call: it depends on the abilities of the transport being used.
4434 call: it depends on the abilities of the transport being used.
4423
4435
4424 Each line in the block is de-indented and concatenated. Then, that
4436 Each line in the block is de-indented and concatenated. Then, that
4425 value is evaluated as a Python b'' literal. This allows the use of
4437 value is evaluated as a Python b'' literal. This allows the use of
4426 backslash escaping, etc.
4438 backslash escaping, etc.
4427
4439
4428 raw+
4440 raw+
4429 ----
4441 ----
4430
4442
4431 Behaves like ``raw`` except flushes output afterwards.
4443 Behaves like ``raw`` except flushes output afterwards.
4432
4444
4433 command <X>
4445 command <X>
4434 -----------
4446 -----------
4435
4447
4436 Send a request to run a named command, whose name follows the ``command``
4448 Send a request to run a named command, whose name follows the ``command``
4437 string.
4449 string.
4438
4450
4439 Arguments to the command are defined as lines in this block. The format of
4451 Arguments to the command are defined as lines in this block. The format of
4440 each line is ``<key> <value>``. e.g.::
4452 each line is ``<key> <value>``. e.g.::
4441
4453
4442 command listkeys
4454 command listkeys
4443 namespace bookmarks
4455 namespace bookmarks
4444
4456
4445 If the value begins with ``eval:``, it will be interpreted as a Python
4457 If the value begins with ``eval:``, it will be interpreted as a Python
4446 literal expression. Otherwise values are interpreted as Python b'' literals.
4458 literal expression. Otherwise values are interpreted as Python b'' literals.
4447 This allows sending complex types and encoding special byte sequences via
4459 This allows sending complex types and encoding special byte sequences via
4448 backslash escaping.
4460 backslash escaping.
4449
4461
4450 The following arguments have special meaning:
4462 The following arguments have special meaning:
4451
4463
4452 ``PUSHFILE``
4464 ``PUSHFILE``
4453 When defined, the *push* mechanism of the peer will be used instead
4465 When defined, the *push* mechanism of the peer will be used instead
4454 of the static request-response mechanism and the content of the
4466 of the static request-response mechanism and the content of the
4455 file specified in the value of this argument will be sent as the
4467 file specified in the value of this argument will be sent as the
4456 command payload.
4468 command payload.
4457
4469
4458 This can be used to submit a local bundle file to the remote.
4470 This can be used to submit a local bundle file to the remote.
4459
4471
4460 batchbegin
4472 batchbegin
4461 ----------
4473 ----------
4462
4474
4463 Instruct the peer to begin a batched send.
4475 Instruct the peer to begin a batched send.
4464
4476
4465 All ``command`` blocks are queued for execution until the next
4477 All ``command`` blocks are queued for execution until the next
4466 ``batchsubmit`` block.
4478 ``batchsubmit`` block.
4467
4479
4468 batchsubmit
4480 batchsubmit
4469 -----------
4481 -----------
4470
4482
4471 Submit previously queued ``command`` blocks as a batch request.
4483 Submit previously queued ``command`` blocks as a batch request.
4472
4484
4473 This action MUST be paired with a ``batchbegin`` action.
4485 This action MUST be paired with a ``batchbegin`` action.
4474
4486
4475 httprequest <method> <path>
4487 httprequest <method> <path>
4476 ---------------------------
4488 ---------------------------
4477
4489
4478 (HTTP peer only)
4490 (HTTP peer only)
4479
4491
4480 Send an HTTP request to the peer.
4492 Send an HTTP request to the peer.
4481
4493
4482 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4494 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4483
4495
4484 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4496 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4485 headers to add to the request. e.g. ``Accept: foo``.
4497 headers to add to the request. e.g. ``Accept: foo``.
4486
4498
4487 The following arguments are special:
4499 The following arguments are special:
4488
4500
4489 ``BODYFILE``
4501 ``BODYFILE``
4490 The content of the file defined as the value to this argument will be
4502 The content of the file defined as the value to this argument will be
4491 transferred verbatim as the HTTP request body.
4503 transferred verbatim as the HTTP request body.
4492
4504
4493 ``frame <type> <flags> <payload>``
4505 ``frame <type> <flags> <payload>``
4494 Send a unified protocol frame as part of the request body.
4506 Send a unified protocol frame as part of the request body.
4495
4507
4496 All frames will be collected and sent as the body to the HTTP
4508 All frames will be collected and sent as the body to the HTTP
4497 request.
4509 request.
4498
4510
4499 close
4511 close
4500 -----
4512 -----
4501
4513
4502 Close the connection to the server.
4514 Close the connection to the server.
4503
4515
4504 flush
4516 flush
4505 -----
4517 -----
4506
4518
4507 Flush data written to the server.
4519 Flush data written to the server.
4508
4520
4509 readavailable
4521 readavailable
4510 -------------
4522 -------------
4511
4523
4512 Close the write end of the connection and read all available data from
4524 Close the write end of the connection and read all available data from
4513 the server.
4525 the server.
4514
4526
4515 If the connection to the server encompasses multiple pipes, we poll both
4527 If the connection to the server encompasses multiple pipes, we poll both
4516 pipes and read available data.
4528 pipes and read available data.
4517
4529
4518 readline
4530 readline
4519 --------
4531 --------
4520
4532
4521 Read a line of output from the server. If there are multiple output
4533 Read a line of output from the server. If there are multiple output
4522 pipes, reads only the main pipe.
4534 pipes, reads only the main pipe.
4523
4535
4524 ereadline
4536 ereadline
4525 ---------
4537 ---------
4526
4538
4527 Like ``readline``, but read from the stderr pipe, if available.
4539 Like ``readline``, but read from the stderr pipe, if available.
4528
4540
4529 read <X>
4541 read <X>
4530 --------
4542 --------
4531
4543
4532 ``read()`` N bytes from the server's main output pipe.
4544 ``read()`` N bytes from the server's main output pipe.
4533
4545
4534 eread <X>
4546 eread <X>
4535 ---------
4547 ---------
4536
4548
4537 ``read()`` N bytes from the server's stderr pipe, if available.
4549 ``read()`` N bytes from the server's stderr pipe, if available.
4538
4550
4539 Specifying Unified Frame-Based Protocol Frames
4551 Specifying Unified Frame-Based Protocol Frames
4540 ----------------------------------------------
4552 ----------------------------------------------
4541
4553
4542 It is possible to emit a *Unified Frame-Based Protocol* by using special
4554 It is possible to emit a *Unified Frame-Based Protocol* by using special
4543 syntax.
4555 syntax.
4544
4556
4545 A frame is composed as a type, flags, and payload. These can be parsed
4557 A frame is composed as a type, flags, and payload. These can be parsed
4546 from a string of the form:
4558 from a string of the form:
4547
4559
4548 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4560 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4549
4561
4550 ``request-id`` and ``stream-id`` are integers defining the request and
4562 ``request-id`` and ``stream-id`` are integers defining the request and
4551 stream identifiers.
4563 stream identifiers.
4552
4564
4553 ``type`` can be an integer value for the frame type or the string name
4565 ``type`` can be an integer value for the frame type or the string name
4554 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4566 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4555 ``command-name``.
4567 ``command-name``.
4556
4568
4557 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4569 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4558 components. Each component (and there can be just one) can be an integer
4570 components. Each component (and there can be just one) can be an integer
4559 or a flag name for stream flags or frame flags, respectively. Values are
4571 or a flag name for stream flags or frame flags, respectively. Values are
4560 resolved to integers and then bitwise OR'd together.
4572 resolved to integers and then bitwise OR'd together.
4561
4573
4562 ``payload`` represents the raw frame payload. If it begins with
4574 ``payload`` represents the raw frame payload. If it begins with
4563 ``cbor:``, the following string is evaluated as Python code and the
4575 ``cbor:``, the following string is evaluated as Python code and the
4564 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4576 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4565 as a Python byte string literal.
4577 as a Python byte string literal.
4566 """
4578 """
4567 opts = pycompat.byteskwargs(opts)
4579 opts = pycompat.byteskwargs(opts)
4568
4580
4569 if opts[b'localssh'] and not repo:
4581 if opts[b'localssh'] and not repo:
4570 raise error.Abort(_(b'--localssh requires a repository'))
4582 raise error.Abort(_(b'--localssh requires a repository'))
4571
4583
4572 if opts[b'peer'] and opts[b'peer'] not in (
4584 if opts[b'peer'] and opts[b'peer'] not in (
4573 b'raw',
4585 b'raw',
4574 b'ssh1',
4586 b'ssh1',
4575 ):
4587 ):
4576 raise error.Abort(
4588 raise error.Abort(
4577 _(b'invalid value for --peer'),
4589 _(b'invalid value for --peer'),
4578 hint=_(b'valid values are "raw" and "ssh1"'),
4590 hint=_(b'valid values are "raw" and "ssh1"'),
4579 )
4591 )
4580
4592
4581 if path and opts[b'localssh']:
4593 if path and opts[b'localssh']:
4582 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4594 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4583
4595
4584 if ui.interactive():
4596 if ui.interactive():
4585 ui.write(_(b'(waiting for commands on stdin)\n'))
4597 ui.write(_(b'(waiting for commands on stdin)\n'))
4586
4598
4587 blocks = list(_parsewirelangblocks(ui.fin))
4599 blocks = list(_parsewirelangblocks(ui.fin))
4588
4600
4589 proc = None
4601 proc = None
4590 stdin = None
4602 stdin = None
4591 stdout = None
4603 stdout = None
4592 stderr = None
4604 stderr = None
4593 opener = None
4605 opener = None
4594
4606
4595 if opts[b'localssh']:
4607 if opts[b'localssh']:
4596 # We start the SSH server in its own process so there is process
4608 # We start the SSH server in its own process so there is process
4597 # separation. This prevents a whole class of potential bugs around
4609 # separation. This prevents a whole class of potential bugs around
4598 # shared state from interfering with server operation.
4610 # shared state from interfering with server operation.
4599 args = procutil.hgcmd() + [
4611 args = procutil.hgcmd() + [
4600 b'-R',
4612 b'-R',
4601 repo.root,
4613 repo.root,
4602 b'debugserve',
4614 b'debugserve',
4603 b'--sshstdio',
4615 b'--sshstdio',
4604 ]
4616 ]
4605 proc = subprocess.Popen(
4617 proc = subprocess.Popen(
4606 pycompat.rapply(procutil.tonativestr, args),
4618 pycompat.rapply(procutil.tonativestr, args),
4607 stdin=subprocess.PIPE,
4619 stdin=subprocess.PIPE,
4608 stdout=subprocess.PIPE,
4620 stdout=subprocess.PIPE,
4609 stderr=subprocess.PIPE,
4621 stderr=subprocess.PIPE,
4610 bufsize=0,
4622 bufsize=0,
4611 )
4623 )
4612
4624
4613 stdin = proc.stdin
4625 stdin = proc.stdin
4614 stdout = proc.stdout
4626 stdout = proc.stdout
4615 stderr = proc.stderr
4627 stderr = proc.stderr
4616
4628
4617 # We turn the pipes into observers so we can log I/O.
4629 # We turn the pipes into observers so we can log I/O.
4618 if ui.verbose or opts[b'peer'] == b'raw':
4630 if ui.verbose or opts[b'peer'] == b'raw':
4619 stdin = util.makeloggingfileobject(
4631 stdin = util.makeloggingfileobject(
4620 ui, proc.stdin, b'i', logdata=True
4632 ui, proc.stdin, b'i', logdata=True
4621 )
4633 )
4622 stdout = util.makeloggingfileobject(
4634 stdout = util.makeloggingfileobject(
4623 ui, proc.stdout, b'o', logdata=True
4635 ui, proc.stdout, b'o', logdata=True
4624 )
4636 )
4625 stderr = util.makeloggingfileobject(
4637 stderr = util.makeloggingfileobject(
4626 ui, proc.stderr, b'e', logdata=True
4638 ui, proc.stderr, b'e', logdata=True
4627 )
4639 )
4628
4640
4629 # --localssh also implies the peer connection settings.
4641 # --localssh also implies the peer connection settings.
4630
4642
4631 url = b'ssh://localserver'
4643 url = b'ssh://localserver'
4632 autoreadstderr = not opts[b'noreadstderr']
4644 autoreadstderr = not opts[b'noreadstderr']
4633
4645
4634 if opts[b'peer'] == b'ssh1':
4646 if opts[b'peer'] == b'ssh1':
4635 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4647 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4636 peer = sshpeer.sshv1peer(
4648 peer = sshpeer.sshv1peer(
4637 ui,
4649 ui,
4638 url,
4650 url,
4639 proc,
4651 proc,
4640 stdin,
4652 stdin,
4641 stdout,
4653 stdout,
4642 stderr,
4654 stderr,
4643 None,
4655 None,
4644 autoreadstderr=autoreadstderr,
4656 autoreadstderr=autoreadstderr,
4645 )
4657 )
4646 elif opts[b'peer'] == b'raw':
4658 elif opts[b'peer'] == b'raw':
4647 ui.write(_(b'using raw connection to peer\n'))
4659 ui.write(_(b'using raw connection to peer\n'))
4648 peer = None
4660 peer = None
4649 else:
4661 else:
4650 ui.write(_(b'creating ssh peer from handshake results\n'))
4662 ui.write(_(b'creating ssh peer from handshake results\n'))
4651 peer = sshpeer.makepeer(
4663 peer = sshpeer.makepeer(
4652 ui,
4664 ui,
4653 url,
4665 url,
4654 proc,
4666 proc,
4655 stdin,
4667 stdin,
4656 stdout,
4668 stdout,
4657 stderr,
4669 stderr,
4658 autoreadstderr=autoreadstderr,
4670 autoreadstderr=autoreadstderr,
4659 )
4671 )
4660
4672
4661 elif path:
4673 elif path:
4662 # We bypass hg.peer() so we can proxy the sockets.
4674 # We bypass hg.peer() so we can proxy the sockets.
4663 # TODO consider not doing this because we skip
4675 # TODO consider not doing this because we skip
4664 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4676 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4665 u = urlutil.url(path)
4677 u = urlutil.url(path)
4666 if u.scheme != b'http':
4678 if u.scheme != b'http':
4667 raise error.Abort(_(b'only http:// paths are currently supported'))
4679 raise error.Abort(_(b'only http:// paths are currently supported'))
4668
4680
4669 url, authinfo = u.authinfo()
4681 url, authinfo = u.authinfo()
4670 openerargs = {
4682 openerargs = {
4671 'useragent': b'Mercurial debugwireproto',
4683 'useragent': b'Mercurial debugwireproto',
4672 }
4684 }
4673
4685
4674 # Turn pipes/sockets into observers so we can log I/O.
4686 # Turn pipes/sockets into observers so we can log I/O.
4675 if ui.verbose:
4687 if ui.verbose:
4676 openerargs.update(
4688 openerargs.update(
4677 {
4689 {
4678 'loggingfh': ui,
4690 'loggingfh': ui,
4679 'loggingname': b's',
4691 'loggingname': b's',
4680 'loggingopts': {
4692 'loggingopts': {
4681 'logdata': True,
4693 'logdata': True,
4682 'logdataapis': False,
4694 'logdataapis': False,
4683 },
4695 },
4684 }
4696 }
4685 )
4697 )
4686
4698
4687 if ui.debugflag:
4699 if ui.debugflag:
4688 openerargs['loggingopts']['logdataapis'] = True
4700 openerargs['loggingopts']['logdataapis'] = True
4689
4701
4690 # Don't send default headers when in raw mode. This allows us to
4702 # Don't send default headers when in raw mode. This allows us to
4691 # bypass most of the behavior of our URL handling code so we can
4703 # bypass most of the behavior of our URL handling code so we can
4692 # have near complete control over what's sent on the wire.
4704 # have near complete control over what's sent on the wire.
4693 if opts[b'peer'] == b'raw':
4705 if opts[b'peer'] == b'raw':
4694 openerargs['sendaccept'] = False
4706 openerargs['sendaccept'] = False
4695
4707
4696 opener = urlmod.opener(ui, authinfo, **openerargs)
4708 opener = urlmod.opener(ui, authinfo, **openerargs)
4697
4709
4698 if opts[b'peer'] == b'raw':
4710 if opts[b'peer'] == b'raw':
4699 ui.write(_(b'using raw connection to peer\n'))
4711 ui.write(_(b'using raw connection to peer\n'))
4700 peer = None
4712 peer = None
4701 elif opts[b'peer']:
4713 elif opts[b'peer']:
4702 raise error.Abort(
4714 raise error.Abort(
4703 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4715 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4704 )
4716 )
4705 else:
4717 else:
4706 peer = httppeer.makepeer(ui, path, opener=opener)
4718 peer = httppeer.makepeer(ui, path, opener=opener)
4707
4719
4708 # We /could/ populate stdin/stdout with sock.makefile()...
4720 # We /could/ populate stdin/stdout with sock.makefile()...
4709 else:
4721 else:
4710 raise error.Abort(_(b'unsupported connection configuration'))
4722 raise error.Abort(_(b'unsupported connection configuration'))
4711
4723
4712 batchedcommands = None
4724 batchedcommands = None
4713
4725
4714 # Now perform actions based on the parsed wire language instructions.
4726 # Now perform actions based on the parsed wire language instructions.
4715 for action, lines in blocks:
4727 for action, lines in blocks:
4716 if action in (b'raw', b'raw+'):
4728 if action in (b'raw', b'raw+'):
4717 if not stdin:
4729 if not stdin:
4718 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4730 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4719
4731
4720 # Concatenate the data together.
4732 # Concatenate the data together.
4721 data = b''.join(l.lstrip() for l in lines)
4733 data = b''.join(l.lstrip() for l in lines)
4722 data = stringutil.unescapestr(data)
4734 data = stringutil.unescapestr(data)
4723 stdin.write(data)
4735 stdin.write(data)
4724
4736
4725 if action == b'raw+':
4737 if action == b'raw+':
4726 stdin.flush()
4738 stdin.flush()
4727 elif action == b'flush':
4739 elif action == b'flush':
4728 if not stdin:
4740 if not stdin:
4729 raise error.Abort(_(b'cannot call flush on this peer'))
4741 raise error.Abort(_(b'cannot call flush on this peer'))
4730 stdin.flush()
4742 stdin.flush()
4731 elif action.startswith(b'command'):
4743 elif action.startswith(b'command'):
4732 if not peer:
4744 if not peer:
4733 raise error.Abort(
4745 raise error.Abort(
4734 _(
4746 _(
4735 b'cannot send commands unless peer instance '
4747 b'cannot send commands unless peer instance '
4736 b'is available'
4748 b'is available'
4737 )
4749 )
4738 )
4750 )
4739
4751
4740 command = action.split(b' ', 1)[1]
4752 command = action.split(b' ', 1)[1]
4741
4753
4742 args = {}
4754 args = {}
4743 for line in lines:
4755 for line in lines:
4744 # We need to allow empty values.
4756 # We need to allow empty values.
4745 fields = line.lstrip().split(b' ', 1)
4757 fields = line.lstrip().split(b' ', 1)
4746 if len(fields) == 1:
4758 if len(fields) == 1:
4747 key = fields[0]
4759 key = fields[0]
4748 value = b''
4760 value = b''
4749 else:
4761 else:
4750 key, value = fields
4762 key, value = fields
4751
4763
4752 if value.startswith(b'eval:'):
4764 if value.startswith(b'eval:'):
4753 value = stringutil.evalpythonliteral(value[5:])
4765 value = stringutil.evalpythonliteral(value[5:])
4754 else:
4766 else:
4755 value = stringutil.unescapestr(value)
4767 value = stringutil.unescapestr(value)
4756
4768
4757 args[key] = value
4769 args[key] = value
4758
4770
4759 if batchedcommands is not None:
4771 if batchedcommands is not None:
4760 batchedcommands.append((command, args))
4772 batchedcommands.append((command, args))
4761 continue
4773 continue
4762
4774
4763 ui.status(_(b'sending %s command\n') % command)
4775 ui.status(_(b'sending %s command\n') % command)
4764
4776
4765 if b'PUSHFILE' in args:
4777 if b'PUSHFILE' in args:
4766 with open(args[b'PUSHFILE'], 'rb') as fh:
4778 with open(args[b'PUSHFILE'], 'rb') as fh:
4767 del args[b'PUSHFILE']
4779 del args[b'PUSHFILE']
4768 res, output = peer._callpush(
4780 res, output = peer._callpush(
4769 command, fh, **pycompat.strkwargs(args)
4781 command, fh, **pycompat.strkwargs(args)
4770 )
4782 )
4771 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4783 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4772 ui.status(
4784 ui.status(
4773 _(b'remote output: %s\n') % stringutil.escapestr(output)
4785 _(b'remote output: %s\n') % stringutil.escapestr(output)
4774 )
4786 )
4775 else:
4787 else:
4776 with peer.commandexecutor() as e:
4788 with peer.commandexecutor() as e:
4777 res = e.callcommand(command, args).result()
4789 res = e.callcommand(command, args).result()
4778
4790
4779 ui.status(
4791 ui.status(
4780 _(b'response: %s\n')
4792 _(b'response: %s\n')
4781 % stringutil.pprint(res, bprefix=True, indent=2)
4793 % stringutil.pprint(res, bprefix=True, indent=2)
4782 )
4794 )
4783
4795
4784 elif action == b'batchbegin':
4796 elif action == b'batchbegin':
4785 if batchedcommands is not None:
4797 if batchedcommands is not None:
4786 raise error.Abort(_(b'nested batchbegin not allowed'))
4798 raise error.Abort(_(b'nested batchbegin not allowed'))
4787
4799
4788 batchedcommands = []
4800 batchedcommands = []
4789 elif action == b'batchsubmit':
4801 elif action == b'batchsubmit':
4790 # There is a batching API we could go through. But it would be
4802 # There is a batching API we could go through. But it would be
4791 # difficult to normalize requests into function calls. It is easier
4803 # difficult to normalize requests into function calls. It is easier
4792 # to bypass this layer and normalize to commands + args.
4804 # to bypass this layer and normalize to commands + args.
4793 ui.status(
4805 ui.status(
4794 _(b'sending batch with %d sub-commands\n')
4806 _(b'sending batch with %d sub-commands\n')
4795 % len(batchedcommands)
4807 % len(batchedcommands)
4796 )
4808 )
4797 assert peer is not None
4809 assert peer is not None
4798 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4810 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4799 ui.status(
4811 ui.status(
4800 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4812 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4801 )
4813 )
4802
4814
4803 batchedcommands = None
4815 batchedcommands = None
4804
4816
4805 elif action.startswith(b'httprequest '):
4817 elif action.startswith(b'httprequest '):
4806 if not opener:
4818 if not opener:
4807 raise error.Abort(
4819 raise error.Abort(
4808 _(b'cannot use httprequest without an HTTP peer')
4820 _(b'cannot use httprequest without an HTTP peer')
4809 )
4821 )
4810
4822
4811 request = action.split(b' ', 2)
4823 request = action.split(b' ', 2)
4812 if len(request) != 3:
4824 if len(request) != 3:
4813 raise error.Abort(
4825 raise error.Abort(
4814 _(
4826 _(
4815 b'invalid httprequest: expected format is '
4827 b'invalid httprequest: expected format is '
4816 b'"httprequest <method> <path>'
4828 b'"httprequest <method> <path>'
4817 )
4829 )
4818 )
4830 )
4819
4831
4820 method, httppath = request[1:]
4832 method, httppath = request[1:]
4821 headers = {}
4833 headers = {}
4822 body = None
4834 body = None
4823 frames = []
4835 frames = []
4824 for line in lines:
4836 for line in lines:
4825 line = line.lstrip()
4837 line = line.lstrip()
4826 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4838 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4827 if m:
4839 if m:
4828 # Headers need to use native strings.
4840 # Headers need to use native strings.
4829 key = pycompat.strurl(m.group(1))
4841 key = pycompat.strurl(m.group(1))
4830 value = pycompat.strurl(m.group(2))
4842 value = pycompat.strurl(m.group(2))
4831 headers[key] = value
4843 headers[key] = value
4832 continue
4844 continue
4833
4845
4834 if line.startswith(b'BODYFILE '):
4846 if line.startswith(b'BODYFILE '):
4835 with open(line.split(b' ', 1), b'rb') as fh:
4847 with open(line.split(b' ', 1), b'rb') as fh:
4836 body = fh.read()
4848 body = fh.read()
4837 elif line.startswith(b'frame '):
4849 elif line.startswith(b'frame '):
4838 frame = wireprotoframing.makeframefromhumanstring(
4850 frame = wireprotoframing.makeframefromhumanstring(
4839 line[len(b'frame ') :]
4851 line[len(b'frame ') :]
4840 )
4852 )
4841
4853
4842 frames.append(frame)
4854 frames.append(frame)
4843 else:
4855 else:
4844 raise error.Abort(
4856 raise error.Abort(
4845 _(b'unknown argument to httprequest: %s') % line
4857 _(b'unknown argument to httprequest: %s') % line
4846 )
4858 )
4847
4859
4848 url = path + httppath
4860 url = path + httppath
4849
4861
4850 if frames:
4862 if frames:
4851 body = b''.join(bytes(f) for f in frames)
4863 body = b''.join(bytes(f) for f in frames)
4852
4864
4853 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4865 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4854
4866
4855 # urllib.Request insists on using has_data() as a proxy for
4867 # urllib.Request insists on using has_data() as a proxy for
4856 # determining the request method. Override that to use our
4868 # determining the request method. Override that to use our
4857 # explicitly requested method.
4869 # explicitly requested method.
4858 req.get_method = lambda: pycompat.sysstr(method)
4870 req.get_method = lambda: pycompat.sysstr(method)
4859
4871
4860 try:
4872 try:
4861 res = opener.open(req)
4873 res = opener.open(req)
4862 body = res.read()
4874 body = res.read()
4863 except util.urlerr.urlerror as e:
4875 except util.urlerr.urlerror as e:
4864 # read() method must be called, but only exists in Python 2
4876 # read() method must be called, but only exists in Python 2
4865 getattr(e, 'read', lambda: None)()
4877 getattr(e, 'read', lambda: None)()
4866 continue
4878 continue
4867
4879
4868 ct = res.headers.get('Content-Type')
4880 ct = res.headers.get('Content-Type')
4869 if ct == 'application/mercurial-cbor':
4881 if ct == 'application/mercurial-cbor':
4870 ui.write(
4882 ui.write(
4871 _(b'cbor> %s\n')
4883 _(b'cbor> %s\n')
4872 % stringutil.pprint(
4884 % stringutil.pprint(
4873 cborutil.decodeall(body), bprefix=True, indent=2
4885 cborutil.decodeall(body), bprefix=True, indent=2
4874 )
4886 )
4875 )
4887 )
4876
4888
4877 elif action == b'close':
4889 elif action == b'close':
4878 assert peer is not None
4890 assert peer is not None
4879 peer.close()
4891 peer.close()
4880 elif action == b'readavailable':
4892 elif action == b'readavailable':
4881 if not stdout or not stderr:
4893 if not stdout or not stderr:
4882 raise error.Abort(
4894 raise error.Abort(
4883 _(b'readavailable not available on this peer')
4895 _(b'readavailable not available on this peer')
4884 )
4896 )
4885
4897
4886 stdin.close()
4898 stdin.close()
4887 stdout.read()
4899 stdout.read()
4888 stderr.read()
4900 stderr.read()
4889
4901
4890 elif action == b'readline':
4902 elif action == b'readline':
4891 if not stdout:
4903 if not stdout:
4892 raise error.Abort(_(b'readline not available on this peer'))
4904 raise error.Abort(_(b'readline not available on this peer'))
4893 stdout.readline()
4905 stdout.readline()
4894 elif action == b'ereadline':
4906 elif action == b'ereadline':
4895 if not stderr:
4907 if not stderr:
4896 raise error.Abort(_(b'ereadline not available on this peer'))
4908 raise error.Abort(_(b'ereadline not available on this peer'))
4897 stderr.readline()
4909 stderr.readline()
4898 elif action.startswith(b'read '):
4910 elif action.startswith(b'read '):
4899 count = int(action.split(b' ', 1)[1])
4911 count = int(action.split(b' ', 1)[1])
4900 if not stdout:
4912 if not stdout:
4901 raise error.Abort(_(b'read not available on this peer'))
4913 raise error.Abort(_(b'read not available on this peer'))
4902 stdout.read(count)
4914 stdout.read(count)
4903 elif action.startswith(b'eread '):
4915 elif action.startswith(b'eread '):
4904 count = int(action.split(b' ', 1)[1])
4916 count = int(action.split(b' ', 1)[1])
4905 if not stderr:
4917 if not stderr:
4906 raise error.Abort(_(b'eread not available on this peer'))
4918 raise error.Abort(_(b'eread not available on this peer'))
4907 stderr.read(count)
4919 stderr.read(count)
4908 else:
4920 else:
4909 raise error.Abort(_(b'unknown action: %s') % action)
4921 raise error.Abort(_(b'unknown action: %s') % action)
4910
4922
4911 if batchedcommands is not None:
4923 if batchedcommands is not None:
4912 raise error.Abort(_(b'unclosed "batchbegin" request'))
4924 raise error.Abort(_(b'unclosed "batchbegin" request'))
4913
4925
4914 if peer:
4926 if peer:
4915 peer.close()
4927 peer.close()
4916
4928
4917 if proc:
4929 if proc:
4918 proc.kill()
4930 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now