##// END OF EJS Templates
debug-discovery: apply spelling fixes from Raphaël
marmoute -
r50686:f69bffd0 6.2.1 stable
parent child Browse files
Show More
@@ -1,5058 +1,5058 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revlogutils,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
107 constants as revlog_constants,
108 debug as revlog_debug,
108 debug as revlog_debug,
109 deltas as deltautil,
109 deltas as deltautil,
110 nodemap,
110 nodemap,
111 rewrite,
111 rewrite,
112 sidedata,
112 sidedata,
113 )
113 )
114
114
115 release = lockmod.release
115 release = lockmod.release
116
116
117 table = {}
117 table = {}
118 table.update(strip.command._table)
118 table.update(strip.command._table)
119 command = registrar.command(table)
119 command = registrar.command(table)
120
120
121
121
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
125 if len(args) == 3:
126 index, rev1, rev2 = args
126 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
128 lookup = r.lookup
129 elif len(args) == 2:
129 elif len(args) == 2:
130 if not repo:
130 if not repo:
131 raise error.Abort(
131 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
133 )
133 )
134 rev1, rev2 = args
134 rev1, rev2 = args
135 r = repo.changelog
135 r = repo.changelog
136 lookup = repo.lookup
136 lookup = repo.lookup
137 else:
137 else:
138 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
141
142
142
143 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
147 f.write(
148 util.b85decode(
148 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
153 )
154 )
154 )
155 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
156 time.sleep(2)
156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
158
159
159
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
165 gen.apply(repo)
166
166
167
167
168 @command(
168 @command(
169 b'debugbuilddag',
169 b'debugbuilddag',
170 [
170 [
171 (
171 (
172 b'm',
172 b'm',
173 b'mergeable-file',
173 b'mergeable-file',
174 None,
174 None,
175 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
176 ),
176 ),
177 (
177 (
178 b'o',
178 b'o',
179 b'overwritten-file',
179 b'overwritten-file',
180 None,
180 None,
181 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
182 ),
182 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
184 (
185 b'',
185 b'',
186 b'from-existing',
186 b'from-existing',
187 None,
187 None,
188 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
189 ),
189 ),
190 ],
190 ],
191 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
192 )
192 )
193 def debugbuilddag(
193 def debugbuilddag(
194 ui,
194 ui,
195 repo,
195 repo,
196 text=None,
196 text=None,
197 mergeable_file=False,
197 mergeable_file=False,
198 overwritten_file=False,
198 overwritten_file=False,
199 new_file=False,
199 new_file=False,
200 from_existing=False,
200 from_existing=False,
201 ):
201 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
203
203
204 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
205 command line.
205 command line.
206
206
207 Elements:
207 Elements:
208
208
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
220
220
221 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
222
222
223 A backref is either
223 A backref is either
224
224
225 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
226 node, or
226 node, or
227 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
228 - empty to denote the default parent.
229
229
230 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
232 """
233
233
234 if text is None:
234 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
236 text = ui.fin.read()
237
237
238 cl = repo.changelog
238 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
241
241
242 # determine number of revs in DAG
242 # determine number of revs in DAG
243 total = 0
243 total = 0
244 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
245 if type == b'n':
245 if type == b'n':
246 total += 1
246 total += 1
247
247
248 if mergeable_file:
248 if mergeable_file:
249 linesperrev = 2
249 linesperrev = 2
250 # make a file with k lines per rev
250 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
489 try:
489 try:
490 caps = peer.capabilities()
490 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
492 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
501 finally:
501 finally:
502 peer.close()
502 peer.close()
503
503
504
504
505 @command(
505 @command(
506 b'debugchangedfiles',
506 b'debugchangedfiles',
507 [
507 [
508 (
508 (
509 b'',
509 b'',
510 b'compute',
510 b'compute',
511 False,
511 False,
512 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
513 ),
513 ),
514 ],
514 ],
515 b'REV',
515 b'REV',
516 )
516 )
517 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
520 files = None
521
521
522 if opts['compute']:
522 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
524 else:
524 else:
525 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
527 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
529 if files is not None:
530 for f in sorted(files.touched):
530 for f in sorted(files.touched):
531 if f in files.added:
531 if f in files.added:
532 action = b"added"
532 action = b"added"
533 elif f in files.removed:
533 elif f in files.removed:
534 action = b"removed"
534 action = b"removed"
535 elif f in files.merged:
535 elif f in files.merged:
536 action = b"merged"
536 action = b"merged"
537 elif f in files.salvaged:
537 elif f in files.salvaged:
538 action = b"salvaged"
538 action = b"salvaged"
539 else:
539 else:
540 action = b"touched"
540 action = b"touched"
541
541
542 copy_parent = b""
542 copy_parent = b""
543 copy_source = b""
543 copy_source = b""
544 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
545 copy_parent = b"p1"
545 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
548 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
550
550
551 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
553 ui.write(template % data)
554
554
555
555
556 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
562 errors = 0
562 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
565 errors += 1
565 errors += 1
566 if errors:
566 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
568 raise error.Abort(errstr)
569
569
570
570
571 @command(
571 @command(
572 b'debugcolor',
572 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
574 b'hg debugcolor',
575 )
575 )
576 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
577 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
579 if opts.get('style'):
580 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
581 else:
581 else:
582 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
583
583
584
584
585 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
586 ui = ui.copy()
586 ui = ui.copy()
587 ui._styles.clear()
587 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
589 ui._styles[effect] = effect
590 if ui._terminfoparams:
590 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
599 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
601
601
602
602
603 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
605 if not ui._styles:
605 if not ui._styles:
606 return
606 return
607 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
610 if effects:
610 if effects:
611 # 50
611 # 50
612 ui.write(b': ')
612 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
615 ui.write(b'\n')
616
616
617
617
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
620 """create a stream clone bundle file
621
621
622 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
624 """
624 """
625 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
627 if phases.hassecret(repo):
628 ui.warn(
628 ui.warn(
629 _(
629 _(
630 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
631 b'revisions)\n'
632 )
632 )
633 )
633 )
634
634
635 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
637
637
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
639
640
640
641 @command(
641 @command(
642 b'debugdag',
642 b'debugdag',
643 [
643 [
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
648 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
654
654
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
657
657
658 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
659 """
659 """
660 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
661 dots = opts.get('dots')
661 dots = opts.get('dots')
662 if file_:
662 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
665
665
666 def events():
666 def events():
667 for r in rlog:
667 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
669 if r in revs:
670 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
671
671
672 elif repo:
672 elif repo:
673 cl = repo.changelog
673 cl = repo.changelog
674 tags = opts.get('tags')
674 tags = opts.get('tags')
675 branches = opts.get('branches')
675 branches = opts.get('branches')
676 if tags:
676 if tags:
677 labels = {}
677 labels = {}
678 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
680
680
681 def events():
681 def events():
682 b = b"default"
682 b = b"default"
683 for r in cl:
683 for r in cl:
684 if branches:
684 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
686 if newb != b:
687 yield b'a', newb
687 yield b'a', newb
688 b = newb
688 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
690 if tags:
691 ls = labels.get(r)
691 ls = labels.get(r)
692 if ls:
692 if ls:
693 for l in ls:
693 for l in ls:
694 yield b'l', (r, l)
694 yield b'l', (r, l)
695
695
696 else:
696 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
698
698
699 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
700 events(),
700 events(),
701 addspaces=spaces,
701 addspaces=spaces,
702 wraplabels=True,
702 wraplabels=True,
703 wrapannotations=True,
703 wrapannotations=True,
704 wrapnonlinear=dots,
704 wrapnonlinear=dots,
705 usedots=dots,
705 usedots=dots,
706 maxlinewidth=70,
706 maxlinewidth=70,
707 ):
707 ):
708 ui.write(line)
708 ui.write(line)
709 ui.write(b"\n")
709 ui.write(b"\n")
710
710
711
711
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
717 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
719 file_, rev = None, file_
720 elif rev is None:
720 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
783 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
787 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
790
790
791 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
792 are available:
792 are available:
793
793
794 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
799
799
800 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
801 """
801 """
802 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 index = r.index
804 index = r.index
805 start = r.start
805 start = r.start
806 length = r.length
806 length = r.length
807 generaldelta = r._generaldelta
807 generaldelta = r._generaldelta
808 withsparseread = getattr(r, '_withsparseread', False)
808 withsparseread = getattr(r, '_withsparseread', False)
809
809
810 # security to avoid crash on corrupted revlogs
810 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
811 total_revs = len(index)
812
812
813 def revinfo(rev):
813 def revinfo(rev):
814 e = index[rev]
814 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 chainsize = 0
817 chainsize = 0
818
818
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
822
823 # If the parents of a revision has an empty delta, we never try to delta
823 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
824 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
826 #
827 # However we need to detect that as a special case for delta-type, that
827 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
828 # is not simply "other".
829 p1_base = p1
829 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
830 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
831 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
834 if (
835 new_base == p1_base
835 new_base == p1_base
836 or new_base == nullrev
836 or new_base == nullrev
837 or new_base >= total_revs
837 or new_base >= total_revs
838 ):
838 ):
839 break
839 break
840 p1_base = new_base
840 p1_base = new_base
841 e1 = index[p1_base]
841 e1 = index[p1_base]
842 p2_base = p2
842 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
843 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
844 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
847 if (
848 new_base == p2_base
848 new_base == p2_base
849 or new_base == nullrev
849 or new_base == nullrev
850 or new_base >= total_revs
850 or new_base >= total_revs
851 ):
851 ):
852 break
852 break
853 p2_base = new_base
853 p2_base = new_base
854 e2 = index[p2_base]
854 e2 = index[p2_base]
855
855
856 if generaldelta:
856 if generaldelta:
857 if base == p1:
857 if base == p1:
858 deltatype = b'p1'
858 deltatype = b'p1'
859 elif base == p2:
859 elif base == p2:
860 deltatype = b'p2'
860 deltatype = b'p2'
861 elif base == rev:
861 elif base == rev:
862 deltatype = b'base'
862 deltatype = b'base'
863 elif base == p1_base:
863 elif base == p1_base:
864 deltatype = b'skip1'
864 deltatype = b'skip1'
865 elif base == p2_base:
865 elif base == p2_base:
866 deltatype = b'skip2'
866 deltatype = b'skip2'
867 elif r.issnapshot(rev):
867 elif r.issnapshot(rev):
868 deltatype = b'snap'
868 deltatype = b'snap'
869 elif base == rev - 1:
869 elif base == rev - 1:
870 deltatype = b'prev'
870 deltatype = b'prev'
871 else:
871 else:
872 deltatype = b'other'
872 deltatype = b'other'
873 else:
873 else:
874 if base == rev:
874 if base == rev:
875 deltatype = b'base'
875 deltatype = b'base'
876 else:
876 else:
877 deltatype = b'prev'
877 deltatype = b'prev'
878
878
879 chain = r._deltachain(rev)[0]
879 chain = r._deltachain(rev)[0]
880 for iterrev in chain:
880 for iterrev in chain:
881 e = index[iterrev]
881 e = index[iterrev]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883
883
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885
885
886 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
887
887
888 fm.plain(
888 fm.plain(
889 b' rev p1 p2 chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
890 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
891 b'extraratio'
891 b'extraratio'
892 )
892 )
893 if withsparseread:
893 if withsparseread:
894 fm.plain(b' readsize largestblk rddensity srchunks')
894 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b'\n')
895 fm.plain(b'\n')
896
896
897 chainbases = {}
897 chainbases = {}
898 for rev in r:
898 for rev in r:
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 chainbase = chain[0]
900 chainbase = chain[0]
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 basestart = start(chainbase)
902 basestart = start(chainbase)
903 revstart = start(rev)
903 revstart = start(rev)
904 lineardist = revstart + comp - basestart
904 lineardist = revstart + comp - basestart
905 extradist = lineardist - chainsize
905 extradist = lineardist - chainsize
906 try:
906 try:
907 prevrev = chain[-2]
907 prevrev = chain[-2]
908 except IndexError:
908 except IndexError:
909 prevrev = -1
909 prevrev = -1
910
910
911 if uncomp != 0:
911 if uncomp != 0:
912 chainratio = float(chainsize) / float(uncomp)
912 chainratio = float(chainsize) / float(uncomp)
913 else:
913 else:
914 chainratio = chainsize
914 chainratio = chainsize
915
915
916 if chainsize != 0:
916 if chainsize != 0:
917 extraratio = float(extradist) / float(chainsize)
917 extraratio = float(extradist) / float(chainsize)
918 else:
918 else:
919 extraratio = extradist
919 extraratio = extradist
920
920
921 fm.startitem()
921 fm.startitem()
922 fm.write(
922 fm.write(
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
925 b'extraratio',
925 b'extraratio',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 rev,
927 rev,
928 p1,
928 p1,
929 p2,
929 p2,
930 chainid,
930 chainid,
931 len(chain),
931 len(chain),
932 prevrev,
932 prevrev,
933 deltatype,
933 deltatype,
934 comp,
934 comp,
935 uncomp,
935 uncomp,
936 chainsize,
936 chainsize,
937 chainratio,
937 chainratio,
938 lineardist,
938 lineardist,
939 extradist,
939 extradist,
940 extraratio,
940 extraratio,
941 rev=rev,
941 rev=rev,
942 chainid=chainid,
942 chainid=chainid,
943 chainlen=len(chain),
943 chainlen=len(chain),
944 prevrev=prevrev,
944 prevrev=prevrev,
945 deltatype=deltatype,
945 deltatype=deltatype,
946 compsize=comp,
946 compsize=comp,
947 uncompsize=uncomp,
947 uncompsize=uncomp,
948 chainsize=chainsize,
948 chainsize=chainsize,
949 chainratio=chainratio,
949 chainratio=chainratio,
950 lindist=lineardist,
950 lindist=lineardist,
951 extradist=extradist,
951 extradist=extradist,
952 extraratio=extraratio,
952 extraratio=extraratio,
953 )
953 )
954 if withsparseread:
954 if withsparseread:
955 readsize = 0
955 readsize = 0
956 largestblock = 0
956 largestblock = 0
957 srchunks = 0
957 srchunks = 0
958
958
959 for revschunk in deltautil.slicechunk(r, chain):
959 for revschunk in deltautil.slicechunk(r, chain):
960 srchunks += 1
960 srchunks += 1
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blksize = blkend - start(revschunk[0])
962 blksize = blkend - start(revschunk[0])
963
963
964 readsize += blksize
964 readsize += blksize
965 if largestblock < blksize:
965 if largestblock < blksize:
966 largestblock = blksize
966 largestblock = blksize
967
967
968 if readsize:
968 if readsize:
969 readdensity = float(chainsize) / float(readsize)
969 readdensity = float(chainsize) / float(readsize)
970 else:
970 else:
971 readdensity = 1
971 readdensity = 1
972
972
973 fm.write(
973 fm.write(
974 b'readsize largestblock readdensity srchunks',
974 b'readsize largestblock readdensity srchunks',
975 b' %10d %10d %9.5f %8d',
975 b' %10d %10d %9.5f %8d',
976 readsize,
976 readsize,
977 largestblock,
977 largestblock,
978 readdensity,
978 readdensity,
979 srchunks,
979 srchunks,
980 readsize=readsize,
980 readsize=readsize,
981 largestblock=largestblock,
981 largestblock=largestblock,
982 readdensity=readdensity,
982 readdensity=readdensity,
983 srchunks=srchunks,
983 srchunks=srchunks,
984 )
984 )
985
985
986 fm.plain(b'\n')
986 fm.plain(b'\n')
987
987
988 fm.end()
988 fm.end()
989
989
990
990
991 @command(
991 @command(
992 b'debug-delta-find',
992 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 _(b'-c|-m|FILE REV'),
994 _(b'-c|-m|FILE REV'),
995 optionalrepo=True,
995 optionalrepo=True,
996 )
996 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 """display the computation to get to a valid delta for storing REV
998 """display the computation to get to a valid delta for storing REV
999
999
1000 This command will replay the process used to find the "best" delta to store
1000 This command will replay the process used to find the "best" delta to store
1001 a revision and display information about all the steps used to get to that
1001 a revision and display information about all the steps used to get to that
1002 result.
1002 result.
1003
1003
1004 The revision use the revision number of the target storage (not changelog
1004 The revision use the revision number of the target storage (not changelog
1005 revision number).
1005 revision number).
1006
1006
1007 note: the process is initiated from a full text of the revision to store.
1007 note: the process is initiated from a full text of the revision to store.
1008 """
1008 """
1009 opts = pycompat.byteskwargs(opts)
1009 opts = pycompat.byteskwargs(opts)
1010 if arg_2 is None:
1010 if arg_2 is None:
1011 file_ = None
1011 file_ = None
1012 rev = arg_1
1012 rev = arg_1
1013 else:
1013 else:
1014 file_ = arg_1
1014 file_ = arg_1
1015 rev = arg_2
1015 rev = arg_2
1016
1016
1017 rev = int(rev)
1017 rev = int(rev)
1018
1018
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020
1020
1021 deltacomputer = deltautil.deltacomputer(
1021 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1022 revlog,
1023 write_debug=ui.write,
1023 write_debug=ui.write,
1024 debug_search=True,
1024 debug_search=True,
1025 )
1025 )
1026
1026
1027 node = revlog.node(rev)
1027 node = revlog.node(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1029 p1 = revlog.node(p1r)
1029 p1 = revlog.node(p1r)
1030 p2 = revlog.node(p2r)
1030 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1031 btext = [revlog.revision(rev)]
1032 textlen = len(btext[0])
1032 textlen = len(btext[0])
1033 cachedelta = None
1033 cachedelta = None
1034 flags = revlog.flags(rev)
1034 flags = revlog.flags(rev)
1035
1035
1036 revinfo = revlogutils.revisioninfo(
1036 revinfo = revlogutils.revisioninfo(
1037 node,
1037 node,
1038 p1,
1038 p1,
1039 p2,
1039 p2,
1040 btext,
1040 btext,
1041 textlen,
1041 textlen,
1042 cachedelta,
1042 cachedelta,
1043 flags,
1043 flags,
1044 )
1044 )
1045
1045
1046 fh = revlog._datafp()
1046 fh = revlog._datafp()
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048
1048
1049
1049
1050 @command(
1050 @command(
1051 b'debugdirstate|debugstate',
1051 b'debugdirstate|debugstate',
1052 [
1052 [
1053 (
1053 (
1054 b'',
1054 b'',
1055 b'nodates',
1055 b'nodates',
1056 None,
1056 None,
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 ),
1058 ),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 (
1061 (
1062 b'',
1062 b'',
1063 b'docket',
1063 b'docket',
1064 False,
1064 False,
1065 _(b'display the docket (metadata file) instead'),
1065 _(b'display the docket (metadata file) instead'),
1066 ),
1066 ),
1067 (
1067 (
1068 b'',
1068 b'',
1069 b'all',
1069 b'all',
1070 False,
1070 False,
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 ),
1072 ),
1073 ],
1073 ],
1074 _(b'[OPTION]...'),
1074 _(b'[OPTION]...'),
1075 )
1075 )
1076 def debugstate(ui, repo, **opts):
1076 def debugstate(ui, repo, **opts):
1077 """show the contents of the current dirstate"""
1077 """show the contents of the current dirstate"""
1078
1078
1079 if opts.get("docket"):
1079 if opts.get("docket"):
1080 if not repo.dirstate._use_dirstate_v2:
1080 if not repo.dirstate._use_dirstate_v2:
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082
1082
1083 docket = repo.dirstate._map.docket
1083 docket = repo.dirstate._map.docket
1084 (
1084 (
1085 start_offset,
1085 start_offset,
1086 root_nodes,
1086 root_nodes,
1087 nodes_with_entry,
1087 nodes_with_entry,
1088 nodes_with_copy,
1088 nodes_with_copy,
1089 unused_bytes,
1089 unused_bytes,
1090 _unused,
1090 _unused,
1091 ignore_pattern,
1091 ignore_pattern,
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093
1093
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 ui.write(
1101 ui.write(
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 )
1103 )
1104 return
1104 return
1105
1105
1106 nodates = not opts['dates']
1106 nodates = not opts['dates']
1107 if opts.get('nodates') is not None:
1107 if opts.get('nodates') is not None:
1108 nodates = True
1108 nodates = True
1109 datesort = opts.get('datesort')
1109 datesort = opts.get('datesort')
1110
1110
1111 if datesort:
1111 if datesort:
1112
1112
1113 def keyfunc(entry):
1113 def keyfunc(entry):
1114 filename, _state, _mode, _size, mtime = entry
1114 filename, _state, _mode, _size, mtime = entry
1115 return (mtime, filename)
1115 return (mtime, filename)
1116
1116
1117 else:
1117 else:
1118 keyfunc = None # sort by filename
1118 keyfunc = None # sort by filename
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 entries.sort(key=keyfunc)
1120 entries.sort(key=keyfunc)
1121 for entry in entries:
1121 for entry in entries:
1122 filename, state, mode, size, mtime = entry
1122 filename, state, mode, size, mtime = entry
1123 if mtime == -1:
1123 if mtime == -1:
1124 timestr = b'unset '
1124 timestr = b'unset '
1125 elif nodates:
1125 elif nodates:
1126 timestr = b'set '
1126 timestr = b'set '
1127 else:
1127 else:
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 timestr = encoding.strtolocal(timestr)
1129 timestr = encoding.strtolocal(timestr)
1130 if mode & 0o20000:
1130 if mode & 0o20000:
1131 mode = b'lnk'
1131 mode = b'lnk'
1132 else:
1132 else:
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 for f in repo.dirstate.copies():
1135 for f in repo.dirstate.copies():
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137
1137
1138
1138
1139 @command(
1139 @command(
1140 b'debugdirstateignorepatternshash',
1140 b'debugdirstateignorepatternshash',
1141 [],
1141 [],
1142 _(b''),
1142 _(b''),
1143 )
1143 )
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 """show the hash of ignore patterns stored in dirstate if v2,
1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 or nothing for dirstate-v2
1146 or nothing for dirstate-v2
1147 """
1147 """
1148 if repo.dirstate._use_dirstate_v2:
1148 if repo.dirstate._use_dirstate_v2:
1149 docket = repo.dirstate._map.docket
1149 docket = repo.dirstate._map.docket
1150 hash_len = 20 # 160 bits for SHA-1
1150 hash_len = 20 # 160 bits for SHA-1
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153
1153
1154
1154
1155 @command(
1155 @command(
1156 b'debugdiscovery',
1156 b'debugdiscovery',
1157 [
1157 [
1158 (b'', b'old', None, _(b'use old-style discovery')),
1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 (
1159 (
1160 b'',
1160 b'',
1161 b'nonheads',
1161 b'nonheads',
1162 None,
1162 None,
1163 _(b'use old-style discovery with non-heads included'),
1163 _(b'use old-style discovery with non-heads included'),
1164 ),
1164 ),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 (
1167 (
1168 b'',
1168 b'',
1169 b'local-as-revs',
1169 b'local-as-revs',
1170 b"",
1170 b"",
1171 b'treat local has having these revisions only',
1171 b'treat local has having these revisions only',
1172 ),
1172 ),
1173 (
1173 (
1174 b'',
1174 b'',
1175 b'remote-as-revs',
1175 b'remote-as-revs',
1176 b"",
1176 b"",
1177 b'use local as remote, with only these revisions',
1177 b'use local as remote, with only these revisions',
1178 ),
1178 ),
1179 ]
1179 ]
1180 + cmdutil.remoteopts
1180 + cmdutil.remoteopts
1181 + cmdutil.formatteropts,
1181 + cmdutil.formatteropts,
1182 _(b'[--rev REV] [OTHER]'),
1182 _(b'[--rev REV] [OTHER]'),
1183 )
1183 )
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 """runs the changeset discovery protocol in isolation
1185 """runs the changeset discovery protocol in isolation
1186
1186
1187 The local peer can be "replaced" by a subset of the local repository by
1187 The local peer can be "replaced" by a subset of the local repository by
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1188 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1189 be "replaced" by a subset of the local repository using the
1189 can be "replaced" by a subset of the local repository using the
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 discovery situation.
1191 discovery situations.
1192
1192
1193 The following developer oriented config are relevant for people playing with this command:
1193 The following developer oriented config are relevant for people playing with this command:
1194
1194
1195 * devel.discovery.exchange-heads=True
1195 * devel.discovery.exchange-heads=True
1196
1196
1197 If False, the discovery will not start with
1197 If False, the discovery will not start with
1198 remote head fetching and local head querying.
1198 remote head fetching and local head querying.
1199
1199
1200 * devel.discovery.grow-sample=True
1200 * devel.discovery.grow-sample=True
1201
1201
1202 If False, the sample size used in set discovery will not be increased
1202 If False, the sample size used in set discovery will not be increased
1203 through the process
1203 through the process
1204
1204
1205 * devel.discovery.grow-sample.dynamic=True
1205 * devel.discovery.grow-sample.dynamic=True
1206
1206
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 adapted to the shape of the undecided set (it is set to the max of:
1208 adapted to the shape of the undecided set (it is set to the max of:
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210
1210
1211 * devel.discovery.grow-sample.rate=1.05
1211 * devel.discovery.grow-sample.rate=1.05
1212
1212
1213 the rate at which the sample grow
1213 the rate at which the sample grow
1214
1214
1215 * devel.discovery.randomize=True
1215 * devel.discovery.randomize=True
1216
1216
1217 If andom sampling during discovery are deterministic. It is meant for
1217 If andom sampling during discovery are deterministic. It is meant for
1218 integration tests.
1218 integration tests.
1219
1219
1220 * devel.discovery.sample-size=200
1220 * devel.discovery.sample-size=200
1221
1221
1222 Control the initial size of the discovery sample
1222 Control the initial size of the discovery sample
1223
1223
1224 * devel.discovery.sample-size.initial=100
1224 * devel.discovery.sample-size.initial=100
1225
1225
1226 Control the initial size of the discovery for initial change
1226 Control the initial size of the discovery for initial change
1227 """
1227 """
1228 opts = pycompat.byteskwargs(opts)
1228 opts = pycompat.byteskwargs(opts)
1229 unfi = repo.unfiltered()
1229 unfi = repo.unfiltered()
1230
1230
1231 # setup potential extra filtering
1231 # setup potential extra filtering
1232 local_revs = opts[b"local_as_revs"]
1232 local_revs = opts[b"local_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1234
1234
1235 # make sure tests are repeatable
1235 # make sure tests are repeatable
1236 random.seed(int(opts[b'seed']))
1236 random.seed(int(opts[b'seed']))
1237
1237
1238 if not remote_revs:
1238 if not remote_revs:
1239
1239
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 b'debugdiscovery', repo, ui, remoteurl
1241 b'debugdiscovery', repo, ui, remoteurl
1242 )
1242 )
1243 remote = hg.peer(repo, opts, remoteurl)
1243 remote = hg.peer(repo, opts, remoteurl)
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 else:
1245 else:
1246 branches = (None, [])
1246 branches = (None, [])
1247 remote_filtered_revs = logcmdutil.revrange(
1247 remote_filtered_revs = logcmdutil.revrange(
1248 unfi, [b"not (::(%s))" % remote_revs]
1248 unfi, [b"not (::(%s))" % remote_revs]
1249 )
1249 )
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251
1251
1252 def remote_func(x):
1252 def remote_func(x):
1253 return remote_filtered_revs
1253 return remote_filtered_revs
1254
1254
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256
1256
1257 remote = repo.peer()
1257 remote = repo.peer()
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259
1259
1260 if local_revs:
1260 if local_revs:
1261 local_filtered_revs = logcmdutil.revrange(
1261 local_filtered_revs = logcmdutil.revrange(
1262 unfi, [b"not (::(%s))" % local_revs]
1262 unfi, [b"not (::(%s))" % local_revs]
1263 )
1263 )
1264 local_filtered_revs = frozenset(local_filtered_revs)
1264 local_filtered_revs = frozenset(local_filtered_revs)
1265
1265
1266 def local_func(x):
1266 def local_func(x):
1267 return local_filtered_revs
1267 return local_filtered_revs
1268
1268
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271
1271
1272 data = {}
1272 data = {}
1273 if opts.get(b'old'):
1273 if opts.get(b'old'):
1274
1274
1275 def doit(pushedrevs, remoteheads, remote=remote):
1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 if not util.safehasattr(remote, b'branches'):
1276 if not util.safehasattr(remote, b'branches'):
1277 # enable in-client legacy support
1277 # enable in-client legacy support
1278 remote = localrepo.locallegacypeer(remote.local())
1278 remote = localrepo.locallegacypeer(remote.local())
1279 if remote_revs:
1279 if remote_revs:
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 remote._repo = r
1281 remote._repo = r
1282 common, _in, hds = treediscovery.findcommonincoming(
1282 common, _in, hds = treediscovery.findcommonincoming(
1283 repo, remote, force=True, audit=data
1283 repo, remote, force=True, audit=data
1284 )
1284 )
1285 common = set(common)
1285 common = set(common)
1286 if not opts.get(b'nonheads'):
1286 if not opts.get(b'nonheads'):
1287 ui.writenoi18n(
1287 ui.writenoi18n(
1288 b"unpruned common: %s\n"
1288 b"unpruned common: %s\n"
1289 % b" ".join(sorted(short(n) for n in common))
1289 % b" ".join(sorted(short(n) for n in common))
1290 )
1290 )
1291
1291
1292 clnode = repo.changelog.node
1292 clnode = repo.changelog.node
1293 common = repo.revs(b'heads(::%ln)', common)
1293 common = repo.revs(b'heads(::%ln)', common)
1294 common = {clnode(r) for r in common}
1294 common = {clnode(r) for r in common}
1295 return common, hds
1295 return common, hds
1296
1296
1297 else:
1297 else:
1298
1298
1299 def doit(pushedrevs, remoteheads, remote=remote):
1299 def doit(pushedrevs, remoteheads, remote=remote):
1300 nodes = None
1300 nodes = None
1301 if pushedrevs:
1301 if pushedrevs:
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1303 nodes = [repo[r].node() for r in revs]
1303 nodes = [repo[r].node() for r in revs]
1304 common, any, hds = setdiscovery.findcommonheads(
1304 common, any, hds = setdiscovery.findcommonheads(
1305 ui,
1305 ui,
1306 repo,
1306 repo,
1307 remote,
1307 remote,
1308 ancestorsof=nodes,
1308 ancestorsof=nodes,
1309 audit=data,
1309 audit=data,
1310 abortwhenunrelated=False,
1310 abortwhenunrelated=False,
1311 )
1311 )
1312 return common, hds
1312 return common, hds
1313
1313
1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1315 localrevs = opts[b'rev']
1315 localrevs = opts[b'rev']
1316
1316
1317 fm = ui.formatter(b'debugdiscovery', opts)
1317 fm = ui.formatter(b'debugdiscovery', opts)
1318 if fm.strict_format:
1318 if fm.strict_format:
1319
1319
1320 @contextlib.contextmanager
1320 @contextlib.contextmanager
1321 def may_capture_output():
1321 def may_capture_output():
1322 ui.pushbuffer()
1322 ui.pushbuffer()
1323 yield
1323 yield
1324 data[b'output'] = ui.popbuffer()
1324 data[b'output'] = ui.popbuffer()
1325
1325
1326 else:
1326 else:
1327 may_capture_output = util.nullcontextmanager
1327 may_capture_output = util.nullcontextmanager
1328 with may_capture_output():
1328 with may_capture_output():
1329 with util.timedcm('debug-discovery') as t:
1329 with util.timedcm('debug-discovery') as t:
1330 common, hds = doit(localrevs, remoterevs)
1330 common, hds = doit(localrevs, remoterevs)
1331
1331
1332 # compute all statistics
1332 # compute all statistics
1333 if len(common) == 1 and repo.nullid in common:
1333 if len(common) == 1 and repo.nullid in common:
1334 common = set()
1334 common = set()
1335 heads_common = set(common)
1335 heads_common = set(common)
1336 heads_remote = set(hds)
1336 heads_remote = set(hds)
1337 heads_local = set(repo.heads())
1337 heads_local = set(repo.heads())
1338 # note: they cannot be a local or remote head that is in common and not
1338 # note: they cannot be a local or remote head that is in common and not
1339 # itself a head of common.
1339 # itself a head of common.
1340 heads_common_local = heads_common & heads_local
1340 heads_common_local = heads_common & heads_local
1341 heads_common_remote = heads_common & heads_remote
1341 heads_common_remote = heads_common & heads_remote
1342 heads_common_both = heads_common & heads_remote & heads_local
1342 heads_common_both = heads_common & heads_remote & heads_local
1343
1343
1344 all = repo.revs(b'all()')
1344 all = repo.revs(b'all()')
1345 common = repo.revs(b'::%ln', common)
1345 common = repo.revs(b'::%ln', common)
1346 roots_common = repo.revs(b'roots(::%ld)', common)
1346 roots_common = repo.revs(b'roots(::%ld)', common)
1347 missing = repo.revs(b'not ::%ld', common)
1347 missing = repo.revs(b'not ::%ld', common)
1348 heads_missing = repo.revs(b'heads(%ld)', missing)
1348 heads_missing = repo.revs(b'heads(%ld)', missing)
1349 roots_missing = repo.revs(b'roots(%ld)', missing)
1349 roots_missing = repo.revs(b'roots(%ld)', missing)
1350 assert len(common) + len(missing) == len(all)
1350 assert len(common) + len(missing) == len(all)
1351
1351
1352 initial_undecided = repo.revs(
1352 initial_undecided = repo.revs(
1353 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1353 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1354 )
1354 )
1355 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1355 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1356 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1356 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1357 common_initial_undecided = initial_undecided & common
1357 common_initial_undecided = initial_undecided & common
1358 missing_initial_undecided = initial_undecided & missing
1358 missing_initial_undecided = initial_undecided & missing
1359
1359
1360 data[b'elapsed'] = t.elapsed
1360 data[b'elapsed'] = t.elapsed
1361 data[b'nb-common-heads'] = len(heads_common)
1361 data[b'nb-common-heads'] = len(heads_common)
1362 data[b'nb-common-heads-local'] = len(heads_common_local)
1362 data[b'nb-common-heads-local'] = len(heads_common_local)
1363 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1363 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1364 data[b'nb-common-heads-both'] = len(heads_common_both)
1364 data[b'nb-common-heads-both'] = len(heads_common_both)
1365 data[b'nb-common-roots'] = len(roots_common)
1365 data[b'nb-common-roots'] = len(roots_common)
1366 data[b'nb-head-local'] = len(heads_local)
1366 data[b'nb-head-local'] = len(heads_local)
1367 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1367 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1368 data[b'nb-head-remote'] = len(heads_remote)
1368 data[b'nb-head-remote'] = len(heads_remote)
1369 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1369 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1370 heads_common_remote
1370 heads_common_remote
1371 )
1371 )
1372 data[b'nb-revs'] = len(all)
1372 data[b'nb-revs'] = len(all)
1373 data[b'nb-revs-common'] = len(common)
1373 data[b'nb-revs-common'] = len(common)
1374 data[b'nb-revs-missing'] = len(missing)
1374 data[b'nb-revs-missing'] = len(missing)
1375 data[b'nb-missing-heads'] = len(heads_missing)
1375 data[b'nb-missing-heads'] = len(heads_missing)
1376 data[b'nb-missing-roots'] = len(roots_missing)
1376 data[b'nb-missing-roots'] = len(roots_missing)
1377 data[b'nb-ini_und'] = len(initial_undecided)
1377 data[b'nb-ini_und'] = len(initial_undecided)
1378 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1378 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1379 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1379 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1380 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1380 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1381 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1381 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1382
1382
1383 fm.startitem()
1383 fm.startitem()
1384 fm.data(**pycompat.strkwargs(data))
1384 fm.data(**pycompat.strkwargs(data))
1385 # display discovery summary
1385 # display discovery summary
1386 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1386 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1387 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1387 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1388 if b'total-round-trips-heads' in data:
1388 if b'total-round-trips-heads' in data:
1389 fm.plain(
1389 fm.plain(
1390 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1390 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1391 )
1391 )
1392 if b'total-round-trips-branches' in data:
1392 if b'total-round-trips-branches' in data:
1393 fm.plain(
1393 fm.plain(
1394 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1394 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1395 % data
1395 % data
1396 )
1396 )
1397 if b'total-round-trips-between' in data:
1397 if b'total-round-trips-between' in data:
1398 fm.plain(
1398 fm.plain(
1399 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1399 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1400 )
1400 )
1401 fm.plain(b"queries: %(total-queries)9d\n" % data)
1401 fm.plain(b"queries: %(total-queries)9d\n" % data)
1402 if b'total-queries-branches' in data:
1402 if b'total-queries-branches' in data:
1403 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1403 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1404 if b'total-queries-between' in data:
1404 if b'total-queries-between' in data:
1405 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1405 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1406 fm.plain(b"heads summary:\n")
1406 fm.plain(b"heads summary:\n")
1407 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1407 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1408 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1408 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1409 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1409 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1410 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1410 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1411 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1411 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1412 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1412 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1413 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1413 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1414 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1414 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1415 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1415 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1416 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1416 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1417 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1417 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1418 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1418 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1419 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1419 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1420 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1420 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1421 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1421 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1422 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1422 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1423 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1423 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1424 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1424 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1425 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1425 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1426 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1426 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1427 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1427 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1428 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1428 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1429
1429
1430 if ui.verbose:
1430 if ui.verbose:
1431 fm.plain(
1431 fm.plain(
1432 b"common heads: %s\n"
1432 b"common heads: %s\n"
1433 % b" ".join(sorted(short(n) for n in heads_common))
1433 % b" ".join(sorted(short(n) for n in heads_common))
1434 )
1434 )
1435 fm.end()
1435 fm.end()
1436
1436
1437
1437
1438 _chunksize = 4 << 10
1438 _chunksize = 4 << 10
1439
1439
1440
1440
1441 @command(
1441 @command(
1442 b'debugdownload',
1442 b'debugdownload',
1443 [
1443 [
1444 (b'o', b'output', b'', _(b'path')),
1444 (b'o', b'output', b'', _(b'path')),
1445 ],
1445 ],
1446 optionalrepo=True,
1446 optionalrepo=True,
1447 )
1447 )
1448 def debugdownload(ui, repo, url, output=None, **opts):
1448 def debugdownload(ui, repo, url, output=None, **opts):
1449 """download a resource using Mercurial logic and config"""
1449 """download a resource using Mercurial logic and config"""
1450 fh = urlmod.open(ui, url, output)
1450 fh = urlmod.open(ui, url, output)
1451
1451
1452 dest = ui
1452 dest = ui
1453 if output:
1453 if output:
1454 dest = open(output, b"wb", _chunksize)
1454 dest = open(output, b"wb", _chunksize)
1455 try:
1455 try:
1456 data = fh.read(_chunksize)
1456 data = fh.read(_chunksize)
1457 while data:
1457 while data:
1458 dest.write(data)
1458 dest.write(data)
1459 data = fh.read(_chunksize)
1459 data = fh.read(_chunksize)
1460 finally:
1460 finally:
1461 if output:
1461 if output:
1462 dest.close()
1462 dest.close()
1463
1463
1464
1464
1465 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1465 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1466 def debugextensions(ui, repo, **opts):
1466 def debugextensions(ui, repo, **opts):
1467 '''show information about active extensions'''
1467 '''show information about active extensions'''
1468 opts = pycompat.byteskwargs(opts)
1468 opts = pycompat.byteskwargs(opts)
1469 exts = extensions.extensions(ui)
1469 exts = extensions.extensions(ui)
1470 hgver = util.version()
1470 hgver = util.version()
1471 fm = ui.formatter(b'debugextensions', opts)
1471 fm = ui.formatter(b'debugextensions', opts)
1472 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1472 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1473 isinternal = extensions.ismoduleinternal(extmod)
1473 isinternal = extensions.ismoduleinternal(extmod)
1474 extsource = None
1474 extsource = None
1475
1475
1476 if util.safehasattr(extmod, '__file__'):
1476 if util.safehasattr(extmod, '__file__'):
1477 extsource = pycompat.fsencode(extmod.__file__)
1477 extsource = pycompat.fsencode(extmod.__file__)
1478 elif getattr(sys, 'oxidized', False):
1478 elif getattr(sys, 'oxidized', False):
1479 extsource = pycompat.sysexecutable
1479 extsource = pycompat.sysexecutable
1480 if isinternal:
1480 if isinternal:
1481 exttestedwith = [] # never expose magic string to users
1481 exttestedwith = [] # never expose magic string to users
1482 else:
1482 else:
1483 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1483 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1484 extbuglink = getattr(extmod, 'buglink', None)
1484 extbuglink = getattr(extmod, 'buglink', None)
1485
1485
1486 fm.startitem()
1486 fm.startitem()
1487
1487
1488 if ui.quiet or ui.verbose:
1488 if ui.quiet or ui.verbose:
1489 fm.write(b'name', b'%s\n', extname)
1489 fm.write(b'name', b'%s\n', extname)
1490 else:
1490 else:
1491 fm.write(b'name', b'%s', extname)
1491 fm.write(b'name', b'%s', extname)
1492 if isinternal or hgver in exttestedwith:
1492 if isinternal or hgver in exttestedwith:
1493 fm.plain(b'\n')
1493 fm.plain(b'\n')
1494 elif not exttestedwith:
1494 elif not exttestedwith:
1495 fm.plain(_(b' (untested!)\n'))
1495 fm.plain(_(b' (untested!)\n'))
1496 else:
1496 else:
1497 lasttestedversion = exttestedwith[-1]
1497 lasttestedversion = exttestedwith[-1]
1498 fm.plain(b' (%s!)\n' % lasttestedversion)
1498 fm.plain(b' (%s!)\n' % lasttestedversion)
1499
1499
1500 fm.condwrite(
1500 fm.condwrite(
1501 ui.verbose and extsource,
1501 ui.verbose and extsource,
1502 b'source',
1502 b'source',
1503 _(b' location: %s\n'),
1503 _(b' location: %s\n'),
1504 extsource or b"",
1504 extsource or b"",
1505 )
1505 )
1506
1506
1507 if ui.verbose:
1507 if ui.verbose:
1508 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1508 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1509 fm.data(bundled=isinternal)
1509 fm.data(bundled=isinternal)
1510
1510
1511 fm.condwrite(
1511 fm.condwrite(
1512 ui.verbose and exttestedwith,
1512 ui.verbose and exttestedwith,
1513 b'testedwith',
1513 b'testedwith',
1514 _(b' tested with: %s\n'),
1514 _(b' tested with: %s\n'),
1515 fm.formatlist(exttestedwith, name=b'ver'),
1515 fm.formatlist(exttestedwith, name=b'ver'),
1516 )
1516 )
1517
1517
1518 fm.condwrite(
1518 fm.condwrite(
1519 ui.verbose and extbuglink,
1519 ui.verbose and extbuglink,
1520 b'buglink',
1520 b'buglink',
1521 _(b' bug reporting: %s\n'),
1521 _(b' bug reporting: %s\n'),
1522 extbuglink or b"",
1522 extbuglink or b"",
1523 )
1523 )
1524
1524
1525 fm.end()
1525 fm.end()
1526
1526
1527
1527
1528 @command(
1528 @command(
1529 b'debugfileset',
1529 b'debugfileset',
1530 [
1530 [
1531 (
1531 (
1532 b'r',
1532 b'r',
1533 b'rev',
1533 b'rev',
1534 b'',
1534 b'',
1535 _(b'apply the filespec on this revision'),
1535 _(b'apply the filespec on this revision'),
1536 _(b'REV'),
1536 _(b'REV'),
1537 ),
1537 ),
1538 (
1538 (
1539 b'',
1539 b'',
1540 b'all-files',
1540 b'all-files',
1541 False,
1541 False,
1542 _(b'test files from all revisions and working directory'),
1542 _(b'test files from all revisions and working directory'),
1543 ),
1543 ),
1544 (
1544 (
1545 b's',
1545 b's',
1546 b'show-matcher',
1546 b'show-matcher',
1547 None,
1547 None,
1548 _(b'print internal representation of matcher'),
1548 _(b'print internal representation of matcher'),
1549 ),
1549 ),
1550 (
1550 (
1551 b'p',
1551 b'p',
1552 b'show-stage',
1552 b'show-stage',
1553 [],
1553 [],
1554 _(b'print parsed tree at the given stage'),
1554 _(b'print parsed tree at the given stage'),
1555 _(b'NAME'),
1555 _(b'NAME'),
1556 ),
1556 ),
1557 ],
1557 ],
1558 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1558 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1559 )
1559 )
1560 def debugfileset(ui, repo, expr, **opts):
1560 def debugfileset(ui, repo, expr, **opts):
1561 '''parse and apply a fileset specification'''
1561 '''parse and apply a fileset specification'''
1562 from . import fileset
1562 from . import fileset
1563
1563
1564 fileset.symbols # force import of fileset so we have predicates to optimize
1564 fileset.symbols # force import of fileset so we have predicates to optimize
1565 opts = pycompat.byteskwargs(opts)
1565 opts = pycompat.byteskwargs(opts)
1566 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1566 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1567
1567
1568 stages = [
1568 stages = [
1569 (b'parsed', pycompat.identity),
1569 (b'parsed', pycompat.identity),
1570 (b'analyzed', filesetlang.analyze),
1570 (b'analyzed', filesetlang.analyze),
1571 (b'optimized', filesetlang.optimize),
1571 (b'optimized', filesetlang.optimize),
1572 ]
1572 ]
1573 stagenames = {n for n, f in stages}
1573 stagenames = {n for n, f in stages}
1574
1574
1575 showalways = set()
1575 showalways = set()
1576 if ui.verbose and not opts[b'show_stage']:
1576 if ui.verbose and not opts[b'show_stage']:
1577 # show parsed tree by --verbose (deprecated)
1577 # show parsed tree by --verbose (deprecated)
1578 showalways.add(b'parsed')
1578 showalways.add(b'parsed')
1579 if opts[b'show_stage'] == [b'all']:
1579 if opts[b'show_stage'] == [b'all']:
1580 showalways.update(stagenames)
1580 showalways.update(stagenames)
1581 else:
1581 else:
1582 for n in opts[b'show_stage']:
1582 for n in opts[b'show_stage']:
1583 if n not in stagenames:
1583 if n not in stagenames:
1584 raise error.Abort(_(b'invalid stage name: %s') % n)
1584 raise error.Abort(_(b'invalid stage name: %s') % n)
1585 showalways.update(opts[b'show_stage'])
1585 showalways.update(opts[b'show_stage'])
1586
1586
1587 tree = filesetlang.parse(expr)
1587 tree = filesetlang.parse(expr)
1588 for n, f in stages:
1588 for n, f in stages:
1589 tree = f(tree)
1589 tree = f(tree)
1590 if n in showalways:
1590 if n in showalways:
1591 if opts[b'show_stage'] or n != b'parsed':
1591 if opts[b'show_stage'] or n != b'parsed':
1592 ui.write(b"* %s:\n" % n)
1592 ui.write(b"* %s:\n" % n)
1593 ui.write(filesetlang.prettyformat(tree), b"\n")
1593 ui.write(filesetlang.prettyformat(tree), b"\n")
1594
1594
1595 files = set()
1595 files = set()
1596 if opts[b'all_files']:
1596 if opts[b'all_files']:
1597 for r in repo:
1597 for r in repo:
1598 c = repo[r]
1598 c = repo[r]
1599 files.update(c.files())
1599 files.update(c.files())
1600 files.update(c.substate)
1600 files.update(c.substate)
1601 if opts[b'all_files'] or ctx.rev() is None:
1601 if opts[b'all_files'] or ctx.rev() is None:
1602 wctx = repo[None]
1602 wctx = repo[None]
1603 files.update(
1603 files.update(
1604 repo.dirstate.walk(
1604 repo.dirstate.walk(
1605 scmutil.matchall(repo),
1605 scmutil.matchall(repo),
1606 subrepos=list(wctx.substate),
1606 subrepos=list(wctx.substate),
1607 unknown=True,
1607 unknown=True,
1608 ignored=True,
1608 ignored=True,
1609 )
1609 )
1610 )
1610 )
1611 files.update(wctx.substate)
1611 files.update(wctx.substate)
1612 else:
1612 else:
1613 files.update(ctx.files())
1613 files.update(ctx.files())
1614 files.update(ctx.substate)
1614 files.update(ctx.substate)
1615
1615
1616 m = ctx.matchfileset(repo.getcwd(), expr)
1616 m = ctx.matchfileset(repo.getcwd(), expr)
1617 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1617 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1618 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1618 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1619 for f in sorted(files):
1619 for f in sorted(files):
1620 if not m(f):
1620 if not m(f):
1621 continue
1621 continue
1622 ui.write(b"%s\n" % f)
1622 ui.write(b"%s\n" % f)
1623
1623
1624
1624
1625 @command(
1625 @command(
1626 b"debug-repair-issue6528",
1626 b"debug-repair-issue6528",
1627 [
1627 [
1628 (
1628 (
1629 b'',
1629 b'',
1630 b'to-report',
1630 b'to-report',
1631 b'',
1631 b'',
1632 _(b'build a report of affected revisions to this file'),
1632 _(b'build a report of affected revisions to this file'),
1633 _(b'FILE'),
1633 _(b'FILE'),
1634 ),
1634 ),
1635 (
1635 (
1636 b'',
1636 b'',
1637 b'from-report',
1637 b'from-report',
1638 b'',
1638 b'',
1639 _(b'repair revisions listed in this report file'),
1639 _(b'repair revisions listed in this report file'),
1640 _(b'FILE'),
1640 _(b'FILE'),
1641 ),
1641 ),
1642 (
1642 (
1643 b'',
1643 b'',
1644 b'paranoid',
1644 b'paranoid',
1645 False,
1645 False,
1646 _(b'check that both detection methods do the same thing'),
1646 _(b'check that both detection methods do the same thing'),
1647 ),
1647 ),
1648 ]
1648 ]
1649 + cmdutil.dryrunopts,
1649 + cmdutil.dryrunopts,
1650 )
1650 )
1651 def debug_repair_issue6528(ui, repo, **opts):
1651 def debug_repair_issue6528(ui, repo, **opts):
1652 """find affected revisions and repair them. See issue6528 for more details.
1652 """find affected revisions and repair them. See issue6528 for more details.
1653
1653
1654 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1654 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1655 computation of affected revisions for a given repository across clones.
1655 computation of affected revisions for a given repository across clones.
1656 The report format is line-based (with empty lines ignored):
1656 The report format is line-based (with empty lines ignored):
1657
1657
1658 ```
1658 ```
1659 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1659 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1660 ```
1660 ```
1661
1661
1662 There can be multiple broken revisions per filelog, they are separated by
1662 There can be multiple broken revisions per filelog, they are separated by
1663 a comma with no spaces. The only space is between the revision(s) and the
1663 a comma with no spaces. The only space is between the revision(s) and the
1664 filename.
1664 filename.
1665
1665
1666 Note that this does *not* mean that this repairs future affected revisions,
1666 Note that this does *not* mean that this repairs future affected revisions,
1667 that needs a separate fix at the exchange level that was introduced in
1667 that needs a separate fix at the exchange level that was introduced in
1668 Mercurial 5.9.1.
1668 Mercurial 5.9.1.
1669
1669
1670 There is a `--paranoid` flag to test that the fast implementation is correct
1670 There is a `--paranoid` flag to test that the fast implementation is correct
1671 by checking it against the slow implementation. Since this matter is quite
1671 by checking it against the slow implementation. Since this matter is quite
1672 urgent and testing every edge-case is probably quite costly, we use this
1672 urgent and testing every edge-case is probably quite costly, we use this
1673 method to test on large repositories as a fuzzing method of sorts.
1673 method to test on large repositories as a fuzzing method of sorts.
1674 """
1674 """
1675 cmdutil.check_incompatible_arguments(
1675 cmdutil.check_incompatible_arguments(
1676 opts, 'to_report', ['from_report', 'dry_run']
1676 opts, 'to_report', ['from_report', 'dry_run']
1677 )
1677 )
1678 dry_run = opts.get('dry_run')
1678 dry_run = opts.get('dry_run')
1679 to_report = opts.get('to_report')
1679 to_report = opts.get('to_report')
1680 from_report = opts.get('from_report')
1680 from_report = opts.get('from_report')
1681 paranoid = opts.get('paranoid')
1681 paranoid = opts.get('paranoid')
1682 # TODO maybe add filelog pattern and revision pattern parameters to help
1682 # TODO maybe add filelog pattern and revision pattern parameters to help
1683 # narrow down the search for users that know what they're looking for?
1683 # narrow down the search for users that know what they're looking for?
1684
1684
1685 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1685 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1686 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1686 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1687 raise error.Abort(_(msg))
1687 raise error.Abort(_(msg))
1688
1688
1689 rewrite.repair_issue6528(
1689 rewrite.repair_issue6528(
1690 ui,
1690 ui,
1691 repo,
1691 repo,
1692 dry_run=dry_run,
1692 dry_run=dry_run,
1693 to_report=to_report,
1693 to_report=to_report,
1694 from_report=from_report,
1694 from_report=from_report,
1695 paranoid=paranoid,
1695 paranoid=paranoid,
1696 )
1696 )
1697
1697
1698
1698
1699 @command(b'debugformat', [] + cmdutil.formatteropts)
1699 @command(b'debugformat', [] + cmdutil.formatteropts)
1700 def debugformat(ui, repo, **opts):
1700 def debugformat(ui, repo, **opts):
1701 """display format information about the current repository
1701 """display format information about the current repository
1702
1702
1703 Use --verbose to get extra information about current config value and
1703 Use --verbose to get extra information about current config value and
1704 Mercurial default."""
1704 Mercurial default."""
1705 opts = pycompat.byteskwargs(opts)
1705 opts = pycompat.byteskwargs(opts)
1706 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1706 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1707 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1707 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1708
1708
1709 def makeformatname(name):
1709 def makeformatname(name):
1710 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1710 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1711
1711
1712 fm = ui.formatter(b'debugformat', opts)
1712 fm = ui.formatter(b'debugformat', opts)
1713 if fm.isplain():
1713 if fm.isplain():
1714
1714
1715 def formatvalue(value):
1715 def formatvalue(value):
1716 if util.safehasattr(value, b'startswith'):
1716 if util.safehasattr(value, b'startswith'):
1717 return value
1717 return value
1718 if value:
1718 if value:
1719 return b'yes'
1719 return b'yes'
1720 else:
1720 else:
1721 return b'no'
1721 return b'no'
1722
1722
1723 else:
1723 else:
1724 formatvalue = pycompat.identity
1724 formatvalue = pycompat.identity
1725
1725
1726 fm.plain(b'format-variant')
1726 fm.plain(b'format-variant')
1727 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1727 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1728 fm.plain(b' repo')
1728 fm.plain(b' repo')
1729 if ui.verbose:
1729 if ui.verbose:
1730 fm.plain(b' config default')
1730 fm.plain(b' config default')
1731 fm.plain(b'\n')
1731 fm.plain(b'\n')
1732 for fv in upgrade.allformatvariant:
1732 for fv in upgrade.allformatvariant:
1733 fm.startitem()
1733 fm.startitem()
1734 repovalue = fv.fromrepo(repo)
1734 repovalue = fv.fromrepo(repo)
1735 configvalue = fv.fromconfig(repo)
1735 configvalue = fv.fromconfig(repo)
1736
1736
1737 if repovalue != configvalue:
1737 if repovalue != configvalue:
1738 namelabel = b'formatvariant.name.mismatchconfig'
1738 namelabel = b'formatvariant.name.mismatchconfig'
1739 repolabel = b'formatvariant.repo.mismatchconfig'
1739 repolabel = b'formatvariant.repo.mismatchconfig'
1740 elif repovalue != fv.default:
1740 elif repovalue != fv.default:
1741 namelabel = b'formatvariant.name.mismatchdefault'
1741 namelabel = b'formatvariant.name.mismatchdefault'
1742 repolabel = b'formatvariant.repo.mismatchdefault'
1742 repolabel = b'formatvariant.repo.mismatchdefault'
1743 else:
1743 else:
1744 namelabel = b'formatvariant.name.uptodate'
1744 namelabel = b'formatvariant.name.uptodate'
1745 repolabel = b'formatvariant.repo.uptodate'
1745 repolabel = b'formatvariant.repo.uptodate'
1746
1746
1747 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1747 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1748 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1748 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1749 if fv.default != configvalue:
1749 if fv.default != configvalue:
1750 configlabel = b'formatvariant.config.special'
1750 configlabel = b'formatvariant.config.special'
1751 else:
1751 else:
1752 configlabel = b'formatvariant.config.default'
1752 configlabel = b'formatvariant.config.default'
1753 fm.condwrite(
1753 fm.condwrite(
1754 ui.verbose,
1754 ui.verbose,
1755 b'config',
1755 b'config',
1756 b' %6s',
1756 b' %6s',
1757 formatvalue(configvalue),
1757 formatvalue(configvalue),
1758 label=configlabel,
1758 label=configlabel,
1759 )
1759 )
1760 fm.condwrite(
1760 fm.condwrite(
1761 ui.verbose,
1761 ui.verbose,
1762 b'default',
1762 b'default',
1763 b' %7s',
1763 b' %7s',
1764 formatvalue(fv.default),
1764 formatvalue(fv.default),
1765 label=b'formatvariant.default',
1765 label=b'formatvariant.default',
1766 )
1766 )
1767 fm.plain(b'\n')
1767 fm.plain(b'\n')
1768 fm.end()
1768 fm.end()
1769
1769
1770
1770
1771 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1771 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1772 def debugfsinfo(ui, path=b"."):
1772 def debugfsinfo(ui, path=b"."):
1773 """show information detected about current filesystem"""
1773 """show information detected about current filesystem"""
1774 ui.writenoi18n(b'path: %s\n' % path)
1774 ui.writenoi18n(b'path: %s\n' % path)
1775 ui.writenoi18n(
1775 ui.writenoi18n(
1776 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1776 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1777 )
1777 )
1778 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1778 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1779 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1779 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1780 ui.writenoi18n(
1780 ui.writenoi18n(
1781 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1781 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1782 )
1782 )
1783 ui.writenoi18n(
1783 ui.writenoi18n(
1784 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1784 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1785 )
1785 )
1786 casesensitive = b'(unknown)'
1786 casesensitive = b'(unknown)'
1787 try:
1787 try:
1788 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1788 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1789 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1789 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1790 except OSError:
1790 except OSError:
1791 pass
1791 pass
1792 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1792 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1793
1793
1794
1794
1795 @command(
1795 @command(
1796 b'debuggetbundle',
1796 b'debuggetbundle',
1797 [
1797 [
1798 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1798 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1799 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1799 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1800 (
1800 (
1801 b't',
1801 b't',
1802 b'type',
1802 b'type',
1803 b'bzip2',
1803 b'bzip2',
1804 _(b'bundle compression type to use'),
1804 _(b'bundle compression type to use'),
1805 _(b'TYPE'),
1805 _(b'TYPE'),
1806 ),
1806 ),
1807 ],
1807 ],
1808 _(b'REPO FILE [-H|-C ID]...'),
1808 _(b'REPO FILE [-H|-C ID]...'),
1809 norepo=True,
1809 norepo=True,
1810 )
1810 )
1811 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1811 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1812 """retrieves a bundle from a repo
1812 """retrieves a bundle from a repo
1813
1813
1814 Every ID must be a full-length hex node id string. Saves the bundle to the
1814 Every ID must be a full-length hex node id string. Saves the bundle to the
1815 given file.
1815 given file.
1816 """
1816 """
1817 opts = pycompat.byteskwargs(opts)
1817 opts = pycompat.byteskwargs(opts)
1818 repo = hg.peer(ui, opts, repopath)
1818 repo = hg.peer(ui, opts, repopath)
1819 if not repo.capable(b'getbundle'):
1819 if not repo.capable(b'getbundle'):
1820 raise error.Abort(b"getbundle() not supported by target repository")
1820 raise error.Abort(b"getbundle() not supported by target repository")
1821 args = {}
1821 args = {}
1822 if common:
1822 if common:
1823 args['common'] = [bin(s) for s in common]
1823 args['common'] = [bin(s) for s in common]
1824 if head:
1824 if head:
1825 args['heads'] = [bin(s) for s in head]
1825 args['heads'] = [bin(s) for s in head]
1826 # TODO: get desired bundlecaps from command line.
1826 # TODO: get desired bundlecaps from command line.
1827 args['bundlecaps'] = None
1827 args['bundlecaps'] = None
1828 bundle = repo.getbundle(b'debug', **args)
1828 bundle = repo.getbundle(b'debug', **args)
1829
1829
1830 bundletype = opts.get(b'type', b'bzip2').lower()
1830 bundletype = opts.get(b'type', b'bzip2').lower()
1831 btypes = {
1831 btypes = {
1832 b'none': b'HG10UN',
1832 b'none': b'HG10UN',
1833 b'bzip2': b'HG10BZ',
1833 b'bzip2': b'HG10BZ',
1834 b'gzip': b'HG10GZ',
1834 b'gzip': b'HG10GZ',
1835 b'bundle2': b'HG20',
1835 b'bundle2': b'HG20',
1836 }
1836 }
1837 bundletype = btypes.get(bundletype)
1837 bundletype = btypes.get(bundletype)
1838 if bundletype not in bundle2.bundletypes:
1838 if bundletype not in bundle2.bundletypes:
1839 raise error.Abort(_(b'unknown bundle type specified with --type'))
1839 raise error.Abort(_(b'unknown bundle type specified with --type'))
1840 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1840 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1841
1841
1842
1842
1843 @command(b'debugignore', [], b'[FILE]')
1843 @command(b'debugignore', [], b'[FILE]')
1844 def debugignore(ui, repo, *files, **opts):
1844 def debugignore(ui, repo, *files, **opts):
1845 """display the combined ignore pattern and information about ignored files
1845 """display the combined ignore pattern and information about ignored files
1846
1846
1847 With no argument display the combined ignore pattern.
1847 With no argument display the combined ignore pattern.
1848
1848
1849 Given space separated file names, shows if the given file is ignored and
1849 Given space separated file names, shows if the given file is ignored and
1850 if so, show the ignore rule (file and line number) that matched it.
1850 if so, show the ignore rule (file and line number) that matched it.
1851 """
1851 """
1852 ignore = repo.dirstate._ignore
1852 ignore = repo.dirstate._ignore
1853 if not files:
1853 if not files:
1854 # Show all the patterns
1854 # Show all the patterns
1855 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1855 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1856 else:
1856 else:
1857 m = scmutil.match(repo[None], pats=files)
1857 m = scmutil.match(repo[None], pats=files)
1858 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1858 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1859 for f in m.files():
1859 for f in m.files():
1860 nf = util.normpath(f)
1860 nf = util.normpath(f)
1861 ignored = None
1861 ignored = None
1862 ignoredata = None
1862 ignoredata = None
1863 if nf != b'.':
1863 if nf != b'.':
1864 if ignore(nf):
1864 if ignore(nf):
1865 ignored = nf
1865 ignored = nf
1866 ignoredata = repo.dirstate._ignorefileandline(nf)
1866 ignoredata = repo.dirstate._ignorefileandline(nf)
1867 else:
1867 else:
1868 for p in pathutil.finddirs(nf):
1868 for p in pathutil.finddirs(nf):
1869 if ignore(p):
1869 if ignore(p):
1870 ignored = p
1870 ignored = p
1871 ignoredata = repo.dirstate._ignorefileandline(p)
1871 ignoredata = repo.dirstate._ignorefileandline(p)
1872 break
1872 break
1873 if ignored:
1873 if ignored:
1874 if ignored == nf:
1874 if ignored == nf:
1875 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1875 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1876 else:
1876 else:
1877 ui.write(
1877 ui.write(
1878 _(
1878 _(
1879 b"%s is ignored because of "
1879 b"%s is ignored because of "
1880 b"containing directory %s\n"
1880 b"containing directory %s\n"
1881 )
1881 )
1882 % (uipathfn(f), ignored)
1882 % (uipathfn(f), ignored)
1883 )
1883 )
1884 ignorefile, lineno, line = ignoredata
1884 ignorefile, lineno, line = ignoredata
1885 ui.write(
1885 ui.write(
1886 _(b"(ignore rule in %s, line %d: '%s')\n")
1886 _(b"(ignore rule in %s, line %d: '%s')\n")
1887 % (ignorefile, lineno, line)
1887 % (ignorefile, lineno, line)
1888 )
1888 )
1889 else:
1889 else:
1890 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1890 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1891
1891
1892
1892
1893 @command(
1893 @command(
1894 b'debug-revlog-index|debugindex',
1894 b'debug-revlog-index|debugindex',
1895 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1895 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1896 _(b'-c|-m|FILE'),
1896 _(b'-c|-m|FILE'),
1897 )
1897 )
1898 def debugindex(ui, repo, file_=None, **opts):
1898 def debugindex(ui, repo, file_=None, **opts):
1899 """dump index data for a revlog"""
1899 """dump index data for a revlog"""
1900 opts = pycompat.byteskwargs(opts)
1900 opts = pycompat.byteskwargs(opts)
1901 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1901 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1902
1902
1903 fm = ui.formatter(b'debugindex', opts)
1903 fm = ui.formatter(b'debugindex', opts)
1904
1904
1905 revlog = getattr(store, b'_revlog', store)
1905 revlog = getattr(store, b'_revlog', store)
1906
1906
1907 return revlog_debug.debug_index(
1907 return revlog_debug.debug_index(
1908 ui,
1908 ui,
1909 repo,
1909 repo,
1910 formatter=fm,
1910 formatter=fm,
1911 revlog=revlog,
1911 revlog=revlog,
1912 full_node=ui.debugflag,
1912 full_node=ui.debugflag,
1913 )
1913 )
1914
1914
1915
1915
1916 @command(
1916 @command(
1917 b'debugindexdot',
1917 b'debugindexdot',
1918 cmdutil.debugrevlogopts,
1918 cmdutil.debugrevlogopts,
1919 _(b'-c|-m|FILE'),
1919 _(b'-c|-m|FILE'),
1920 optionalrepo=True,
1920 optionalrepo=True,
1921 )
1921 )
1922 def debugindexdot(ui, repo, file_=None, **opts):
1922 def debugindexdot(ui, repo, file_=None, **opts):
1923 """dump an index DAG as a graphviz dot file"""
1923 """dump an index DAG as a graphviz dot file"""
1924 opts = pycompat.byteskwargs(opts)
1924 opts = pycompat.byteskwargs(opts)
1925 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1925 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1926 ui.writenoi18n(b"digraph G {\n")
1926 ui.writenoi18n(b"digraph G {\n")
1927 for i in r:
1927 for i in r:
1928 node = r.node(i)
1928 node = r.node(i)
1929 pp = r.parents(node)
1929 pp = r.parents(node)
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1931 if pp[1] != repo.nullid:
1931 if pp[1] != repo.nullid:
1932 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1932 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1933 ui.write(b"}\n")
1933 ui.write(b"}\n")
1934
1934
1935
1935
1936 @command(b'debugindexstats', [])
1936 @command(b'debugindexstats', [])
1937 def debugindexstats(ui, repo):
1937 def debugindexstats(ui, repo):
1938 """show stats related to the changelog index"""
1938 """show stats related to the changelog index"""
1939 repo.changelog.shortest(repo.nullid, 1)
1939 repo.changelog.shortest(repo.nullid, 1)
1940 index = repo.changelog.index
1940 index = repo.changelog.index
1941 if not util.safehasattr(index, b'stats'):
1941 if not util.safehasattr(index, b'stats'):
1942 raise error.Abort(_(b'debugindexstats only works with native code'))
1942 raise error.Abort(_(b'debugindexstats only works with native code'))
1943 for k, v in sorted(index.stats().items()):
1943 for k, v in sorted(index.stats().items()):
1944 ui.write(b'%s: %d\n' % (k, v))
1944 ui.write(b'%s: %d\n' % (k, v))
1945
1945
1946
1946
1947 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1947 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1948 def debuginstall(ui, **opts):
1948 def debuginstall(ui, **opts):
1949 """test Mercurial installation
1949 """test Mercurial installation
1950
1950
1951 Returns 0 on success.
1951 Returns 0 on success.
1952 """
1952 """
1953 opts = pycompat.byteskwargs(opts)
1953 opts = pycompat.byteskwargs(opts)
1954
1954
1955 problems = 0
1955 problems = 0
1956
1956
1957 fm = ui.formatter(b'debuginstall', opts)
1957 fm = ui.formatter(b'debuginstall', opts)
1958 fm.startitem()
1958 fm.startitem()
1959
1959
1960 # encoding might be unknown or wrong. don't translate these messages.
1960 # encoding might be unknown or wrong. don't translate these messages.
1961 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1961 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1962 err = None
1962 err = None
1963 try:
1963 try:
1964 codecs.lookup(pycompat.sysstr(encoding.encoding))
1964 codecs.lookup(pycompat.sysstr(encoding.encoding))
1965 except LookupError as inst:
1965 except LookupError as inst:
1966 err = stringutil.forcebytestr(inst)
1966 err = stringutil.forcebytestr(inst)
1967 problems += 1
1967 problems += 1
1968 fm.condwrite(
1968 fm.condwrite(
1969 err,
1969 err,
1970 b'encodingerror',
1970 b'encodingerror',
1971 b" %s\n (check that your locale is properly set)\n",
1971 b" %s\n (check that your locale is properly set)\n",
1972 err,
1972 err,
1973 )
1973 )
1974
1974
1975 # Python
1975 # Python
1976 pythonlib = None
1976 pythonlib = None
1977 if util.safehasattr(os, '__file__'):
1977 if util.safehasattr(os, '__file__'):
1978 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1978 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1979 elif getattr(sys, 'oxidized', False):
1979 elif getattr(sys, 'oxidized', False):
1980 pythonlib = pycompat.sysexecutable
1980 pythonlib = pycompat.sysexecutable
1981
1981
1982 fm.write(
1982 fm.write(
1983 b'pythonexe',
1983 b'pythonexe',
1984 _(b"checking Python executable (%s)\n"),
1984 _(b"checking Python executable (%s)\n"),
1985 pycompat.sysexecutable or _(b"unknown"),
1985 pycompat.sysexecutable or _(b"unknown"),
1986 )
1986 )
1987 fm.write(
1987 fm.write(
1988 b'pythonimplementation',
1988 b'pythonimplementation',
1989 _(b"checking Python implementation (%s)\n"),
1989 _(b"checking Python implementation (%s)\n"),
1990 pycompat.sysbytes(platform.python_implementation()),
1990 pycompat.sysbytes(platform.python_implementation()),
1991 )
1991 )
1992 fm.write(
1992 fm.write(
1993 b'pythonver',
1993 b'pythonver',
1994 _(b"checking Python version (%s)\n"),
1994 _(b"checking Python version (%s)\n"),
1995 (b"%d.%d.%d" % sys.version_info[:3]),
1995 (b"%d.%d.%d" % sys.version_info[:3]),
1996 )
1996 )
1997 fm.write(
1997 fm.write(
1998 b'pythonlib',
1998 b'pythonlib',
1999 _(b"checking Python lib (%s)...\n"),
1999 _(b"checking Python lib (%s)...\n"),
2000 pythonlib or _(b"unknown"),
2000 pythonlib or _(b"unknown"),
2001 )
2001 )
2002
2002
2003 try:
2003 try:
2004 from . import rustext # pytype: disable=import-error
2004 from . import rustext # pytype: disable=import-error
2005
2005
2006 rustext.__doc__ # trigger lazy import
2006 rustext.__doc__ # trigger lazy import
2007 except ImportError:
2007 except ImportError:
2008 rustext = None
2008 rustext = None
2009
2009
2010 security = set(sslutil.supportedprotocols)
2010 security = set(sslutil.supportedprotocols)
2011 if sslutil.hassni:
2011 if sslutil.hassni:
2012 security.add(b'sni')
2012 security.add(b'sni')
2013
2013
2014 fm.write(
2014 fm.write(
2015 b'pythonsecurity',
2015 b'pythonsecurity',
2016 _(b"checking Python security support (%s)\n"),
2016 _(b"checking Python security support (%s)\n"),
2017 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2017 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2018 )
2018 )
2019
2019
2020 # These are warnings, not errors. So don't increment problem count. This
2020 # These are warnings, not errors. So don't increment problem count. This
2021 # may change in the future.
2021 # may change in the future.
2022 if b'tls1.2' not in security:
2022 if b'tls1.2' not in security:
2023 fm.plain(
2023 fm.plain(
2024 _(
2024 _(
2025 b' TLS 1.2 not supported by Python install; '
2025 b' TLS 1.2 not supported by Python install; '
2026 b'network connections lack modern security\n'
2026 b'network connections lack modern security\n'
2027 )
2027 )
2028 )
2028 )
2029 if b'sni' not in security:
2029 if b'sni' not in security:
2030 fm.plain(
2030 fm.plain(
2031 _(
2031 _(
2032 b' SNI not supported by Python install; may have '
2032 b' SNI not supported by Python install; may have '
2033 b'connectivity issues with some servers\n'
2033 b'connectivity issues with some servers\n'
2034 )
2034 )
2035 )
2035 )
2036
2036
2037 fm.plain(
2037 fm.plain(
2038 _(
2038 _(
2039 b"checking Rust extensions (%s)\n"
2039 b"checking Rust extensions (%s)\n"
2040 % (b'missing' if rustext is None else b'installed')
2040 % (b'missing' if rustext is None else b'installed')
2041 ),
2041 ),
2042 )
2042 )
2043
2043
2044 # TODO print CA cert info
2044 # TODO print CA cert info
2045
2045
2046 # hg version
2046 # hg version
2047 hgver = util.version()
2047 hgver = util.version()
2048 fm.write(
2048 fm.write(
2049 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2049 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2050 )
2050 )
2051 fm.write(
2051 fm.write(
2052 b'hgverextra',
2052 b'hgverextra',
2053 _(b"checking Mercurial custom build (%s)\n"),
2053 _(b"checking Mercurial custom build (%s)\n"),
2054 b'+'.join(hgver.split(b'+')[1:]),
2054 b'+'.join(hgver.split(b'+')[1:]),
2055 )
2055 )
2056
2056
2057 # compiled modules
2057 # compiled modules
2058 hgmodules = None
2058 hgmodules = None
2059 if util.safehasattr(sys.modules[__name__], '__file__'):
2059 if util.safehasattr(sys.modules[__name__], '__file__'):
2060 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2060 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2061 elif getattr(sys, 'oxidized', False):
2061 elif getattr(sys, 'oxidized', False):
2062 hgmodules = pycompat.sysexecutable
2062 hgmodules = pycompat.sysexecutable
2063
2063
2064 fm.write(
2064 fm.write(
2065 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2065 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2066 )
2066 )
2067 fm.write(
2067 fm.write(
2068 b'hgmodules',
2068 b'hgmodules',
2069 _(b"checking installed modules (%s)...\n"),
2069 _(b"checking installed modules (%s)...\n"),
2070 hgmodules or _(b"unknown"),
2070 hgmodules or _(b"unknown"),
2071 )
2071 )
2072
2072
2073 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2073 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2074 rustext = rustandc # for now, that's the only case
2074 rustext = rustandc # for now, that's the only case
2075 cext = policy.policy in (b'c', b'allow') or rustandc
2075 cext = policy.policy in (b'c', b'allow') or rustandc
2076 nopure = cext or rustext
2076 nopure = cext or rustext
2077 if nopure:
2077 if nopure:
2078 err = None
2078 err = None
2079 try:
2079 try:
2080 if cext:
2080 if cext:
2081 from .cext import ( # pytype: disable=import-error
2081 from .cext import ( # pytype: disable=import-error
2082 base85,
2082 base85,
2083 bdiff,
2083 bdiff,
2084 mpatch,
2084 mpatch,
2085 osutil,
2085 osutil,
2086 )
2086 )
2087
2087
2088 # quiet pyflakes
2088 # quiet pyflakes
2089 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2089 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2090 if rustext:
2090 if rustext:
2091 from .rustext import ( # pytype: disable=import-error
2091 from .rustext import ( # pytype: disable=import-error
2092 ancestor,
2092 ancestor,
2093 dirstate,
2093 dirstate,
2094 )
2094 )
2095
2095
2096 dir(ancestor), dir(dirstate) # quiet pyflakes
2096 dir(ancestor), dir(dirstate) # quiet pyflakes
2097 except Exception as inst:
2097 except Exception as inst:
2098 err = stringutil.forcebytestr(inst)
2098 err = stringutil.forcebytestr(inst)
2099 problems += 1
2099 problems += 1
2100 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2100 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2101
2101
2102 compengines = util.compengines._engines.values()
2102 compengines = util.compengines._engines.values()
2103 fm.write(
2103 fm.write(
2104 b'compengines',
2104 b'compengines',
2105 _(b'checking registered compression engines (%s)\n'),
2105 _(b'checking registered compression engines (%s)\n'),
2106 fm.formatlist(
2106 fm.formatlist(
2107 sorted(e.name() for e in compengines),
2107 sorted(e.name() for e in compengines),
2108 name=b'compengine',
2108 name=b'compengine',
2109 fmt=b'%s',
2109 fmt=b'%s',
2110 sep=b', ',
2110 sep=b', ',
2111 ),
2111 ),
2112 )
2112 )
2113 fm.write(
2113 fm.write(
2114 b'compenginesavail',
2114 b'compenginesavail',
2115 _(b'checking available compression engines (%s)\n'),
2115 _(b'checking available compression engines (%s)\n'),
2116 fm.formatlist(
2116 fm.formatlist(
2117 sorted(e.name() for e in compengines if e.available()),
2117 sorted(e.name() for e in compengines if e.available()),
2118 name=b'compengine',
2118 name=b'compengine',
2119 fmt=b'%s',
2119 fmt=b'%s',
2120 sep=b', ',
2120 sep=b', ',
2121 ),
2121 ),
2122 )
2122 )
2123 wirecompengines = compression.compengines.supportedwireengines(
2123 wirecompengines = compression.compengines.supportedwireengines(
2124 compression.SERVERROLE
2124 compression.SERVERROLE
2125 )
2125 )
2126 fm.write(
2126 fm.write(
2127 b'compenginesserver',
2127 b'compenginesserver',
2128 _(
2128 _(
2129 b'checking available compression engines '
2129 b'checking available compression engines '
2130 b'for wire protocol (%s)\n'
2130 b'for wire protocol (%s)\n'
2131 ),
2131 ),
2132 fm.formatlist(
2132 fm.formatlist(
2133 [e.name() for e in wirecompengines if e.wireprotosupport()],
2133 [e.name() for e in wirecompengines if e.wireprotosupport()],
2134 name=b'compengine',
2134 name=b'compengine',
2135 fmt=b'%s',
2135 fmt=b'%s',
2136 sep=b', ',
2136 sep=b', ',
2137 ),
2137 ),
2138 )
2138 )
2139 re2 = b'missing'
2139 re2 = b'missing'
2140 if util._re2:
2140 if util._re2:
2141 re2 = b'available'
2141 re2 = b'available'
2142 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2142 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2143 fm.data(re2=bool(util._re2))
2143 fm.data(re2=bool(util._re2))
2144
2144
2145 # templates
2145 # templates
2146 p = templater.templatedir()
2146 p = templater.templatedir()
2147 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2147 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2148 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2148 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2149 if p:
2149 if p:
2150 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2150 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2151 if m:
2151 if m:
2152 # template found, check if it is working
2152 # template found, check if it is working
2153 err = None
2153 err = None
2154 try:
2154 try:
2155 templater.templater.frommapfile(m)
2155 templater.templater.frommapfile(m)
2156 except Exception as inst:
2156 except Exception as inst:
2157 err = stringutil.forcebytestr(inst)
2157 err = stringutil.forcebytestr(inst)
2158 p = None
2158 p = None
2159 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2159 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2160 else:
2160 else:
2161 p = None
2161 p = None
2162 fm.condwrite(
2162 fm.condwrite(
2163 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2163 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2164 )
2164 )
2165 fm.condwrite(
2165 fm.condwrite(
2166 not m,
2166 not m,
2167 b'defaulttemplatenotfound',
2167 b'defaulttemplatenotfound',
2168 _(b" template '%s' not found\n"),
2168 _(b" template '%s' not found\n"),
2169 b"default",
2169 b"default",
2170 )
2170 )
2171 if not p:
2171 if not p:
2172 problems += 1
2172 problems += 1
2173 fm.condwrite(
2173 fm.condwrite(
2174 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2174 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2175 )
2175 )
2176
2176
2177 # editor
2177 # editor
2178 editor = ui.geteditor()
2178 editor = ui.geteditor()
2179 editor = util.expandpath(editor)
2179 editor = util.expandpath(editor)
2180 editorbin = procutil.shellsplit(editor)[0]
2180 editorbin = procutil.shellsplit(editor)[0]
2181 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2181 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2182 cmdpath = procutil.findexe(editorbin)
2182 cmdpath = procutil.findexe(editorbin)
2183 fm.condwrite(
2183 fm.condwrite(
2184 not cmdpath and editor == b'vi',
2184 not cmdpath and editor == b'vi',
2185 b'vinotfound',
2185 b'vinotfound',
2186 _(
2186 _(
2187 b" No commit editor set and can't find %s in PATH\n"
2187 b" No commit editor set and can't find %s in PATH\n"
2188 b" (specify a commit editor in your configuration"
2188 b" (specify a commit editor in your configuration"
2189 b" file)\n"
2189 b" file)\n"
2190 ),
2190 ),
2191 not cmdpath and editor == b'vi' and editorbin,
2191 not cmdpath and editor == b'vi' and editorbin,
2192 )
2192 )
2193 fm.condwrite(
2193 fm.condwrite(
2194 not cmdpath and editor != b'vi',
2194 not cmdpath and editor != b'vi',
2195 b'editornotfound',
2195 b'editornotfound',
2196 _(
2196 _(
2197 b" Can't find editor '%s' in PATH\n"
2197 b" Can't find editor '%s' in PATH\n"
2198 b" (specify a commit editor in your configuration"
2198 b" (specify a commit editor in your configuration"
2199 b" file)\n"
2199 b" file)\n"
2200 ),
2200 ),
2201 not cmdpath and editorbin,
2201 not cmdpath and editorbin,
2202 )
2202 )
2203 if not cmdpath and editor != b'vi':
2203 if not cmdpath and editor != b'vi':
2204 problems += 1
2204 problems += 1
2205
2205
2206 # check username
2206 # check username
2207 username = None
2207 username = None
2208 err = None
2208 err = None
2209 try:
2209 try:
2210 username = ui.username()
2210 username = ui.username()
2211 except error.Abort as e:
2211 except error.Abort as e:
2212 err = e.message
2212 err = e.message
2213 problems += 1
2213 problems += 1
2214
2214
2215 fm.condwrite(
2215 fm.condwrite(
2216 username, b'username', _(b"checking username (%s)\n"), username
2216 username, b'username', _(b"checking username (%s)\n"), username
2217 )
2217 )
2218 fm.condwrite(
2218 fm.condwrite(
2219 err,
2219 err,
2220 b'usernameerror',
2220 b'usernameerror',
2221 _(
2221 _(
2222 b"checking username...\n %s\n"
2222 b"checking username...\n %s\n"
2223 b" (specify a username in your configuration file)\n"
2223 b" (specify a username in your configuration file)\n"
2224 ),
2224 ),
2225 err,
2225 err,
2226 )
2226 )
2227
2227
2228 for name, mod in extensions.extensions():
2228 for name, mod in extensions.extensions():
2229 handler = getattr(mod, 'debuginstall', None)
2229 handler = getattr(mod, 'debuginstall', None)
2230 if handler is not None:
2230 if handler is not None:
2231 problems += handler(ui, fm)
2231 problems += handler(ui, fm)
2232
2232
2233 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2233 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2234 if not problems:
2234 if not problems:
2235 fm.data(problems=problems)
2235 fm.data(problems=problems)
2236 fm.condwrite(
2236 fm.condwrite(
2237 problems,
2237 problems,
2238 b'problems',
2238 b'problems',
2239 _(b"%d problems detected, please check your install!\n"),
2239 _(b"%d problems detected, please check your install!\n"),
2240 problems,
2240 problems,
2241 )
2241 )
2242 fm.end()
2242 fm.end()
2243
2243
2244 return problems
2244 return problems
2245
2245
2246
2246
2247 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2247 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2248 def debugknown(ui, repopath, *ids, **opts):
2248 def debugknown(ui, repopath, *ids, **opts):
2249 """test whether node ids are known to a repo
2249 """test whether node ids are known to a repo
2250
2250
2251 Every ID must be a full-length hex node id string. Returns a list of 0s
2251 Every ID must be a full-length hex node id string. Returns a list of 0s
2252 and 1s indicating unknown/known.
2252 and 1s indicating unknown/known.
2253 """
2253 """
2254 opts = pycompat.byteskwargs(opts)
2254 opts = pycompat.byteskwargs(opts)
2255 repo = hg.peer(ui, opts, repopath)
2255 repo = hg.peer(ui, opts, repopath)
2256 if not repo.capable(b'known'):
2256 if not repo.capable(b'known'):
2257 raise error.Abort(b"known() not supported by target repository")
2257 raise error.Abort(b"known() not supported by target repository")
2258 flags = repo.known([bin(s) for s in ids])
2258 flags = repo.known([bin(s) for s in ids])
2259 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2259 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2260
2260
2261
2261
2262 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2262 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2263 def debuglabelcomplete(ui, repo, *args):
2263 def debuglabelcomplete(ui, repo, *args):
2264 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2264 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2265 debugnamecomplete(ui, repo, *args)
2265 debugnamecomplete(ui, repo, *args)
2266
2266
2267
2267
2268 @command(
2268 @command(
2269 b'debuglocks',
2269 b'debuglocks',
2270 [
2270 [
2271 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2271 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2272 (
2272 (
2273 b'W',
2273 b'W',
2274 b'force-free-wlock',
2274 b'force-free-wlock',
2275 None,
2275 None,
2276 _(b'free the working state lock (DANGEROUS)'),
2276 _(b'free the working state lock (DANGEROUS)'),
2277 ),
2277 ),
2278 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2278 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2279 (
2279 (
2280 b'S',
2280 b'S',
2281 b'set-wlock',
2281 b'set-wlock',
2282 None,
2282 None,
2283 _(b'set the working state lock until stopped'),
2283 _(b'set the working state lock until stopped'),
2284 ),
2284 ),
2285 ],
2285 ],
2286 _(b'[OPTION]...'),
2286 _(b'[OPTION]...'),
2287 )
2287 )
2288 def debuglocks(ui, repo, **opts):
2288 def debuglocks(ui, repo, **opts):
2289 """show or modify state of locks
2289 """show or modify state of locks
2290
2290
2291 By default, this command will show which locks are held. This
2291 By default, this command will show which locks are held. This
2292 includes the user and process holding the lock, the amount of time
2292 includes the user and process holding the lock, the amount of time
2293 the lock has been held, and the machine name where the process is
2293 the lock has been held, and the machine name where the process is
2294 running if it's not local.
2294 running if it's not local.
2295
2295
2296 Locks protect the integrity of Mercurial's data, so should be
2296 Locks protect the integrity of Mercurial's data, so should be
2297 treated with care. System crashes or other interruptions may cause
2297 treated with care. System crashes or other interruptions may cause
2298 locks to not be properly released, though Mercurial will usually
2298 locks to not be properly released, though Mercurial will usually
2299 detect and remove such stale locks automatically.
2299 detect and remove such stale locks automatically.
2300
2300
2301 However, detecting stale locks may not always be possible (for
2301 However, detecting stale locks may not always be possible (for
2302 instance, on a shared filesystem). Removing locks may also be
2302 instance, on a shared filesystem). Removing locks may also be
2303 blocked by filesystem permissions.
2303 blocked by filesystem permissions.
2304
2304
2305 Setting a lock will prevent other commands from changing the data.
2305 Setting a lock will prevent other commands from changing the data.
2306 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2306 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2307 The set locks are removed when the command exits.
2307 The set locks are removed when the command exits.
2308
2308
2309 Returns 0 if no locks are held.
2309 Returns 0 if no locks are held.
2310
2310
2311 """
2311 """
2312
2312
2313 if opts.get('force_free_lock'):
2313 if opts.get('force_free_lock'):
2314 repo.svfs.tryunlink(b'lock')
2314 repo.svfs.tryunlink(b'lock')
2315 if opts.get('force_free_wlock'):
2315 if opts.get('force_free_wlock'):
2316 repo.vfs.tryunlink(b'wlock')
2316 repo.vfs.tryunlink(b'wlock')
2317 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2317 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2318 return 0
2318 return 0
2319
2319
2320 locks = []
2320 locks = []
2321 try:
2321 try:
2322 if opts.get('set_wlock'):
2322 if opts.get('set_wlock'):
2323 try:
2323 try:
2324 locks.append(repo.wlock(False))
2324 locks.append(repo.wlock(False))
2325 except error.LockHeld:
2325 except error.LockHeld:
2326 raise error.Abort(_(b'wlock is already held'))
2326 raise error.Abort(_(b'wlock is already held'))
2327 if opts.get('set_lock'):
2327 if opts.get('set_lock'):
2328 try:
2328 try:
2329 locks.append(repo.lock(False))
2329 locks.append(repo.lock(False))
2330 except error.LockHeld:
2330 except error.LockHeld:
2331 raise error.Abort(_(b'lock is already held'))
2331 raise error.Abort(_(b'lock is already held'))
2332 if len(locks):
2332 if len(locks):
2333 try:
2333 try:
2334 if ui.interactive():
2334 if ui.interactive():
2335 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2335 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2336 ui.promptchoice(prompt)
2336 ui.promptchoice(prompt)
2337 else:
2337 else:
2338 msg = b"%d locks held, waiting for signal\n"
2338 msg = b"%d locks held, waiting for signal\n"
2339 msg %= len(locks)
2339 msg %= len(locks)
2340 ui.status(msg)
2340 ui.status(msg)
2341 while True: # XXX wait for a signal
2341 while True: # XXX wait for a signal
2342 time.sleep(0.1)
2342 time.sleep(0.1)
2343 except KeyboardInterrupt:
2343 except KeyboardInterrupt:
2344 msg = b"signal-received releasing locks\n"
2344 msg = b"signal-received releasing locks\n"
2345 ui.status(msg)
2345 ui.status(msg)
2346 return 0
2346 return 0
2347 finally:
2347 finally:
2348 release(*locks)
2348 release(*locks)
2349
2349
2350 now = time.time()
2350 now = time.time()
2351 held = 0
2351 held = 0
2352
2352
2353 def report(vfs, name, method):
2353 def report(vfs, name, method):
2354 # this causes stale locks to get reaped for more accurate reporting
2354 # this causes stale locks to get reaped for more accurate reporting
2355 try:
2355 try:
2356 l = method(False)
2356 l = method(False)
2357 except error.LockHeld:
2357 except error.LockHeld:
2358 l = None
2358 l = None
2359
2359
2360 if l:
2360 if l:
2361 l.release()
2361 l.release()
2362 else:
2362 else:
2363 try:
2363 try:
2364 st = vfs.lstat(name)
2364 st = vfs.lstat(name)
2365 age = now - st[stat.ST_MTIME]
2365 age = now - st[stat.ST_MTIME]
2366 user = util.username(st.st_uid)
2366 user = util.username(st.st_uid)
2367 locker = vfs.readlock(name)
2367 locker = vfs.readlock(name)
2368 if b":" in locker:
2368 if b":" in locker:
2369 host, pid = locker.split(b':')
2369 host, pid = locker.split(b':')
2370 if host == socket.gethostname():
2370 if host == socket.gethostname():
2371 locker = b'user %s, process %s' % (user or b'None', pid)
2371 locker = b'user %s, process %s' % (user or b'None', pid)
2372 else:
2372 else:
2373 locker = b'user %s, process %s, host %s' % (
2373 locker = b'user %s, process %s, host %s' % (
2374 user or b'None',
2374 user or b'None',
2375 pid,
2375 pid,
2376 host,
2376 host,
2377 )
2377 )
2378 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2378 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2379 return 1
2379 return 1
2380 except FileNotFoundError:
2380 except FileNotFoundError:
2381 pass
2381 pass
2382
2382
2383 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2383 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2384 return 0
2384 return 0
2385
2385
2386 held += report(repo.svfs, b"lock", repo.lock)
2386 held += report(repo.svfs, b"lock", repo.lock)
2387 held += report(repo.vfs, b"wlock", repo.wlock)
2387 held += report(repo.vfs, b"wlock", repo.wlock)
2388
2388
2389 return held
2389 return held
2390
2390
2391
2391
2392 @command(
2392 @command(
2393 b'debugmanifestfulltextcache',
2393 b'debugmanifestfulltextcache',
2394 [
2394 [
2395 (b'', b'clear', False, _(b'clear the cache')),
2395 (b'', b'clear', False, _(b'clear the cache')),
2396 (
2396 (
2397 b'a',
2397 b'a',
2398 b'add',
2398 b'add',
2399 [],
2399 [],
2400 _(b'add the given manifest nodes to the cache'),
2400 _(b'add the given manifest nodes to the cache'),
2401 _(b'NODE'),
2401 _(b'NODE'),
2402 ),
2402 ),
2403 ],
2403 ],
2404 b'',
2404 b'',
2405 )
2405 )
2406 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2406 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2407 """show, clear or amend the contents of the manifest fulltext cache"""
2407 """show, clear or amend the contents of the manifest fulltext cache"""
2408
2408
2409 def getcache():
2409 def getcache():
2410 r = repo.manifestlog.getstorage(b'')
2410 r = repo.manifestlog.getstorage(b'')
2411 try:
2411 try:
2412 return r._fulltextcache
2412 return r._fulltextcache
2413 except AttributeError:
2413 except AttributeError:
2414 msg = _(
2414 msg = _(
2415 b"Current revlog implementation doesn't appear to have a "
2415 b"Current revlog implementation doesn't appear to have a "
2416 b"manifest fulltext cache\n"
2416 b"manifest fulltext cache\n"
2417 )
2417 )
2418 raise error.Abort(msg)
2418 raise error.Abort(msg)
2419
2419
2420 if opts.get('clear'):
2420 if opts.get('clear'):
2421 with repo.wlock():
2421 with repo.wlock():
2422 cache = getcache()
2422 cache = getcache()
2423 cache.clear(clear_persisted_data=True)
2423 cache.clear(clear_persisted_data=True)
2424 return
2424 return
2425
2425
2426 if add:
2426 if add:
2427 with repo.wlock():
2427 with repo.wlock():
2428 m = repo.manifestlog
2428 m = repo.manifestlog
2429 store = m.getstorage(b'')
2429 store = m.getstorage(b'')
2430 for n in add:
2430 for n in add:
2431 try:
2431 try:
2432 manifest = m[store.lookup(n)]
2432 manifest = m[store.lookup(n)]
2433 except error.LookupError as e:
2433 except error.LookupError as e:
2434 raise error.Abort(
2434 raise error.Abort(
2435 bytes(e), hint=b"Check your manifest node id"
2435 bytes(e), hint=b"Check your manifest node id"
2436 )
2436 )
2437 manifest.read() # stores revisision in cache too
2437 manifest.read() # stores revisision in cache too
2438 return
2438 return
2439
2439
2440 cache = getcache()
2440 cache = getcache()
2441 if not len(cache):
2441 if not len(cache):
2442 ui.write(_(b'cache empty\n'))
2442 ui.write(_(b'cache empty\n'))
2443 else:
2443 else:
2444 ui.write(
2444 ui.write(
2445 _(
2445 _(
2446 b'cache contains %d manifest entries, in order of most to '
2446 b'cache contains %d manifest entries, in order of most to '
2447 b'least recent:\n'
2447 b'least recent:\n'
2448 )
2448 )
2449 % (len(cache),)
2449 % (len(cache),)
2450 )
2450 )
2451 totalsize = 0
2451 totalsize = 0
2452 for nodeid in cache:
2452 for nodeid in cache:
2453 # Use cache.get to not update the LRU order
2453 # Use cache.get to not update the LRU order
2454 data = cache.peek(nodeid)
2454 data = cache.peek(nodeid)
2455 size = len(data)
2455 size = len(data)
2456 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2456 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2457 ui.write(
2457 ui.write(
2458 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2458 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2459 )
2459 )
2460 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2460 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2461 ui.write(
2461 ui.write(
2462 _(b'total cache data size %s, on-disk %s\n')
2462 _(b'total cache data size %s, on-disk %s\n')
2463 % (util.bytecount(totalsize), util.bytecount(ondisk))
2463 % (util.bytecount(totalsize), util.bytecount(ondisk))
2464 )
2464 )
2465
2465
2466
2466
2467 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2467 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2468 def debugmergestate(ui, repo, *args, **opts):
2468 def debugmergestate(ui, repo, *args, **opts):
2469 """print merge state
2469 """print merge state
2470
2470
2471 Use --verbose to print out information about whether v1 or v2 merge state
2471 Use --verbose to print out information about whether v1 or v2 merge state
2472 was chosen."""
2472 was chosen."""
2473
2473
2474 if ui.verbose:
2474 if ui.verbose:
2475 ms = mergestatemod.mergestate(repo)
2475 ms = mergestatemod.mergestate(repo)
2476
2476
2477 # sort so that reasonable information is on top
2477 # sort so that reasonable information is on top
2478 v1records = ms._readrecordsv1()
2478 v1records = ms._readrecordsv1()
2479 v2records = ms._readrecordsv2()
2479 v2records = ms._readrecordsv2()
2480
2480
2481 if not v1records and not v2records:
2481 if not v1records and not v2records:
2482 pass
2482 pass
2483 elif not v2records:
2483 elif not v2records:
2484 ui.writenoi18n(b'no version 2 merge state\n')
2484 ui.writenoi18n(b'no version 2 merge state\n')
2485 elif ms._v1v2match(v1records, v2records):
2485 elif ms._v1v2match(v1records, v2records):
2486 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2486 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2487 else:
2487 else:
2488 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2488 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2489
2489
2490 opts = pycompat.byteskwargs(opts)
2490 opts = pycompat.byteskwargs(opts)
2491 if not opts[b'template']:
2491 if not opts[b'template']:
2492 opts[b'template'] = (
2492 opts[b'template'] = (
2493 b'{if(commits, "", "no merge state found\n")}'
2493 b'{if(commits, "", "no merge state found\n")}'
2494 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2494 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2495 b'{files % "file: {path} (state \\"{state}\\")\n'
2495 b'{files % "file: {path} (state \\"{state}\\")\n'
2496 b'{if(local_path, "'
2496 b'{if(local_path, "'
2497 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2497 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2498 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2498 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2499 b' other path: {other_path} (node {other_node})\n'
2499 b' other path: {other_path} (node {other_node})\n'
2500 b'")}'
2500 b'")}'
2501 b'{if(rename_side, "'
2501 b'{if(rename_side, "'
2502 b' rename side: {rename_side}\n'
2502 b' rename side: {rename_side}\n'
2503 b' renamed path: {renamed_path}\n'
2503 b' renamed path: {renamed_path}\n'
2504 b'")}'
2504 b'")}'
2505 b'{extras % " extra: {key} = {value}\n"}'
2505 b'{extras % " extra: {key} = {value}\n"}'
2506 b'"}'
2506 b'"}'
2507 b'{extras % "extra: {file} ({key} = {value})\n"}'
2507 b'{extras % "extra: {file} ({key} = {value})\n"}'
2508 )
2508 )
2509
2509
2510 ms = mergestatemod.mergestate.read(repo)
2510 ms = mergestatemod.mergestate.read(repo)
2511
2511
2512 fm = ui.formatter(b'debugmergestate', opts)
2512 fm = ui.formatter(b'debugmergestate', opts)
2513 fm.startitem()
2513 fm.startitem()
2514
2514
2515 fm_commits = fm.nested(b'commits')
2515 fm_commits = fm.nested(b'commits')
2516 if ms.active():
2516 if ms.active():
2517 for name, node, label_index in (
2517 for name, node, label_index in (
2518 (b'local', ms.local, 0),
2518 (b'local', ms.local, 0),
2519 (b'other', ms.other, 1),
2519 (b'other', ms.other, 1),
2520 ):
2520 ):
2521 fm_commits.startitem()
2521 fm_commits.startitem()
2522 fm_commits.data(name=name)
2522 fm_commits.data(name=name)
2523 fm_commits.data(node=hex(node))
2523 fm_commits.data(node=hex(node))
2524 if ms._labels and len(ms._labels) > label_index:
2524 if ms._labels and len(ms._labels) > label_index:
2525 fm_commits.data(label=ms._labels[label_index])
2525 fm_commits.data(label=ms._labels[label_index])
2526 fm_commits.end()
2526 fm_commits.end()
2527
2527
2528 fm_files = fm.nested(b'files')
2528 fm_files = fm.nested(b'files')
2529 if ms.active():
2529 if ms.active():
2530 for f in ms:
2530 for f in ms:
2531 fm_files.startitem()
2531 fm_files.startitem()
2532 fm_files.data(path=f)
2532 fm_files.data(path=f)
2533 state = ms._state[f]
2533 state = ms._state[f]
2534 fm_files.data(state=state[0])
2534 fm_files.data(state=state[0])
2535 if state[0] in (
2535 if state[0] in (
2536 mergestatemod.MERGE_RECORD_UNRESOLVED,
2536 mergestatemod.MERGE_RECORD_UNRESOLVED,
2537 mergestatemod.MERGE_RECORD_RESOLVED,
2537 mergestatemod.MERGE_RECORD_RESOLVED,
2538 ):
2538 ):
2539 fm_files.data(local_key=state[1])
2539 fm_files.data(local_key=state[1])
2540 fm_files.data(local_path=state[2])
2540 fm_files.data(local_path=state[2])
2541 fm_files.data(ancestor_path=state[3])
2541 fm_files.data(ancestor_path=state[3])
2542 fm_files.data(ancestor_node=state[4])
2542 fm_files.data(ancestor_node=state[4])
2543 fm_files.data(other_path=state[5])
2543 fm_files.data(other_path=state[5])
2544 fm_files.data(other_node=state[6])
2544 fm_files.data(other_node=state[6])
2545 fm_files.data(local_flags=state[7])
2545 fm_files.data(local_flags=state[7])
2546 elif state[0] in (
2546 elif state[0] in (
2547 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2547 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2548 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2548 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2549 ):
2549 ):
2550 fm_files.data(renamed_path=state[1])
2550 fm_files.data(renamed_path=state[1])
2551 fm_files.data(rename_side=state[2])
2551 fm_files.data(rename_side=state[2])
2552 fm_extras = fm_files.nested(b'extras')
2552 fm_extras = fm_files.nested(b'extras')
2553 for k, v in sorted(ms.extras(f).items()):
2553 for k, v in sorted(ms.extras(f).items()):
2554 fm_extras.startitem()
2554 fm_extras.startitem()
2555 fm_extras.data(key=k)
2555 fm_extras.data(key=k)
2556 fm_extras.data(value=v)
2556 fm_extras.data(value=v)
2557 fm_extras.end()
2557 fm_extras.end()
2558
2558
2559 fm_files.end()
2559 fm_files.end()
2560
2560
2561 fm_extras = fm.nested(b'extras')
2561 fm_extras = fm.nested(b'extras')
2562 for f, d in sorted(ms.allextras().items()):
2562 for f, d in sorted(ms.allextras().items()):
2563 if f in ms:
2563 if f in ms:
2564 # If file is in mergestate, we have already processed it's extras
2564 # If file is in mergestate, we have already processed it's extras
2565 continue
2565 continue
2566 for k, v in d.items():
2566 for k, v in d.items():
2567 fm_extras.startitem()
2567 fm_extras.startitem()
2568 fm_extras.data(file=f)
2568 fm_extras.data(file=f)
2569 fm_extras.data(key=k)
2569 fm_extras.data(key=k)
2570 fm_extras.data(value=v)
2570 fm_extras.data(value=v)
2571 fm_extras.end()
2571 fm_extras.end()
2572
2572
2573 fm.end()
2573 fm.end()
2574
2574
2575
2575
2576 @command(b'debugnamecomplete', [], _(b'NAME...'))
2576 @command(b'debugnamecomplete', [], _(b'NAME...'))
2577 def debugnamecomplete(ui, repo, *args):
2577 def debugnamecomplete(ui, repo, *args):
2578 '''complete "names" - tags, open branch names, bookmark names'''
2578 '''complete "names" - tags, open branch names, bookmark names'''
2579
2579
2580 names = set()
2580 names = set()
2581 # since we previously only listed open branches, we will handle that
2581 # since we previously only listed open branches, we will handle that
2582 # specially (after this for loop)
2582 # specially (after this for loop)
2583 for name, ns in repo.names.items():
2583 for name, ns in repo.names.items():
2584 if name != b'branches':
2584 if name != b'branches':
2585 names.update(ns.listnames(repo))
2585 names.update(ns.listnames(repo))
2586 names.update(
2586 names.update(
2587 tag
2587 tag
2588 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2588 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2589 if not closed
2589 if not closed
2590 )
2590 )
2591 completions = set()
2591 completions = set()
2592 if not args:
2592 if not args:
2593 args = [b'']
2593 args = [b'']
2594 for a in args:
2594 for a in args:
2595 completions.update(n for n in names if n.startswith(a))
2595 completions.update(n for n in names if n.startswith(a))
2596 ui.write(b'\n'.join(sorted(completions)))
2596 ui.write(b'\n'.join(sorted(completions)))
2597 ui.write(b'\n')
2597 ui.write(b'\n')
2598
2598
2599
2599
2600 @command(
2600 @command(
2601 b'debugnodemap',
2601 b'debugnodemap',
2602 [
2602 [
2603 (
2603 (
2604 b'',
2604 b'',
2605 b'dump-new',
2605 b'dump-new',
2606 False,
2606 False,
2607 _(b'write a (new) persistent binary nodemap on stdout'),
2607 _(b'write a (new) persistent binary nodemap on stdout'),
2608 ),
2608 ),
2609 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2609 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2610 (
2610 (
2611 b'',
2611 b'',
2612 b'check',
2612 b'check',
2613 False,
2613 False,
2614 _(b'check that the data on disk data are correct.'),
2614 _(b'check that the data on disk data are correct.'),
2615 ),
2615 ),
2616 (
2616 (
2617 b'',
2617 b'',
2618 b'metadata',
2618 b'metadata',
2619 False,
2619 False,
2620 _(b'display the on disk meta data for the nodemap'),
2620 _(b'display the on disk meta data for the nodemap'),
2621 ),
2621 ),
2622 ],
2622 ],
2623 )
2623 )
2624 def debugnodemap(ui, repo, **opts):
2624 def debugnodemap(ui, repo, **opts):
2625 """write and inspect on disk nodemap"""
2625 """write and inspect on disk nodemap"""
2626 if opts['dump_new']:
2626 if opts['dump_new']:
2627 unfi = repo.unfiltered()
2627 unfi = repo.unfiltered()
2628 cl = unfi.changelog
2628 cl = unfi.changelog
2629 if util.safehasattr(cl.index, "nodemap_data_all"):
2629 if util.safehasattr(cl.index, "nodemap_data_all"):
2630 data = cl.index.nodemap_data_all()
2630 data = cl.index.nodemap_data_all()
2631 else:
2631 else:
2632 data = nodemap.persistent_data(cl.index)
2632 data = nodemap.persistent_data(cl.index)
2633 ui.write(data)
2633 ui.write(data)
2634 elif opts['dump_disk']:
2634 elif opts['dump_disk']:
2635 unfi = repo.unfiltered()
2635 unfi = repo.unfiltered()
2636 cl = unfi.changelog
2636 cl = unfi.changelog
2637 nm_data = nodemap.persisted_data(cl)
2637 nm_data = nodemap.persisted_data(cl)
2638 if nm_data is not None:
2638 if nm_data is not None:
2639 docket, data = nm_data
2639 docket, data = nm_data
2640 ui.write(data[:])
2640 ui.write(data[:])
2641 elif opts['check']:
2641 elif opts['check']:
2642 unfi = repo.unfiltered()
2642 unfi = repo.unfiltered()
2643 cl = unfi.changelog
2643 cl = unfi.changelog
2644 nm_data = nodemap.persisted_data(cl)
2644 nm_data = nodemap.persisted_data(cl)
2645 if nm_data is not None:
2645 if nm_data is not None:
2646 docket, data = nm_data
2646 docket, data = nm_data
2647 return nodemap.check_data(ui, cl.index, data)
2647 return nodemap.check_data(ui, cl.index, data)
2648 elif opts['metadata']:
2648 elif opts['metadata']:
2649 unfi = repo.unfiltered()
2649 unfi = repo.unfiltered()
2650 cl = unfi.changelog
2650 cl = unfi.changelog
2651 nm_data = nodemap.persisted_data(cl)
2651 nm_data = nodemap.persisted_data(cl)
2652 if nm_data is not None:
2652 if nm_data is not None:
2653 docket, data = nm_data
2653 docket, data = nm_data
2654 ui.write((b"uid: %s\n") % docket.uid)
2654 ui.write((b"uid: %s\n") % docket.uid)
2655 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2655 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2656 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2656 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2657 ui.write((b"data-length: %d\n") % docket.data_length)
2657 ui.write((b"data-length: %d\n") % docket.data_length)
2658 ui.write((b"data-unused: %d\n") % docket.data_unused)
2658 ui.write((b"data-unused: %d\n") % docket.data_unused)
2659 unused_perc = docket.data_unused * 100.0 / docket.data_length
2659 unused_perc = docket.data_unused * 100.0 / docket.data_length
2660 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2660 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2661
2661
2662
2662
2663 @command(
2663 @command(
2664 b'debugobsolete',
2664 b'debugobsolete',
2665 [
2665 [
2666 (b'', b'flags', 0, _(b'markers flag')),
2666 (b'', b'flags', 0, _(b'markers flag')),
2667 (
2667 (
2668 b'',
2668 b'',
2669 b'record-parents',
2669 b'record-parents',
2670 False,
2670 False,
2671 _(b'record parent information for the precursor'),
2671 _(b'record parent information for the precursor'),
2672 ),
2672 ),
2673 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2673 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2674 (
2674 (
2675 b'',
2675 b'',
2676 b'exclusive',
2676 b'exclusive',
2677 False,
2677 False,
2678 _(b'restrict display to markers only relevant to REV'),
2678 _(b'restrict display to markers only relevant to REV'),
2679 ),
2679 ),
2680 (b'', b'index', False, _(b'display index of the marker')),
2680 (b'', b'index', False, _(b'display index of the marker')),
2681 (b'', b'delete', [], _(b'delete markers specified by indices')),
2681 (b'', b'delete', [], _(b'delete markers specified by indices')),
2682 ]
2682 ]
2683 + cmdutil.commitopts2
2683 + cmdutil.commitopts2
2684 + cmdutil.formatteropts,
2684 + cmdutil.formatteropts,
2685 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2685 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2686 )
2686 )
2687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2688 """create arbitrary obsolete marker
2688 """create arbitrary obsolete marker
2689
2689
2690 With no arguments, displays the list of obsolescence markers."""
2690 With no arguments, displays the list of obsolescence markers."""
2691
2691
2692 opts = pycompat.byteskwargs(opts)
2692 opts = pycompat.byteskwargs(opts)
2693
2693
2694 def parsenodeid(s):
2694 def parsenodeid(s):
2695 try:
2695 try:
2696 # We do not use revsingle/revrange functions here to accept
2696 # We do not use revsingle/revrange functions here to accept
2697 # arbitrary node identifiers, possibly not present in the
2697 # arbitrary node identifiers, possibly not present in the
2698 # local repository.
2698 # local repository.
2699 n = bin(s)
2699 n = bin(s)
2700 if len(n) != repo.nodeconstants.nodelen:
2700 if len(n) != repo.nodeconstants.nodelen:
2701 raise ValueError
2701 raise ValueError
2702 return n
2702 return n
2703 except ValueError:
2703 except ValueError:
2704 raise error.InputError(
2704 raise error.InputError(
2705 b'changeset references must be full hexadecimal '
2705 b'changeset references must be full hexadecimal '
2706 b'node identifiers'
2706 b'node identifiers'
2707 )
2707 )
2708
2708
2709 if opts.get(b'delete'):
2709 if opts.get(b'delete'):
2710 indices = []
2710 indices = []
2711 for v in opts.get(b'delete'):
2711 for v in opts.get(b'delete'):
2712 try:
2712 try:
2713 indices.append(int(v))
2713 indices.append(int(v))
2714 except ValueError:
2714 except ValueError:
2715 raise error.InputError(
2715 raise error.InputError(
2716 _(b'invalid index value: %r') % v,
2716 _(b'invalid index value: %r') % v,
2717 hint=_(b'use integers for indices'),
2717 hint=_(b'use integers for indices'),
2718 )
2718 )
2719
2719
2720 if repo.currenttransaction():
2720 if repo.currenttransaction():
2721 raise error.Abort(
2721 raise error.Abort(
2722 _(b'cannot delete obsmarkers in the middle of transaction.')
2722 _(b'cannot delete obsmarkers in the middle of transaction.')
2723 )
2723 )
2724
2724
2725 with repo.lock():
2725 with repo.lock():
2726 n = repair.deleteobsmarkers(repo.obsstore, indices)
2726 n = repair.deleteobsmarkers(repo.obsstore, indices)
2727 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2727 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2728
2728
2729 return
2729 return
2730
2730
2731 if precursor is not None:
2731 if precursor is not None:
2732 if opts[b'rev']:
2732 if opts[b'rev']:
2733 raise error.InputError(
2733 raise error.InputError(
2734 b'cannot select revision when creating marker'
2734 b'cannot select revision when creating marker'
2735 )
2735 )
2736 metadata = {}
2736 metadata = {}
2737 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2737 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2738 succs = tuple(parsenodeid(succ) for succ in successors)
2738 succs = tuple(parsenodeid(succ) for succ in successors)
2739 l = repo.lock()
2739 l = repo.lock()
2740 try:
2740 try:
2741 tr = repo.transaction(b'debugobsolete')
2741 tr = repo.transaction(b'debugobsolete')
2742 try:
2742 try:
2743 date = opts.get(b'date')
2743 date = opts.get(b'date')
2744 if date:
2744 if date:
2745 date = dateutil.parsedate(date)
2745 date = dateutil.parsedate(date)
2746 else:
2746 else:
2747 date = None
2747 date = None
2748 prec = parsenodeid(precursor)
2748 prec = parsenodeid(precursor)
2749 parents = None
2749 parents = None
2750 if opts[b'record_parents']:
2750 if opts[b'record_parents']:
2751 if prec not in repo.unfiltered():
2751 if prec not in repo.unfiltered():
2752 raise error.Abort(
2752 raise error.Abort(
2753 b'cannot used --record-parents on '
2753 b'cannot used --record-parents on '
2754 b'unknown changesets'
2754 b'unknown changesets'
2755 )
2755 )
2756 parents = repo.unfiltered()[prec].parents()
2756 parents = repo.unfiltered()[prec].parents()
2757 parents = tuple(p.node() for p in parents)
2757 parents = tuple(p.node() for p in parents)
2758 repo.obsstore.create(
2758 repo.obsstore.create(
2759 tr,
2759 tr,
2760 prec,
2760 prec,
2761 succs,
2761 succs,
2762 opts[b'flags'],
2762 opts[b'flags'],
2763 parents=parents,
2763 parents=parents,
2764 date=date,
2764 date=date,
2765 metadata=metadata,
2765 metadata=metadata,
2766 ui=ui,
2766 ui=ui,
2767 )
2767 )
2768 tr.close()
2768 tr.close()
2769 except ValueError as exc:
2769 except ValueError as exc:
2770 raise error.Abort(
2770 raise error.Abort(
2771 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2771 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2772 )
2772 )
2773 finally:
2773 finally:
2774 tr.release()
2774 tr.release()
2775 finally:
2775 finally:
2776 l.release()
2776 l.release()
2777 else:
2777 else:
2778 if opts[b'rev']:
2778 if opts[b'rev']:
2779 revs = logcmdutil.revrange(repo, opts[b'rev'])
2779 revs = logcmdutil.revrange(repo, opts[b'rev'])
2780 nodes = [repo[r].node() for r in revs]
2780 nodes = [repo[r].node() for r in revs]
2781 markers = list(
2781 markers = list(
2782 obsutil.getmarkers(
2782 obsutil.getmarkers(
2783 repo, nodes=nodes, exclusive=opts[b'exclusive']
2783 repo, nodes=nodes, exclusive=opts[b'exclusive']
2784 )
2784 )
2785 )
2785 )
2786 markers.sort(key=lambda x: x._data)
2786 markers.sort(key=lambda x: x._data)
2787 else:
2787 else:
2788 markers = obsutil.getmarkers(repo)
2788 markers = obsutil.getmarkers(repo)
2789
2789
2790 markerstoiter = markers
2790 markerstoiter = markers
2791 isrelevant = lambda m: True
2791 isrelevant = lambda m: True
2792 if opts.get(b'rev') and opts.get(b'index'):
2792 if opts.get(b'rev') and opts.get(b'index'):
2793 markerstoiter = obsutil.getmarkers(repo)
2793 markerstoiter = obsutil.getmarkers(repo)
2794 markerset = set(markers)
2794 markerset = set(markers)
2795 isrelevant = lambda m: m in markerset
2795 isrelevant = lambda m: m in markerset
2796
2796
2797 fm = ui.formatter(b'debugobsolete', opts)
2797 fm = ui.formatter(b'debugobsolete', opts)
2798 for i, m in enumerate(markerstoiter):
2798 for i, m in enumerate(markerstoiter):
2799 if not isrelevant(m):
2799 if not isrelevant(m):
2800 # marker can be irrelevant when we're iterating over a set
2800 # marker can be irrelevant when we're iterating over a set
2801 # of markers (markerstoiter) which is bigger than the set
2801 # of markers (markerstoiter) which is bigger than the set
2802 # of markers we want to display (markers)
2802 # of markers we want to display (markers)
2803 # this can happen if both --index and --rev options are
2803 # this can happen if both --index and --rev options are
2804 # provided and thus we need to iterate over all of the markers
2804 # provided and thus we need to iterate over all of the markers
2805 # to get the correct indices, but only display the ones that
2805 # to get the correct indices, but only display the ones that
2806 # are relevant to --rev value
2806 # are relevant to --rev value
2807 continue
2807 continue
2808 fm.startitem()
2808 fm.startitem()
2809 ind = i if opts.get(b'index') else None
2809 ind = i if opts.get(b'index') else None
2810 cmdutil.showmarker(fm, m, index=ind)
2810 cmdutil.showmarker(fm, m, index=ind)
2811 fm.end()
2811 fm.end()
2812
2812
2813
2813
2814 @command(
2814 @command(
2815 b'debugp1copies',
2815 b'debugp1copies',
2816 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2816 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2817 _(b'[-r REV]'),
2817 _(b'[-r REV]'),
2818 )
2818 )
2819 def debugp1copies(ui, repo, **opts):
2819 def debugp1copies(ui, repo, **opts):
2820 """dump copy information compared to p1"""
2820 """dump copy information compared to p1"""
2821
2821
2822 opts = pycompat.byteskwargs(opts)
2822 opts = pycompat.byteskwargs(opts)
2823 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2823 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2824 for dst, src in ctx.p1copies().items():
2824 for dst, src in ctx.p1copies().items():
2825 ui.write(b'%s -> %s\n' % (src, dst))
2825 ui.write(b'%s -> %s\n' % (src, dst))
2826
2826
2827
2827
2828 @command(
2828 @command(
2829 b'debugp2copies',
2829 b'debugp2copies',
2830 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2830 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2831 _(b'[-r REV]'),
2831 _(b'[-r REV]'),
2832 )
2832 )
2833 def debugp2copies(ui, repo, **opts):
2833 def debugp2copies(ui, repo, **opts):
2834 """dump copy information compared to p2"""
2834 """dump copy information compared to p2"""
2835
2835
2836 opts = pycompat.byteskwargs(opts)
2836 opts = pycompat.byteskwargs(opts)
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2838 for dst, src in ctx.p2copies().items():
2838 for dst, src in ctx.p2copies().items():
2839 ui.write(b'%s -> %s\n' % (src, dst))
2839 ui.write(b'%s -> %s\n' % (src, dst))
2840
2840
2841
2841
2842 @command(
2842 @command(
2843 b'debugpathcomplete',
2843 b'debugpathcomplete',
2844 [
2844 [
2845 (b'f', b'full', None, _(b'complete an entire path')),
2845 (b'f', b'full', None, _(b'complete an entire path')),
2846 (b'n', b'normal', None, _(b'show only normal files')),
2846 (b'n', b'normal', None, _(b'show only normal files')),
2847 (b'a', b'added', None, _(b'show only added files')),
2847 (b'a', b'added', None, _(b'show only added files')),
2848 (b'r', b'removed', None, _(b'show only removed files')),
2848 (b'r', b'removed', None, _(b'show only removed files')),
2849 ],
2849 ],
2850 _(b'FILESPEC...'),
2850 _(b'FILESPEC...'),
2851 )
2851 )
2852 def debugpathcomplete(ui, repo, *specs, **opts):
2852 def debugpathcomplete(ui, repo, *specs, **opts):
2853 """complete part or all of a tracked path
2853 """complete part or all of a tracked path
2854
2854
2855 This command supports shells that offer path name completion. It
2855 This command supports shells that offer path name completion. It
2856 currently completes only files already known to the dirstate.
2856 currently completes only files already known to the dirstate.
2857
2857
2858 Completion extends only to the next path segment unless
2858 Completion extends only to the next path segment unless
2859 --full is specified, in which case entire paths are used."""
2859 --full is specified, in which case entire paths are used."""
2860
2860
2861 def complete(path, acceptable):
2861 def complete(path, acceptable):
2862 dirstate = repo.dirstate
2862 dirstate = repo.dirstate
2863 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2863 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2864 rootdir = repo.root + pycompat.ossep
2864 rootdir = repo.root + pycompat.ossep
2865 if spec != repo.root and not spec.startswith(rootdir):
2865 if spec != repo.root and not spec.startswith(rootdir):
2866 return [], []
2866 return [], []
2867 if os.path.isdir(spec):
2867 if os.path.isdir(spec):
2868 spec += b'/'
2868 spec += b'/'
2869 spec = spec[len(rootdir) :]
2869 spec = spec[len(rootdir) :]
2870 fixpaths = pycompat.ossep != b'/'
2870 fixpaths = pycompat.ossep != b'/'
2871 if fixpaths:
2871 if fixpaths:
2872 spec = spec.replace(pycompat.ossep, b'/')
2872 spec = spec.replace(pycompat.ossep, b'/')
2873 speclen = len(spec)
2873 speclen = len(spec)
2874 fullpaths = opts['full']
2874 fullpaths = opts['full']
2875 files, dirs = set(), set()
2875 files, dirs = set(), set()
2876 adddir, addfile = dirs.add, files.add
2876 adddir, addfile = dirs.add, files.add
2877 for f, st in dirstate.items():
2877 for f, st in dirstate.items():
2878 if f.startswith(spec) and st.state in acceptable:
2878 if f.startswith(spec) and st.state in acceptable:
2879 if fixpaths:
2879 if fixpaths:
2880 f = f.replace(b'/', pycompat.ossep)
2880 f = f.replace(b'/', pycompat.ossep)
2881 if fullpaths:
2881 if fullpaths:
2882 addfile(f)
2882 addfile(f)
2883 continue
2883 continue
2884 s = f.find(pycompat.ossep, speclen)
2884 s = f.find(pycompat.ossep, speclen)
2885 if s >= 0:
2885 if s >= 0:
2886 adddir(f[:s])
2886 adddir(f[:s])
2887 else:
2887 else:
2888 addfile(f)
2888 addfile(f)
2889 return files, dirs
2889 return files, dirs
2890
2890
2891 acceptable = b''
2891 acceptable = b''
2892 if opts['normal']:
2892 if opts['normal']:
2893 acceptable += b'nm'
2893 acceptable += b'nm'
2894 if opts['added']:
2894 if opts['added']:
2895 acceptable += b'a'
2895 acceptable += b'a'
2896 if opts['removed']:
2896 if opts['removed']:
2897 acceptable += b'r'
2897 acceptable += b'r'
2898 cwd = repo.getcwd()
2898 cwd = repo.getcwd()
2899 if not specs:
2899 if not specs:
2900 specs = [b'.']
2900 specs = [b'.']
2901
2901
2902 files, dirs = set(), set()
2902 files, dirs = set(), set()
2903 for spec in specs:
2903 for spec in specs:
2904 f, d = complete(spec, acceptable or b'nmar')
2904 f, d = complete(spec, acceptable or b'nmar')
2905 files.update(f)
2905 files.update(f)
2906 dirs.update(d)
2906 dirs.update(d)
2907 files.update(dirs)
2907 files.update(dirs)
2908 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2908 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2909 ui.write(b'\n')
2909 ui.write(b'\n')
2910
2910
2911
2911
2912 @command(
2912 @command(
2913 b'debugpathcopies',
2913 b'debugpathcopies',
2914 cmdutil.walkopts,
2914 cmdutil.walkopts,
2915 b'hg debugpathcopies REV1 REV2 [FILE]',
2915 b'hg debugpathcopies REV1 REV2 [FILE]',
2916 inferrepo=True,
2916 inferrepo=True,
2917 )
2917 )
2918 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2918 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2919 """show copies between two revisions"""
2919 """show copies between two revisions"""
2920 ctx1 = scmutil.revsingle(repo, rev1)
2920 ctx1 = scmutil.revsingle(repo, rev1)
2921 ctx2 = scmutil.revsingle(repo, rev2)
2921 ctx2 = scmutil.revsingle(repo, rev2)
2922 m = scmutil.match(ctx1, pats, opts)
2922 m = scmutil.match(ctx1, pats, opts)
2923 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2923 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2924 ui.write(b'%s -> %s\n' % (src, dst))
2924 ui.write(b'%s -> %s\n' % (src, dst))
2925
2925
2926
2926
2927 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2927 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2928 def debugpeer(ui, path):
2928 def debugpeer(ui, path):
2929 """establish a connection to a peer repository"""
2929 """establish a connection to a peer repository"""
2930 # Always enable peer request logging. Requires --debug to display
2930 # Always enable peer request logging. Requires --debug to display
2931 # though.
2931 # though.
2932 overrides = {
2932 overrides = {
2933 (b'devel', b'debug.peer-request'): True,
2933 (b'devel', b'debug.peer-request'): True,
2934 }
2934 }
2935
2935
2936 with ui.configoverride(overrides):
2936 with ui.configoverride(overrides):
2937 peer = hg.peer(ui, {}, path)
2937 peer = hg.peer(ui, {}, path)
2938
2938
2939 try:
2939 try:
2940 local = peer.local() is not None
2940 local = peer.local() is not None
2941 canpush = peer.canpush()
2941 canpush = peer.canpush()
2942
2942
2943 ui.write(_(b'url: %s\n') % peer.url())
2943 ui.write(_(b'url: %s\n') % peer.url())
2944 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2944 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2945 ui.write(
2945 ui.write(
2946 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2946 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2947 )
2947 )
2948 finally:
2948 finally:
2949 peer.close()
2949 peer.close()
2950
2950
2951
2951
2952 @command(
2952 @command(
2953 b'debugpickmergetool',
2953 b'debugpickmergetool',
2954 [
2954 [
2955 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2955 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2956 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2956 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2957 ]
2957 ]
2958 + cmdutil.walkopts
2958 + cmdutil.walkopts
2959 + cmdutil.mergetoolopts,
2959 + cmdutil.mergetoolopts,
2960 _(b'[PATTERN]...'),
2960 _(b'[PATTERN]...'),
2961 inferrepo=True,
2961 inferrepo=True,
2962 )
2962 )
2963 def debugpickmergetool(ui, repo, *pats, **opts):
2963 def debugpickmergetool(ui, repo, *pats, **opts):
2964 """examine which merge tool is chosen for specified file
2964 """examine which merge tool is chosen for specified file
2965
2965
2966 As described in :hg:`help merge-tools`, Mercurial examines
2966 As described in :hg:`help merge-tools`, Mercurial examines
2967 configurations below in this order to decide which merge tool is
2967 configurations below in this order to decide which merge tool is
2968 chosen for specified file.
2968 chosen for specified file.
2969
2969
2970 1. ``--tool`` option
2970 1. ``--tool`` option
2971 2. ``HGMERGE`` environment variable
2971 2. ``HGMERGE`` environment variable
2972 3. configurations in ``merge-patterns`` section
2972 3. configurations in ``merge-patterns`` section
2973 4. configuration of ``ui.merge``
2973 4. configuration of ``ui.merge``
2974 5. configurations in ``merge-tools`` section
2974 5. configurations in ``merge-tools`` section
2975 6. ``hgmerge`` tool (for historical reason only)
2975 6. ``hgmerge`` tool (for historical reason only)
2976 7. default tool for fallback (``:merge`` or ``:prompt``)
2976 7. default tool for fallback (``:merge`` or ``:prompt``)
2977
2977
2978 This command writes out examination result in the style below::
2978 This command writes out examination result in the style below::
2979
2979
2980 FILE = MERGETOOL
2980 FILE = MERGETOOL
2981
2981
2982 By default, all files known in the first parent context of the
2982 By default, all files known in the first parent context of the
2983 working directory are examined. Use file patterns and/or -I/-X
2983 working directory are examined. Use file patterns and/or -I/-X
2984 options to limit target files. -r/--rev is also useful to examine
2984 options to limit target files. -r/--rev is also useful to examine
2985 files in another context without actual updating to it.
2985 files in another context without actual updating to it.
2986
2986
2987 With --debug, this command shows warning messages while matching
2987 With --debug, this command shows warning messages while matching
2988 against ``merge-patterns`` and so on, too. It is recommended to
2988 against ``merge-patterns`` and so on, too. It is recommended to
2989 use this option with explicit file patterns and/or -I/-X options,
2989 use this option with explicit file patterns and/or -I/-X options,
2990 because this option increases amount of output per file according
2990 because this option increases amount of output per file according
2991 to configurations in hgrc.
2991 to configurations in hgrc.
2992
2992
2993 With -v/--verbose, this command shows configurations below at
2993 With -v/--verbose, this command shows configurations below at
2994 first (only if specified).
2994 first (only if specified).
2995
2995
2996 - ``--tool`` option
2996 - ``--tool`` option
2997 - ``HGMERGE`` environment variable
2997 - ``HGMERGE`` environment variable
2998 - configuration of ``ui.merge``
2998 - configuration of ``ui.merge``
2999
2999
3000 If merge tool is chosen before matching against
3000 If merge tool is chosen before matching against
3001 ``merge-patterns``, this command can't show any helpful
3001 ``merge-patterns``, this command can't show any helpful
3002 information, even with --debug. In such case, information above is
3002 information, even with --debug. In such case, information above is
3003 useful to know why a merge tool is chosen.
3003 useful to know why a merge tool is chosen.
3004 """
3004 """
3005 opts = pycompat.byteskwargs(opts)
3005 opts = pycompat.byteskwargs(opts)
3006 overrides = {}
3006 overrides = {}
3007 if opts[b'tool']:
3007 if opts[b'tool']:
3008 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3008 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3009 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3009 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3010
3010
3011 with ui.configoverride(overrides, b'debugmergepatterns'):
3011 with ui.configoverride(overrides, b'debugmergepatterns'):
3012 hgmerge = encoding.environ.get(b"HGMERGE")
3012 hgmerge = encoding.environ.get(b"HGMERGE")
3013 if hgmerge is not None:
3013 if hgmerge is not None:
3014 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3014 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3015 uimerge = ui.config(b"ui", b"merge")
3015 uimerge = ui.config(b"ui", b"merge")
3016 if uimerge:
3016 if uimerge:
3017 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3017 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3018
3018
3019 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3019 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3020 m = scmutil.match(ctx, pats, opts)
3020 m = scmutil.match(ctx, pats, opts)
3021 changedelete = opts[b'changedelete']
3021 changedelete = opts[b'changedelete']
3022 for path in ctx.walk(m):
3022 for path in ctx.walk(m):
3023 fctx = ctx[path]
3023 fctx = ctx[path]
3024 with ui.silent(
3024 with ui.silent(
3025 error=True
3025 error=True
3026 ) if not ui.debugflag else util.nullcontextmanager():
3026 ) if not ui.debugflag else util.nullcontextmanager():
3027 tool, toolpath = filemerge._picktool(
3027 tool, toolpath = filemerge._picktool(
3028 repo,
3028 repo,
3029 ui,
3029 ui,
3030 path,
3030 path,
3031 fctx.isbinary(),
3031 fctx.isbinary(),
3032 b'l' in fctx.flags(),
3032 b'l' in fctx.flags(),
3033 changedelete,
3033 changedelete,
3034 )
3034 )
3035 ui.write(b'%s = %s\n' % (path, tool))
3035 ui.write(b'%s = %s\n' % (path, tool))
3036
3036
3037
3037
3038 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3038 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3039 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3039 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3040 """access the pushkey key/value protocol
3040 """access the pushkey key/value protocol
3041
3041
3042 With two args, list the keys in the given namespace.
3042 With two args, list the keys in the given namespace.
3043
3043
3044 With five args, set a key to new if it currently is set to old.
3044 With five args, set a key to new if it currently is set to old.
3045 Reports success or failure.
3045 Reports success or failure.
3046 """
3046 """
3047
3047
3048 target = hg.peer(ui, {}, repopath)
3048 target = hg.peer(ui, {}, repopath)
3049 try:
3049 try:
3050 if keyinfo:
3050 if keyinfo:
3051 key, old, new = keyinfo
3051 key, old, new = keyinfo
3052 with target.commandexecutor() as e:
3052 with target.commandexecutor() as e:
3053 r = e.callcommand(
3053 r = e.callcommand(
3054 b'pushkey',
3054 b'pushkey',
3055 {
3055 {
3056 b'namespace': namespace,
3056 b'namespace': namespace,
3057 b'key': key,
3057 b'key': key,
3058 b'old': old,
3058 b'old': old,
3059 b'new': new,
3059 b'new': new,
3060 },
3060 },
3061 ).result()
3061 ).result()
3062
3062
3063 ui.status(pycompat.bytestr(r) + b'\n')
3063 ui.status(pycompat.bytestr(r) + b'\n')
3064 return not r
3064 return not r
3065 else:
3065 else:
3066 for k, v in sorted(target.listkeys(namespace).items()):
3066 for k, v in sorted(target.listkeys(namespace).items()):
3067 ui.write(
3067 ui.write(
3068 b"%s\t%s\n"
3068 b"%s\t%s\n"
3069 % (stringutil.escapestr(k), stringutil.escapestr(v))
3069 % (stringutil.escapestr(k), stringutil.escapestr(v))
3070 )
3070 )
3071 finally:
3071 finally:
3072 target.close()
3072 target.close()
3073
3073
3074
3074
3075 @command(b'debugpvec', [], _(b'A B'))
3075 @command(b'debugpvec', [], _(b'A B'))
3076 def debugpvec(ui, repo, a, b=None):
3076 def debugpvec(ui, repo, a, b=None):
3077 ca = scmutil.revsingle(repo, a)
3077 ca = scmutil.revsingle(repo, a)
3078 cb = scmutil.revsingle(repo, b)
3078 cb = scmutil.revsingle(repo, b)
3079 pa = pvec.ctxpvec(ca)
3079 pa = pvec.ctxpvec(ca)
3080 pb = pvec.ctxpvec(cb)
3080 pb = pvec.ctxpvec(cb)
3081 if pa == pb:
3081 if pa == pb:
3082 rel = b"="
3082 rel = b"="
3083 elif pa > pb:
3083 elif pa > pb:
3084 rel = b">"
3084 rel = b">"
3085 elif pa < pb:
3085 elif pa < pb:
3086 rel = b"<"
3086 rel = b"<"
3087 elif pa | pb:
3087 elif pa | pb:
3088 rel = b"|"
3088 rel = b"|"
3089 ui.write(_(b"a: %s\n") % pa)
3089 ui.write(_(b"a: %s\n") % pa)
3090 ui.write(_(b"b: %s\n") % pb)
3090 ui.write(_(b"b: %s\n") % pb)
3091 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3091 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3092 ui.write(
3092 ui.write(
3093 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3093 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3094 % (
3094 % (
3095 abs(pa._depth - pb._depth),
3095 abs(pa._depth - pb._depth),
3096 pvec._hamming(pa._vec, pb._vec),
3096 pvec._hamming(pa._vec, pb._vec),
3097 pa.distance(pb),
3097 pa.distance(pb),
3098 rel,
3098 rel,
3099 )
3099 )
3100 )
3100 )
3101
3101
3102
3102
3103 @command(
3103 @command(
3104 b'debugrebuilddirstate|debugrebuildstate',
3104 b'debugrebuilddirstate|debugrebuildstate',
3105 [
3105 [
3106 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3106 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3107 (
3107 (
3108 b'',
3108 b'',
3109 b'minimal',
3109 b'minimal',
3110 None,
3110 None,
3111 _(
3111 _(
3112 b'only rebuild files that are inconsistent with '
3112 b'only rebuild files that are inconsistent with '
3113 b'the working copy parent'
3113 b'the working copy parent'
3114 ),
3114 ),
3115 ),
3115 ),
3116 ],
3116 ],
3117 _(b'[-r REV]'),
3117 _(b'[-r REV]'),
3118 )
3118 )
3119 def debugrebuilddirstate(ui, repo, rev, **opts):
3119 def debugrebuilddirstate(ui, repo, rev, **opts):
3120 """rebuild the dirstate as it would look like for the given revision
3120 """rebuild the dirstate as it would look like for the given revision
3121
3121
3122 If no revision is specified the first current parent will be used.
3122 If no revision is specified the first current parent will be used.
3123
3123
3124 The dirstate will be set to the files of the given revision.
3124 The dirstate will be set to the files of the given revision.
3125 The actual working directory content or existing dirstate
3125 The actual working directory content or existing dirstate
3126 information such as adds or removes is not considered.
3126 information such as adds or removes is not considered.
3127
3127
3128 ``minimal`` will only rebuild the dirstate status for files that claim to be
3128 ``minimal`` will only rebuild the dirstate status for files that claim to be
3129 tracked but are not in the parent manifest, or that exist in the parent
3129 tracked but are not in the parent manifest, or that exist in the parent
3130 manifest but are not in the dirstate. It will not change adds, removes, or
3130 manifest but are not in the dirstate. It will not change adds, removes, or
3131 modified files that are in the working copy parent.
3131 modified files that are in the working copy parent.
3132
3132
3133 One use of this command is to make the next :hg:`status` invocation
3133 One use of this command is to make the next :hg:`status` invocation
3134 check the actual file content.
3134 check the actual file content.
3135 """
3135 """
3136 ctx = scmutil.revsingle(repo, rev)
3136 ctx = scmutil.revsingle(repo, rev)
3137 with repo.wlock():
3137 with repo.wlock():
3138 dirstate = repo.dirstate
3138 dirstate = repo.dirstate
3139 changedfiles = None
3139 changedfiles = None
3140 # See command doc for what minimal does.
3140 # See command doc for what minimal does.
3141 if opts.get('minimal'):
3141 if opts.get('minimal'):
3142 manifestfiles = set(ctx.manifest().keys())
3142 manifestfiles = set(ctx.manifest().keys())
3143 dirstatefiles = set(dirstate)
3143 dirstatefiles = set(dirstate)
3144 manifestonly = manifestfiles - dirstatefiles
3144 manifestonly = manifestfiles - dirstatefiles
3145 dsonly = dirstatefiles - manifestfiles
3145 dsonly = dirstatefiles - manifestfiles
3146 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3146 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3147 changedfiles = manifestonly | dsnotadded
3147 changedfiles = manifestonly | dsnotadded
3148
3148
3149 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3149 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3150
3150
3151
3151
3152 @command(
3152 @command(
3153 b'debugrebuildfncache',
3153 b'debugrebuildfncache',
3154 [
3154 [
3155 (
3155 (
3156 b'',
3156 b'',
3157 b'only-data',
3157 b'only-data',
3158 False,
3158 False,
3159 _(b'only look for wrong .d files (much faster)'),
3159 _(b'only look for wrong .d files (much faster)'),
3160 )
3160 )
3161 ],
3161 ],
3162 b'',
3162 b'',
3163 )
3163 )
3164 def debugrebuildfncache(ui, repo, **opts):
3164 def debugrebuildfncache(ui, repo, **opts):
3165 """rebuild the fncache file"""
3165 """rebuild the fncache file"""
3166 opts = pycompat.byteskwargs(opts)
3166 opts = pycompat.byteskwargs(opts)
3167 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3167 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3168
3168
3169
3169
3170 @command(
3170 @command(
3171 b'debugrename',
3171 b'debugrename',
3172 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3172 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3173 _(b'[-r REV] [FILE]...'),
3173 _(b'[-r REV] [FILE]...'),
3174 )
3174 )
3175 def debugrename(ui, repo, *pats, **opts):
3175 def debugrename(ui, repo, *pats, **opts):
3176 """dump rename information"""
3176 """dump rename information"""
3177
3177
3178 opts = pycompat.byteskwargs(opts)
3178 opts = pycompat.byteskwargs(opts)
3179 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3179 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3180 m = scmutil.match(ctx, pats, opts)
3180 m = scmutil.match(ctx, pats, opts)
3181 for abs in ctx.walk(m):
3181 for abs in ctx.walk(m):
3182 fctx = ctx[abs]
3182 fctx = ctx[abs]
3183 o = fctx.filelog().renamed(fctx.filenode())
3183 o = fctx.filelog().renamed(fctx.filenode())
3184 rel = repo.pathto(abs)
3184 rel = repo.pathto(abs)
3185 if o:
3185 if o:
3186 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3186 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3187 else:
3187 else:
3188 ui.write(_(b"%s not renamed\n") % rel)
3188 ui.write(_(b"%s not renamed\n") % rel)
3189
3189
3190
3190
3191 @command(b'debugrequires|debugrequirements', [], b'')
3191 @command(b'debugrequires|debugrequirements', [], b'')
3192 def debugrequirements(ui, repo):
3192 def debugrequirements(ui, repo):
3193 """print the current repo requirements"""
3193 """print the current repo requirements"""
3194 for r in sorted(repo.requirements):
3194 for r in sorted(repo.requirements):
3195 ui.write(b"%s\n" % r)
3195 ui.write(b"%s\n" % r)
3196
3196
3197
3197
3198 @command(
3198 @command(
3199 b'debugrevlog',
3199 b'debugrevlog',
3200 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3200 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3201 _(b'-c|-m|FILE'),
3201 _(b'-c|-m|FILE'),
3202 optionalrepo=True,
3202 optionalrepo=True,
3203 )
3203 )
3204 def debugrevlog(ui, repo, file_=None, **opts):
3204 def debugrevlog(ui, repo, file_=None, **opts):
3205 """show data and statistics about a revlog"""
3205 """show data and statistics about a revlog"""
3206 opts = pycompat.byteskwargs(opts)
3206 opts = pycompat.byteskwargs(opts)
3207 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3207 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3208
3208
3209 if opts.get(b"dump"):
3209 if opts.get(b"dump"):
3210 numrevs = len(r)
3210 numrevs = len(r)
3211 ui.write(
3211 ui.write(
3212 (
3212 (
3213 b"# rev p1rev p2rev start end deltastart base p1 p2"
3213 b"# rev p1rev p2rev start end deltastart base p1 p2"
3214 b" rawsize totalsize compression heads chainlen\n"
3214 b" rawsize totalsize compression heads chainlen\n"
3215 )
3215 )
3216 )
3216 )
3217 ts = 0
3217 ts = 0
3218 heads = set()
3218 heads = set()
3219
3219
3220 for rev in range(numrevs):
3220 for rev in range(numrevs):
3221 dbase = r.deltaparent(rev)
3221 dbase = r.deltaparent(rev)
3222 if dbase == -1:
3222 if dbase == -1:
3223 dbase = rev
3223 dbase = rev
3224 cbase = r.chainbase(rev)
3224 cbase = r.chainbase(rev)
3225 clen = r.chainlen(rev)
3225 clen = r.chainlen(rev)
3226 p1, p2 = r.parentrevs(rev)
3226 p1, p2 = r.parentrevs(rev)
3227 rs = r.rawsize(rev)
3227 rs = r.rawsize(rev)
3228 ts = ts + rs
3228 ts = ts + rs
3229 heads -= set(r.parentrevs(rev))
3229 heads -= set(r.parentrevs(rev))
3230 heads.add(rev)
3230 heads.add(rev)
3231 try:
3231 try:
3232 compression = ts / r.end(rev)
3232 compression = ts / r.end(rev)
3233 except ZeroDivisionError:
3233 except ZeroDivisionError:
3234 compression = 0
3234 compression = 0
3235 ui.write(
3235 ui.write(
3236 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3236 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3237 b"%11d %5d %8d\n"
3237 b"%11d %5d %8d\n"
3238 % (
3238 % (
3239 rev,
3239 rev,
3240 p1,
3240 p1,
3241 p2,
3241 p2,
3242 r.start(rev),
3242 r.start(rev),
3243 r.end(rev),
3243 r.end(rev),
3244 r.start(dbase),
3244 r.start(dbase),
3245 r.start(cbase),
3245 r.start(cbase),
3246 r.start(p1),
3246 r.start(p1),
3247 r.start(p2),
3247 r.start(p2),
3248 rs,
3248 rs,
3249 ts,
3249 ts,
3250 compression,
3250 compression,
3251 len(heads),
3251 len(heads),
3252 clen,
3252 clen,
3253 )
3253 )
3254 )
3254 )
3255 return 0
3255 return 0
3256
3256
3257 format = r._format_version
3257 format = r._format_version
3258 v = r._format_flags
3258 v = r._format_flags
3259 flags = []
3259 flags = []
3260 gdelta = False
3260 gdelta = False
3261 if v & revlog.FLAG_INLINE_DATA:
3261 if v & revlog.FLAG_INLINE_DATA:
3262 flags.append(b'inline')
3262 flags.append(b'inline')
3263 if v & revlog.FLAG_GENERALDELTA:
3263 if v & revlog.FLAG_GENERALDELTA:
3264 gdelta = True
3264 gdelta = True
3265 flags.append(b'generaldelta')
3265 flags.append(b'generaldelta')
3266 if not flags:
3266 if not flags:
3267 flags = [b'(none)']
3267 flags = [b'(none)']
3268
3268
3269 ### tracks merge vs single parent
3269 ### tracks merge vs single parent
3270 nummerges = 0
3270 nummerges = 0
3271
3271
3272 ### tracks ways the "delta" are build
3272 ### tracks ways the "delta" are build
3273 # nodelta
3273 # nodelta
3274 numempty = 0
3274 numempty = 0
3275 numemptytext = 0
3275 numemptytext = 0
3276 numemptydelta = 0
3276 numemptydelta = 0
3277 # full file content
3277 # full file content
3278 numfull = 0
3278 numfull = 0
3279 # intermediate snapshot against a prior snapshot
3279 # intermediate snapshot against a prior snapshot
3280 numsemi = 0
3280 numsemi = 0
3281 # snapshot count per depth
3281 # snapshot count per depth
3282 numsnapdepth = collections.defaultdict(lambda: 0)
3282 numsnapdepth = collections.defaultdict(lambda: 0)
3283 # delta against previous revision
3283 # delta against previous revision
3284 numprev = 0
3284 numprev = 0
3285 # delta against first or second parent (not prev)
3285 # delta against first or second parent (not prev)
3286 nump1 = 0
3286 nump1 = 0
3287 nump2 = 0
3287 nump2 = 0
3288 # delta against neither prev nor parents
3288 # delta against neither prev nor parents
3289 numother = 0
3289 numother = 0
3290 # delta against prev that are also first or second parent
3290 # delta against prev that are also first or second parent
3291 # (details of `numprev`)
3291 # (details of `numprev`)
3292 nump1prev = 0
3292 nump1prev = 0
3293 nump2prev = 0
3293 nump2prev = 0
3294
3294
3295 # data about delta chain of each revs
3295 # data about delta chain of each revs
3296 chainlengths = []
3296 chainlengths = []
3297 chainbases = []
3297 chainbases = []
3298 chainspans = []
3298 chainspans = []
3299
3299
3300 # data about each revision
3300 # data about each revision
3301 datasize = [None, 0, 0]
3301 datasize = [None, 0, 0]
3302 fullsize = [None, 0, 0]
3302 fullsize = [None, 0, 0]
3303 semisize = [None, 0, 0]
3303 semisize = [None, 0, 0]
3304 # snapshot count per depth
3304 # snapshot count per depth
3305 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3305 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3306 deltasize = [None, 0, 0]
3306 deltasize = [None, 0, 0]
3307 chunktypecounts = {}
3307 chunktypecounts = {}
3308 chunktypesizes = {}
3308 chunktypesizes = {}
3309
3309
3310 def addsize(size, l):
3310 def addsize(size, l):
3311 if l[0] is None or size < l[0]:
3311 if l[0] is None or size < l[0]:
3312 l[0] = size
3312 l[0] = size
3313 if size > l[1]:
3313 if size > l[1]:
3314 l[1] = size
3314 l[1] = size
3315 l[2] += size
3315 l[2] += size
3316
3316
3317 numrevs = len(r)
3317 numrevs = len(r)
3318 for rev in range(numrevs):
3318 for rev in range(numrevs):
3319 p1, p2 = r.parentrevs(rev)
3319 p1, p2 = r.parentrevs(rev)
3320 delta = r.deltaparent(rev)
3320 delta = r.deltaparent(rev)
3321 if format > 0:
3321 if format > 0:
3322 addsize(r.rawsize(rev), datasize)
3322 addsize(r.rawsize(rev), datasize)
3323 if p2 != nullrev:
3323 if p2 != nullrev:
3324 nummerges += 1
3324 nummerges += 1
3325 size = r.length(rev)
3325 size = r.length(rev)
3326 if delta == nullrev:
3326 if delta == nullrev:
3327 chainlengths.append(0)
3327 chainlengths.append(0)
3328 chainbases.append(r.start(rev))
3328 chainbases.append(r.start(rev))
3329 chainspans.append(size)
3329 chainspans.append(size)
3330 if size == 0:
3330 if size == 0:
3331 numempty += 1
3331 numempty += 1
3332 numemptytext += 1
3332 numemptytext += 1
3333 else:
3333 else:
3334 numfull += 1
3334 numfull += 1
3335 numsnapdepth[0] += 1
3335 numsnapdepth[0] += 1
3336 addsize(size, fullsize)
3336 addsize(size, fullsize)
3337 addsize(size, snapsizedepth[0])
3337 addsize(size, snapsizedepth[0])
3338 else:
3338 else:
3339 chainlengths.append(chainlengths[delta] + 1)
3339 chainlengths.append(chainlengths[delta] + 1)
3340 baseaddr = chainbases[delta]
3340 baseaddr = chainbases[delta]
3341 revaddr = r.start(rev)
3341 revaddr = r.start(rev)
3342 chainbases.append(baseaddr)
3342 chainbases.append(baseaddr)
3343 chainspans.append((revaddr - baseaddr) + size)
3343 chainspans.append((revaddr - baseaddr) + size)
3344 if size == 0:
3344 if size == 0:
3345 numempty += 1
3345 numempty += 1
3346 numemptydelta += 1
3346 numemptydelta += 1
3347 elif r.issnapshot(rev):
3347 elif r.issnapshot(rev):
3348 addsize(size, semisize)
3348 addsize(size, semisize)
3349 numsemi += 1
3349 numsemi += 1
3350 depth = r.snapshotdepth(rev)
3350 depth = r.snapshotdepth(rev)
3351 numsnapdepth[depth] += 1
3351 numsnapdepth[depth] += 1
3352 addsize(size, snapsizedepth[depth])
3352 addsize(size, snapsizedepth[depth])
3353 else:
3353 else:
3354 addsize(size, deltasize)
3354 addsize(size, deltasize)
3355 if delta == rev - 1:
3355 if delta == rev - 1:
3356 numprev += 1
3356 numprev += 1
3357 if delta == p1:
3357 if delta == p1:
3358 nump1prev += 1
3358 nump1prev += 1
3359 elif delta == p2:
3359 elif delta == p2:
3360 nump2prev += 1
3360 nump2prev += 1
3361 elif delta == p1:
3361 elif delta == p1:
3362 nump1 += 1
3362 nump1 += 1
3363 elif delta == p2:
3363 elif delta == p2:
3364 nump2 += 1
3364 nump2 += 1
3365 elif delta != nullrev:
3365 elif delta != nullrev:
3366 numother += 1
3366 numother += 1
3367
3367
3368 # Obtain data on the raw chunks in the revlog.
3368 # Obtain data on the raw chunks in the revlog.
3369 if util.safehasattr(r, b'_getsegmentforrevs'):
3369 if util.safehasattr(r, b'_getsegmentforrevs'):
3370 segment = r._getsegmentforrevs(rev, rev)[1]
3370 segment = r._getsegmentforrevs(rev, rev)[1]
3371 else:
3371 else:
3372 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3372 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3373 if segment:
3373 if segment:
3374 chunktype = bytes(segment[0:1])
3374 chunktype = bytes(segment[0:1])
3375 else:
3375 else:
3376 chunktype = b'empty'
3376 chunktype = b'empty'
3377
3377
3378 if chunktype not in chunktypecounts:
3378 if chunktype not in chunktypecounts:
3379 chunktypecounts[chunktype] = 0
3379 chunktypecounts[chunktype] = 0
3380 chunktypesizes[chunktype] = 0
3380 chunktypesizes[chunktype] = 0
3381
3381
3382 chunktypecounts[chunktype] += 1
3382 chunktypecounts[chunktype] += 1
3383 chunktypesizes[chunktype] += size
3383 chunktypesizes[chunktype] += size
3384
3384
3385 # Adjust size min value for empty cases
3385 # Adjust size min value for empty cases
3386 for size in (datasize, fullsize, semisize, deltasize):
3386 for size in (datasize, fullsize, semisize, deltasize):
3387 if size[0] is None:
3387 if size[0] is None:
3388 size[0] = 0
3388 size[0] = 0
3389
3389
3390 numdeltas = numrevs - numfull - numempty - numsemi
3390 numdeltas = numrevs - numfull - numempty - numsemi
3391 numoprev = numprev - nump1prev - nump2prev
3391 numoprev = numprev - nump1prev - nump2prev
3392 totalrawsize = datasize[2]
3392 totalrawsize = datasize[2]
3393 datasize[2] /= numrevs
3393 datasize[2] /= numrevs
3394 fulltotal = fullsize[2]
3394 fulltotal = fullsize[2]
3395 if numfull == 0:
3395 if numfull == 0:
3396 fullsize[2] = 0
3396 fullsize[2] = 0
3397 else:
3397 else:
3398 fullsize[2] /= numfull
3398 fullsize[2] /= numfull
3399 semitotal = semisize[2]
3399 semitotal = semisize[2]
3400 snaptotal = {}
3400 snaptotal = {}
3401 if numsemi > 0:
3401 if numsemi > 0:
3402 semisize[2] /= numsemi
3402 semisize[2] /= numsemi
3403 for depth in snapsizedepth:
3403 for depth in snapsizedepth:
3404 snaptotal[depth] = snapsizedepth[depth][2]
3404 snaptotal[depth] = snapsizedepth[depth][2]
3405 snapsizedepth[depth][2] /= numsnapdepth[depth]
3405 snapsizedepth[depth][2] /= numsnapdepth[depth]
3406
3406
3407 deltatotal = deltasize[2]
3407 deltatotal = deltasize[2]
3408 if numdeltas > 0:
3408 if numdeltas > 0:
3409 deltasize[2] /= numdeltas
3409 deltasize[2] /= numdeltas
3410 totalsize = fulltotal + semitotal + deltatotal
3410 totalsize = fulltotal + semitotal + deltatotal
3411 avgchainlen = sum(chainlengths) / numrevs
3411 avgchainlen = sum(chainlengths) / numrevs
3412 maxchainlen = max(chainlengths)
3412 maxchainlen = max(chainlengths)
3413 maxchainspan = max(chainspans)
3413 maxchainspan = max(chainspans)
3414 compratio = 1
3414 compratio = 1
3415 if totalsize:
3415 if totalsize:
3416 compratio = totalrawsize / totalsize
3416 compratio = totalrawsize / totalsize
3417
3417
3418 basedfmtstr = b'%%%dd\n'
3418 basedfmtstr = b'%%%dd\n'
3419 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3419 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3420
3420
3421 def dfmtstr(max):
3421 def dfmtstr(max):
3422 return basedfmtstr % len(str(max))
3422 return basedfmtstr % len(str(max))
3423
3423
3424 def pcfmtstr(max, padding=0):
3424 def pcfmtstr(max, padding=0):
3425 return basepcfmtstr % (len(str(max)), b' ' * padding)
3425 return basepcfmtstr % (len(str(max)), b' ' * padding)
3426
3426
3427 def pcfmt(value, total):
3427 def pcfmt(value, total):
3428 if total:
3428 if total:
3429 return (value, 100 * float(value) / total)
3429 return (value, 100 * float(value) / total)
3430 else:
3430 else:
3431 return value, 100.0
3431 return value, 100.0
3432
3432
3433 ui.writenoi18n(b'format : %d\n' % format)
3433 ui.writenoi18n(b'format : %d\n' % format)
3434 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3434 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3435
3435
3436 ui.write(b'\n')
3436 ui.write(b'\n')
3437 fmt = pcfmtstr(totalsize)
3437 fmt = pcfmtstr(totalsize)
3438 fmt2 = dfmtstr(totalsize)
3438 fmt2 = dfmtstr(totalsize)
3439 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3439 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3440 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3440 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3441 ui.writenoi18n(
3441 ui.writenoi18n(
3442 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3442 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3443 )
3443 )
3444 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3444 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3445 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3445 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3446 ui.writenoi18n(
3446 ui.writenoi18n(
3447 b' text : '
3447 b' text : '
3448 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3448 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3449 )
3449 )
3450 ui.writenoi18n(
3450 ui.writenoi18n(
3451 b' delta : '
3451 b' delta : '
3452 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3452 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3453 )
3453 )
3454 ui.writenoi18n(
3454 ui.writenoi18n(
3455 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3455 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3456 )
3456 )
3457 for depth in sorted(numsnapdepth):
3457 for depth in sorted(numsnapdepth):
3458 ui.write(
3458 ui.write(
3459 (b' lvl-%-3d : ' % depth)
3459 (b' lvl-%-3d : ' % depth)
3460 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3460 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3461 )
3461 )
3462 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3462 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3463 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3463 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3464 ui.writenoi18n(
3464 ui.writenoi18n(
3465 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3465 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3466 )
3466 )
3467 for depth in sorted(numsnapdepth):
3467 for depth in sorted(numsnapdepth):
3468 ui.write(
3468 ui.write(
3469 (b' lvl-%-3d : ' % depth)
3469 (b' lvl-%-3d : ' % depth)
3470 + fmt % pcfmt(snaptotal[depth], totalsize)
3470 + fmt % pcfmt(snaptotal[depth], totalsize)
3471 )
3471 )
3472 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3472 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3473
3473
3474 def fmtchunktype(chunktype):
3474 def fmtchunktype(chunktype):
3475 if chunktype == b'empty':
3475 if chunktype == b'empty':
3476 return b' %s : ' % chunktype
3476 return b' %s : ' % chunktype
3477 elif chunktype in pycompat.bytestr(string.ascii_letters):
3477 elif chunktype in pycompat.bytestr(string.ascii_letters):
3478 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3478 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3479 else:
3479 else:
3480 return b' 0x%s : ' % hex(chunktype)
3480 return b' 0x%s : ' % hex(chunktype)
3481
3481
3482 ui.write(b'\n')
3482 ui.write(b'\n')
3483 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3483 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3484 for chunktype in sorted(chunktypecounts):
3484 for chunktype in sorted(chunktypecounts):
3485 ui.write(fmtchunktype(chunktype))
3485 ui.write(fmtchunktype(chunktype))
3486 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3486 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3487 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3487 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3488 for chunktype in sorted(chunktypecounts):
3488 for chunktype in sorted(chunktypecounts):
3489 ui.write(fmtchunktype(chunktype))
3489 ui.write(fmtchunktype(chunktype))
3490 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3490 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3491
3491
3492 ui.write(b'\n')
3492 ui.write(b'\n')
3493 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3493 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3494 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3494 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3495 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3495 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3496 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3496 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3497 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3497 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3498
3498
3499 if format > 0:
3499 if format > 0:
3500 ui.write(b'\n')
3500 ui.write(b'\n')
3501 ui.writenoi18n(
3501 ui.writenoi18n(
3502 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3502 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3503 % tuple(datasize)
3503 % tuple(datasize)
3504 )
3504 )
3505 ui.writenoi18n(
3505 ui.writenoi18n(
3506 b'full revision size (min/max/avg) : %d / %d / %d\n'
3506 b'full revision size (min/max/avg) : %d / %d / %d\n'
3507 % tuple(fullsize)
3507 % tuple(fullsize)
3508 )
3508 )
3509 ui.writenoi18n(
3509 ui.writenoi18n(
3510 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3510 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3511 % tuple(semisize)
3511 % tuple(semisize)
3512 )
3512 )
3513 for depth in sorted(snapsizedepth):
3513 for depth in sorted(snapsizedepth):
3514 if depth == 0:
3514 if depth == 0:
3515 continue
3515 continue
3516 ui.writenoi18n(
3516 ui.writenoi18n(
3517 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3517 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3518 % ((depth,) + tuple(snapsizedepth[depth]))
3518 % ((depth,) + tuple(snapsizedepth[depth]))
3519 )
3519 )
3520 ui.writenoi18n(
3520 ui.writenoi18n(
3521 b'delta size (min/max/avg) : %d / %d / %d\n'
3521 b'delta size (min/max/avg) : %d / %d / %d\n'
3522 % tuple(deltasize)
3522 % tuple(deltasize)
3523 )
3523 )
3524
3524
3525 if numdeltas > 0:
3525 if numdeltas > 0:
3526 ui.write(b'\n')
3526 ui.write(b'\n')
3527 fmt = pcfmtstr(numdeltas)
3527 fmt = pcfmtstr(numdeltas)
3528 fmt2 = pcfmtstr(numdeltas, 4)
3528 fmt2 = pcfmtstr(numdeltas, 4)
3529 ui.writenoi18n(
3529 ui.writenoi18n(
3530 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3530 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3531 )
3531 )
3532 if numprev > 0:
3532 if numprev > 0:
3533 ui.writenoi18n(
3533 ui.writenoi18n(
3534 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3534 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3535 )
3535 )
3536 ui.writenoi18n(
3536 ui.writenoi18n(
3537 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3537 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3538 )
3538 )
3539 ui.writenoi18n(
3539 ui.writenoi18n(
3540 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3540 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3541 )
3541 )
3542 if gdelta:
3542 if gdelta:
3543 ui.writenoi18n(
3543 ui.writenoi18n(
3544 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3544 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3545 )
3545 )
3546 ui.writenoi18n(
3546 ui.writenoi18n(
3547 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3547 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3548 )
3548 )
3549 ui.writenoi18n(
3549 ui.writenoi18n(
3550 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3550 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3551 )
3551 )
3552
3552
3553
3553
3554 @command(
3554 @command(
3555 b'debugrevlogindex',
3555 b'debugrevlogindex',
3556 cmdutil.debugrevlogopts
3556 cmdutil.debugrevlogopts
3557 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3557 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3558 _(b'[-f FORMAT] -c|-m|FILE'),
3558 _(b'[-f FORMAT] -c|-m|FILE'),
3559 optionalrepo=True,
3559 optionalrepo=True,
3560 )
3560 )
3561 def debugrevlogindex(ui, repo, file_=None, **opts):
3561 def debugrevlogindex(ui, repo, file_=None, **opts):
3562 """dump the contents of a revlog index"""
3562 """dump the contents of a revlog index"""
3563 opts = pycompat.byteskwargs(opts)
3563 opts = pycompat.byteskwargs(opts)
3564 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3564 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3565 format = opts.get(b'format', 0)
3565 format = opts.get(b'format', 0)
3566 if format not in (0, 1):
3566 if format not in (0, 1):
3567 raise error.Abort(_(b"unknown format %d") % format)
3567 raise error.Abort(_(b"unknown format %d") % format)
3568
3568
3569 if ui.debugflag:
3569 if ui.debugflag:
3570 shortfn = hex
3570 shortfn = hex
3571 else:
3571 else:
3572 shortfn = short
3572 shortfn = short
3573
3573
3574 # There might not be anything in r, so have a sane default
3574 # There might not be anything in r, so have a sane default
3575 idlen = 12
3575 idlen = 12
3576 for i in r:
3576 for i in r:
3577 idlen = len(shortfn(r.node(i)))
3577 idlen = len(shortfn(r.node(i)))
3578 break
3578 break
3579
3579
3580 if format == 0:
3580 if format == 0:
3581 if ui.verbose:
3581 if ui.verbose:
3582 ui.writenoi18n(
3582 ui.writenoi18n(
3583 b" rev offset length linkrev %s %s p2\n"
3583 b" rev offset length linkrev %s %s p2\n"
3584 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3584 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3585 )
3585 )
3586 else:
3586 else:
3587 ui.writenoi18n(
3587 ui.writenoi18n(
3588 b" rev linkrev %s %s p2\n"
3588 b" rev linkrev %s %s p2\n"
3589 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3589 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3590 )
3590 )
3591 elif format == 1:
3591 elif format == 1:
3592 if ui.verbose:
3592 if ui.verbose:
3593 ui.writenoi18n(
3593 ui.writenoi18n(
3594 (
3594 (
3595 b" rev flag offset length size link p1"
3595 b" rev flag offset length size link p1"
3596 b" p2 %s\n"
3596 b" p2 %s\n"
3597 )
3597 )
3598 % b"nodeid".rjust(idlen)
3598 % b"nodeid".rjust(idlen)
3599 )
3599 )
3600 else:
3600 else:
3601 ui.writenoi18n(
3601 ui.writenoi18n(
3602 b" rev flag size link p1 p2 %s\n"
3602 b" rev flag size link p1 p2 %s\n"
3603 % b"nodeid".rjust(idlen)
3603 % b"nodeid".rjust(idlen)
3604 )
3604 )
3605
3605
3606 for i in r:
3606 for i in r:
3607 node = r.node(i)
3607 node = r.node(i)
3608 if format == 0:
3608 if format == 0:
3609 try:
3609 try:
3610 pp = r.parents(node)
3610 pp = r.parents(node)
3611 except Exception:
3611 except Exception:
3612 pp = [repo.nullid, repo.nullid]
3612 pp = [repo.nullid, repo.nullid]
3613 if ui.verbose:
3613 if ui.verbose:
3614 ui.write(
3614 ui.write(
3615 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3615 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3616 % (
3616 % (
3617 i,
3617 i,
3618 r.start(i),
3618 r.start(i),
3619 r.length(i),
3619 r.length(i),
3620 r.linkrev(i),
3620 r.linkrev(i),
3621 shortfn(node),
3621 shortfn(node),
3622 shortfn(pp[0]),
3622 shortfn(pp[0]),
3623 shortfn(pp[1]),
3623 shortfn(pp[1]),
3624 )
3624 )
3625 )
3625 )
3626 else:
3626 else:
3627 ui.write(
3627 ui.write(
3628 b"% 6d % 7d %s %s %s\n"
3628 b"% 6d % 7d %s %s %s\n"
3629 % (
3629 % (
3630 i,
3630 i,
3631 r.linkrev(i),
3631 r.linkrev(i),
3632 shortfn(node),
3632 shortfn(node),
3633 shortfn(pp[0]),
3633 shortfn(pp[0]),
3634 shortfn(pp[1]),
3634 shortfn(pp[1]),
3635 )
3635 )
3636 )
3636 )
3637 elif format == 1:
3637 elif format == 1:
3638 pr = r.parentrevs(i)
3638 pr = r.parentrevs(i)
3639 if ui.verbose:
3639 if ui.verbose:
3640 ui.write(
3640 ui.write(
3641 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3641 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3642 % (
3642 % (
3643 i,
3643 i,
3644 r.flags(i),
3644 r.flags(i),
3645 r.start(i),
3645 r.start(i),
3646 r.length(i),
3646 r.length(i),
3647 r.rawsize(i),
3647 r.rawsize(i),
3648 r.linkrev(i),
3648 r.linkrev(i),
3649 pr[0],
3649 pr[0],
3650 pr[1],
3650 pr[1],
3651 shortfn(node),
3651 shortfn(node),
3652 )
3652 )
3653 )
3653 )
3654 else:
3654 else:
3655 ui.write(
3655 ui.write(
3656 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3656 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3657 % (
3657 % (
3658 i,
3658 i,
3659 r.flags(i),
3659 r.flags(i),
3660 r.rawsize(i),
3660 r.rawsize(i),
3661 r.linkrev(i),
3661 r.linkrev(i),
3662 pr[0],
3662 pr[0],
3663 pr[1],
3663 pr[1],
3664 shortfn(node),
3664 shortfn(node),
3665 )
3665 )
3666 )
3666 )
3667
3667
3668
3668
3669 @command(
3669 @command(
3670 b'debugrevspec',
3670 b'debugrevspec',
3671 [
3671 [
3672 (
3672 (
3673 b'',
3673 b'',
3674 b'optimize',
3674 b'optimize',
3675 None,
3675 None,
3676 _(b'print parsed tree after optimizing (DEPRECATED)'),
3676 _(b'print parsed tree after optimizing (DEPRECATED)'),
3677 ),
3677 ),
3678 (
3678 (
3679 b'',
3679 b'',
3680 b'show-revs',
3680 b'show-revs',
3681 True,
3681 True,
3682 _(b'print list of result revisions (default)'),
3682 _(b'print list of result revisions (default)'),
3683 ),
3683 ),
3684 (
3684 (
3685 b's',
3685 b's',
3686 b'show-set',
3686 b'show-set',
3687 None,
3687 None,
3688 _(b'print internal representation of result set'),
3688 _(b'print internal representation of result set'),
3689 ),
3689 ),
3690 (
3690 (
3691 b'p',
3691 b'p',
3692 b'show-stage',
3692 b'show-stage',
3693 [],
3693 [],
3694 _(b'print parsed tree at the given stage'),
3694 _(b'print parsed tree at the given stage'),
3695 _(b'NAME'),
3695 _(b'NAME'),
3696 ),
3696 ),
3697 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3697 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3698 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3698 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3699 ],
3699 ],
3700 b'REVSPEC',
3700 b'REVSPEC',
3701 )
3701 )
3702 def debugrevspec(ui, repo, expr, **opts):
3702 def debugrevspec(ui, repo, expr, **opts):
3703 """parse and apply a revision specification
3703 """parse and apply a revision specification
3704
3704
3705 Use -p/--show-stage option to print the parsed tree at the given stages.
3705 Use -p/--show-stage option to print the parsed tree at the given stages.
3706 Use -p all to print tree at every stage.
3706 Use -p all to print tree at every stage.
3707
3707
3708 Use --no-show-revs option with -s or -p to print only the set
3708 Use --no-show-revs option with -s or -p to print only the set
3709 representation or the parsed tree respectively.
3709 representation or the parsed tree respectively.
3710
3710
3711 Use --verify-optimized to compare the optimized result with the unoptimized
3711 Use --verify-optimized to compare the optimized result with the unoptimized
3712 one. Returns 1 if the optimized result differs.
3712 one. Returns 1 if the optimized result differs.
3713 """
3713 """
3714 opts = pycompat.byteskwargs(opts)
3714 opts = pycompat.byteskwargs(opts)
3715 aliases = ui.configitems(b'revsetalias')
3715 aliases = ui.configitems(b'revsetalias')
3716 stages = [
3716 stages = [
3717 (b'parsed', lambda tree: tree),
3717 (b'parsed', lambda tree: tree),
3718 (
3718 (
3719 b'expanded',
3719 b'expanded',
3720 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3720 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3721 ),
3721 ),
3722 (b'concatenated', revsetlang.foldconcat),
3722 (b'concatenated', revsetlang.foldconcat),
3723 (b'analyzed', revsetlang.analyze),
3723 (b'analyzed', revsetlang.analyze),
3724 (b'optimized', revsetlang.optimize),
3724 (b'optimized', revsetlang.optimize),
3725 ]
3725 ]
3726 if opts[b'no_optimized']:
3726 if opts[b'no_optimized']:
3727 stages = stages[:-1]
3727 stages = stages[:-1]
3728 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3728 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3729 raise error.Abort(
3729 raise error.Abort(
3730 _(b'cannot use --verify-optimized with --no-optimized')
3730 _(b'cannot use --verify-optimized with --no-optimized')
3731 )
3731 )
3732 stagenames = {n for n, f in stages}
3732 stagenames = {n for n, f in stages}
3733
3733
3734 showalways = set()
3734 showalways = set()
3735 showchanged = set()
3735 showchanged = set()
3736 if ui.verbose and not opts[b'show_stage']:
3736 if ui.verbose and not opts[b'show_stage']:
3737 # show parsed tree by --verbose (deprecated)
3737 # show parsed tree by --verbose (deprecated)
3738 showalways.add(b'parsed')
3738 showalways.add(b'parsed')
3739 showchanged.update([b'expanded', b'concatenated'])
3739 showchanged.update([b'expanded', b'concatenated'])
3740 if opts[b'optimize']:
3740 if opts[b'optimize']:
3741 showalways.add(b'optimized')
3741 showalways.add(b'optimized')
3742 if opts[b'show_stage'] and opts[b'optimize']:
3742 if opts[b'show_stage'] and opts[b'optimize']:
3743 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3743 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3744 if opts[b'show_stage'] == [b'all']:
3744 if opts[b'show_stage'] == [b'all']:
3745 showalways.update(stagenames)
3745 showalways.update(stagenames)
3746 else:
3746 else:
3747 for n in opts[b'show_stage']:
3747 for n in opts[b'show_stage']:
3748 if n not in stagenames:
3748 if n not in stagenames:
3749 raise error.Abort(_(b'invalid stage name: %s') % n)
3749 raise error.Abort(_(b'invalid stage name: %s') % n)
3750 showalways.update(opts[b'show_stage'])
3750 showalways.update(opts[b'show_stage'])
3751
3751
3752 treebystage = {}
3752 treebystage = {}
3753 printedtree = None
3753 printedtree = None
3754 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3754 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3755 for n, f in stages:
3755 for n, f in stages:
3756 treebystage[n] = tree = f(tree)
3756 treebystage[n] = tree = f(tree)
3757 if n in showalways or (n in showchanged and tree != printedtree):
3757 if n in showalways or (n in showchanged and tree != printedtree):
3758 if opts[b'show_stage'] or n != b'parsed':
3758 if opts[b'show_stage'] or n != b'parsed':
3759 ui.write(b"* %s:\n" % n)
3759 ui.write(b"* %s:\n" % n)
3760 ui.write(revsetlang.prettyformat(tree), b"\n")
3760 ui.write(revsetlang.prettyformat(tree), b"\n")
3761 printedtree = tree
3761 printedtree = tree
3762
3762
3763 if opts[b'verify_optimized']:
3763 if opts[b'verify_optimized']:
3764 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3764 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3765 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3765 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3766 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3766 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3767 ui.writenoi18n(
3767 ui.writenoi18n(
3768 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3768 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3769 )
3769 )
3770 ui.writenoi18n(
3770 ui.writenoi18n(
3771 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3771 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3772 )
3772 )
3773 arevs = list(arevs)
3773 arevs = list(arevs)
3774 brevs = list(brevs)
3774 brevs = list(brevs)
3775 if arevs == brevs:
3775 if arevs == brevs:
3776 return 0
3776 return 0
3777 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3777 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3778 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3778 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3779 sm = difflib.SequenceMatcher(None, arevs, brevs)
3779 sm = difflib.SequenceMatcher(None, arevs, brevs)
3780 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3780 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3781 if tag in ('delete', 'replace'):
3781 if tag in ('delete', 'replace'):
3782 for c in arevs[alo:ahi]:
3782 for c in arevs[alo:ahi]:
3783 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3783 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3784 if tag in ('insert', 'replace'):
3784 if tag in ('insert', 'replace'):
3785 for c in brevs[blo:bhi]:
3785 for c in brevs[blo:bhi]:
3786 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3786 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3787 if tag == 'equal':
3787 if tag == 'equal':
3788 for c in arevs[alo:ahi]:
3788 for c in arevs[alo:ahi]:
3789 ui.write(b' %d\n' % c)
3789 ui.write(b' %d\n' % c)
3790 return 1
3790 return 1
3791
3791
3792 func = revset.makematcher(tree)
3792 func = revset.makematcher(tree)
3793 revs = func(repo)
3793 revs = func(repo)
3794 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3794 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3795 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3795 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3796 if not opts[b'show_revs']:
3796 if not opts[b'show_revs']:
3797 return
3797 return
3798 for c in revs:
3798 for c in revs:
3799 ui.write(b"%d\n" % c)
3799 ui.write(b"%d\n" % c)
3800
3800
3801
3801
3802 @command(
3802 @command(
3803 b'debugserve',
3803 b'debugserve',
3804 [
3804 [
3805 (
3805 (
3806 b'',
3806 b'',
3807 b'sshstdio',
3807 b'sshstdio',
3808 False,
3808 False,
3809 _(b'run an SSH server bound to process handles'),
3809 _(b'run an SSH server bound to process handles'),
3810 ),
3810 ),
3811 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3811 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3812 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3812 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3813 ],
3813 ],
3814 b'',
3814 b'',
3815 )
3815 )
3816 def debugserve(ui, repo, **opts):
3816 def debugserve(ui, repo, **opts):
3817 """run a server with advanced settings
3817 """run a server with advanced settings
3818
3818
3819 This command is similar to :hg:`serve`. It exists partially as a
3819 This command is similar to :hg:`serve`. It exists partially as a
3820 workaround to the fact that ``hg serve --stdio`` must have specific
3820 workaround to the fact that ``hg serve --stdio`` must have specific
3821 arguments for security reasons.
3821 arguments for security reasons.
3822 """
3822 """
3823 opts = pycompat.byteskwargs(opts)
3823 opts = pycompat.byteskwargs(opts)
3824
3824
3825 if not opts[b'sshstdio']:
3825 if not opts[b'sshstdio']:
3826 raise error.Abort(_(b'only --sshstdio is currently supported'))
3826 raise error.Abort(_(b'only --sshstdio is currently supported'))
3827
3827
3828 logfh = None
3828 logfh = None
3829
3829
3830 if opts[b'logiofd'] and opts[b'logiofile']:
3830 if opts[b'logiofd'] and opts[b'logiofile']:
3831 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3831 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3832
3832
3833 if opts[b'logiofd']:
3833 if opts[b'logiofd']:
3834 # Ideally we would be line buffered. But line buffering in binary
3834 # Ideally we would be line buffered. But line buffering in binary
3835 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3835 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3836 # buffering could have performance impacts. But since this isn't
3836 # buffering could have performance impacts. But since this isn't
3837 # performance critical code, it should be fine.
3837 # performance critical code, it should be fine.
3838 try:
3838 try:
3839 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3839 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3840 except OSError as e:
3840 except OSError as e:
3841 if e.errno != errno.ESPIPE:
3841 if e.errno != errno.ESPIPE:
3842 raise
3842 raise
3843 # can't seek a pipe, so `ab` mode fails on py3
3843 # can't seek a pipe, so `ab` mode fails on py3
3844 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3844 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3845 elif opts[b'logiofile']:
3845 elif opts[b'logiofile']:
3846 logfh = open(opts[b'logiofile'], b'ab', 0)
3846 logfh = open(opts[b'logiofile'], b'ab', 0)
3847
3847
3848 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3848 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3849 s.serve_forever()
3849 s.serve_forever()
3850
3850
3851
3851
3852 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3852 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3853 def debugsetparents(ui, repo, rev1, rev2=None):
3853 def debugsetparents(ui, repo, rev1, rev2=None):
3854 """manually set the parents of the current working directory (DANGEROUS)
3854 """manually set the parents of the current working directory (DANGEROUS)
3855
3855
3856 This command is not what you are looking for and should not be used. Using
3856 This command is not what you are looking for and should not be used. Using
3857 this command will most certainly results in slight corruption of the file
3857 this command will most certainly results in slight corruption of the file
3858 level histories withing your repository. DO NOT USE THIS COMMAND.
3858 level histories withing your repository. DO NOT USE THIS COMMAND.
3859
3859
3860 The command update the p1 and p2 field in the dirstate, and not touching
3860 The command update the p1 and p2 field in the dirstate, and not touching
3861 anything else. This useful for writing repository conversion tools, but
3861 anything else. This useful for writing repository conversion tools, but
3862 should be used with extreme care. For example, neither the working
3862 should be used with extreme care. For example, neither the working
3863 directory nor the dirstate is updated, so file status may be incorrect
3863 directory nor the dirstate is updated, so file status may be incorrect
3864 after running this command. Only used if you are one of the few people that
3864 after running this command. Only used if you are one of the few people that
3865 deeply unstand both conversion tools and file level histories. If you are
3865 deeply unstand both conversion tools and file level histories. If you are
3866 reading this help, you are not one of this people (most of them sailed west
3866 reading this help, you are not one of this people (most of them sailed west
3867 from Mithlond anyway.
3867 from Mithlond anyway.
3868
3868
3869 So one last time DO NOT USE THIS COMMAND.
3869 So one last time DO NOT USE THIS COMMAND.
3870
3870
3871 Returns 0 on success.
3871 Returns 0 on success.
3872 """
3872 """
3873
3873
3874 node1 = scmutil.revsingle(repo, rev1).node()
3874 node1 = scmutil.revsingle(repo, rev1).node()
3875 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3875 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3876
3876
3877 with repo.wlock():
3877 with repo.wlock():
3878 repo.setparents(node1, node2)
3878 repo.setparents(node1, node2)
3879
3879
3880
3880
3881 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3881 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3882 def debugsidedata(ui, repo, file_, rev=None, **opts):
3882 def debugsidedata(ui, repo, file_, rev=None, **opts):
3883 """dump the side data for a cl/manifest/file revision
3883 """dump the side data for a cl/manifest/file revision
3884
3884
3885 Use --verbose to dump the sidedata content."""
3885 Use --verbose to dump the sidedata content."""
3886 opts = pycompat.byteskwargs(opts)
3886 opts = pycompat.byteskwargs(opts)
3887 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3887 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3888 if rev is not None:
3888 if rev is not None:
3889 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3889 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3890 file_, rev = None, file_
3890 file_, rev = None, file_
3891 elif rev is None:
3891 elif rev is None:
3892 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3892 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3893 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3893 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3894 r = getattr(r, '_revlog', r)
3894 r = getattr(r, '_revlog', r)
3895 try:
3895 try:
3896 sidedata = r.sidedata(r.lookup(rev))
3896 sidedata = r.sidedata(r.lookup(rev))
3897 except KeyError:
3897 except KeyError:
3898 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3898 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3899 if sidedata:
3899 if sidedata:
3900 sidedata = list(sidedata.items())
3900 sidedata = list(sidedata.items())
3901 sidedata.sort()
3901 sidedata.sort()
3902 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3902 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3903 for key, value in sidedata:
3903 for key, value in sidedata:
3904 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3904 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3905 if ui.verbose:
3905 if ui.verbose:
3906 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3906 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3907
3907
3908
3908
3909 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3909 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3910 def debugssl(ui, repo, source=None, **opts):
3910 def debugssl(ui, repo, source=None, **opts):
3911 """test a secure connection to a server
3911 """test a secure connection to a server
3912
3912
3913 This builds the certificate chain for the server on Windows, installing the
3913 This builds the certificate chain for the server on Windows, installing the
3914 missing intermediates and trusted root via Windows Update if necessary. It
3914 missing intermediates and trusted root via Windows Update if necessary. It
3915 does nothing on other platforms.
3915 does nothing on other platforms.
3916
3916
3917 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3917 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3918 that server is used. See :hg:`help urls` for more information.
3918 that server is used. See :hg:`help urls` for more information.
3919
3919
3920 If the update succeeds, retry the original operation. Otherwise, the cause
3920 If the update succeeds, retry the original operation. Otherwise, the cause
3921 of the SSL error is likely another issue.
3921 of the SSL error is likely another issue.
3922 """
3922 """
3923 if not pycompat.iswindows:
3923 if not pycompat.iswindows:
3924 raise error.Abort(
3924 raise error.Abort(
3925 _(b'certificate chain building is only possible on Windows')
3925 _(b'certificate chain building is only possible on Windows')
3926 )
3926 )
3927
3927
3928 if not source:
3928 if not source:
3929 if not repo:
3929 if not repo:
3930 raise error.Abort(
3930 raise error.Abort(
3931 _(
3931 _(
3932 b"there is no Mercurial repository here, and no "
3932 b"there is no Mercurial repository here, and no "
3933 b"server specified"
3933 b"server specified"
3934 )
3934 )
3935 )
3935 )
3936 source = b"default"
3936 source = b"default"
3937
3937
3938 source, branches = urlutil.get_unique_pull_path(
3938 source, branches = urlutil.get_unique_pull_path(
3939 b'debugssl', repo, ui, source
3939 b'debugssl', repo, ui, source
3940 )
3940 )
3941 url = urlutil.url(source)
3941 url = urlutil.url(source)
3942
3942
3943 defaultport = {b'https': 443, b'ssh': 22}
3943 defaultport = {b'https': 443, b'ssh': 22}
3944 if url.scheme in defaultport:
3944 if url.scheme in defaultport:
3945 try:
3945 try:
3946 addr = (url.host, int(url.port or defaultport[url.scheme]))
3946 addr = (url.host, int(url.port or defaultport[url.scheme]))
3947 except ValueError:
3947 except ValueError:
3948 raise error.Abort(_(b"malformed port number in URL"))
3948 raise error.Abort(_(b"malformed port number in URL"))
3949 else:
3949 else:
3950 raise error.Abort(_(b"only https and ssh connections are supported"))
3950 raise error.Abort(_(b"only https and ssh connections are supported"))
3951
3951
3952 from . import win32
3952 from . import win32
3953
3953
3954 s = ssl.wrap_socket(
3954 s = ssl.wrap_socket(
3955 socket.socket(),
3955 socket.socket(),
3956 ssl_version=ssl.PROTOCOL_TLS,
3956 ssl_version=ssl.PROTOCOL_TLS,
3957 cert_reqs=ssl.CERT_NONE,
3957 cert_reqs=ssl.CERT_NONE,
3958 ca_certs=None,
3958 ca_certs=None,
3959 )
3959 )
3960
3960
3961 try:
3961 try:
3962 s.connect(addr)
3962 s.connect(addr)
3963 cert = s.getpeercert(True)
3963 cert = s.getpeercert(True)
3964
3964
3965 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3965 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3966
3966
3967 complete = win32.checkcertificatechain(cert, build=False)
3967 complete = win32.checkcertificatechain(cert, build=False)
3968
3968
3969 if not complete:
3969 if not complete:
3970 ui.status(_(b'certificate chain is incomplete, updating... '))
3970 ui.status(_(b'certificate chain is incomplete, updating... '))
3971
3971
3972 if not win32.checkcertificatechain(cert):
3972 if not win32.checkcertificatechain(cert):
3973 ui.status(_(b'failed.\n'))
3973 ui.status(_(b'failed.\n'))
3974 else:
3974 else:
3975 ui.status(_(b'done.\n'))
3975 ui.status(_(b'done.\n'))
3976 else:
3976 else:
3977 ui.status(_(b'full certificate chain is available\n'))
3977 ui.status(_(b'full certificate chain is available\n'))
3978 finally:
3978 finally:
3979 s.close()
3979 s.close()
3980
3980
3981
3981
3982 @command(
3982 @command(
3983 b"debugbackupbundle",
3983 b"debugbackupbundle",
3984 [
3984 [
3985 (
3985 (
3986 b"",
3986 b"",
3987 b"recover",
3987 b"recover",
3988 b"",
3988 b"",
3989 b"brings the specified changeset back into the repository",
3989 b"brings the specified changeset back into the repository",
3990 )
3990 )
3991 ]
3991 ]
3992 + cmdutil.logopts,
3992 + cmdutil.logopts,
3993 _(b"hg debugbackupbundle [--recover HASH]"),
3993 _(b"hg debugbackupbundle [--recover HASH]"),
3994 )
3994 )
3995 def debugbackupbundle(ui, repo, *pats, **opts):
3995 def debugbackupbundle(ui, repo, *pats, **opts):
3996 """lists the changesets available in backup bundles
3996 """lists the changesets available in backup bundles
3997
3997
3998 Without any arguments, this command prints a list of the changesets in each
3998 Without any arguments, this command prints a list of the changesets in each
3999 backup bundle.
3999 backup bundle.
4000
4000
4001 --recover takes a changeset hash and unbundles the first bundle that
4001 --recover takes a changeset hash and unbundles the first bundle that
4002 contains that hash, which puts that changeset back in your repository.
4002 contains that hash, which puts that changeset back in your repository.
4003
4003
4004 --verbose will print the entire commit message and the bundle path for that
4004 --verbose will print the entire commit message and the bundle path for that
4005 backup.
4005 backup.
4006 """
4006 """
4007 backups = list(
4007 backups = list(
4008 filter(
4008 filter(
4009 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4009 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4010 )
4010 )
4011 )
4011 )
4012 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4012 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4013
4013
4014 opts = pycompat.byteskwargs(opts)
4014 opts = pycompat.byteskwargs(opts)
4015 opts[b"bundle"] = b""
4015 opts[b"bundle"] = b""
4016 opts[b"force"] = None
4016 opts[b"force"] = None
4017 limit = logcmdutil.getlimit(opts)
4017 limit = logcmdutil.getlimit(opts)
4018
4018
4019 def display(other, chlist, displayer):
4019 def display(other, chlist, displayer):
4020 if opts.get(b"newest_first"):
4020 if opts.get(b"newest_first"):
4021 chlist.reverse()
4021 chlist.reverse()
4022 count = 0
4022 count = 0
4023 for n in chlist:
4023 for n in chlist:
4024 if limit is not None and count >= limit:
4024 if limit is not None and count >= limit:
4025 break
4025 break
4026 parents = [
4026 parents = [
4027 True for p in other.changelog.parents(n) if p != repo.nullid
4027 True for p in other.changelog.parents(n) if p != repo.nullid
4028 ]
4028 ]
4029 if opts.get(b"no_merges") and len(parents) == 2:
4029 if opts.get(b"no_merges") and len(parents) == 2:
4030 continue
4030 continue
4031 count += 1
4031 count += 1
4032 displayer.show(other[n])
4032 displayer.show(other[n])
4033
4033
4034 recovernode = opts.get(b"recover")
4034 recovernode = opts.get(b"recover")
4035 if recovernode:
4035 if recovernode:
4036 if scmutil.isrevsymbol(repo, recovernode):
4036 if scmutil.isrevsymbol(repo, recovernode):
4037 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4037 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4038 return
4038 return
4039 elif backups:
4039 elif backups:
4040 msg = _(
4040 msg = _(
4041 b"Recover changesets using: hg debugbackupbundle --recover "
4041 b"Recover changesets using: hg debugbackupbundle --recover "
4042 b"<changeset hash>\n\nAvailable backup changesets:"
4042 b"<changeset hash>\n\nAvailable backup changesets:"
4043 )
4043 )
4044 ui.status(msg, label=b"status.removed")
4044 ui.status(msg, label=b"status.removed")
4045 else:
4045 else:
4046 ui.status(_(b"no backup changesets found\n"))
4046 ui.status(_(b"no backup changesets found\n"))
4047 return
4047 return
4048
4048
4049 for backup in backups:
4049 for backup in backups:
4050 # Much of this is copied from the hg incoming logic
4050 # Much of this is copied from the hg incoming logic
4051 source = os.path.relpath(backup, encoding.getcwd())
4051 source = os.path.relpath(backup, encoding.getcwd())
4052 source, branches = urlutil.get_unique_pull_path(
4052 source, branches = urlutil.get_unique_pull_path(
4053 b'debugbackupbundle',
4053 b'debugbackupbundle',
4054 repo,
4054 repo,
4055 ui,
4055 ui,
4056 source,
4056 source,
4057 default_branches=opts.get(b'branch'),
4057 default_branches=opts.get(b'branch'),
4058 )
4058 )
4059 try:
4059 try:
4060 other = hg.peer(repo, opts, source)
4060 other = hg.peer(repo, opts, source)
4061 except error.LookupError as ex:
4061 except error.LookupError as ex:
4062 msg = _(b"\nwarning: unable to open bundle %s") % source
4062 msg = _(b"\nwarning: unable to open bundle %s") % source
4063 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4063 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4064 ui.warn(msg, hint=hint)
4064 ui.warn(msg, hint=hint)
4065 continue
4065 continue
4066 revs, checkout = hg.addbranchrevs(
4066 revs, checkout = hg.addbranchrevs(
4067 repo, other, branches, opts.get(b"rev")
4067 repo, other, branches, opts.get(b"rev")
4068 )
4068 )
4069
4069
4070 if revs:
4070 if revs:
4071 revs = [other.lookup(rev) for rev in revs]
4071 revs = [other.lookup(rev) for rev in revs]
4072
4072
4073 with ui.silent():
4073 with ui.silent():
4074 try:
4074 try:
4075 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4075 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4076 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4076 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4077 )
4077 )
4078 except error.LookupError:
4078 except error.LookupError:
4079 continue
4079 continue
4080
4080
4081 try:
4081 try:
4082 if not chlist:
4082 if not chlist:
4083 continue
4083 continue
4084 if recovernode:
4084 if recovernode:
4085 with repo.lock(), repo.transaction(b"unbundle") as tr:
4085 with repo.lock(), repo.transaction(b"unbundle") as tr:
4086 if scmutil.isrevsymbol(other, recovernode):
4086 if scmutil.isrevsymbol(other, recovernode):
4087 ui.status(_(b"Unbundling %s\n") % (recovernode))
4087 ui.status(_(b"Unbundling %s\n") % (recovernode))
4088 f = hg.openpath(ui, source)
4088 f = hg.openpath(ui, source)
4089 gen = exchange.readbundle(ui, f, source)
4089 gen = exchange.readbundle(ui, f, source)
4090 if isinstance(gen, bundle2.unbundle20):
4090 if isinstance(gen, bundle2.unbundle20):
4091 bundle2.applybundle(
4091 bundle2.applybundle(
4092 repo,
4092 repo,
4093 gen,
4093 gen,
4094 tr,
4094 tr,
4095 source=b"unbundle",
4095 source=b"unbundle",
4096 url=b"bundle:" + source,
4096 url=b"bundle:" + source,
4097 )
4097 )
4098 else:
4098 else:
4099 gen.apply(repo, b"unbundle", b"bundle:" + source)
4099 gen.apply(repo, b"unbundle", b"bundle:" + source)
4100 break
4100 break
4101 else:
4101 else:
4102 backupdate = encoding.strtolocal(
4102 backupdate = encoding.strtolocal(
4103 time.strftime(
4103 time.strftime(
4104 "%a %H:%M, %Y-%m-%d",
4104 "%a %H:%M, %Y-%m-%d",
4105 time.localtime(os.path.getmtime(source)),
4105 time.localtime(os.path.getmtime(source)),
4106 )
4106 )
4107 )
4107 )
4108 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4108 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4109 if ui.verbose:
4109 if ui.verbose:
4110 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4110 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4111 else:
4111 else:
4112 opts[
4112 opts[
4113 b"template"
4113 b"template"
4114 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4114 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4115 displayer = logcmdutil.changesetdisplayer(
4115 displayer = logcmdutil.changesetdisplayer(
4116 ui, other, opts, False
4116 ui, other, opts, False
4117 )
4117 )
4118 display(other, chlist, displayer)
4118 display(other, chlist, displayer)
4119 displayer.close()
4119 displayer.close()
4120 finally:
4120 finally:
4121 cleanupfn()
4121 cleanupfn()
4122
4122
4123
4123
4124 @command(
4124 @command(
4125 b'debugsub',
4125 b'debugsub',
4126 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4126 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4127 _(b'[-r REV] [REV]'),
4127 _(b'[-r REV] [REV]'),
4128 )
4128 )
4129 def debugsub(ui, repo, rev=None):
4129 def debugsub(ui, repo, rev=None):
4130 ctx = scmutil.revsingle(repo, rev, None)
4130 ctx = scmutil.revsingle(repo, rev, None)
4131 for k, v in sorted(ctx.substate.items()):
4131 for k, v in sorted(ctx.substate.items()):
4132 ui.writenoi18n(b'path %s\n' % k)
4132 ui.writenoi18n(b'path %s\n' % k)
4133 ui.writenoi18n(b' source %s\n' % v[0])
4133 ui.writenoi18n(b' source %s\n' % v[0])
4134 ui.writenoi18n(b' revision %s\n' % v[1])
4134 ui.writenoi18n(b' revision %s\n' % v[1])
4135
4135
4136
4136
4137 @command(b'debugshell', optionalrepo=True)
4137 @command(b'debugshell', optionalrepo=True)
4138 def debugshell(ui, repo):
4138 def debugshell(ui, repo):
4139 """run an interactive Python interpreter
4139 """run an interactive Python interpreter
4140
4140
4141 The local namespace is provided with a reference to the ui and
4141 The local namespace is provided with a reference to the ui and
4142 the repo instance (if available).
4142 the repo instance (if available).
4143 """
4143 """
4144 import code
4144 import code
4145
4145
4146 imported_objects = {
4146 imported_objects = {
4147 'ui': ui,
4147 'ui': ui,
4148 'repo': repo,
4148 'repo': repo,
4149 }
4149 }
4150
4150
4151 code.interact(local=imported_objects)
4151 code.interact(local=imported_objects)
4152
4152
4153
4153
4154 @command(
4154 @command(
4155 b'debugsuccessorssets',
4155 b'debugsuccessorssets',
4156 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4156 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4157 _(b'[REV]'),
4157 _(b'[REV]'),
4158 )
4158 )
4159 def debugsuccessorssets(ui, repo, *revs, **opts):
4159 def debugsuccessorssets(ui, repo, *revs, **opts):
4160 """show set of successors for revision
4160 """show set of successors for revision
4161
4161
4162 A successors set of changeset A is a consistent group of revisions that
4162 A successors set of changeset A is a consistent group of revisions that
4163 succeed A. It contains non-obsolete changesets only unless closests
4163 succeed A. It contains non-obsolete changesets only unless closests
4164 successors set is set.
4164 successors set is set.
4165
4165
4166 In most cases a changeset A has a single successors set containing a single
4166 In most cases a changeset A has a single successors set containing a single
4167 successor (changeset A replaced by A').
4167 successor (changeset A replaced by A').
4168
4168
4169 A changeset that is made obsolete with no successors are called "pruned".
4169 A changeset that is made obsolete with no successors are called "pruned".
4170 Such changesets have no successors sets at all.
4170 Such changesets have no successors sets at all.
4171
4171
4172 A changeset that has been "split" will have a successors set containing
4172 A changeset that has been "split" will have a successors set containing
4173 more than one successor.
4173 more than one successor.
4174
4174
4175 A changeset that has been rewritten in multiple different ways is called
4175 A changeset that has been rewritten in multiple different ways is called
4176 "divergent". Such changesets have multiple successor sets (each of which
4176 "divergent". Such changesets have multiple successor sets (each of which
4177 may also be split, i.e. have multiple successors).
4177 may also be split, i.e. have multiple successors).
4178
4178
4179 Results are displayed as follows::
4179 Results are displayed as follows::
4180
4180
4181 <rev1>
4181 <rev1>
4182 <successors-1A>
4182 <successors-1A>
4183 <rev2>
4183 <rev2>
4184 <successors-2A>
4184 <successors-2A>
4185 <successors-2B1> <successors-2B2> <successors-2B3>
4185 <successors-2B1> <successors-2B2> <successors-2B3>
4186
4186
4187 Here rev2 has two possible (i.e. divergent) successors sets. The first
4187 Here rev2 has two possible (i.e. divergent) successors sets. The first
4188 holds one element, whereas the second holds three (i.e. the changeset has
4188 holds one element, whereas the second holds three (i.e. the changeset has
4189 been split).
4189 been split).
4190 """
4190 """
4191 # passed to successorssets caching computation from one call to another
4191 # passed to successorssets caching computation from one call to another
4192 cache = {}
4192 cache = {}
4193 ctx2str = bytes
4193 ctx2str = bytes
4194 node2str = short
4194 node2str = short
4195 for rev in logcmdutil.revrange(repo, revs):
4195 for rev in logcmdutil.revrange(repo, revs):
4196 ctx = repo[rev]
4196 ctx = repo[rev]
4197 ui.write(b'%s\n' % ctx2str(ctx))
4197 ui.write(b'%s\n' % ctx2str(ctx))
4198 for succsset in obsutil.successorssets(
4198 for succsset in obsutil.successorssets(
4199 repo, ctx.node(), closest=opts['closest'], cache=cache
4199 repo, ctx.node(), closest=opts['closest'], cache=cache
4200 ):
4200 ):
4201 if succsset:
4201 if succsset:
4202 ui.write(b' ')
4202 ui.write(b' ')
4203 ui.write(node2str(succsset[0]))
4203 ui.write(node2str(succsset[0]))
4204 for node in succsset[1:]:
4204 for node in succsset[1:]:
4205 ui.write(b' ')
4205 ui.write(b' ')
4206 ui.write(node2str(node))
4206 ui.write(node2str(node))
4207 ui.write(b'\n')
4207 ui.write(b'\n')
4208
4208
4209
4209
4210 @command(b'debugtagscache', [])
4210 @command(b'debugtagscache', [])
4211 def debugtagscache(ui, repo):
4211 def debugtagscache(ui, repo):
4212 """display the contents of .hg/cache/hgtagsfnodes1"""
4212 """display the contents of .hg/cache/hgtagsfnodes1"""
4213 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4213 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4214 flog = repo.file(b'.hgtags')
4214 flog = repo.file(b'.hgtags')
4215 for r in repo:
4215 for r in repo:
4216 node = repo[r].node()
4216 node = repo[r].node()
4217 tagsnode = cache.getfnode(node, computemissing=False)
4217 tagsnode = cache.getfnode(node, computemissing=False)
4218 if tagsnode:
4218 if tagsnode:
4219 tagsnodedisplay = hex(tagsnode)
4219 tagsnodedisplay = hex(tagsnode)
4220 if not flog.hasnode(tagsnode):
4220 if not flog.hasnode(tagsnode):
4221 tagsnodedisplay += b' (unknown node)'
4221 tagsnodedisplay += b' (unknown node)'
4222 elif tagsnode is None:
4222 elif tagsnode is None:
4223 tagsnodedisplay = b'missing'
4223 tagsnodedisplay = b'missing'
4224 else:
4224 else:
4225 tagsnodedisplay = b'invalid'
4225 tagsnodedisplay = b'invalid'
4226
4226
4227 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4227 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4228
4228
4229
4229
4230 @command(
4230 @command(
4231 b'debugtemplate',
4231 b'debugtemplate',
4232 [
4232 [
4233 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4233 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4234 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4234 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4235 ],
4235 ],
4236 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4236 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4237 optionalrepo=True,
4237 optionalrepo=True,
4238 )
4238 )
4239 def debugtemplate(ui, repo, tmpl, **opts):
4239 def debugtemplate(ui, repo, tmpl, **opts):
4240 """parse and apply a template
4240 """parse and apply a template
4241
4241
4242 If -r/--rev is given, the template is processed as a log template and
4242 If -r/--rev is given, the template is processed as a log template and
4243 applied to the given changesets. Otherwise, it is processed as a generic
4243 applied to the given changesets. Otherwise, it is processed as a generic
4244 template.
4244 template.
4245
4245
4246 Use --verbose to print the parsed tree.
4246 Use --verbose to print the parsed tree.
4247 """
4247 """
4248 revs = None
4248 revs = None
4249 if opts['rev']:
4249 if opts['rev']:
4250 if repo is None:
4250 if repo is None:
4251 raise error.RepoError(
4251 raise error.RepoError(
4252 _(b'there is no Mercurial repository here (.hg not found)')
4252 _(b'there is no Mercurial repository here (.hg not found)')
4253 )
4253 )
4254 revs = logcmdutil.revrange(repo, opts['rev'])
4254 revs = logcmdutil.revrange(repo, opts['rev'])
4255
4255
4256 props = {}
4256 props = {}
4257 for d in opts['define']:
4257 for d in opts['define']:
4258 try:
4258 try:
4259 k, v = (e.strip() for e in d.split(b'=', 1))
4259 k, v = (e.strip() for e in d.split(b'=', 1))
4260 if not k or k == b'ui':
4260 if not k or k == b'ui':
4261 raise ValueError
4261 raise ValueError
4262 props[k] = v
4262 props[k] = v
4263 except ValueError:
4263 except ValueError:
4264 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4264 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4265
4265
4266 if ui.verbose:
4266 if ui.verbose:
4267 aliases = ui.configitems(b'templatealias')
4267 aliases = ui.configitems(b'templatealias')
4268 tree = templater.parse(tmpl)
4268 tree = templater.parse(tmpl)
4269 ui.note(templater.prettyformat(tree), b'\n')
4269 ui.note(templater.prettyformat(tree), b'\n')
4270 newtree = templater.expandaliases(tree, aliases)
4270 newtree = templater.expandaliases(tree, aliases)
4271 if newtree != tree:
4271 if newtree != tree:
4272 ui.notenoi18n(
4272 ui.notenoi18n(
4273 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4273 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4274 )
4274 )
4275
4275
4276 if revs is None:
4276 if revs is None:
4277 tres = formatter.templateresources(ui, repo)
4277 tres = formatter.templateresources(ui, repo)
4278 t = formatter.maketemplater(ui, tmpl, resources=tres)
4278 t = formatter.maketemplater(ui, tmpl, resources=tres)
4279 if ui.verbose:
4279 if ui.verbose:
4280 kwds, funcs = t.symbolsuseddefault()
4280 kwds, funcs = t.symbolsuseddefault()
4281 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4281 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4282 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 ui.write(t.renderdefault(props))
4283 ui.write(t.renderdefault(props))
4284 else:
4284 else:
4285 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4285 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4286 if ui.verbose:
4286 if ui.verbose:
4287 kwds, funcs = displayer.t.symbolsuseddefault()
4287 kwds, funcs = displayer.t.symbolsuseddefault()
4288 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4288 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4289 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4289 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4290 for r in revs:
4290 for r in revs:
4291 displayer.show(repo[r], **pycompat.strkwargs(props))
4291 displayer.show(repo[r], **pycompat.strkwargs(props))
4292 displayer.close()
4292 displayer.close()
4293
4293
4294
4294
4295 @command(
4295 @command(
4296 b'debuguigetpass',
4296 b'debuguigetpass',
4297 [
4297 [
4298 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4298 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4299 ],
4299 ],
4300 _(b'[-p TEXT]'),
4300 _(b'[-p TEXT]'),
4301 norepo=True,
4301 norepo=True,
4302 )
4302 )
4303 def debuguigetpass(ui, prompt=b''):
4303 def debuguigetpass(ui, prompt=b''):
4304 """show prompt to type password"""
4304 """show prompt to type password"""
4305 r = ui.getpass(prompt)
4305 r = ui.getpass(prompt)
4306 if r is None:
4306 if r is None:
4307 r = b"<default response>"
4307 r = b"<default response>"
4308 ui.writenoi18n(b'response: %s\n' % r)
4308 ui.writenoi18n(b'response: %s\n' % r)
4309
4309
4310
4310
4311 @command(
4311 @command(
4312 b'debuguiprompt',
4312 b'debuguiprompt',
4313 [
4313 [
4314 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4314 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4315 ],
4315 ],
4316 _(b'[-p TEXT]'),
4316 _(b'[-p TEXT]'),
4317 norepo=True,
4317 norepo=True,
4318 )
4318 )
4319 def debuguiprompt(ui, prompt=b''):
4319 def debuguiprompt(ui, prompt=b''):
4320 """show plain prompt"""
4320 """show plain prompt"""
4321 r = ui.prompt(prompt)
4321 r = ui.prompt(prompt)
4322 ui.writenoi18n(b'response: %s\n' % r)
4322 ui.writenoi18n(b'response: %s\n' % r)
4323
4323
4324
4324
4325 @command(b'debugupdatecaches', [])
4325 @command(b'debugupdatecaches', [])
4326 def debugupdatecaches(ui, repo, *pats, **opts):
4326 def debugupdatecaches(ui, repo, *pats, **opts):
4327 """warm all known caches in the repository"""
4327 """warm all known caches in the repository"""
4328 with repo.wlock(), repo.lock():
4328 with repo.wlock(), repo.lock():
4329 repo.updatecaches(caches=repository.CACHES_ALL)
4329 repo.updatecaches(caches=repository.CACHES_ALL)
4330
4330
4331
4331
4332 @command(
4332 @command(
4333 b'debugupgraderepo',
4333 b'debugupgraderepo',
4334 [
4334 [
4335 (
4335 (
4336 b'o',
4336 b'o',
4337 b'optimize',
4337 b'optimize',
4338 [],
4338 [],
4339 _(b'extra optimization to perform'),
4339 _(b'extra optimization to perform'),
4340 _(b'NAME'),
4340 _(b'NAME'),
4341 ),
4341 ),
4342 (b'', b'run', False, _(b'performs an upgrade')),
4342 (b'', b'run', False, _(b'performs an upgrade')),
4343 (b'', b'backup', True, _(b'keep the old repository content around')),
4343 (b'', b'backup', True, _(b'keep the old repository content around')),
4344 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4344 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4345 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4345 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4346 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4346 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4347 ],
4347 ],
4348 )
4348 )
4349 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4349 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4350 """upgrade a repository to use different features
4350 """upgrade a repository to use different features
4351
4351
4352 If no arguments are specified, the repository is evaluated for upgrade
4352 If no arguments are specified, the repository is evaluated for upgrade
4353 and a list of problems and potential optimizations is printed.
4353 and a list of problems and potential optimizations is printed.
4354
4354
4355 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4355 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4356 can be influenced via additional arguments. More details will be provided
4356 can be influenced via additional arguments. More details will be provided
4357 by the command output when run without ``--run``.
4357 by the command output when run without ``--run``.
4358
4358
4359 During the upgrade, the repository will be locked and no writes will be
4359 During the upgrade, the repository will be locked and no writes will be
4360 allowed.
4360 allowed.
4361
4361
4362 At the end of the upgrade, the repository may not be readable while new
4362 At the end of the upgrade, the repository may not be readable while new
4363 repository data is swapped in. This window will be as long as it takes to
4363 repository data is swapped in. This window will be as long as it takes to
4364 rename some directories inside the ``.hg`` directory. On most machines, this
4364 rename some directories inside the ``.hg`` directory. On most machines, this
4365 should complete almost instantaneously and the chances of a consumer being
4365 should complete almost instantaneously and the chances of a consumer being
4366 unable to access the repository should be low.
4366 unable to access the repository should be low.
4367
4367
4368 By default, all revlogs will be upgraded. You can restrict this using flags
4368 By default, all revlogs will be upgraded. You can restrict this using flags
4369 such as `--manifest`:
4369 such as `--manifest`:
4370
4370
4371 * `--manifest`: only optimize the manifest
4371 * `--manifest`: only optimize the manifest
4372 * `--no-manifest`: optimize all revlog but the manifest
4372 * `--no-manifest`: optimize all revlog but the manifest
4373 * `--changelog`: optimize the changelog only
4373 * `--changelog`: optimize the changelog only
4374 * `--no-changelog --no-manifest`: optimize filelogs only
4374 * `--no-changelog --no-manifest`: optimize filelogs only
4375 * `--filelogs`: optimize the filelogs only
4375 * `--filelogs`: optimize the filelogs only
4376 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4376 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4377 """
4377 """
4378 return upgrade.upgraderepo(
4378 return upgrade.upgraderepo(
4379 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4379 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4380 )
4380 )
4381
4381
4382
4382
4383 @command(
4383 @command(
4384 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4384 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4385 )
4385 )
4386 def debugwalk(ui, repo, *pats, **opts):
4386 def debugwalk(ui, repo, *pats, **opts):
4387 """show how files match on given patterns"""
4387 """show how files match on given patterns"""
4388 opts = pycompat.byteskwargs(opts)
4388 opts = pycompat.byteskwargs(opts)
4389 m = scmutil.match(repo[None], pats, opts)
4389 m = scmutil.match(repo[None], pats, opts)
4390 if ui.verbose:
4390 if ui.verbose:
4391 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4391 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4392 items = list(repo[None].walk(m))
4392 items = list(repo[None].walk(m))
4393 if not items:
4393 if not items:
4394 return
4394 return
4395 f = lambda fn: fn
4395 f = lambda fn: fn
4396 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4396 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4397 f = lambda fn: util.normpath(fn)
4397 f = lambda fn: util.normpath(fn)
4398 fmt = b'f %%-%ds %%-%ds %%s' % (
4398 fmt = b'f %%-%ds %%-%ds %%s' % (
4399 max([len(abs) for abs in items]),
4399 max([len(abs) for abs in items]),
4400 max([len(repo.pathto(abs)) for abs in items]),
4400 max([len(repo.pathto(abs)) for abs in items]),
4401 )
4401 )
4402 for abs in items:
4402 for abs in items:
4403 line = fmt % (
4403 line = fmt % (
4404 abs,
4404 abs,
4405 f(repo.pathto(abs)),
4405 f(repo.pathto(abs)),
4406 m.exact(abs) and b'exact' or b'',
4406 m.exact(abs) and b'exact' or b'',
4407 )
4407 )
4408 ui.write(b"%s\n" % line.rstrip())
4408 ui.write(b"%s\n" % line.rstrip())
4409
4409
4410
4410
4411 @command(b'debugwhyunstable', [], _(b'REV'))
4411 @command(b'debugwhyunstable', [], _(b'REV'))
4412 def debugwhyunstable(ui, repo, rev):
4412 def debugwhyunstable(ui, repo, rev):
4413 """explain instabilities of a changeset"""
4413 """explain instabilities of a changeset"""
4414 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4414 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4415 dnodes = b''
4415 dnodes = b''
4416 if entry.get(b'divergentnodes'):
4416 if entry.get(b'divergentnodes'):
4417 dnodes = (
4417 dnodes = (
4418 b' '.join(
4418 b' '.join(
4419 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4419 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4420 for ctx in entry[b'divergentnodes']
4420 for ctx in entry[b'divergentnodes']
4421 )
4421 )
4422 + b' '
4422 + b' '
4423 )
4423 )
4424 ui.write(
4424 ui.write(
4425 b'%s: %s%s %s\n'
4425 b'%s: %s%s %s\n'
4426 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4426 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4427 )
4427 )
4428
4428
4429
4429
4430 @command(
4430 @command(
4431 b'debugwireargs',
4431 b'debugwireargs',
4432 [
4432 [
4433 (b'', b'three', b'', b'three'),
4433 (b'', b'three', b'', b'three'),
4434 (b'', b'four', b'', b'four'),
4434 (b'', b'four', b'', b'four'),
4435 (b'', b'five', b'', b'five'),
4435 (b'', b'five', b'', b'five'),
4436 ]
4436 ]
4437 + cmdutil.remoteopts,
4437 + cmdutil.remoteopts,
4438 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4438 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4439 norepo=True,
4439 norepo=True,
4440 )
4440 )
4441 def debugwireargs(ui, repopath, *vals, **opts):
4441 def debugwireargs(ui, repopath, *vals, **opts):
4442 opts = pycompat.byteskwargs(opts)
4442 opts = pycompat.byteskwargs(opts)
4443 repo = hg.peer(ui, opts, repopath)
4443 repo = hg.peer(ui, opts, repopath)
4444 try:
4444 try:
4445 for opt in cmdutil.remoteopts:
4445 for opt in cmdutil.remoteopts:
4446 del opts[opt[1]]
4446 del opts[opt[1]]
4447 args = {}
4447 args = {}
4448 for k, v in opts.items():
4448 for k, v in opts.items():
4449 if v:
4449 if v:
4450 args[k] = v
4450 args[k] = v
4451 args = pycompat.strkwargs(args)
4451 args = pycompat.strkwargs(args)
4452 # run twice to check that we don't mess up the stream for the next command
4452 # run twice to check that we don't mess up the stream for the next command
4453 res1 = repo.debugwireargs(*vals, **args)
4453 res1 = repo.debugwireargs(*vals, **args)
4454 res2 = repo.debugwireargs(*vals, **args)
4454 res2 = repo.debugwireargs(*vals, **args)
4455 ui.write(b"%s\n" % res1)
4455 ui.write(b"%s\n" % res1)
4456 if res1 != res2:
4456 if res1 != res2:
4457 ui.warn(b"%s\n" % res2)
4457 ui.warn(b"%s\n" % res2)
4458 finally:
4458 finally:
4459 repo.close()
4459 repo.close()
4460
4460
4461
4461
4462 def _parsewirelangblocks(fh):
4462 def _parsewirelangblocks(fh):
4463 activeaction = None
4463 activeaction = None
4464 blocklines = []
4464 blocklines = []
4465 lastindent = 0
4465 lastindent = 0
4466
4466
4467 for line in fh:
4467 for line in fh:
4468 line = line.rstrip()
4468 line = line.rstrip()
4469 if not line:
4469 if not line:
4470 continue
4470 continue
4471
4471
4472 if line.startswith(b'#'):
4472 if line.startswith(b'#'):
4473 continue
4473 continue
4474
4474
4475 if not line.startswith(b' '):
4475 if not line.startswith(b' '):
4476 # New block. Flush previous one.
4476 # New block. Flush previous one.
4477 if activeaction:
4477 if activeaction:
4478 yield activeaction, blocklines
4478 yield activeaction, blocklines
4479
4479
4480 activeaction = line
4480 activeaction = line
4481 blocklines = []
4481 blocklines = []
4482 lastindent = 0
4482 lastindent = 0
4483 continue
4483 continue
4484
4484
4485 # Else we start with an indent.
4485 # Else we start with an indent.
4486
4486
4487 if not activeaction:
4487 if not activeaction:
4488 raise error.Abort(_(b'indented line outside of block'))
4488 raise error.Abort(_(b'indented line outside of block'))
4489
4489
4490 indent = len(line) - len(line.lstrip())
4490 indent = len(line) - len(line.lstrip())
4491
4491
4492 # If this line is indented more than the last line, concatenate it.
4492 # If this line is indented more than the last line, concatenate it.
4493 if indent > lastindent and blocklines:
4493 if indent > lastindent and blocklines:
4494 blocklines[-1] += line.lstrip()
4494 blocklines[-1] += line.lstrip()
4495 else:
4495 else:
4496 blocklines.append(line)
4496 blocklines.append(line)
4497 lastindent = indent
4497 lastindent = indent
4498
4498
4499 # Flush last block.
4499 # Flush last block.
4500 if activeaction:
4500 if activeaction:
4501 yield activeaction, blocklines
4501 yield activeaction, blocklines
4502
4502
4503
4503
4504 @command(
4504 @command(
4505 b'debugwireproto',
4505 b'debugwireproto',
4506 [
4506 [
4507 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4507 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4508 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4508 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4509 (
4509 (
4510 b'',
4510 b'',
4511 b'noreadstderr',
4511 b'noreadstderr',
4512 False,
4512 False,
4513 _(b'do not read from stderr of the remote'),
4513 _(b'do not read from stderr of the remote'),
4514 ),
4514 ),
4515 (
4515 (
4516 b'',
4516 b'',
4517 b'nologhandshake',
4517 b'nologhandshake',
4518 False,
4518 False,
4519 _(b'do not log I/O related to the peer handshake'),
4519 _(b'do not log I/O related to the peer handshake'),
4520 ),
4520 ),
4521 ]
4521 ]
4522 + cmdutil.remoteopts,
4522 + cmdutil.remoteopts,
4523 _(b'[PATH]'),
4523 _(b'[PATH]'),
4524 optionalrepo=True,
4524 optionalrepo=True,
4525 )
4525 )
4526 def debugwireproto(ui, repo, path=None, **opts):
4526 def debugwireproto(ui, repo, path=None, **opts):
4527 """send wire protocol commands to a server
4527 """send wire protocol commands to a server
4528
4528
4529 This command can be used to issue wire protocol commands to remote
4529 This command can be used to issue wire protocol commands to remote
4530 peers and to debug the raw data being exchanged.
4530 peers and to debug the raw data being exchanged.
4531
4531
4532 ``--localssh`` will start an SSH server against the current repository
4532 ``--localssh`` will start an SSH server against the current repository
4533 and connect to that. By default, the connection will perform a handshake
4533 and connect to that. By default, the connection will perform a handshake
4534 and establish an appropriate peer instance.
4534 and establish an appropriate peer instance.
4535
4535
4536 ``--peer`` can be used to bypass the handshake protocol and construct a
4536 ``--peer`` can be used to bypass the handshake protocol and construct a
4537 peer instance using the specified class type. Valid values are ``raw``,
4537 peer instance using the specified class type. Valid values are ``raw``,
4538 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4538 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4539 don't support higher-level command actions.
4539 don't support higher-level command actions.
4540
4540
4541 ``--noreadstderr`` can be used to disable automatic reading from stderr
4541 ``--noreadstderr`` can be used to disable automatic reading from stderr
4542 of the peer (for SSH connections only). Disabling automatic reading of
4542 of the peer (for SSH connections only). Disabling automatic reading of
4543 stderr is useful for making output more deterministic.
4543 stderr is useful for making output more deterministic.
4544
4544
4545 Commands are issued via a mini language which is specified via stdin.
4545 Commands are issued via a mini language which is specified via stdin.
4546 The language consists of individual actions to perform. An action is
4546 The language consists of individual actions to perform. An action is
4547 defined by a block. A block is defined as a line with no leading
4547 defined by a block. A block is defined as a line with no leading
4548 space followed by 0 or more lines with leading space. Blocks are
4548 space followed by 0 or more lines with leading space. Blocks are
4549 effectively a high-level command with additional metadata.
4549 effectively a high-level command with additional metadata.
4550
4550
4551 Lines beginning with ``#`` are ignored.
4551 Lines beginning with ``#`` are ignored.
4552
4552
4553 The following sections denote available actions.
4553 The following sections denote available actions.
4554
4554
4555 raw
4555 raw
4556 ---
4556 ---
4557
4557
4558 Send raw data to the server.
4558 Send raw data to the server.
4559
4559
4560 The block payload contains the raw data to send as one atomic send
4560 The block payload contains the raw data to send as one atomic send
4561 operation. The data may not actually be delivered in a single system
4561 operation. The data may not actually be delivered in a single system
4562 call: it depends on the abilities of the transport being used.
4562 call: it depends on the abilities of the transport being used.
4563
4563
4564 Each line in the block is de-indented and concatenated. Then, that
4564 Each line in the block is de-indented and concatenated. Then, that
4565 value is evaluated as a Python b'' literal. This allows the use of
4565 value is evaluated as a Python b'' literal. This allows the use of
4566 backslash escaping, etc.
4566 backslash escaping, etc.
4567
4567
4568 raw+
4568 raw+
4569 ----
4569 ----
4570
4570
4571 Behaves like ``raw`` except flushes output afterwards.
4571 Behaves like ``raw`` except flushes output afterwards.
4572
4572
4573 command <X>
4573 command <X>
4574 -----------
4574 -----------
4575
4575
4576 Send a request to run a named command, whose name follows the ``command``
4576 Send a request to run a named command, whose name follows the ``command``
4577 string.
4577 string.
4578
4578
4579 Arguments to the command are defined as lines in this block. The format of
4579 Arguments to the command are defined as lines in this block. The format of
4580 each line is ``<key> <value>``. e.g.::
4580 each line is ``<key> <value>``. e.g.::
4581
4581
4582 command listkeys
4582 command listkeys
4583 namespace bookmarks
4583 namespace bookmarks
4584
4584
4585 If the value begins with ``eval:``, it will be interpreted as a Python
4585 If the value begins with ``eval:``, it will be interpreted as a Python
4586 literal expression. Otherwise values are interpreted as Python b'' literals.
4586 literal expression. Otherwise values are interpreted as Python b'' literals.
4587 This allows sending complex types and encoding special byte sequences via
4587 This allows sending complex types and encoding special byte sequences via
4588 backslash escaping.
4588 backslash escaping.
4589
4589
4590 The following arguments have special meaning:
4590 The following arguments have special meaning:
4591
4591
4592 ``PUSHFILE``
4592 ``PUSHFILE``
4593 When defined, the *push* mechanism of the peer will be used instead
4593 When defined, the *push* mechanism of the peer will be used instead
4594 of the static request-response mechanism and the content of the
4594 of the static request-response mechanism and the content of the
4595 file specified in the value of this argument will be sent as the
4595 file specified in the value of this argument will be sent as the
4596 command payload.
4596 command payload.
4597
4597
4598 This can be used to submit a local bundle file to the remote.
4598 This can be used to submit a local bundle file to the remote.
4599
4599
4600 batchbegin
4600 batchbegin
4601 ----------
4601 ----------
4602
4602
4603 Instruct the peer to begin a batched send.
4603 Instruct the peer to begin a batched send.
4604
4604
4605 All ``command`` blocks are queued for execution until the next
4605 All ``command`` blocks are queued for execution until the next
4606 ``batchsubmit`` block.
4606 ``batchsubmit`` block.
4607
4607
4608 batchsubmit
4608 batchsubmit
4609 -----------
4609 -----------
4610
4610
4611 Submit previously queued ``command`` blocks as a batch request.
4611 Submit previously queued ``command`` blocks as a batch request.
4612
4612
4613 This action MUST be paired with a ``batchbegin`` action.
4613 This action MUST be paired with a ``batchbegin`` action.
4614
4614
4615 httprequest <method> <path>
4615 httprequest <method> <path>
4616 ---------------------------
4616 ---------------------------
4617
4617
4618 (HTTP peer only)
4618 (HTTP peer only)
4619
4619
4620 Send an HTTP request to the peer.
4620 Send an HTTP request to the peer.
4621
4621
4622 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4622 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4623
4623
4624 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4624 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4625 headers to add to the request. e.g. ``Accept: foo``.
4625 headers to add to the request. e.g. ``Accept: foo``.
4626
4626
4627 The following arguments are special:
4627 The following arguments are special:
4628
4628
4629 ``BODYFILE``
4629 ``BODYFILE``
4630 The content of the file defined as the value to this argument will be
4630 The content of the file defined as the value to this argument will be
4631 transferred verbatim as the HTTP request body.
4631 transferred verbatim as the HTTP request body.
4632
4632
4633 ``frame <type> <flags> <payload>``
4633 ``frame <type> <flags> <payload>``
4634 Send a unified protocol frame as part of the request body.
4634 Send a unified protocol frame as part of the request body.
4635
4635
4636 All frames will be collected and sent as the body to the HTTP
4636 All frames will be collected and sent as the body to the HTTP
4637 request.
4637 request.
4638
4638
4639 close
4639 close
4640 -----
4640 -----
4641
4641
4642 Close the connection to the server.
4642 Close the connection to the server.
4643
4643
4644 flush
4644 flush
4645 -----
4645 -----
4646
4646
4647 Flush data written to the server.
4647 Flush data written to the server.
4648
4648
4649 readavailable
4649 readavailable
4650 -------------
4650 -------------
4651
4651
4652 Close the write end of the connection and read all available data from
4652 Close the write end of the connection and read all available data from
4653 the server.
4653 the server.
4654
4654
4655 If the connection to the server encompasses multiple pipes, we poll both
4655 If the connection to the server encompasses multiple pipes, we poll both
4656 pipes and read available data.
4656 pipes and read available data.
4657
4657
4658 readline
4658 readline
4659 --------
4659 --------
4660
4660
4661 Read a line of output from the server. If there are multiple output
4661 Read a line of output from the server. If there are multiple output
4662 pipes, reads only the main pipe.
4662 pipes, reads only the main pipe.
4663
4663
4664 ereadline
4664 ereadline
4665 ---------
4665 ---------
4666
4666
4667 Like ``readline``, but read from the stderr pipe, if available.
4667 Like ``readline``, but read from the stderr pipe, if available.
4668
4668
4669 read <X>
4669 read <X>
4670 --------
4670 --------
4671
4671
4672 ``read()`` N bytes from the server's main output pipe.
4672 ``read()`` N bytes from the server's main output pipe.
4673
4673
4674 eread <X>
4674 eread <X>
4675 ---------
4675 ---------
4676
4676
4677 ``read()`` N bytes from the server's stderr pipe, if available.
4677 ``read()`` N bytes from the server's stderr pipe, if available.
4678
4678
4679 Specifying Unified Frame-Based Protocol Frames
4679 Specifying Unified Frame-Based Protocol Frames
4680 ----------------------------------------------
4680 ----------------------------------------------
4681
4681
4682 It is possible to emit a *Unified Frame-Based Protocol* by using special
4682 It is possible to emit a *Unified Frame-Based Protocol* by using special
4683 syntax.
4683 syntax.
4684
4684
4685 A frame is composed as a type, flags, and payload. These can be parsed
4685 A frame is composed as a type, flags, and payload. These can be parsed
4686 from a string of the form:
4686 from a string of the form:
4687
4687
4688 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4688 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4689
4689
4690 ``request-id`` and ``stream-id`` are integers defining the request and
4690 ``request-id`` and ``stream-id`` are integers defining the request and
4691 stream identifiers.
4691 stream identifiers.
4692
4692
4693 ``type`` can be an integer value for the frame type or the string name
4693 ``type`` can be an integer value for the frame type or the string name
4694 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4694 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4695 ``command-name``.
4695 ``command-name``.
4696
4696
4697 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4697 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4698 components. Each component (and there can be just one) can be an integer
4698 components. Each component (and there can be just one) can be an integer
4699 or a flag name for stream flags or frame flags, respectively. Values are
4699 or a flag name for stream flags or frame flags, respectively. Values are
4700 resolved to integers and then bitwise OR'd together.
4700 resolved to integers and then bitwise OR'd together.
4701
4701
4702 ``payload`` represents the raw frame payload. If it begins with
4702 ``payload`` represents the raw frame payload. If it begins with
4703 ``cbor:``, the following string is evaluated as Python code and the
4703 ``cbor:``, the following string is evaluated as Python code and the
4704 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4704 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4705 as a Python byte string literal.
4705 as a Python byte string literal.
4706 """
4706 """
4707 opts = pycompat.byteskwargs(opts)
4707 opts = pycompat.byteskwargs(opts)
4708
4708
4709 if opts[b'localssh'] and not repo:
4709 if opts[b'localssh'] and not repo:
4710 raise error.Abort(_(b'--localssh requires a repository'))
4710 raise error.Abort(_(b'--localssh requires a repository'))
4711
4711
4712 if opts[b'peer'] and opts[b'peer'] not in (
4712 if opts[b'peer'] and opts[b'peer'] not in (
4713 b'raw',
4713 b'raw',
4714 b'ssh1',
4714 b'ssh1',
4715 ):
4715 ):
4716 raise error.Abort(
4716 raise error.Abort(
4717 _(b'invalid value for --peer'),
4717 _(b'invalid value for --peer'),
4718 hint=_(b'valid values are "raw" and "ssh1"'),
4718 hint=_(b'valid values are "raw" and "ssh1"'),
4719 )
4719 )
4720
4720
4721 if path and opts[b'localssh']:
4721 if path and opts[b'localssh']:
4722 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4722 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4723
4723
4724 if ui.interactive():
4724 if ui.interactive():
4725 ui.write(_(b'(waiting for commands on stdin)\n'))
4725 ui.write(_(b'(waiting for commands on stdin)\n'))
4726
4726
4727 blocks = list(_parsewirelangblocks(ui.fin))
4727 blocks = list(_parsewirelangblocks(ui.fin))
4728
4728
4729 proc = None
4729 proc = None
4730 stdin = None
4730 stdin = None
4731 stdout = None
4731 stdout = None
4732 stderr = None
4732 stderr = None
4733 opener = None
4733 opener = None
4734
4734
4735 if opts[b'localssh']:
4735 if opts[b'localssh']:
4736 # We start the SSH server in its own process so there is process
4736 # We start the SSH server in its own process so there is process
4737 # separation. This prevents a whole class of potential bugs around
4737 # separation. This prevents a whole class of potential bugs around
4738 # shared state from interfering with server operation.
4738 # shared state from interfering with server operation.
4739 args = procutil.hgcmd() + [
4739 args = procutil.hgcmd() + [
4740 b'-R',
4740 b'-R',
4741 repo.root,
4741 repo.root,
4742 b'debugserve',
4742 b'debugserve',
4743 b'--sshstdio',
4743 b'--sshstdio',
4744 ]
4744 ]
4745 proc = subprocess.Popen(
4745 proc = subprocess.Popen(
4746 pycompat.rapply(procutil.tonativestr, args),
4746 pycompat.rapply(procutil.tonativestr, args),
4747 stdin=subprocess.PIPE,
4747 stdin=subprocess.PIPE,
4748 stdout=subprocess.PIPE,
4748 stdout=subprocess.PIPE,
4749 stderr=subprocess.PIPE,
4749 stderr=subprocess.PIPE,
4750 bufsize=0,
4750 bufsize=0,
4751 )
4751 )
4752
4752
4753 stdin = proc.stdin
4753 stdin = proc.stdin
4754 stdout = proc.stdout
4754 stdout = proc.stdout
4755 stderr = proc.stderr
4755 stderr = proc.stderr
4756
4756
4757 # We turn the pipes into observers so we can log I/O.
4757 # We turn the pipes into observers so we can log I/O.
4758 if ui.verbose or opts[b'peer'] == b'raw':
4758 if ui.verbose or opts[b'peer'] == b'raw':
4759 stdin = util.makeloggingfileobject(
4759 stdin = util.makeloggingfileobject(
4760 ui, proc.stdin, b'i', logdata=True
4760 ui, proc.stdin, b'i', logdata=True
4761 )
4761 )
4762 stdout = util.makeloggingfileobject(
4762 stdout = util.makeloggingfileobject(
4763 ui, proc.stdout, b'o', logdata=True
4763 ui, proc.stdout, b'o', logdata=True
4764 )
4764 )
4765 stderr = util.makeloggingfileobject(
4765 stderr = util.makeloggingfileobject(
4766 ui, proc.stderr, b'e', logdata=True
4766 ui, proc.stderr, b'e', logdata=True
4767 )
4767 )
4768
4768
4769 # --localssh also implies the peer connection settings.
4769 # --localssh also implies the peer connection settings.
4770
4770
4771 url = b'ssh://localserver'
4771 url = b'ssh://localserver'
4772 autoreadstderr = not opts[b'noreadstderr']
4772 autoreadstderr = not opts[b'noreadstderr']
4773
4773
4774 if opts[b'peer'] == b'ssh1':
4774 if opts[b'peer'] == b'ssh1':
4775 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4775 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4776 peer = sshpeer.sshv1peer(
4776 peer = sshpeer.sshv1peer(
4777 ui,
4777 ui,
4778 url,
4778 url,
4779 proc,
4779 proc,
4780 stdin,
4780 stdin,
4781 stdout,
4781 stdout,
4782 stderr,
4782 stderr,
4783 None,
4783 None,
4784 autoreadstderr=autoreadstderr,
4784 autoreadstderr=autoreadstderr,
4785 )
4785 )
4786 elif opts[b'peer'] == b'raw':
4786 elif opts[b'peer'] == b'raw':
4787 ui.write(_(b'using raw connection to peer\n'))
4787 ui.write(_(b'using raw connection to peer\n'))
4788 peer = None
4788 peer = None
4789 else:
4789 else:
4790 ui.write(_(b'creating ssh peer from handshake results\n'))
4790 ui.write(_(b'creating ssh peer from handshake results\n'))
4791 peer = sshpeer.makepeer(
4791 peer = sshpeer.makepeer(
4792 ui,
4792 ui,
4793 url,
4793 url,
4794 proc,
4794 proc,
4795 stdin,
4795 stdin,
4796 stdout,
4796 stdout,
4797 stderr,
4797 stderr,
4798 autoreadstderr=autoreadstderr,
4798 autoreadstderr=autoreadstderr,
4799 )
4799 )
4800
4800
4801 elif path:
4801 elif path:
4802 # We bypass hg.peer() so we can proxy the sockets.
4802 # We bypass hg.peer() so we can proxy the sockets.
4803 # TODO consider not doing this because we skip
4803 # TODO consider not doing this because we skip
4804 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4804 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4805 u = urlutil.url(path)
4805 u = urlutil.url(path)
4806 if u.scheme != b'http':
4806 if u.scheme != b'http':
4807 raise error.Abort(_(b'only http:// paths are currently supported'))
4807 raise error.Abort(_(b'only http:// paths are currently supported'))
4808
4808
4809 url, authinfo = u.authinfo()
4809 url, authinfo = u.authinfo()
4810 openerargs = {
4810 openerargs = {
4811 'useragent': b'Mercurial debugwireproto',
4811 'useragent': b'Mercurial debugwireproto',
4812 }
4812 }
4813
4813
4814 # Turn pipes/sockets into observers so we can log I/O.
4814 # Turn pipes/sockets into observers so we can log I/O.
4815 if ui.verbose:
4815 if ui.verbose:
4816 openerargs.update(
4816 openerargs.update(
4817 {
4817 {
4818 'loggingfh': ui,
4818 'loggingfh': ui,
4819 'loggingname': b's',
4819 'loggingname': b's',
4820 'loggingopts': {
4820 'loggingopts': {
4821 'logdata': True,
4821 'logdata': True,
4822 'logdataapis': False,
4822 'logdataapis': False,
4823 },
4823 },
4824 }
4824 }
4825 )
4825 )
4826
4826
4827 if ui.debugflag:
4827 if ui.debugflag:
4828 openerargs['loggingopts']['logdataapis'] = True
4828 openerargs['loggingopts']['logdataapis'] = True
4829
4829
4830 # Don't send default headers when in raw mode. This allows us to
4830 # Don't send default headers when in raw mode. This allows us to
4831 # bypass most of the behavior of our URL handling code so we can
4831 # bypass most of the behavior of our URL handling code so we can
4832 # have near complete control over what's sent on the wire.
4832 # have near complete control over what's sent on the wire.
4833 if opts[b'peer'] == b'raw':
4833 if opts[b'peer'] == b'raw':
4834 openerargs['sendaccept'] = False
4834 openerargs['sendaccept'] = False
4835
4835
4836 opener = urlmod.opener(ui, authinfo, **openerargs)
4836 opener = urlmod.opener(ui, authinfo, **openerargs)
4837
4837
4838 if opts[b'peer'] == b'raw':
4838 if opts[b'peer'] == b'raw':
4839 ui.write(_(b'using raw connection to peer\n'))
4839 ui.write(_(b'using raw connection to peer\n'))
4840 peer = None
4840 peer = None
4841 elif opts[b'peer']:
4841 elif opts[b'peer']:
4842 raise error.Abort(
4842 raise error.Abort(
4843 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4843 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4844 )
4844 )
4845 else:
4845 else:
4846 peer = httppeer.makepeer(ui, path, opener=opener)
4846 peer = httppeer.makepeer(ui, path, opener=opener)
4847
4847
4848 # We /could/ populate stdin/stdout with sock.makefile()...
4848 # We /could/ populate stdin/stdout with sock.makefile()...
4849 else:
4849 else:
4850 raise error.Abort(_(b'unsupported connection configuration'))
4850 raise error.Abort(_(b'unsupported connection configuration'))
4851
4851
4852 batchedcommands = None
4852 batchedcommands = None
4853
4853
4854 # Now perform actions based on the parsed wire language instructions.
4854 # Now perform actions based on the parsed wire language instructions.
4855 for action, lines in blocks:
4855 for action, lines in blocks:
4856 if action in (b'raw', b'raw+'):
4856 if action in (b'raw', b'raw+'):
4857 if not stdin:
4857 if not stdin:
4858 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4858 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4859
4859
4860 # Concatenate the data together.
4860 # Concatenate the data together.
4861 data = b''.join(l.lstrip() for l in lines)
4861 data = b''.join(l.lstrip() for l in lines)
4862 data = stringutil.unescapestr(data)
4862 data = stringutil.unescapestr(data)
4863 stdin.write(data)
4863 stdin.write(data)
4864
4864
4865 if action == b'raw+':
4865 if action == b'raw+':
4866 stdin.flush()
4866 stdin.flush()
4867 elif action == b'flush':
4867 elif action == b'flush':
4868 if not stdin:
4868 if not stdin:
4869 raise error.Abort(_(b'cannot call flush on this peer'))
4869 raise error.Abort(_(b'cannot call flush on this peer'))
4870 stdin.flush()
4870 stdin.flush()
4871 elif action.startswith(b'command'):
4871 elif action.startswith(b'command'):
4872 if not peer:
4872 if not peer:
4873 raise error.Abort(
4873 raise error.Abort(
4874 _(
4874 _(
4875 b'cannot send commands unless peer instance '
4875 b'cannot send commands unless peer instance '
4876 b'is available'
4876 b'is available'
4877 )
4877 )
4878 )
4878 )
4879
4879
4880 command = action.split(b' ', 1)[1]
4880 command = action.split(b' ', 1)[1]
4881
4881
4882 args = {}
4882 args = {}
4883 for line in lines:
4883 for line in lines:
4884 # We need to allow empty values.
4884 # We need to allow empty values.
4885 fields = line.lstrip().split(b' ', 1)
4885 fields = line.lstrip().split(b' ', 1)
4886 if len(fields) == 1:
4886 if len(fields) == 1:
4887 key = fields[0]
4887 key = fields[0]
4888 value = b''
4888 value = b''
4889 else:
4889 else:
4890 key, value = fields
4890 key, value = fields
4891
4891
4892 if value.startswith(b'eval:'):
4892 if value.startswith(b'eval:'):
4893 value = stringutil.evalpythonliteral(value[5:])
4893 value = stringutil.evalpythonliteral(value[5:])
4894 else:
4894 else:
4895 value = stringutil.unescapestr(value)
4895 value = stringutil.unescapestr(value)
4896
4896
4897 args[key] = value
4897 args[key] = value
4898
4898
4899 if batchedcommands is not None:
4899 if batchedcommands is not None:
4900 batchedcommands.append((command, args))
4900 batchedcommands.append((command, args))
4901 continue
4901 continue
4902
4902
4903 ui.status(_(b'sending %s command\n') % command)
4903 ui.status(_(b'sending %s command\n') % command)
4904
4904
4905 if b'PUSHFILE' in args:
4905 if b'PUSHFILE' in args:
4906 with open(args[b'PUSHFILE'], 'rb') as fh:
4906 with open(args[b'PUSHFILE'], 'rb') as fh:
4907 del args[b'PUSHFILE']
4907 del args[b'PUSHFILE']
4908 res, output = peer._callpush(
4908 res, output = peer._callpush(
4909 command, fh, **pycompat.strkwargs(args)
4909 command, fh, **pycompat.strkwargs(args)
4910 )
4910 )
4911 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4911 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4912 ui.status(
4912 ui.status(
4913 _(b'remote output: %s\n') % stringutil.escapestr(output)
4913 _(b'remote output: %s\n') % stringutil.escapestr(output)
4914 )
4914 )
4915 else:
4915 else:
4916 with peer.commandexecutor() as e:
4916 with peer.commandexecutor() as e:
4917 res = e.callcommand(command, args).result()
4917 res = e.callcommand(command, args).result()
4918
4918
4919 ui.status(
4919 ui.status(
4920 _(b'response: %s\n')
4920 _(b'response: %s\n')
4921 % stringutil.pprint(res, bprefix=True, indent=2)
4921 % stringutil.pprint(res, bprefix=True, indent=2)
4922 )
4922 )
4923
4923
4924 elif action == b'batchbegin':
4924 elif action == b'batchbegin':
4925 if batchedcommands is not None:
4925 if batchedcommands is not None:
4926 raise error.Abort(_(b'nested batchbegin not allowed'))
4926 raise error.Abort(_(b'nested batchbegin not allowed'))
4927
4927
4928 batchedcommands = []
4928 batchedcommands = []
4929 elif action == b'batchsubmit':
4929 elif action == b'batchsubmit':
4930 # There is a batching API we could go through. But it would be
4930 # There is a batching API we could go through. But it would be
4931 # difficult to normalize requests into function calls. It is easier
4931 # difficult to normalize requests into function calls. It is easier
4932 # to bypass this layer and normalize to commands + args.
4932 # to bypass this layer and normalize to commands + args.
4933 ui.status(
4933 ui.status(
4934 _(b'sending batch with %d sub-commands\n')
4934 _(b'sending batch with %d sub-commands\n')
4935 % len(batchedcommands)
4935 % len(batchedcommands)
4936 )
4936 )
4937 assert peer is not None
4937 assert peer is not None
4938 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4938 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4939 ui.status(
4939 ui.status(
4940 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4940 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4941 )
4941 )
4942
4942
4943 batchedcommands = None
4943 batchedcommands = None
4944
4944
4945 elif action.startswith(b'httprequest '):
4945 elif action.startswith(b'httprequest '):
4946 if not opener:
4946 if not opener:
4947 raise error.Abort(
4947 raise error.Abort(
4948 _(b'cannot use httprequest without an HTTP peer')
4948 _(b'cannot use httprequest without an HTTP peer')
4949 )
4949 )
4950
4950
4951 request = action.split(b' ', 2)
4951 request = action.split(b' ', 2)
4952 if len(request) != 3:
4952 if len(request) != 3:
4953 raise error.Abort(
4953 raise error.Abort(
4954 _(
4954 _(
4955 b'invalid httprequest: expected format is '
4955 b'invalid httprequest: expected format is '
4956 b'"httprequest <method> <path>'
4956 b'"httprequest <method> <path>'
4957 )
4957 )
4958 )
4958 )
4959
4959
4960 method, httppath = request[1:]
4960 method, httppath = request[1:]
4961 headers = {}
4961 headers = {}
4962 body = None
4962 body = None
4963 frames = []
4963 frames = []
4964 for line in lines:
4964 for line in lines:
4965 line = line.lstrip()
4965 line = line.lstrip()
4966 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4966 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4967 if m:
4967 if m:
4968 # Headers need to use native strings.
4968 # Headers need to use native strings.
4969 key = pycompat.strurl(m.group(1))
4969 key = pycompat.strurl(m.group(1))
4970 value = pycompat.strurl(m.group(2))
4970 value = pycompat.strurl(m.group(2))
4971 headers[key] = value
4971 headers[key] = value
4972 continue
4972 continue
4973
4973
4974 if line.startswith(b'BODYFILE '):
4974 if line.startswith(b'BODYFILE '):
4975 with open(line.split(b' ', 1), b'rb') as fh:
4975 with open(line.split(b' ', 1), b'rb') as fh:
4976 body = fh.read()
4976 body = fh.read()
4977 elif line.startswith(b'frame '):
4977 elif line.startswith(b'frame '):
4978 frame = wireprotoframing.makeframefromhumanstring(
4978 frame = wireprotoframing.makeframefromhumanstring(
4979 line[len(b'frame ') :]
4979 line[len(b'frame ') :]
4980 )
4980 )
4981
4981
4982 frames.append(frame)
4982 frames.append(frame)
4983 else:
4983 else:
4984 raise error.Abort(
4984 raise error.Abort(
4985 _(b'unknown argument to httprequest: %s') % line
4985 _(b'unknown argument to httprequest: %s') % line
4986 )
4986 )
4987
4987
4988 url = path + httppath
4988 url = path + httppath
4989
4989
4990 if frames:
4990 if frames:
4991 body = b''.join(bytes(f) for f in frames)
4991 body = b''.join(bytes(f) for f in frames)
4992
4992
4993 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4993 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4994
4994
4995 # urllib.Request insists on using has_data() as a proxy for
4995 # urllib.Request insists on using has_data() as a proxy for
4996 # determining the request method. Override that to use our
4996 # determining the request method. Override that to use our
4997 # explicitly requested method.
4997 # explicitly requested method.
4998 req.get_method = lambda: pycompat.sysstr(method)
4998 req.get_method = lambda: pycompat.sysstr(method)
4999
4999
5000 try:
5000 try:
5001 res = opener.open(req)
5001 res = opener.open(req)
5002 body = res.read()
5002 body = res.read()
5003 except util.urlerr.urlerror as e:
5003 except util.urlerr.urlerror as e:
5004 # read() method must be called, but only exists in Python 2
5004 # read() method must be called, but only exists in Python 2
5005 getattr(e, 'read', lambda: None)()
5005 getattr(e, 'read', lambda: None)()
5006 continue
5006 continue
5007
5007
5008 ct = res.headers.get('Content-Type')
5008 ct = res.headers.get('Content-Type')
5009 if ct == 'application/mercurial-cbor':
5009 if ct == 'application/mercurial-cbor':
5010 ui.write(
5010 ui.write(
5011 _(b'cbor> %s\n')
5011 _(b'cbor> %s\n')
5012 % stringutil.pprint(
5012 % stringutil.pprint(
5013 cborutil.decodeall(body), bprefix=True, indent=2
5013 cborutil.decodeall(body), bprefix=True, indent=2
5014 )
5014 )
5015 )
5015 )
5016
5016
5017 elif action == b'close':
5017 elif action == b'close':
5018 assert peer is not None
5018 assert peer is not None
5019 peer.close()
5019 peer.close()
5020 elif action == b'readavailable':
5020 elif action == b'readavailable':
5021 if not stdout or not stderr:
5021 if not stdout or not stderr:
5022 raise error.Abort(
5022 raise error.Abort(
5023 _(b'readavailable not available on this peer')
5023 _(b'readavailable not available on this peer')
5024 )
5024 )
5025
5025
5026 stdin.close()
5026 stdin.close()
5027 stdout.read()
5027 stdout.read()
5028 stderr.read()
5028 stderr.read()
5029
5029
5030 elif action == b'readline':
5030 elif action == b'readline':
5031 if not stdout:
5031 if not stdout:
5032 raise error.Abort(_(b'readline not available on this peer'))
5032 raise error.Abort(_(b'readline not available on this peer'))
5033 stdout.readline()
5033 stdout.readline()
5034 elif action == b'ereadline':
5034 elif action == b'ereadline':
5035 if not stderr:
5035 if not stderr:
5036 raise error.Abort(_(b'ereadline not available on this peer'))
5036 raise error.Abort(_(b'ereadline not available on this peer'))
5037 stderr.readline()
5037 stderr.readline()
5038 elif action.startswith(b'read '):
5038 elif action.startswith(b'read '):
5039 count = int(action.split(b' ', 1)[1])
5039 count = int(action.split(b' ', 1)[1])
5040 if not stdout:
5040 if not stdout:
5041 raise error.Abort(_(b'read not available on this peer'))
5041 raise error.Abort(_(b'read not available on this peer'))
5042 stdout.read(count)
5042 stdout.read(count)
5043 elif action.startswith(b'eread '):
5043 elif action.startswith(b'eread '):
5044 count = int(action.split(b' ', 1)[1])
5044 count = int(action.split(b' ', 1)[1])
5045 if not stderr:
5045 if not stderr:
5046 raise error.Abort(_(b'eread not available on this peer'))
5046 raise error.Abort(_(b'eread not available on this peer'))
5047 stderr.read(count)
5047 stderr.read(count)
5048 else:
5048 else:
5049 raise error.Abort(_(b'unknown action: %s') % action)
5049 raise error.Abort(_(b'unknown action: %s') % action)
5050
5050
5051 if batchedcommands is not None:
5051 if batchedcommands is not None:
5052 raise error.Abort(_(b'unclosed "batchbegin" request'))
5052 raise error.Abort(_(b'unclosed "batchbegin" request'))
5053
5053
5054 if peer:
5054 if peer:
5055 peer.close()
5055 peer.close()
5056
5056
5057 if proc:
5057 if proc:
5058 proc.kill()
5058 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now