##// END OF EJS Templates
delta-chain: move the debugdeltachain command in revlogutils...
marmoute -
r51963:d7f975e4 default
parent child Browse files
Show More
@@ -1,4836 +1,4657 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 open,
36 open,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 bundlerepo,
40 bundlerepo,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 dirstateutils,
47 dirstateutils,
48 encoding,
48 encoding,
49 error,
49 error,
50 exchange,
50 exchange,
51 extensions,
51 extensions,
52 filelog,
52 filelog,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 manifest,
61 manifest,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 verify,
91 verify,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .stabletailgraph import stabletailsort
97 from .stabletailgraph import stabletailsort
98 from .utils import (
98 from .utils import (
99 cborutil,
99 cborutil,
100 compression,
100 compression,
101 dateutil,
101 dateutil,
102 procutil,
102 procutil,
103 stringutil,
103 stringutil,
104 urlutil,
104 urlutil,
105 )
105 )
106
106
107 from .revlogutils import (
107 from .revlogutils import (
108 constants as revlog_constants,
109 debug as revlog_debug,
108 debug as revlog_debug,
110 deltas as deltautil,
111 nodemap,
109 nodemap,
112 rewrite,
110 rewrite,
113 sidedata,
111 sidedata,
114 )
112 )
115
113
116 release = lockmod.release
114 release = lockmod.release
117
115
118 table = {}
116 table = {}
119 table.update(strip.command._table)
117 table.update(strip.command._table)
120 command = registrar.command(table)
118 command = registrar.command(table)
121
119
122
120
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 def debugancestor(ui, repo, *args):
122 def debugancestor(ui, repo, *args):
125 """find the ancestor revision of two revisions in a given index"""
123 """find the ancestor revision of two revisions in a given index"""
126 if len(args) == 3:
124 if len(args) == 3:
127 index, rev1, rev2 = args
125 index, rev1, rev2 = args
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 lookup = r.lookup
127 lookup = r.lookup
130 elif len(args) == 2:
128 elif len(args) == 2:
131 if not repo:
129 if not repo:
132 raise error.Abort(
130 raise error.Abort(
133 _(b'there is no Mercurial repository here (.hg not found)')
131 _(b'there is no Mercurial repository here (.hg not found)')
134 )
132 )
135 rev1, rev2 = args
133 rev1, rev2 = args
136 r = repo.changelog
134 r = repo.changelog
137 lookup = repo.lookup
135 lookup = repo.lookup
138 else:
136 else:
139 raise error.Abort(_(b'either two or three arguments required'))
137 raise error.Abort(_(b'either two or three arguments required'))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
138 a = r.ancestor(lookup(rev1), lookup(rev2))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142
140
143
141
144 @command(b'debugantivirusrunning', [])
142 @command(b'debugantivirusrunning', [])
145 def debugantivirusrunning(ui, repo):
143 def debugantivirusrunning(ui, repo):
146 """attempt to trigger an antivirus scanner to see if one is active"""
144 """attempt to trigger an antivirus scanner to see if one is active"""
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 f.write(
146 f.write(
149 util.b85decode(
147 util.b85decode(
150 # This is a base85-armored version of the EICAR test file. See
148 # This is a base85-armored version of the EICAR test file. See
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 )
152 )
155 )
153 )
156 # Give an AV engine time to scan the file.
154 # Give an AV engine time to scan the file.
157 time.sleep(2)
155 time.sleep(2)
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159
157
160
158
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 def debugapplystreamclonebundle(ui, repo, fname):
160 def debugapplystreamclonebundle(ui, repo, fname):
163 """apply a stream clone bundle file"""
161 """apply a stream clone bundle file"""
164 f = hg.openpath(ui, fname)
162 f = hg.openpath(ui, fname)
165 gen = exchange.readbundle(ui, f, fname)
163 gen = exchange.readbundle(ui, f, fname)
166 gen.apply(repo)
164 gen.apply(repo)
167
165
168
166
169 @command(
167 @command(
170 b'debugbuilddag',
168 b'debugbuilddag',
171 [
169 [
172 (
170 (
173 b'm',
171 b'm',
174 b'mergeable-file',
172 b'mergeable-file',
175 None,
173 None,
176 _(b'add single file mergeable changes'),
174 _(b'add single file mergeable changes'),
177 ),
175 ),
178 (
176 (
179 b'o',
177 b'o',
180 b'overwritten-file',
178 b'overwritten-file',
181 None,
179 None,
182 _(b'add single file all revs overwrite'),
180 _(b'add single file all revs overwrite'),
183 ),
181 ),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 (
183 (
186 b'',
184 b'',
187 b'from-existing',
185 b'from-existing',
188 None,
186 None,
189 _(b'continue from a non-empty repository'),
187 _(b'continue from a non-empty repository'),
190 ),
188 ),
191 ],
189 ],
192 _(b'[OPTION]... [TEXT]'),
190 _(b'[OPTION]... [TEXT]'),
193 )
191 )
194 def debugbuilddag(
192 def debugbuilddag(
195 ui,
193 ui,
196 repo,
194 repo,
197 text=None,
195 text=None,
198 mergeable_file=False,
196 mergeable_file=False,
199 overwritten_file=False,
197 overwritten_file=False,
200 new_file=False,
198 new_file=False,
201 from_existing=False,
199 from_existing=False,
202 ):
200 ):
203 """builds a repo with a given DAG from scratch in the current empty repo
201 """builds a repo with a given DAG from scratch in the current empty repo
204
202
205 The description of the DAG is read from stdin if not given on the
203 The description of the DAG is read from stdin if not given on the
206 command line.
204 command line.
207
205
208 Elements:
206 Elements:
209
207
210 - "+n" is a linear run of n nodes based on the current default parent
208 - "+n" is a linear run of n nodes based on the current default parent
211 - "." is a single node based on the current default parent
209 - "." is a single node based on the current default parent
212 - "$" resets the default parent to null (implied at the start);
210 - "$" resets the default parent to null (implied at the start);
213 otherwise the default parent is always the last node created
211 otherwise the default parent is always the last node created
214 - "<p" sets the default parent to the backref p
212 - "<p" sets the default parent to the backref p
215 - "*p" is a fork at parent p, which is a backref
213 - "*p" is a fork at parent p, which is a backref
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 - "/p2" is a merge of the preceding node and p2
215 - "/p2" is a merge of the preceding node and p2
218 - ":tag" defines a local tag for the preceding node
216 - ":tag" defines a local tag for the preceding node
219 - "@branch" sets the named branch for subsequent nodes
217 - "@branch" sets the named branch for subsequent nodes
220 - "#...\\n" is a comment up to the end of the line
218 - "#...\\n" is a comment up to the end of the line
221
219
222 Whitespace between the above elements is ignored.
220 Whitespace between the above elements is ignored.
223
221
224 A backref is either
222 A backref is either
225
223
226 - a number n, which references the node curr-n, where curr is the current
224 - a number n, which references the node curr-n, where curr is the current
227 node, or
225 node, or
228 - the name of a local tag you placed earlier using ":tag", or
226 - the name of a local tag you placed earlier using ":tag", or
229 - empty to denote the default parent.
227 - empty to denote the default parent.
230
228
231 All string valued-elements are either strictly alphanumeric, or must
229 All string valued-elements are either strictly alphanumeric, or must
232 be enclosed in double quotes ("..."), with "\\" as escape character.
230 be enclosed in double quotes ("..."), with "\\" as escape character.
233 """
231 """
234
232
235 if text is None:
233 if text is None:
236 ui.status(_(b"reading DAG from stdin\n"))
234 ui.status(_(b"reading DAG from stdin\n"))
237 text = ui.fin.read()
235 text = ui.fin.read()
238
236
239 cl = repo.changelog
237 cl = repo.changelog
240 if len(cl) > 0 and not from_existing:
238 if len(cl) > 0 and not from_existing:
241 raise error.Abort(_(b'repository is not empty'))
239 raise error.Abort(_(b'repository is not empty'))
242
240
243 # determine number of revs in DAG
241 # determine number of revs in DAG
244 total = 0
242 total = 0
245 for type, data in dagparser.parsedag(text):
243 for type, data in dagparser.parsedag(text):
246 if type == b'n':
244 if type == b'n':
247 total += 1
245 total += 1
248
246
249 if mergeable_file:
247 if mergeable_file:
250 linesperrev = 2
248 linesperrev = 2
251 # make a file with k lines per rev
249 # make a file with k lines per rev
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 initialmergedlines.append(b"")
251 initialmergedlines.append(b"")
254
252
255 tags = []
253 tags = []
256 progress = ui.makeprogress(
254 progress = ui.makeprogress(
257 _(b'building'), unit=_(b'revisions'), total=total
255 _(b'building'), unit=_(b'revisions'), total=total
258 )
256 )
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 at = -1
258 at = -1
261 atbranch = b'default'
259 atbranch = b'default'
262 nodeids = []
260 nodeids = []
263 id = 0
261 id = 0
264 progress.update(id)
262 progress.update(id)
265 for type, data in dagparser.parsedag(text):
263 for type, data in dagparser.parsedag(text):
266 if type == b'n':
264 if type == b'n':
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 id, ps = data
266 id, ps = data
269
267
270 files = []
268 files = []
271 filecontent = {}
269 filecontent = {}
272
270
273 p2 = None
271 p2 = None
274 if mergeable_file:
272 if mergeable_file:
275 fn = b"mf"
273 fn = b"mf"
276 p1 = repo[ps[0]]
274 p1 = repo[ps[0]]
277 if len(ps) > 1:
275 if len(ps) > 1:
278 p2 = repo[ps[1]]
276 p2 = repo[ps[1]]
279 pa = p1.ancestor(p2)
277 pa = p1.ancestor(p2)
280 base, local, other = [
278 base, local, other = [
281 x[fn].data() for x in (pa, p1, p2)
279 x[fn].data() for x in (pa, p1, p2)
282 ]
280 ]
283 m3 = simplemerge.Merge3Text(base, local, other)
281 m3 = simplemerge.Merge3Text(base, local, other)
284 ml = [
282 ml = [
285 l.strip()
283 l.strip()
286 for l in simplemerge.render_minimized(m3)[0]
284 for l in simplemerge.render_minimized(m3)[0]
287 ]
285 ]
288 ml.append(b"")
286 ml.append(b"")
289 elif at > 0:
287 elif at > 0:
290 ml = p1[fn].data().split(b"\n")
288 ml = p1[fn].data().split(b"\n")
291 else:
289 else:
292 ml = initialmergedlines
290 ml = initialmergedlines
293 ml[id * linesperrev] += b" r%i" % id
291 ml[id * linesperrev] += b" r%i" % id
294 mergedtext = b"\n".join(ml)
292 mergedtext = b"\n".join(ml)
295 files.append(fn)
293 files.append(fn)
296 filecontent[fn] = mergedtext
294 filecontent[fn] = mergedtext
297
295
298 if overwritten_file:
296 if overwritten_file:
299 fn = b"of"
297 fn = b"of"
300 files.append(fn)
298 files.append(fn)
301 filecontent[fn] = b"r%i\n" % id
299 filecontent[fn] = b"r%i\n" % id
302
300
303 if new_file:
301 if new_file:
304 fn = b"nf%i" % id
302 fn = b"nf%i" % id
305 files.append(fn)
303 files.append(fn)
306 filecontent[fn] = b"r%i\n" % id
304 filecontent[fn] = b"r%i\n" % id
307 if len(ps) > 1:
305 if len(ps) > 1:
308 if not p2:
306 if not p2:
309 p2 = repo[ps[1]]
307 p2 = repo[ps[1]]
310 for fn in p2:
308 for fn in p2:
311 if fn.startswith(b"nf"):
309 if fn.startswith(b"nf"):
312 files.append(fn)
310 files.append(fn)
313 filecontent[fn] = p2[fn].data()
311 filecontent[fn] = p2[fn].data()
314
312
315 def fctxfn(repo, cx, path):
313 def fctxfn(repo, cx, path):
316 if path in filecontent:
314 if path in filecontent:
317 return context.memfilectx(
315 return context.memfilectx(
318 repo, cx, path, filecontent[path]
316 repo, cx, path, filecontent[path]
319 )
317 )
320 return None
318 return None
321
319
322 if len(ps) == 0 or ps[0] < 0:
320 if len(ps) == 0 or ps[0] < 0:
323 pars = [None, None]
321 pars = [None, None]
324 elif len(ps) == 1:
322 elif len(ps) == 1:
325 pars = [nodeids[ps[0]], None]
323 pars = [nodeids[ps[0]], None]
326 else:
324 else:
327 pars = [nodeids[p] for p in ps]
325 pars = [nodeids[p] for p in ps]
328 cx = context.memctx(
326 cx = context.memctx(
329 repo,
327 repo,
330 pars,
328 pars,
331 b"r%i" % id,
329 b"r%i" % id,
332 files,
330 files,
333 fctxfn,
331 fctxfn,
334 date=(id, 0),
332 date=(id, 0),
335 user=b"debugbuilddag",
333 user=b"debugbuilddag",
336 extra={b'branch': atbranch},
334 extra={b'branch': atbranch},
337 )
335 )
338 nodeid = repo.commitctx(cx)
336 nodeid = repo.commitctx(cx)
339 nodeids.append(nodeid)
337 nodeids.append(nodeid)
340 at = id
338 at = id
341 elif type == b'l':
339 elif type == b'l':
342 id, name = data
340 id, name = data
343 ui.note((b'tag %s\n' % name))
341 ui.note((b'tag %s\n' % name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 elif type == b'a':
343 elif type == b'a':
346 ui.note((b'branch %s\n' % data))
344 ui.note((b'branch %s\n' % data))
347 atbranch = data
345 atbranch = data
348 progress.update(id)
346 progress.update(id)
349
347
350 if tags:
348 if tags:
351 repo.vfs.write(b"localtags", b"".join(tags))
349 repo.vfs.write(b"localtags", b"".join(tags))
352
350
353
351
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 indent_string = b' ' * indent
353 indent_string = b' ' * indent
356 if all:
354 if all:
357 ui.writenoi18n(
355 ui.writenoi18n(
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 % indent_string
357 % indent_string
360 )
358 )
361
359
362 def showchunks(named):
360 def showchunks(named):
363 ui.write(b"\n%s%s\n" % (indent_string, named))
361 ui.write(b"\n%s%s\n" % (indent_string, named))
364 for deltadata in gen.deltaiter():
362 for deltadata in gen.deltaiter():
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 ui.write(
364 ui.write(
367 b"%s%s %s %s %s %s %d\n"
365 b"%s%s %s %s %s %s %d\n"
368 % (
366 % (
369 indent_string,
367 indent_string,
370 hex(node),
368 hex(node),
371 hex(p1),
369 hex(p1),
372 hex(p2),
370 hex(p2),
373 hex(cs),
371 hex(cs),
374 hex(deltabase),
372 hex(deltabase),
375 len(delta),
373 len(delta),
376 )
374 )
377 )
375 )
378
376
379 gen.changelogheader()
377 gen.changelogheader()
380 showchunks(b"changelog")
378 showchunks(b"changelog")
381 gen.manifestheader()
379 gen.manifestheader()
382 showchunks(b"manifest")
380 showchunks(b"manifest")
383 for chunkdata in iter(gen.filelogheader, {}):
381 for chunkdata in iter(gen.filelogheader, {}):
384 fname = chunkdata[b'filename']
382 fname = chunkdata[b'filename']
385 showchunks(fname)
383 showchunks(fname)
386 else:
384 else:
387 if isinstance(gen, bundle2.unbundle20):
385 if isinstance(gen, bundle2.unbundle20):
388 raise error.Abort(_(b'use debugbundle2 for this file'))
386 raise error.Abort(_(b'use debugbundle2 for this file'))
389 gen.changelogheader()
387 gen.changelogheader()
390 for deltadata in gen.deltaiter():
388 for deltadata in gen.deltaiter():
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393
391
394
392
395 def _debugobsmarkers(ui, part, indent=0, **opts):
393 def _debugobsmarkers(ui, part, indent=0, **opts):
396 """display version and markers contained in 'data'"""
394 """display version and markers contained in 'data'"""
397 data = part.read()
395 data = part.read()
398 indent_string = b' ' * indent
396 indent_string = b' ' * indent
399 try:
397 try:
400 version, markers = obsolete._readmarkers(data)
398 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
399 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
400 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
401 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
402 ui.write(msg)
405 else:
403 else:
406 msg = b"%sversion: %d (%d bytes)\n"
404 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
405 msg %= indent_string, version, len(data)
408 ui.write(msg)
406 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
407 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
410 for rawmarker in sorted(markers):
408 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
409 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
410 fm.startitem()
413 fm.plain(indent_string)
411 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
412 cmdutil.showmarker(fm, m)
415 fm.end()
413 fm.end()
416
414
417
415
418 def _debugphaseheads(ui, data, indent=0):
416 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
417 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
418 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
419 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
420 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
421 for head in headsbyphase[phase]:
424 ui.write(indent_string)
422 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
424
427
425
428 def _quasirepr(thing):
426 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
428 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
430 )
433 return pycompat.bytestr(repr(thing))
431 return pycompat.bytestr(repr(thing))
434
432
435
433
436 def _debugbundle2(ui, gen, all=None, **opts):
434 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
435 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
436 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
437 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
439 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
440 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
441 if parttypes and part.type not in parttypes:
444 continue
442 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
443 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
445 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
446 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
447 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
448 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
450 if part.type == b'obsmarkers':
453 if not ui.quiet:
451 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
452 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
453 if part.type == b'phase-heads':
456 if not ui.quiet:
454 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
455 _debugphaseheads(ui, part, indent=4)
458
456
459
457
460 @command(
458 @command(
461 b'debugbundle',
459 b'debugbundle',
462 [
460 [
463 (b'a', b'all', None, _(b'show all details')),
461 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
464 ],
467 _(b'FILE'),
465 _(b'FILE'),
468 norepo=True,
466 norepo=True,
469 )
467 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
469 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
470 with hg.openpath(ui, bundlepath) as f:
473 if spec:
471 if spec:
474 spec = exchange.getbundlespec(ui, f)
472 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
473 ui.write(b'%s\n' % spec)
476 return
474 return
477
475
478 gen = exchange.readbundle(ui, f, bundlepath)
476 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
477 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
478 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
482
480
483
481
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
483 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
484 """lists the capabilities of a remote peer"""
487 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
485 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
488 try:
486 try:
489 caps = peer.capabilities()
487 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
488 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
489 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
490 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
491 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
492 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
493 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
494 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
495 ui.write(b' %s\n' % key)
498 for v in values:
496 for v in values:
499 ui.write(b' %s\n' % v)
497 ui.write(b' %s\n' % v)
500 finally:
498 finally:
501 peer.close()
499 peer.close()
502
500
503
501
504 @command(
502 @command(
505 b'debugchangedfiles',
503 b'debugchangedfiles',
506 [
504 [
507 (
505 (
508 b'',
506 b'',
509 b'compute',
507 b'compute',
510 False,
508 False,
511 b"compute information instead of reading it from storage",
509 b"compute information instead of reading it from storage",
512 ),
510 ),
513 ],
511 ],
514 b'REV',
512 b'REV',
515 )
513 )
516 def debugchangedfiles(ui, repo, rev, **opts):
514 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
515 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
516 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
517 files = None
520
518
521 if opts['compute']:
519 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
520 files = metadata.compute_all_files_changes(ctx)
523 else:
521 else:
524 sd = repo.changelog.sidedata(ctx.rev())
522 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
523 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
524 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
525 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
526 if files is not None:
529 for f in sorted(files.touched):
527 for f in sorted(files.touched):
530 if f in files.added:
528 if f in files.added:
531 action = b"added"
529 action = b"added"
532 elif f in files.removed:
530 elif f in files.removed:
533 action = b"removed"
531 action = b"removed"
534 elif f in files.merged:
532 elif f in files.merged:
535 action = b"merged"
533 action = b"merged"
536 elif f in files.salvaged:
534 elif f in files.salvaged:
537 action = b"salvaged"
535 action = b"salvaged"
538 else:
536 else:
539 action = b"touched"
537 action = b"touched"
540
538
541 copy_parent = b""
539 copy_parent = b""
542 copy_source = b""
540 copy_source = b""
543 if f in files.copied_from_p1:
541 if f in files.copied_from_p1:
544 copy_parent = b"p1"
542 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
543 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
544 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
545 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
546 copy_source = files.copied_from_p2[f]
549
547
550 data = (action, copy_parent, f, copy_source)
548 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
549 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
550 ui.write(template % data)
553
551
554
552
555 @command(b'debugcheckstate', [], b'')
553 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
554 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
555 """validate the correctness of the current dirstate"""
558 errors = verify.verifier(repo)._verify_dirstate()
556 errors = verify.verifier(repo)._verify_dirstate()
559 if errors:
557 if errors:
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
558 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 raise error.Abort(errstr)
559 raise error.Abort(errstr)
562
560
563
561
564 @command(
562 @command(
565 b'debugcolor',
563 b'debugcolor',
566 [(b'', b'style', None, _(b'show all configured styles'))],
564 [(b'', b'style', None, _(b'show all configured styles'))],
567 b'hg debugcolor',
565 b'hg debugcolor',
568 )
566 )
569 def debugcolor(ui, repo, **opts):
567 def debugcolor(ui, repo, **opts):
570 """show available color, effects or style"""
568 """show available color, effects or style"""
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
569 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 if opts.get('style'):
570 if opts.get('style'):
573 return _debugdisplaystyle(ui)
571 return _debugdisplaystyle(ui)
574 else:
572 else:
575 return _debugdisplaycolor(ui)
573 return _debugdisplaycolor(ui)
576
574
577
575
578 def _debugdisplaycolor(ui):
576 def _debugdisplaycolor(ui):
579 ui = ui.copy()
577 ui = ui.copy()
580 ui._styles.clear()
578 ui._styles.clear()
581 for effect in color._activeeffects(ui).keys():
579 for effect in color._activeeffects(ui).keys():
582 ui._styles[effect] = effect
580 ui._styles[effect] = effect
583 if ui._terminfoparams:
581 if ui._terminfoparams:
584 for k, v in ui.configitems(b'color'):
582 for k, v in ui.configitems(b'color'):
585 if k.startswith(b'color.'):
583 if k.startswith(b'color.'):
586 ui._styles[k] = k[6:]
584 ui._styles[k] = k[6:]
587 elif k.startswith(b'terminfo.'):
585 elif k.startswith(b'terminfo.'):
588 ui._styles[k] = k[9:]
586 ui._styles[k] = k[9:]
589 ui.write(_(b'available colors:\n'))
587 ui.write(_(b'available colors:\n'))
590 # sort label with a '_' after the other to group '_background' entry.
588 # sort label with a '_' after the other to group '_background' entry.
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
589 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 for colorname, label in items:
590 for colorname, label in items:
593 ui.write(b'%s\n' % colorname, label=label)
591 ui.write(b'%s\n' % colorname, label=label)
594
592
595
593
596 def _debugdisplaystyle(ui):
594 def _debugdisplaystyle(ui):
597 ui.write(_(b'available style:\n'))
595 ui.write(_(b'available style:\n'))
598 if not ui._styles:
596 if not ui._styles:
599 return
597 return
600 width = max(len(s) for s in ui._styles)
598 width = max(len(s) for s in ui._styles)
601 for label, effects in sorted(ui._styles.items()):
599 for label, effects in sorted(ui._styles.items()):
602 ui.write(b'%s' % label, label=label)
600 ui.write(b'%s' % label, label=label)
603 if effects:
601 if effects:
604 # 50
602 # 50
605 ui.write(b': ')
603 ui.write(b': ')
606 ui.write(b' ' * (max(0, width - len(label))))
604 ui.write(b' ' * (max(0, width - len(label))))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
605 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 ui.write(b'\n')
606 ui.write(b'\n')
609
607
610
608
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
609 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 def debugcreatestreamclonebundle(ui, repo, fname):
610 def debugcreatestreamclonebundle(ui, repo, fname):
613 """create a stream clone bundle file
611 """create a stream clone bundle file
614
612
615 Stream bundles are special bundles that are essentially archives of
613 Stream bundles are special bundles that are essentially archives of
616 revlog files. They are commonly used for cloning very quickly.
614 revlog files. They are commonly used for cloning very quickly.
617
615
618 This command creates a "version 1" stream clone, which is deprecated in
616 This command creates a "version 1" stream clone, which is deprecated in
619 favor of newer versions of the stream protocol. Bundles using such newer
617 favor of newer versions of the stream protocol. Bundles using such newer
620 versions can be generated using the `hg bundle` command.
618 versions can be generated using the `hg bundle` command.
621 """
619 """
622 # TODO we may want to turn this into an abort when this functionality
620 # TODO we may want to turn this into an abort when this functionality
623 # is moved into `hg bundle`.
621 # is moved into `hg bundle`.
624 if phases.hassecret(repo):
622 if phases.hassecret(repo):
625 ui.warn(
623 ui.warn(
626 _(
624 _(
627 b'(warning: stream clone bundle will contain secret '
625 b'(warning: stream clone bundle will contain secret '
628 b'revisions)\n'
626 b'revisions)\n'
629 )
627 )
630 )
628 )
631
629
632 requirements, gen = streamclone.generatebundlev1(repo)
630 requirements, gen = streamclone.generatebundlev1(repo)
633 changegroup.writechunks(ui, gen, fname)
631 changegroup.writechunks(ui, gen, fname)
634
632
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
633 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636
634
637
635
638 @command(
636 @command(
639 b'debugdag',
637 b'debugdag',
640 [
638 [
641 (b't', b'tags', None, _(b'use tags as labels')),
639 (b't', b'tags', None, _(b'use tags as labels')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
640 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'', b'dots', None, _(b'use dots for runs')),
641 (b'', b'dots', None, _(b'use dots for runs')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
642 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 ],
643 ],
646 _(b'[OPTION]... [FILE [REV]...]'),
644 _(b'[OPTION]... [FILE [REV]...]'),
647 optionalrepo=True,
645 optionalrepo=True,
648 )
646 )
649 def debugdag(ui, repo, file_=None, *revs, **opts):
647 def debugdag(ui, repo, file_=None, *revs, **opts):
650 """format the changelog or an index DAG as a concise textual description
648 """format the changelog or an index DAG as a concise textual description
651
649
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
650 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 revision numbers, they get labeled in the output as rN.
651 revision numbers, they get labeled in the output as rN.
654
652
655 Otherwise, the changelog DAG of the current repo is emitted.
653 Otherwise, the changelog DAG of the current repo is emitted.
656 """
654 """
657 spaces = opts.get('spaces')
655 spaces = opts.get('spaces')
658 dots = opts.get('dots')
656 dots = opts.get('dots')
659 if file_:
657 if file_:
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
658 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 revs = {int(r) for r in revs}
659 revs = {int(r) for r in revs}
662
660
663 def events():
661 def events():
664 for r in rlog:
662 for r in rlog:
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
663 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 if r in revs:
664 if r in revs:
667 yield b'l', (r, b"r%i" % r)
665 yield b'l', (r, b"r%i" % r)
668
666
669 elif repo:
667 elif repo:
670 cl = repo.changelog
668 cl = repo.changelog
671 tags = opts.get('tags')
669 tags = opts.get('tags')
672 branches = opts.get('branches')
670 branches = opts.get('branches')
673 if tags:
671 if tags:
674 labels = {}
672 labels = {}
675 for l, n in repo.tags().items():
673 for l, n in repo.tags().items():
676 labels.setdefault(cl.rev(n), []).append(l)
674 labels.setdefault(cl.rev(n), []).append(l)
677
675
678 def events():
676 def events():
679 b = b"default"
677 b = b"default"
680 for r in cl:
678 for r in cl:
681 if branches:
679 if branches:
682 newb = cl.read(cl.node(r))[5][b'branch']
680 newb = cl.read(cl.node(r))[5][b'branch']
683 if newb != b:
681 if newb != b:
684 yield b'a', newb
682 yield b'a', newb
685 b = newb
683 b = newb
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
684 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 if tags:
685 if tags:
688 ls = labels.get(r)
686 ls = labels.get(r)
689 if ls:
687 if ls:
690 for l in ls:
688 for l in ls:
691 yield b'l', (r, l)
689 yield b'l', (r, l)
692
690
693 else:
691 else:
694 raise error.Abort(_(b'need repo for changelog dag'))
692 raise error.Abort(_(b'need repo for changelog dag'))
695
693
696 for line in dagparser.dagtextlines(
694 for line in dagparser.dagtextlines(
697 events(),
695 events(),
698 addspaces=spaces,
696 addspaces=spaces,
699 wraplabels=True,
697 wraplabels=True,
700 wrapannotations=True,
698 wrapannotations=True,
701 wrapnonlinear=dots,
699 wrapnonlinear=dots,
702 usedots=dots,
700 usedots=dots,
703 maxlinewidth=70,
701 maxlinewidth=70,
704 ):
702 ):
705 ui.write(line)
703 ui.write(line)
706 ui.write(b"\n")
704 ui.write(b"\n")
707
705
708
706
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
707 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 def debugdata(ui, repo, file_, rev=None, **opts):
708 def debugdata(ui, repo, file_, rev=None, **opts):
711 """dump the contents of a data file revision"""
709 """dump the contents of a data file revision"""
712 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
710 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
713 if rev is not None:
711 if rev is not None:
714 raise error.InputError(
712 raise error.InputError(
715 _(b'cannot specify a revision with other arguments')
713 _(b'cannot specify a revision with other arguments')
716 )
714 )
717 file_, rev = None, file_
715 file_, rev = None, file_
718 elif rev is None:
716 elif rev is None:
719 raise error.InputError(_(b'please specify a revision'))
717 raise error.InputError(_(b'please specify a revision'))
720 r = cmdutil.openstorage(
718 r = cmdutil.openstorage(
721 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
719 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
722 )
720 )
723 try:
721 try:
724 ui.write(r.rawdata(r.lookup(rev)))
722 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
723 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
725
728
726
729 @command(
727 @command(
730 b'debugdate',
728 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
730 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
731 norepo=True,
734 optionalrepo=True,
732 optionalrepo=True,
735 )
733 )
736 def debugdate(ui, date, range=None, **opts):
734 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
735 """parse and display a date"""
738 if opts["extended"]:
736 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
738 else:
741 d = dateutil.parsedate(date)
739 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
742 if range:
745 m = dateutil.matchdate(range)
743 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
745
748
746
749 @command(
747 @command(
750 b'debugdeltachain',
748 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
750 _(b'-c|-m|FILE'),
753 optionalrepo=True,
751 optionalrepo=True,
754 )
752 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
753 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
754 """dump information about delta chains in a revlog
757
755
758 Output can be templatized. Available template keywords are:
756 Output can be templatized. Available template keywords are:
759
757
760 :``rev``: revision number
758 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
759 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
760 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
762 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
763 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
764 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
765 - base: a full snapshot
768 - snap: an intermediate snapshot
766 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
767 - p1: a delta against the first parent
770 - p2: a delta against the second parent
768 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
769 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
770 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
771 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
772 (when p2 has empty delta
775 - prev: a delta against the previous revision
773 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
774 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
775 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
776 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
777 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
778 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
779 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
780 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
781 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
782 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
783 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
784 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
785 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
786 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
787 how much unrelated data is needed to load this delta chain
790
788
791 If the repository is configured to use the sparse read, additional keywords
789 If the repository is configured to use the sparse read, additional keywords
792 are available:
790 are available:
793
791
794 :``readsize``: total size of data read from the disk for a revision
792 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
793 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
794 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
795 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
796 :``srchunks``: in how many data hunks the whole revision would be read
799
797
800 The sparse read can be enabled with experimental.sparse-read = True
798 The sparse read can be enabled with experimental.sparse-read = True
801 """
799 """
802 r = cmdutil.openrevlog(
800 revlog = cmdutil.openrevlog(
803 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
801 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
804 )
802 )
805 index = r.index
806 start = r.start
807 length = r.length
808 generaldelta = r.delta_config.general_delta
809 withsparseread = r.data_config.with_sparse_read
810
811 # security to avoid crash on corrupted revlogs
812 total_revs = len(index)
813
814 chain_size_cache = {}
815
816 def revinfo(rev):
817 e = index[rev]
818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
820
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824
825 # If the parents of a revision has an empty delta, we never try to delta
826 # against that parent, but directly against the delta base of that
827 # parent (recursively). It avoids adding a useless entry in the chain.
828 #
829 # However we need to detect that as a special case for delta-type, that
830 # is not simply "other".
831 p1_base = p1
832 if p1 != nullrev and p1 < total_revs:
833 e1 = index[p1]
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 if (
837 new_base == p1_base
838 or new_base == nullrev
839 or new_base >= total_revs
840 ):
841 break
842 p1_base = new_base
843 e1 = index[p1_base]
844 p2_base = p2
845 if p2 != nullrev and p2 < total_revs:
846 e2 = index[p2]
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 if (
850 new_base == p2_base
851 or new_base == nullrev
852 or new_base >= total_revs
853 ):
854 break
855 p2_base = new_base
856 e2 = index[p2_base]
857
858 if generaldelta:
859 if base == p1:
860 deltatype = b'p1'
861 elif base == p2:
862 deltatype = b'p2'
863 elif base == rev:
864 deltatype = b'base'
865 elif base == p1_base:
866 deltatype = b'skip1'
867 elif base == p2_base:
868 deltatype = b'skip2'
869 elif r.issnapshot(rev):
870 deltatype = b'snap'
871 elif base == rev - 1:
872 deltatype = b'prev'
873 else:
874 deltatype = b'other'
875 else:
876 if base == rev:
877 deltatype = b'base'
878 else:
879 deltatype = b'prev'
880
881 chain = r._deltachain(rev)[0]
882 chain_size = 0
883 for iter_rev in reversed(chain):
884 cached = chain_size_cache.get(iter_rev)
885 if cached is not None:
886 chain_size += cached
887 break
888 e = index[iter_rev]
889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
890 chain_size_cache[rev] = chain_size
891
892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
893
894 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
803 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
895
804
896 fm.plain(
805 lines = revlog_debug.debug_delta_chain(revlog)
897 b' rev p1 p2 chain# chainlen prev delta '
806 # first entry is the header
898 b'size rawsize chainsize ratio lindist extradist '
807 header = next(lines)
899 b'extraratio'
808 fm.plain(header)
900 )
809 for entry in lines:
901 if withsparseread:
810 label = b' '.join(e[0] for e in entry)
902 fm.plain(b' readsize largestblk rddensity srchunks')
811 format = b' '.join(e[1] for e in entry)
903 fm.plain(b'\n')
812 values = [e[3] for e in entry]
904
813 data = dict((e[2], e[3]) for e in entry)
905 chainbases = {}
906 for rev in r:
907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
908 chainbase = chain[0]
909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
910 basestart = start(chainbase)
911 revstart = start(rev)
912 lineardist = revstart + comp - basestart
913 extradist = lineardist - chainsize
914 try:
915 prevrev = chain[-2]
916 except IndexError:
917 prevrev = -1
918
919 if uncomp != 0:
920 chainratio = float(chainsize) / float(uncomp)
921 else:
922 chainratio = chainsize
923
924 if chainsize != 0:
925 extraratio = float(extradist) / float(chainsize)
926 else:
927 extraratio = extradist
928
929 fm.startitem()
814 fm.startitem()
930 fm.write(
815 fm.write(label, format, *values, **data)
931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
932 b'uncompsize chainsize chainratio lindist extradist '
933 b'extraratio',
934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
935 rev,
936 p1,
937 p2,
938 chainid,
939 len(chain),
940 prevrev,
941 deltatype,
942 comp,
943 uncomp,
944 chainsize,
945 chainratio,
946 lineardist,
947 extradist,
948 extraratio,
949 rev=rev,
950 chainid=chainid,
951 chainlen=len(chain),
952 prevrev=prevrev,
953 deltatype=deltatype,
954 compsize=comp,
955 uncompsize=uncomp,
956 chainsize=chainsize,
957 chainratio=chainratio,
958 lindist=lineardist,
959 extradist=extradist,
960 extraratio=extraratio,
961 )
962 if withsparseread:
963 readsize = 0
964 largestblock = 0
965 srchunks = 0
966
967 for revschunk in deltautil.slicechunk(r, chain):
968 srchunks += 1
969 blkend = start(revschunk[-1]) + length(revschunk[-1])
970 blksize = blkend - start(revschunk[0])
971
972 readsize += blksize
973 if largestblock < blksize:
974 largestblock = blksize
975
976 if readsize:
977 readdensity = float(chainsize) / float(readsize)
978 else:
979 readdensity = 1
980
981 fm.write(
982 b'readsize largestblock readdensity srchunks',
983 b' %10d %10d %9.5f %8d',
984 readsize,
985 largestblock,
986 readdensity,
987 srchunks,
988 readsize=readsize,
989 largestblock=largestblock,
990 readdensity=readdensity,
991 srchunks=srchunks,
992 )
993
994 fm.plain(b'\n')
816 fm.plain(b'\n')
995
996 fm.end()
817 fm.end()
997
818
998
819
999 @command(
820 @command(
1000 b'debug-delta-find',
821 b'debug-delta-find',
1001 cmdutil.debugrevlogopts
822 cmdutil.debugrevlogopts
1002 + cmdutil.formatteropts
823 + cmdutil.formatteropts
1003 + [
824 + [
1004 (
825 (
1005 b'',
826 b'',
1006 b'source',
827 b'source',
1007 b'full',
828 b'full',
1008 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
829 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1009 ),
830 ),
1010 ],
831 ],
1011 _(b'-c|-m|FILE REV'),
832 _(b'-c|-m|FILE REV'),
1012 optionalrepo=True,
833 optionalrepo=True,
1013 )
834 )
1014 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
835 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1015 """display the computation to get to a valid delta for storing REV
836 """display the computation to get to a valid delta for storing REV
1016
837
1017 This command will replay the process used to find the "best" delta to store
838 This command will replay the process used to find the "best" delta to store
1018 a revision and display information about all the steps used to get to that
839 a revision and display information about all the steps used to get to that
1019 result.
840 result.
1020
841
1021 By default, the process is fed with a the full-text for the revision. This
842 By default, the process is fed with a the full-text for the revision. This
1022 can be controlled with the --source flag.
843 can be controlled with the --source flag.
1023
844
1024 The revision use the revision number of the target storage (not changelog
845 The revision use the revision number of the target storage (not changelog
1025 revision number).
846 revision number).
1026
847
1027 note: the process is initiated from a full text of the revision to store.
848 note: the process is initiated from a full text of the revision to store.
1028 """
849 """
1029 if arg_2 is None:
850 if arg_2 is None:
1030 file_ = None
851 file_ = None
1031 rev = arg_1
852 rev = arg_1
1032 else:
853 else:
1033 file_ = arg_1
854 file_ = arg_1
1034 rev = arg_2
855 rev = arg_2
1035
856
1036 rev = int(rev)
857 rev = int(rev)
1037
858
1038 revlog = cmdutil.openrevlog(
859 revlog = cmdutil.openrevlog(
1039 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
860 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1040 )
861 )
1041 p1r, p2r = revlog.parentrevs(rev)
862 p1r, p2r = revlog.parentrevs(rev)
1042
863
1043 if source == b'full':
864 if source == b'full':
1044 base_rev = nullrev
865 base_rev = nullrev
1045 elif source == b'storage':
866 elif source == b'storage':
1046 base_rev = revlog.deltaparent(rev)
867 base_rev = revlog.deltaparent(rev)
1047 elif source == b'p1':
868 elif source == b'p1':
1048 base_rev = p1r
869 base_rev = p1r
1049 elif source == b'p2':
870 elif source == b'p2':
1050 base_rev = p2r
871 base_rev = p2r
1051 elif source == b'prev':
872 elif source == b'prev':
1052 base_rev = rev - 1
873 base_rev = rev - 1
1053 else:
874 else:
1054 raise error.InputError(b"invalid --source value: %s" % source)
875 raise error.InputError(b"invalid --source value: %s" % source)
1055
876
1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
877 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1057
878
1058
879
1059 @command(
880 @command(
1060 b'debugdirstate|debugstate',
881 b'debugdirstate|debugstate',
1061 [
882 [
1062 (
883 (
1063 b'',
884 b'',
1064 b'nodates',
885 b'nodates',
1065 None,
886 None,
1066 _(b'do not display the saved mtime (DEPRECATED)'),
887 _(b'do not display the saved mtime (DEPRECATED)'),
1067 ),
888 ),
1068 (b'', b'dates', True, _(b'display the saved mtime')),
889 (b'', b'dates', True, _(b'display the saved mtime')),
1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
890 (b'', b'datesort', None, _(b'sort by saved mtime')),
1070 (
891 (
1071 b'',
892 b'',
1072 b'docket',
893 b'docket',
1073 False,
894 False,
1074 _(b'display the docket (metadata file) instead'),
895 _(b'display the docket (metadata file) instead'),
1075 ),
896 ),
1076 (
897 (
1077 b'',
898 b'',
1078 b'all',
899 b'all',
1079 False,
900 False,
1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
901 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1081 ),
902 ),
1082 ],
903 ],
1083 _(b'[OPTION]...'),
904 _(b'[OPTION]...'),
1084 )
905 )
1085 def debugstate(ui, repo, **opts):
906 def debugstate(ui, repo, **opts):
1086 """show the contents of the current dirstate"""
907 """show the contents of the current dirstate"""
1087
908
1088 if opts.get("docket"):
909 if opts.get("docket"):
1089 if not repo.dirstate._use_dirstate_v2:
910 if not repo.dirstate._use_dirstate_v2:
1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
911 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1091
912
1092 docket = repo.dirstate._map.docket
913 docket = repo.dirstate._map.docket
1093 (
914 (
1094 start_offset,
915 start_offset,
1095 root_nodes,
916 root_nodes,
1096 nodes_with_entry,
917 nodes_with_entry,
1097 nodes_with_copy,
918 nodes_with_copy,
1098 unused_bytes,
919 unused_bytes,
1099 _unused,
920 _unused,
1100 ignore_pattern,
921 ignore_pattern,
1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
922 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1102
923
1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
924 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
925 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
926 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
927 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
928 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
929 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
930 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1110 ui.write(
931 ui.write(
1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
932 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1112 )
933 )
1113 return
934 return
1114
935
1115 nodates = not opts['dates']
936 nodates = not opts['dates']
1116 if opts.get('nodates') is not None:
937 if opts.get('nodates') is not None:
1117 nodates = True
938 nodates = True
1118 datesort = opts.get('datesort')
939 datesort = opts.get('datesort')
1119
940
1120 if datesort:
941 if datesort:
1121
942
1122 def keyfunc(entry):
943 def keyfunc(entry):
1123 filename, _state, _mode, _size, mtime = entry
944 filename, _state, _mode, _size, mtime = entry
1124 return (mtime, filename)
945 return (mtime, filename)
1125
946
1126 else:
947 else:
1127 keyfunc = None # sort by filename
948 keyfunc = None # sort by filename
1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
949 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1129 entries.sort(key=keyfunc)
950 entries.sort(key=keyfunc)
1130 for entry in entries:
951 for entry in entries:
1131 filename, state, mode, size, mtime = entry
952 filename, state, mode, size, mtime = entry
1132 if mtime == -1:
953 if mtime == -1:
1133 timestr = b'unset '
954 timestr = b'unset '
1134 elif nodates:
955 elif nodates:
1135 timestr = b'set '
956 timestr = b'set '
1136 else:
957 else:
1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
958 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1138 timestr = encoding.strtolocal(timestr)
959 timestr = encoding.strtolocal(timestr)
1139 if mode & 0o20000:
960 if mode & 0o20000:
1140 mode = b'lnk'
961 mode = b'lnk'
1141 else:
962 else:
1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
963 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
964 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1144 for f in repo.dirstate.copies():
965 for f in repo.dirstate.copies():
1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
966 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1146
967
1147
968
1148 @command(
969 @command(
1149 b'debugdirstateignorepatternshash',
970 b'debugdirstateignorepatternshash',
1150 [],
971 [],
1151 _(b''),
972 _(b''),
1152 )
973 )
1153 def debugdirstateignorepatternshash(ui, repo, **opts):
974 def debugdirstateignorepatternshash(ui, repo, **opts):
1154 """show the hash of ignore patterns stored in dirstate if v2,
975 """show the hash of ignore patterns stored in dirstate if v2,
1155 or nothing for dirstate-v2
976 or nothing for dirstate-v2
1156 """
977 """
1157 if repo.dirstate._use_dirstate_v2:
978 if repo.dirstate._use_dirstate_v2:
1158 docket = repo.dirstate._map.docket
979 docket = repo.dirstate._map.docket
1159 hash_len = 20 # 160 bits for SHA-1
980 hash_len = 20 # 160 bits for SHA-1
1160 hash_bytes = docket.tree_metadata[-hash_len:]
981 hash_bytes = docket.tree_metadata[-hash_len:]
1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
982 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1162
983
1163
984
1164 @command(
985 @command(
1165 b'debugdiscovery',
986 b'debugdiscovery',
1166 [
987 [
1167 (b'', b'old', None, _(b'use old-style discovery')),
988 (b'', b'old', None, _(b'use old-style discovery')),
1168 (
989 (
1169 b'',
990 b'',
1170 b'nonheads',
991 b'nonheads',
1171 None,
992 None,
1172 _(b'use old-style discovery with non-heads included'),
993 _(b'use old-style discovery with non-heads included'),
1173 ),
994 ),
1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
995 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
996 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1176 (
997 (
1177 b'',
998 b'',
1178 b'local-as-revs',
999 b'local-as-revs',
1179 b"",
1000 b"",
1180 b'treat local has having these revisions only',
1001 b'treat local has having these revisions only',
1181 ),
1002 ),
1182 (
1003 (
1183 b'',
1004 b'',
1184 b'remote-as-revs',
1005 b'remote-as-revs',
1185 b"",
1006 b"",
1186 b'use local as remote, with only these revisions',
1007 b'use local as remote, with only these revisions',
1187 ),
1008 ),
1188 ]
1009 ]
1189 + cmdutil.remoteopts
1010 + cmdutil.remoteopts
1190 + cmdutil.formatteropts,
1011 + cmdutil.formatteropts,
1191 _(b'[--rev REV] [OTHER]'),
1012 _(b'[--rev REV] [OTHER]'),
1192 )
1013 )
1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1014 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1194 """runs the changeset discovery protocol in isolation
1015 """runs the changeset discovery protocol in isolation
1195
1016
1196 The local peer can be "replaced" by a subset of the local repository by
1017 The local peer can be "replaced" by a subset of the local repository by
1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1018 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1198 can be "replaced" by a subset of the local repository using the
1019 can be "replaced" by a subset of the local repository using the
1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1020 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1200 discovery situations.
1021 discovery situations.
1201
1022
1202 The following developer oriented config are relevant for people playing with this command:
1023 The following developer oriented config are relevant for people playing with this command:
1203
1024
1204 * devel.discovery.exchange-heads=True
1025 * devel.discovery.exchange-heads=True
1205
1026
1206 If False, the discovery will not start with
1027 If False, the discovery will not start with
1207 remote head fetching and local head querying.
1028 remote head fetching and local head querying.
1208
1029
1209 * devel.discovery.grow-sample=True
1030 * devel.discovery.grow-sample=True
1210
1031
1211 If False, the sample size used in set discovery will not be increased
1032 If False, the sample size used in set discovery will not be increased
1212 through the process
1033 through the process
1213
1034
1214 * devel.discovery.grow-sample.dynamic=True
1035 * devel.discovery.grow-sample.dynamic=True
1215
1036
1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1037 When discovery.grow-sample.dynamic is True, the default, the sample size is
1217 adapted to the shape of the undecided set (it is set to the max of:
1038 adapted to the shape of the undecided set (it is set to the max of:
1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1039 <target-size>, len(roots(undecided)), len(heads(undecided)
1219
1040
1220 * devel.discovery.grow-sample.rate=1.05
1041 * devel.discovery.grow-sample.rate=1.05
1221
1042
1222 the rate at which the sample grow
1043 the rate at which the sample grow
1223
1044
1224 * devel.discovery.randomize=True
1045 * devel.discovery.randomize=True
1225
1046
1226 If andom sampling during discovery are deterministic. It is meant for
1047 If andom sampling during discovery are deterministic. It is meant for
1227 integration tests.
1048 integration tests.
1228
1049
1229 * devel.discovery.sample-size=200
1050 * devel.discovery.sample-size=200
1230
1051
1231 Control the initial size of the discovery sample
1052 Control the initial size of the discovery sample
1232
1053
1233 * devel.discovery.sample-size.initial=100
1054 * devel.discovery.sample-size.initial=100
1234
1055
1235 Control the initial size of the discovery for initial change
1056 Control the initial size of the discovery for initial change
1236 """
1057 """
1237 unfi = repo.unfiltered()
1058 unfi = repo.unfiltered()
1238
1059
1239 # setup potential extra filtering
1060 # setup potential extra filtering
1240 local_revs = opts["local_as_revs"]
1061 local_revs = opts["local_as_revs"]
1241 remote_revs = opts["remote_as_revs"]
1062 remote_revs = opts["remote_as_revs"]
1242
1063
1243 # make sure tests are repeatable
1064 # make sure tests are repeatable
1244 random.seed(int(opts['seed']))
1065 random.seed(int(opts['seed']))
1245
1066
1246 if not remote_revs:
1067 if not remote_revs:
1247 path = urlutil.get_unique_pull_path_obj(
1068 path = urlutil.get_unique_pull_path_obj(
1248 b'debugdiscovery', ui, remoteurl
1069 b'debugdiscovery', ui, remoteurl
1249 )
1070 )
1250 branches = (path.branch, [])
1071 branches = (path.branch, [])
1251 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1072 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1252 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1073 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1253 else:
1074 else:
1254 branches = (None, [])
1075 branches = (None, [])
1255 remote_filtered_revs = logcmdutil.revrange(
1076 remote_filtered_revs = logcmdutil.revrange(
1256 unfi, [b"not (::(%s))" % remote_revs]
1077 unfi, [b"not (::(%s))" % remote_revs]
1257 )
1078 )
1258 remote_filtered_revs = frozenset(remote_filtered_revs)
1079 remote_filtered_revs = frozenset(remote_filtered_revs)
1259
1080
1260 def remote_func(x):
1081 def remote_func(x):
1261 return remote_filtered_revs
1082 return remote_filtered_revs
1262
1083
1263 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1084 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1264
1085
1265 remote = repo.peer()
1086 remote = repo.peer()
1266 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1087 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1267
1088
1268 if local_revs:
1089 if local_revs:
1269 local_filtered_revs = logcmdutil.revrange(
1090 local_filtered_revs = logcmdutil.revrange(
1270 unfi, [b"not (::(%s))" % local_revs]
1091 unfi, [b"not (::(%s))" % local_revs]
1271 )
1092 )
1272 local_filtered_revs = frozenset(local_filtered_revs)
1093 local_filtered_revs = frozenset(local_filtered_revs)
1273
1094
1274 def local_func(x):
1095 def local_func(x):
1275 return local_filtered_revs
1096 return local_filtered_revs
1276
1097
1277 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1098 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1278 repo = repo.filtered(b'debug-discovery-local-filter')
1099 repo = repo.filtered(b'debug-discovery-local-filter')
1279
1100
1280 data = {}
1101 data = {}
1281 if opts.get('old'):
1102 if opts.get('old'):
1282
1103
1283 def doit(pushedrevs, remoteheads, remote=remote):
1104 def doit(pushedrevs, remoteheads, remote=remote):
1284 if not hasattr(remote, 'branches'):
1105 if not hasattr(remote, 'branches'):
1285 # enable in-client legacy support
1106 # enable in-client legacy support
1286 remote = localrepo.locallegacypeer(remote.local())
1107 remote = localrepo.locallegacypeer(remote.local())
1287 if remote_revs:
1108 if remote_revs:
1288 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1109 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1289 remote._repo = r
1110 remote._repo = r
1290 common, _in, hds = treediscovery.findcommonincoming(
1111 common, _in, hds = treediscovery.findcommonincoming(
1291 repo, remote, force=True, audit=data
1112 repo, remote, force=True, audit=data
1292 )
1113 )
1293 common = set(common)
1114 common = set(common)
1294 if not opts.get('nonheads'):
1115 if not opts.get('nonheads'):
1295 ui.writenoi18n(
1116 ui.writenoi18n(
1296 b"unpruned common: %s\n"
1117 b"unpruned common: %s\n"
1297 % b" ".join(sorted(short(n) for n in common))
1118 % b" ".join(sorted(short(n) for n in common))
1298 )
1119 )
1299
1120
1300 clnode = repo.changelog.node
1121 clnode = repo.changelog.node
1301 common = repo.revs(b'heads(::%ln)', common)
1122 common = repo.revs(b'heads(::%ln)', common)
1302 common = {clnode(r) for r in common}
1123 common = {clnode(r) for r in common}
1303 return common, hds
1124 return common, hds
1304
1125
1305 else:
1126 else:
1306
1127
1307 def doit(pushedrevs, remoteheads, remote=remote):
1128 def doit(pushedrevs, remoteheads, remote=remote):
1308 nodes = None
1129 nodes = None
1309 if pushedrevs:
1130 if pushedrevs:
1310 revs = logcmdutil.revrange(repo, pushedrevs)
1131 revs = logcmdutil.revrange(repo, pushedrevs)
1311 nodes = [repo[r].node() for r in revs]
1132 nodes = [repo[r].node() for r in revs]
1312 common, any, hds = setdiscovery.findcommonheads(
1133 common, any, hds = setdiscovery.findcommonheads(
1313 ui,
1134 ui,
1314 repo,
1135 repo,
1315 remote,
1136 remote,
1316 ancestorsof=nodes,
1137 ancestorsof=nodes,
1317 audit=data,
1138 audit=data,
1318 abortwhenunrelated=False,
1139 abortwhenunrelated=False,
1319 )
1140 )
1320 return common, hds
1141 return common, hds
1321
1142
1322 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1143 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1323 localrevs = opts['rev']
1144 localrevs = opts['rev']
1324
1145
1325 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1146 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1326 if fm.strict_format:
1147 if fm.strict_format:
1327
1148
1328 @contextlib.contextmanager
1149 @contextlib.contextmanager
1329 def may_capture_output():
1150 def may_capture_output():
1330 ui.pushbuffer()
1151 ui.pushbuffer()
1331 yield
1152 yield
1332 data[b'output'] = ui.popbuffer()
1153 data[b'output'] = ui.popbuffer()
1333
1154
1334 else:
1155 else:
1335 may_capture_output = util.nullcontextmanager
1156 may_capture_output = util.nullcontextmanager
1336 with may_capture_output():
1157 with may_capture_output():
1337 with util.timedcm('debug-discovery') as t:
1158 with util.timedcm('debug-discovery') as t:
1338 common, hds = doit(localrevs, remoterevs)
1159 common, hds = doit(localrevs, remoterevs)
1339
1160
1340 # compute all statistics
1161 # compute all statistics
1341 if len(common) == 1 and repo.nullid in common:
1162 if len(common) == 1 and repo.nullid in common:
1342 common = set()
1163 common = set()
1343 heads_common = set(common)
1164 heads_common = set(common)
1344 heads_remote = set(hds)
1165 heads_remote = set(hds)
1345 heads_local = set(repo.heads())
1166 heads_local = set(repo.heads())
1346 # note: they cannot be a local or remote head that is in common and not
1167 # note: they cannot be a local or remote head that is in common and not
1347 # itself a head of common.
1168 # itself a head of common.
1348 heads_common_local = heads_common & heads_local
1169 heads_common_local = heads_common & heads_local
1349 heads_common_remote = heads_common & heads_remote
1170 heads_common_remote = heads_common & heads_remote
1350 heads_common_both = heads_common & heads_remote & heads_local
1171 heads_common_both = heads_common & heads_remote & heads_local
1351
1172
1352 all = repo.revs(b'all()')
1173 all = repo.revs(b'all()')
1353 common = repo.revs(b'::%ln', common)
1174 common = repo.revs(b'::%ln', common)
1354 roots_common = repo.revs(b'roots(::%ld)', common)
1175 roots_common = repo.revs(b'roots(::%ld)', common)
1355 missing = repo.revs(b'not ::%ld', common)
1176 missing = repo.revs(b'not ::%ld', common)
1356 heads_missing = repo.revs(b'heads(%ld)', missing)
1177 heads_missing = repo.revs(b'heads(%ld)', missing)
1357 roots_missing = repo.revs(b'roots(%ld)', missing)
1178 roots_missing = repo.revs(b'roots(%ld)', missing)
1358 assert len(common) + len(missing) == len(all)
1179 assert len(common) + len(missing) == len(all)
1359
1180
1360 initial_undecided = repo.revs(
1181 initial_undecided = repo.revs(
1361 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1182 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1362 )
1183 )
1363 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1184 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1364 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1185 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1365 common_initial_undecided = initial_undecided & common
1186 common_initial_undecided = initial_undecided & common
1366 missing_initial_undecided = initial_undecided & missing
1187 missing_initial_undecided = initial_undecided & missing
1367
1188
1368 data[b'elapsed'] = t.elapsed
1189 data[b'elapsed'] = t.elapsed
1369 data[b'nb-common-heads'] = len(heads_common)
1190 data[b'nb-common-heads'] = len(heads_common)
1370 data[b'nb-common-heads-local'] = len(heads_common_local)
1191 data[b'nb-common-heads-local'] = len(heads_common_local)
1371 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1192 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1372 data[b'nb-common-heads-both'] = len(heads_common_both)
1193 data[b'nb-common-heads-both'] = len(heads_common_both)
1373 data[b'nb-common-roots'] = len(roots_common)
1194 data[b'nb-common-roots'] = len(roots_common)
1374 data[b'nb-head-local'] = len(heads_local)
1195 data[b'nb-head-local'] = len(heads_local)
1375 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1196 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1376 data[b'nb-head-remote'] = len(heads_remote)
1197 data[b'nb-head-remote'] = len(heads_remote)
1377 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1198 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1378 heads_common_remote
1199 heads_common_remote
1379 )
1200 )
1380 data[b'nb-revs'] = len(all)
1201 data[b'nb-revs'] = len(all)
1381 data[b'nb-revs-common'] = len(common)
1202 data[b'nb-revs-common'] = len(common)
1382 data[b'nb-revs-missing'] = len(missing)
1203 data[b'nb-revs-missing'] = len(missing)
1383 data[b'nb-missing-heads'] = len(heads_missing)
1204 data[b'nb-missing-heads'] = len(heads_missing)
1384 data[b'nb-missing-roots'] = len(roots_missing)
1205 data[b'nb-missing-roots'] = len(roots_missing)
1385 data[b'nb-ini_und'] = len(initial_undecided)
1206 data[b'nb-ini_und'] = len(initial_undecided)
1386 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1207 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1387 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1208 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1388 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1209 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1389 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1210 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1390
1211
1391 fm.startitem()
1212 fm.startitem()
1392 fm.data(**pycompat.strkwargs(data))
1213 fm.data(**pycompat.strkwargs(data))
1393 # display discovery summary
1214 # display discovery summary
1394 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1215 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1395 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1216 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1396 if b'total-round-trips-heads' in data:
1217 if b'total-round-trips-heads' in data:
1397 fm.plain(
1218 fm.plain(
1398 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1219 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1399 )
1220 )
1400 if b'total-round-trips-branches' in data:
1221 if b'total-round-trips-branches' in data:
1401 fm.plain(
1222 fm.plain(
1402 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1223 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1403 % data
1224 % data
1404 )
1225 )
1405 if b'total-round-trips-between' in data:
1226 if b'total-round-trips-between' in data:
1406 fm.plain(
1227 fm.plain(
1407 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1228 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1408 )
1229 )
1409 fm.plain(b"queries: %(total-queries)9d\n" % data)
1230 fm.plain(b"queries: %(total-queries)9d\n" % data)
1410 if b'total-queries-branches' in data:
1231 if b'total-queries-branches' in data:
1411 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1232 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1412 if b'total-queries-between' in data:
1233 if b'total-queries-between' in data:
1413 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1234 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1414 fm.plain(b"heads summary:\n")
1235 fm.plain(b"heads summary:\n")
1415 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1236 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1416 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1237 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1417 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1238 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1418 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1239 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1419 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1240 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1420 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1241 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1421 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1242 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1422 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1243 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1423 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1244 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1424 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1245 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1425 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1246 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1426 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1247 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1427 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1248 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1428 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1249 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1429 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1250 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1430 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1251 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1431 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1252 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1432 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1253 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1433 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1254 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1434 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1255 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1435 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1256 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1436 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1257 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1437
1258
1438 if ui.verbose:
1259 if ui.verbose:
1439 fm.plain(
1260 fm.plain(
1440 b"common heads: %s\n"
1261 b"common heads: %s\n"
1441 % b" ".join(sorted(short(n) for n in heads_common))
1262 % b" ".join(sorted(short(n) for n in heads_common))
1442 )
1263 )
1443 fm.end()
1264 fm.end()
1444
1265
1445
1266
1446 _chunksize = 4 << 10
1267 _chunksize = 4 << 10
1447
1268
1448
1269
1449 @command(
1270 @command(
1450 b'debugdownload',
1271 b'debugdownload',
1451 [
1272 [
1452 (b'o', b'output', b'', _(b'path')),
1273 (b'o', b'output', b'', _(b'path')),
1453 ],
1274 ],
1454 optionalrepo=True,
1275 optionalrepo=True,
1455 )
1276 )
1456 def debugdownload(ui, repo, url, output=None, **opts):
1277 def debugdownload(ui, repo, url, output=None, **opts):
1457 """download a resource using Mercurial logic and config"""
1278 """download a resource using Mercurial logic and config"""
1458 fh = urlmod.open(ui, url, output)
1279 fh = urlmod.open(ui, url, output)
1459
1280
1460 dest = ui
1281 dest = ui
1461 if output:
1282 if output:
1462 dest = open(output, b"wb", _chunksize)
1283 dest = open(output, b"wb", _chunksize)
1463 try:
1284 try:
1464 data = fh.read(_chunksize)
1285 data = fh.read(_chunksize)
1465 while data:
1286 while data:
1466 dest.write(data)
1287 dest.write(data)
1467 data = fh.read(_chunksize)
1288 data = fh.read(_chunksize)
1468 finally:
1289 finally:
1469 if output:
1290 if output:
1470 dest.close()
1291 dest.close()
1471
1292
1472
1293
1473 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1294 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1474 def debugextensions(ui, repo, **opts):
1295 def debugextensions(ui, repo, **opts):
1475 '''show information about active extensions'''
1296 '''show information about active extensions'''
1476 exts = extensions.extensions(ui)
1297 exts = extensions.extensions(ui)
1477 hgver = util.version()
1298 hgver = util.version()
1478 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1299 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1479 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1300 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1480 isinternal = extensions.ismoduleinternal(extmod)
1301 isinternal = extensions.ismoduleinternal(extmod)
1481 extsource = None
1302 extsource = None
1482
1303
1483 if hasattr(extmod, '__file__'):
1304 if hasattr(extmod, '__file__'):
1484 extsource = pycompat.fsencode(extmod.__file__)
1305 extsource = pycompat.fsencode(extmod.__file__)
1485 elif getattr(sys, 'oxidized', False):
1306 elif getattr(sys, 'oxidized', False):
1486 extsource = pycompat.sysexecutable
1307 extsource = pycompat.sysexecutable
1487 if isinternal:
1308 if isinternal:
1488 exttestedwith = [] # never expose magic string to users
1309 exttestedwith = [] # never expose magic string to users
1489 else:
1310 else:
1490 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1311 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1491 extbuglink = getattr(extmod, 'buglink', None)
1312 extbuglink = getattr(extmod, 'buglink', None)
1492
1313
1493 fm.startitem()
1314 fm.startitem()
1494
1315
1495 if ui.quiet or ui.verbose:
1316 if ui.quiet or ui.verbose:
1496 fm.write(b'name', b'%s\n', extname)
1317 fm.write(b'name', b'%s\n', extname)
1497 else:
1318 else:
1498 fm.write(b'name', b'%s', extname)
1319 fm.write(b'name', b'%s', extname)
1499 if isinternal or hgver in exttestedwith:
1320 if isinternal or hgver in exttestedwith:
1500 fm.plain(b'\n')
1321 fm.plain(b'\n')
1501 elif not exttestedwith:
1322 elif not exttestedwith:
1502 fm.plain(_(b' (untested!)\n'))
1323 fm.plain(_(b' (untested!)\n'))
1503 else:
1324 else:
1504 lasttestedversion = exttestedwith[-1]
1325 lasttestedversion = exttestedwith[-1]
1505 fm.plain(b' (%s!)\n' % lasttestedversion)
1326 fm.plain(b' (%s!)\n' % lasttestedversion)
1506
1327
1507 fm.condwrite(
1328 fm.condwrite(
1508 ui.verbose and extsource,
1329 ui.verbose and extsource,
1509 b'source',
1330 b'source',
1510 _(b' location: %s\n'),
1331 _(b' location: %s\n'),
1511 extsource or b"",
1332 extsource or b"",
1512 )
1333 )
1513
1334
1514 if ui.verbose:
1335 if ui.verbose:
1515 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1336 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1516 fm.data(bundled=isinternal)
1337 fm.data(bundled=isinternal)
1517
1338
1518 fm.condwrite(
1339 fm.condwrite(
1519 ui.verbose and exttestedwith,
1340 ui.verbose and exttestedwith,
1520 b'testedwith',
1341 b'testedwith',
1521 _(b' tested with: %s\n'),
1342 _(b' tested with: %s\n'),
1522 fm.formatlist(exttestedwith, name=b'ver'),
1343 fm.formatlist(exttestedwith, name=b'ver'),
1523 )
1344 )
1524
1345
1525 fm.condwrite(
1346 fm.condwrite(
1526 ui.verbose and extbuglink,
1347 ui.verbose and extbuglink,
1527 b'buglink',
1348 b'buglink',
1528 _(b' bug reporting: %s\n'),
1349 _(b' bug reporting: %s\n'),
1529 extbuglink or b"",
1350 extbuglink or b"",
1530 )
1351 )
1531
1352
1532 fm.end()
1353 fm.end()
1533
1354
1534
1355
1535 @command(
1356 @command(
1536 b'debugfileset',
1357 b'debugfileset',
1537 [
1358 [
1538 (
1359 (
1539 b'r',
1360 b'r',
1540 b'rev',
1361 b'rev',
1541 b'',
1362 b'',
1542 _(b'apply the filespec on this revision'),
1363 _(b'apply the filespec on this revision'),
1543 _(b'REV'),
1364 _(b'REV'),
1544 ),
1365 ),
1545 (
1366 (
1546 b'',
1367 b'',
1547 b'all-files',
1368 b'all-files',
1548 False,
1369 False,
1549 _(b'test files from all revisions and working directory'),
1370 _(b'test files from all revisions and working directory'),
1550 ),
1371 ),
1551 (
1372 (
1552 b's',
1373 b's',
1553 b'show-matcher',
1374 b'show-matcher',
1554 None,
1375 None,
1555 _(b'print internal representation of matcher'),
1376 _(b'print internal representation of matcher'),
1556 ),
1377 ),
1557 (
1378 (
1558 b'p',
1379 b'p',
1559 b'show-stage',
1380 b'show-stage',
1560 [],
1381 [],
1561 _(b'print parsed tree at the given stage'),
1382 _(b'print parsed tree at the given stage'),
1562 _(b'NAME'),
1383 _(b'NAME'),
1563 ),
1384 ),
1564 ],
1385 ],
1565 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1386 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1566 )
1387 )
1567 def debugfileset(ui, repo, expr, **opts):
1388 def debugfileset(ui, repo, expr, **opts):
1568 '''parse and apply a fileset specification'''
1389 '''parse and apply a fileset specification'''
1569 from . import fileset
1390 from . import fileset
1570
1391
1571 fileset.symbols # force import of fileset so we have predicates to optimize
1392 fileset.symbols # force import of fileset so we have predicates to optimize
1572
1393
1573 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1394 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1574
1395
1575 stages = [
1396 stages = [
1576 (b'parsed', pycompat.identity),
1397 (b'parsed', pycompat.identity),
1577 (b'analyzed', filesetlang.analyze),
1398 (b'analyzed', filesetlang.analyze),
1578 (b'optimized', filesetlang.optimize),
1399 (b'optimized', filesetlang.optimize),
1579 ]
1400 ]
1580 stagenames = {n for n, f in stages}
1401 stagenames = {n for n, f in stages}
1581
1402
1582 showalways = set()
1403 showalways = set()
1583 if ui.verbose and not opts['show_stage']:
1404 if ui.verbose and not opts['show_stage']:
1584 # show parsed tree by --verbose (deprecated)
1405 # show parsed tree by --verbose (deprecated)
1585 showalways.add(b'parsed')
1406 showalways.add(b'parsed')
1586 if opts['show_stage'] == [b'all']:
1407 if opts['show_stage'] == [b'all']:
1587 showalways.update(stagenames)
1408 showalways.update(stagenames)
1588 else:
1409 else:
1589 for n in opts['show_stage']:
1410 for n in opts['show_stage']:
1590 if n not in stagenames:
1411 if n not in stagenames:
1591 raise error.Abort(_(b'invalid stage name: %s') % n)
1412 raise error.Abort(_(b'invalid stage name: %s') % n)
1592 showalways.update(opts['show_stage'])
1413 showalways.update(opts['show_stage'])
1593
1414
1594 tree = filesetlang.parse(expr)
1415 tree = filesetlang.parse(expr)
1595 for n, f in stages:
1416 for n, f in stages:
1596 tree = f(tree)
1417 tree = f(tree)
1597 if n in showalways:
1418 if n in showalways:
1598 if opts['show_stage'] or n != b'parsed':
1419 if opts['show_stage'] or n != b'parsed':
1599 ui.write(b"* %s:\n" % n)
1420 ui.write(b"* %s:\n" % n)
1600 ui.write(filesetlang.prettyformat(tree), b"\n")
1421 ui.write(filesetlang.prettyformat(tree), b"\n")
1601
1422
1602 files = set()
1423 files = set()
1603 if opts['all_files']:
1424 if opts['all_files']:
1604 for r in repo:
1425 for r in repo:
1605 c = repo[r]
1426 c = repo[r]
1606 files.update(c.files())
1427 files.update(c.files())
1607 files.update(c.substate)
1428 files.update(c.substate)
1608 if opts['all_files'] or ctx.rev() is None:
1429 if opts['all_files'] or ctx.rev() is None:
1609 wctx = repo[None]
1430 wctx = repo[None]
1610 files.update(
1431 files.update(
1611 repo.dirstate.walk(
1432 repo.dirstate.walk(
1612 scmutil.matchall(repo),
1433 scmutil.matchall(repo),
1613 subrepos=list(wctx.substate),
1434 subrepos=list(wctx.substate),
1614 unknown=True,
1435 unknown=True,
1615 ignored=True,
1436 ignored=True,
1616 )
1437 )
1617 )
1438 )
1618 files.update(wctx.substate)
1439 files.update(wctx.substate)
1619 else:
1440 else:
1620 files.update(ctx.files())
1441 files.update(ctx.files())
1621 files.update(ctx.substate)
1442 files.update(ctx.substate)
1622
1443
1623 m = ctx.matchfileset(repo.getcwd(), expr)
1444 m = ctx.matchfileset(repo.getcwd(), expr)
1624 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1445 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1625 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1446 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1626 for f in sorted(files):
1447 for f in sorted(files):
1627 if not m(f):
1448 if not m(f):
1628 continue
1449 continue
1629 ui.write(b"%s\n" % f)
1450 ui.write(b"%s\n" % f)
1630
1451
1631
1452
1632 @command(
1453 @command(
1633 b"debug-repair-issue6528",
1454 b"debug-repair-issue6528",
1634 [
1455 [
1635 (
1456 (
1636 b'',
1457 b'',
1637 b'to-report',
1458 b'to-report',
1638 b'',
1459 b'',
1639 _(b'build a report of affected revisions to this file'),
1460 _(b'build a report of affected revisions to this file'),
1640 _(b'FILE'),
1461 _(b'FILE'),
1641 ),
1462 ),
1642 (
1463 (
1643 b'',
1464 b'',
1644 b'from-report',
1465 b'from-report',
1645 b'',
1466 b'',
1646 _(b'repair revisions listed in this report file'),
1467 _(b'repair revisions listed in this report file'),
1647 _(b'FILE'),
1468 _(b'FILE'),
1648 ),
1469 ),
1649 (
1470 (
1650 b'',
1471 b'',
1651 b'paranoid',
1472 b'paranoid',
1652 False,
1473 False,
1653 _(b'check that both detection methods do the same thing'),
1474 _(b'check that both detection methods do the same thing'),
1654 ),
1475 ),
1655 ]
1476 ]
1656 + cmdutil.dryrunopts,
1477 + cmdutil.dryrunopts,
1657 )
1478 )
1658 def debug_repair_issue6528(ui, repo, **opts):
1479 def debug_repair_issue6528(ui, repo, **opts):
1659 """find affected revisions and repair them. See issue6528 for more details.
1480 """find affected revisions and repair them. See issue6528 for more details.
1660
1481
1661 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1482 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1662 computation of affected revisions for a given repository across clones.
1483 computation of affected revisions for a given repository across clones.
1663 The report format is line-based (with empty lines ignored):
1484 The report format is line-based (with empty lines ignored):
1664
1485
1665 ```
1486 ```
1666 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1487 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1667 ```
1488 ```
1668
1489
1669 There can be multiple broken revisions per filelog, they are separated by
1490 There can be multiple broken revisions per filelog, they are separated by
1670 a comma with no spaces. The only space is between the revision(s) and the
1491 a comma with no spaces. The only space is between the revision(s) and the
1671 filename.
1492 filename.
1672
1493
1673 Note that this does *not* mean that this repairs future affected revisions,
1494 Note that this does *not* mean that this repairs future affected revisions,
1674 that needs a separate fix at the exchange level that was introduced in
1495 that needs a separate fix at the exchange level that was introduced in
1675 Mercurial 5.9.1.
1496 Mercurial 5.9.1.
1676
1497
1677 There is a `--paranoid` flag to test that the fast implementation is correct
1498 There is a `--paranoid` flag to test that the fast implementation is correct
1678 by checking it against the slow implementation. Since this matter is quite
1499 by checking it against the slow implementation. Since this matter is quite
1679 urgent and testing every edge-case is probably quite costly, we use this
1500 urgent and testing every edge-case is probably quite costly, we use this
1680 method to test on large repositories as a fuzzing method of sorts.
1501 method to test on large repositories as a fuzzing method of sorts.
1681 """
1502 """
1682 cmdutil.check_incompatible_arguments(
1503 cmdutil.check_incompatible_arguments(
1683 opts, 'to_report', ['from_report', 'dry_run']
1504 opts, 'to_report', ['from_report', 'dry_run']
1684 )
1505 )
1685 dry_run = opts.get('dry_run')
1506 dry_run = opts.get('dry_run')
1686 to_report = opts.get('to_report')
1507 to_report = opts.get('to_report')
1687 from_report = opts.get('from_report')
1508 from_report = opts.get('from_report')
1688 paranoid = opts.get('paranoid')
1509 paranoid = opts.get('paranoid')
1689 # TODO maybe add filelog pattern and revision pattern parameters to help
1510 # TODO maybe add filelog pattern and revision pattern parameters to help
1690 # narrow down the search for users that know what they're looking for?
1511 # narrow down the search for users that know what they're looking for?
1691
1512
1692 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1513 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1693 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1514 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1694 raise error.Abort(_(msg))
1515 raise error.Abort(_(msg))
1695
1516
1696 rewrite.repair_issue6528(
1517 rewrite.repair_issue6528(
1697 ui,
1518 ui,
1698 repo,
1519 repo,
1699 dry_run=dry_run,
1520 dry_run=dry_run,
1700 to_report=to_report,
1521 to_report=to_report,
1701 from_report=from_report,
1522 from_report=from_report,
1702 paranoid=paranoid,
1523 paranoid=paranoid,
1703 )
1524 )
1704
1525
1705
1526
1706 @command(b'debugformat', [] + cmdutil.formatteropts)
1527 @command(b'debugformat', [] + cmdutil.formatteropts)
1707 def debugformat(ui, repo, **opts):
1528 def debugformat(ui, repo, **opts):
1708 """display format information about the current repository
1529 """display format information about the current repository
1709
1530
1710 Use --verbose to get extra information about current config value and
1531 Use --verbose to get extra information about current config value and
1711 Mercurial default."""
1532 Mercurial default."""
1712 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1533 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1713 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1534 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1714
1535
1715 def makeformatname(name):
1536 def makeformatname(name):
1716 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1537 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1717
1538
1718 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1539 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1719 if fm.isplain():
1540 if fm.isplain():
1720
1541
1721 def formatvalue(value):
1542 def formatvalue(value):
1722 if hasattr(value, 'startswith'):
1543 if hasattr(value, 'startswith'):
1723 return value
1544 return value
1724 if value:
1545 if value:
1725 return b'yes'
1546 return b'yes'
1726 else:
1547 else:
1727 return b'no'
1548 return b'no'
1728
1549
1729 else:
1550 else:
1730 formatvalue = pycompat.identity
1551 formatvalue = pycompat.identity
1731
1552
1732 fm.plain(b'format-variant')
1553 fm.plain(b'format-variant')
1733 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1554 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1734 fm.plain(b' repo')
1555 fm.plain(b' repo')
1735 if ui.verbose:
1556 if ui.verbose:
1736 fm.plain(b' config default')
1557 fm.plain(b' config default')
1737 fm.plain(b'\n')
1558 fm.plain(b'\n')
1738 for fv in upgrade.allformatvariant:
1559 for fv in upgrade.allformatvariant:
1739 fm.startitem()
1560 fm.startitem()
1740 repovalue = fv.fromrepo(repo)
1561 repovalue = fv.fromrepo(repo)
1741 configvalue = fv.fromconfig(repo)
1562 configvalue = fv.fromconfig(repo)
1742
1563
1743 if repovalue != configvalue:
1564 if repovalue != configvalue:
1744 namelabel = b'formatvariant.name.mismatchconfig'
1565 namelabel = b'formatvariant.name.mismatchconfig'
1745 repolabel = b'formatvariant.repo.mismatchconfig'
1566 repolabel = b'formatvariant.repo.mismatchconfig'
1746 elif repovalue != fv.default:
1567 elif repovalue != fv.default:
1747 namelabel = b'formatvariant.name.mismatchdefault'
1568 namelabel = b'formatvariant.name.mismatchdefault'
1748 repolabel = b'formatvariant.repo.mismatchdefault'
1569 repolabel = b'formatvariant.repo.mismatchdefault'
1749 else:
1570 else:
1750 namelabel = b'formatvariant.name.uptodate'
1571 namelabel = b'formatvariant.name.uptodate'
1751 repolabel = b'formatvariant.repo.uptodate'
1572 repolabel = b'formatvariant.repo.uptodate'
1752
1573
1753 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1574 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1754 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1575 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1755 if fv.default != configvalue:
1576 if fv.default != configvalue:
1756 configlabel = b'formatvariant.config.special'
1577 configlabel = b'formatvariant.config.special'
1757 else:
1578 else:
1758 configlabel = b'formatvariant.config.default'
1579 configlabel = b'formatvariant.config.default'
1759 fm.condwrite(
1580 fm.condwrite(
1760 ui.verbose,
1581 ui.verbose,
1761 b'config',
1582 b'config',
1762 b' %6s',
1583 b' %6s',
1763 formatvalue(configvalue),
1584 formatvalue(configvalue),
1764 label=configlabel,
1585 label=configlabel,
1765 )
1586 )
1766 fm.condwrite(
1587 fm.condwrite(
1767 ui.verbose,
1588 ui.verbose,
1768 b'default',
1589 b'default',
1769 b' %7s',
1590 b' %7s',
1770 formatvalue(fv.default),
1591 formatvalue(fv.default),
1771 label=b'formatvariant.default',
1592 label=b'formatvariant.default',
1772 )
1593 )
1773 fm.plain(b'\n')
1594 fm.plain(b'\n')
1774 fm.end()
1595 fm.end()
1775
1596
1776
1597
1777 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1598 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1778 def debugfsinfo(ui, path=b"."):
1599 def debugfsinfo(ui, path=b"."):
1779 """show information detected about current filesystem"""
1600 """show information detected about current filesystem"""
1780 ui.writenoi18n(b'path: %s\n' % path)
1601 ui.writenoi18n(b'path: %s\n' % path)
1781 ui.writenoi18n(
1602 ui.writenoi18n(
1782 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1603 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1783 )
1604 )
1784 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1605 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1785 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1606 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1786 ui.writenoi18n(
1607 ui.writenoi18n(
1787 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1608 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1788 )
1609 )
1789 ui.writenoi18n(
1610 ui.writenoi18n(
1790 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1611 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1791 )
1612 )
1792 casesensitive = b'(unknown)'
1613 casesensitive = b'(unknown)'
1793 try:
1614 try:
1794 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1615 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1795 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1616 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1796 except OSError:
1617 except OSError:
1797 pass
1618 pass
1798 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1619 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1799
1620
1800
1621
1801 @command(
1622 @command(
1802 b'debuggetbundle',
1623 b'debuggetbundle',
1803 [
1624 [
1804 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1625 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1805 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1626 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1806 (
1627 (
1807 b't',
1628 b't',
1808 b'type',
1629 b'type',
1809 b'bzip2',
1630 b'bzip2',
1810 _(b'bundle compression type to use'),
1631 _(b'bundle compression type to use'),
1811 _(b'TYPE'),
1632 _(b'TYPE'),
1812 ),
1633 ),
1813 ],
1634 ],
1814 _(b'REPO FILE [-H|-C ID]...'),
1635 _(b'REPO FILE [-H|-C ID]...'),
1815 norepo=True,
1636 norepo=True,
1816 )
1637 )
1817 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1638 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1818 """retrieves a bundle from a repo
1639 """retrieves a bundle from a repo
1819
1640
1820 Every ID must be a full-length hex node id string. Saves the bundle to the
1641 Every ID must be a full-length hex node id string. Saves the bundle to the
1821 given file.
1642 given file.
1822 """
1643 """
1823 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1644 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1824 if not repo.capable(b'getbundle'):
1645 if not repo.capable(b'getbundle'):
1825 raise error.Abort(b"getbundle() not supported by target repository")
1646 raise error.Abort(b"getbundle() not supported by target repository")
1826 args = {}
1647 args = {}
1827 if common:
1648 if common:
1828 args['common'] = [bin(s) for s in common]
1649 args['common'] = [bin(s) for s in common]
1829 if head:
1650 if head:
1830 args['heads'] = [bin(s) for s in head]
1651 args['heads'] = [bin(s) for s in head]
1831 # TODO: get desired bundlecaps from command line.
1652 # TODO: get desired bundlecaps from command line.
1832 args['bundlecaps'] = None
1653 args['bundlecaps'] = None
1833 bundle = repo.getbundle(b'debug', **args)
1654 bundle = repo.getbundle(b'debug', **args)
1834
1655
1835 bundletype = opts.get('type', b'bzip2').lower()
1656 bundletype = opts.get('type', b'bzip2').lower()
1836 btypes = {
1657 btypes = {
1837 b'none': b'HG10UN',
1658 b'none': b'HG10UN',
1838 b'bzip2': b'HG10BZ',
1659 b'bzip2': b'HG10BZ',
1839 b'gzip': b'HG10GZ',
1660 b'gzip': b'HG10GZ',
1840 b'bundle2': b'HG20',
1661 b'bundle2': b'HG20',
1841 }
1662 }
1842 bundletype = btypes.get(bundletype)
1663 bundletype = btypes.get(bundletype)
1843 if bundletype not in bundle2.bundletypes:
1664 if bundletype not in bundle2.bundletypes:
1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1665 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1666 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846
1667
1847
1668
1848 @command(b'debugignore', [], b'[FILE]...')
1669 @command(b'debugignore', [], b'[FILE]...')
1849 def debugignore(ui, repo, *files, **opts):
1670 def debugignore(ui, repo, *files, **opts):
1850 """display the combined ignore pattern and information about ignored files
1671 """display the combined ignore pattern and information about ignored files
1851
1672
1852 With no argument display the combined ignore pattern.
1673 With no argument display the combined ignore pattern.
1853
1674
1854 Given space separated file names, shows if the given file is ignored and
1675 Given space separated file names, shows if the given file is ignored and
1855 if so, show the ignore rule (file and line number) that matched it.
1676 if so, show the ignore rule (file and line number) that matched it.
1856 """
1677 """
1857 ignore = repo.dirstate._ignore
1678 ignore = repo.dirstate._ignore
1858 if not files:
1679 if not files:
1859 # Show all the patterns
1680 # Show all the patterns
1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1681 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 else:
1682 else:
1862 m = scmutil.match(repo[None], pats=files)
1683 m = scmutil.match(repo[None], pats=files)
1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1684 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 for f in m.files():
1685 for f in m.files():
1865 nf = util.normpath(f)
1686 nf = util.normpath(f)
1866 ignored = None
1687 ignored = None
1867 ignoredata = None
1688 ignoredata = None
1868 if nf != b'.':
1689 if nf != b'.':
1869 if ignore(nf):
1690 if ignore(nf):
1870 ignored = nf
1691 ignored = nf
1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1692 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 else:
1693 else:
1873 for p in pathutil.finddirs(nf):
1694 for p in pathutil.finddirs(nf):
1874 if ignore(p):
1695 if ignore(p):
1875 ignored = p
1696 ignored = p
1876 ignoredata = repo.dirstate._ignorefileandline(p)
1697 ignoredata = repo.dirstate._ignorefileandline(p)
1877 break
1698 break
1878 if ignored:
1699 if ignored:
1879 if ignored == nf:
1700 if ignored == nf:
1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1701 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 else:
1702 else:
1882 ui.write(
1703 ui.write(
1883 _(
1704 _(
1884 b"%s is ignored because of "
1705 b"%s is ignored because of "
1885 b"containing directory %s\n"
1706 b"containing directory %s\n"
1886 )
1707 )
1887 % (uipathfn(f), ignored)
1708 % (uipathfn(f), ignored)
1888 )
1709 )
1889 ignorefile, lineno, line = ignoredata
1710 ignorefile, lineno, line = ignoredata
1890 ui.write(
1711 ui.write(
1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1712 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 % (ignorefile, lineno, line)
1713 % (ignorefile, lineno, line)
1893 )
1714 )
1894 else:
1715 else:
1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1716 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896
1717
1897
1718
1898 @command(
1719 @command(
1899 b'debug-revlog-index|debugindex',
1720 b'debug-revlog-index|debugindex',
1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1721 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 _(b'-c|-m|FILE'),
1722 _(b'-c|-m|FILE'),
1902 )
1723 )
1903 def debugindex(ui, repo, file_=None, **opts):
1724 def debugindex(ui, repo, file_=None, **opts):
1904 """dump index data for a revlog"""
1725 """dump index data for a revlog"""
1905 opts = pycompat.byteskwargs(opts)
1726 opts = pycompat.byteskwargs(opts)
1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1727 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907
1728
1908 fm = ui.formatter(b'debugindex', opts)
1729 fm = ui.formatter(b'debugindex', opts)
1909
1730
1910 revlog = getattr(store, '_revlog', store)
1731 revlog = getattr(store, '_revlog', store)
1911
1732
1912 return revlog_debug.debug_index(
1733 return revlog_debug.debug_index(
1913 ui,
1734 ui,
1914 repo,
1735 repo,
1915 formatter=fm,
1736 formatter=fm,
1916 revlog=revlog,
1737 revlog=revlog,
1917 full_node=ui.debugflag,
1738 full_node=ui.debugflag,
1918 )
1739 )
1919
1740
1920
1741
1921 @command(
1742 @command(
1922 b'debugindexdot',
1743 b'debugindexdot',
1923 cmdutil.debugrevlogopts,
1744 cmdutil.debugrevlogopts,
1924 _(b'-c|-m|FILE'),
1745 _(b'-c|-m|FILE'),
1925 optionalrepo=True,
1746 optionalrepo=True,
1926 )
1747 )
1927 def debugindexdot(ui, repo, file_=None, **opts):
1748 def debugindexdot(ui, repo, file_=None, **opts):
1928 """dump an index DAG as a graphviz dot file"""
1749 """dump an index DAG as a graphviz dot file"""
1929 r = cmdutil.openstorage(
1750 r = cmdutil.openstorage(
1930 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1751 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1931 )
1752 )
1932 ui.writenoi18n(b"digraph G {\n")
1753 ui.writenoi18n(b"digraph G {\n")
1933 for i in r:
1754 for i in r:
1934 node = r.node(i)
1755 node = r.node(i)
1935 pp = r.parents(node)
1756 pp = r.parents(node)
1936 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1757 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1937 if pp[1] != repo.nullid:
1758 if pp[1] != repo.nullid:
1938 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1939 ui.write(b"}\n")
1760 ui.write(b"}\n")
1940
1761
1941
1762
1942 @command(b'debugindexstats', [])
1763 @command(b'debugindexstats', [])
1943 def debugindexstats(ui, repo):
1764 def debugindexstats(ui, repo):
1944 """show stats related to the changelog index"""
1765 """show stats related to the changelog index"""
1945 repo.changelog.shortest(repo.nullid, 1)
1766 repo.changelog.shortest(repo.nullid, 1)
1946 index = repo.changelog.index
1767 index = repo.changelog.index
1947 if not hasattr(index, 'stats'):
1768 if not hasattr(index, 'stats'):
1948 raise error.Abort(_(b'debugindexstats only works with native code'))
1769 raise error.Abort(_(b'debugindexstats only works with native code'))
1949 for k, v in sorted(index.stats().items()):
1770 for k, v in sorted(index.stats().items()):
1950 ui.write(b'%s: %d\n' % (k, v))
1771 ui.write(b'%s: %d\n' % (k, v))
1951
1772
1952
1773
1953 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1774 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1954 def debuginstall(ui, **opts):
1775 def debuginstall(ui, **opts):
1955 """test Mercurial installation
1776 """test Mercurial installation
1956
1777
1957 Returns 0 on success.
1778 Returns 0 on success.
1958 """
1779 """
1959 problems = 0
1780 problems = 0
1960
1781
1961 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1782 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1962 fm.startitem()
1783 fm.startitem()
1963
1784
1964 # encoding might be unknown or wrong. don't translate these messages.
1785 # encoding might be unknown or wrong. don't translate these messages.
1965 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1786 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1966 err = None
1787 err = None
1967 try:
1788 try:
1968 codecs.lookup(pycompat.sysstr(encoding.encoding))
1789 codecs.lookup(pycompat.sysstr(encoding.encoding))
1969 except LookupError as inst:
1790 except LookupError as inst:
1970 err = stringutil.forcebytestr(inst)
1791 err = stringutil.forcebytestr(inst)
1971 problems += 1
1792 problems += 1
1972 fm.condwrite(
1793 fm.condwrite(
1973 err,
1794 err,
1974 b'encodingerror',
1795 b'encodingerror',
1975 b" %s\n (check that your locale is properly set)\n",
1796 b" %s\n (check that your locale is properly set)\n",
1976 err,
1797 err,
1977 )
1798 )
1978
1799
1979 # Python
1800 # Python
1980 pythonlib = None
1801 pythonlib = None
1981 if hasattr(os, '__file__'):
1802 if hasattr(os, '__file__'):
1982 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1803 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1983 elif getattr(sys, 'oxidized', False):
1804 elif getattr(sys, 'oxidized', False):
1984 pythonlib = pycompat.sysexecutable
1805 pythonlib = pycompat.sysexecutable
1985
1806
1986 fm.write(
1807 fm.write(
1987 b'pythonexe',
1808 b'pythonexe',
1988 _(b"checking Python executable (%s)\n"),
1809 _(b"checking Python executable (%s)\n"),
1989 pycompat.sysexecutable or _(b"unknown"),
1810 pycompat.sysexecutable or _(b"unknown"),
1990 )
1811 )
1991 fm.write(
1812 fm.write(
1992 b'pythonimplementation',
1813 b'pythonimplementation',
1993 _(b"checking Python implementation (%s)\n"),
1814 _(b"checking Python implementation (%s)\n"),
1994 pycompat.sysbytes(platform.python_implementation()),
1815 pycompat.sysbytes(platform.python_implementation()),
1995 )
1816 )
1996 fm.write(
1817 fm.write(
1997 b'pythonver',
1818 b'pythonver',
1998 _(b"checking Python version (%s)\n"),
1819 _(b"checking Python version (%s)\n"),
1999 (b"%d.%d.%d" % sys.version_info[:3]),
1820 (b"%d.%d.%d" % sys.version_info[:3]),
2000 )
1821 )
2001 fm.write(
1822 fm.write(
2002 b'pythonlib',
1823 b'pythonlib',
2003 _(b"checking Python lib (%s)...\n"),
1824 _(b"checking Python lib (%s)...\n"),
2004 pythonlib or _(b"unknown"),
1825 pythonlib or _(b"unknown"),
2005 )
1826 )
2006
1827
2007 try:
1828 try:
2008 from . import rustext # pytype: disable=import-error
1829 from . import rustext # pytype: disable=import-error
2009
1830
2010 rustext.__doc__ # trigger lazy import
1831 rustext.__doc__ # trigger lazy import
2011 except ImportError:
1832 except ImportError:
2012 rustext = None
1833 rustext = None
2013
1834
2014 security = set(sslutil.supportedprotocols)
1835 security = set(sslutil.supportedprotocols)
2015 if sslutil.hassni:
1836 if sslutil.hassni:
2016 security.add(b'sni')
1837 security.add(b'sni')
2017
1838
2018 fm.write(
1839 fm.write(
2019 b'pythonsecurity',
1840 b'pythonsecurity',
2020 _(b"checking Python security support (%s)\n"),
1841 _(b"checking Python security support (%s)\n"),
2021 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1842 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2022 )
1843 )
2023
1844
2024 # These are warnings, not errors. So don't increment problem count. This
1845 # These are warnings, not errors. So don't increment problem count. This
2025 # may change in the future.
1846 # may change in the future.
2026 if b'tls1.2' not in security:
1847 if b'tls1.2' not in security:
2027 fm.plain(
1848 fm.plain(
2028 _(
1849 _(
2029 b' TLS 1.2 not supported by Python install; '
1850 b' TLS 1.2 not supported by Python install; '
2030 b'network connections lack modern security\n'
1851 b'network connections lack modern security\n'
2031 )
1852 )
2032 )
1853 )
2033 if b'sni' not in security:
1854 if b'sni' not in security:
2034 fm.plain(
1855 fm.plain(
2035 _(
1856 _(
2036 b' SNI not supported by Python install; may have '
1857 b' SNI not supported by Python install; may have '
2037 b'connectivity issues with some servers\n'
1858 b'connectivity issues with some servers\n'
2038 )
1859 )
2039 )
1860 )
2040
1861
2041 fm.plain(
1862 fm.plain(
2042 _(
1863 _(
2043 b"checking Rust extensions (%s)\n"
1864 b"checking Rust extensions (%s)\n"
2044 % (b'missing' if rustext is None else b'installed')
1865 % (b'missing' if rustext is None else b'installed')
2045 ),
1866 ),
2046 )
1867 )
2047
1868
2048 # TODO print CA cert info
1869 # TODO print CA cert info
2049
1870
2050 # hg version
1871 # hg version
2051 hgver = util.version()
1872 hgver = util.version()
2052 fm.write(
1873 fm.write(
2053 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1874 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2054 )
1875 )
2055 fm.write(
1876 fm.write(
2056 b'hgverextra',
1877 b'hgverextra',
2057 _(b"checking Mercurial custom build (%s)\n"),
1878 _(b"checking Mercurial custom build (%s)\n"),
2058 b'+'.join(hgver.split(b'+')[1:]),
1879 b'+'.join(hgver.split(b'+')[1:]),
2059 )
1880 )
2060
1881
2061 # compiled modules
1882 # compiled modules
2062 hgmodules = None
1883 hgmodules = None
2063 if hasattr(sys.modules[__name__], '__file__'):
1884 if hasattr(sys.modules[__name__], '__file__'):
2064 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1885 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2065 elif getattr(sys, 'oxidized', False):
1886 elif getattr(sys, 'oxidized', False):
2066 hgmodules = pycompat.sysexecutable
1887 hgmodules = pycompat.sysexecutable
2067
1888
2068 fm.write(
1889 fm.write(
2069 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1890 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2070 )
1891 )
2071 fm.write(
1892 fm.write(
2072 b'hgmodules',
1893 b'hgmodules',
2073 _(b"checking installed modules (%s)...\n"),
1894 _(b"checking installed modules (%s)...\n"),
2074 hgmodules or _(b"unknown"),
1895 hgmodules or _(b"unknown"),
2075 )
1896 )
2076
1897
2077 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1898 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2078 rustext = rustandc # for now, that's the only case
1899 rustext = rustandc # for now, that's the only case
2079 cext = policy.policy in (b'c', b'allow') or rustandc
1900 cext = policy.policy in (b'c', b'allow') or rustandc
2080 nopure = cext or rustext
1901 nopure = cext or rustext
2081 if nopure:
1902 if nopure:
2082 err = None
1903 err = None
2083 try:
1904 try:
2084 if cext:
1905 if cext:
2085 from .cext import ( # pytype: disable=import-error
1906 from .cext import ( # pytype: disable=import-error
2086 base85,
1907 base85,
2087 bdiff,
1908 bdiff,
2088 mpatch,
1909 mpatch,
2089 osutil,
1910 osutil,
2090 )
1911 )
2091
1912
2092 # quiet pyflakes
1913 # quiet pyflakes
2093 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1914 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2094 if rustext:
1915 if rustext:
2095 from .rustext import ( # pytype: disable=import-error
1916 from .rustext import ( # pytype: disable=import-error
2096 ancestor,
1917 ancestor,
2097 dirstate,
1918 dirstate,
2098 )
1919 )
2099
1920
2100 dir(ancestor), dir(dirstate) # quiet pyflakes
1921 dir(ancestor), dir(dirstate) # quiet pyflakes
2101 except Exception as inst:
1922 except Exception as inst:
2102 err = stringutil.forcebytestr(inst)
1923 err = stringutil.forcebytestr(inst)
2103 problems += 1
1924 problems += 1
2104 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1925 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2105
1926
2106 compengines = util.compengines._engines.values()
1927 compengines = util.compengines._engines.values()
2107 fm.write(
1928 fm.write(
2108 b'compengines',
1929 b'compengines',
2109 _(b'checking registered compression engines (%s)\n'),
1930 _(b'checking registered compression engines (%s)\n'),
2110 fm.formatlist(
1931 fm.formatlist(
2111 sorted(e.name() for e in compengines),
1932 sorted(e.name() for e in compengines),
2112 name=b'compengine',
1933 name=b'compengine',
2113 fmt=b'%s',
1934 fmt=b'%s',
2114 sep=b', ',
1935 sep=b', ',
2115 ),
1936 ),
2116 )
1937 )
2117 fm.write(
1938 fm.write(
2118 b'compenginesavail',
1939 b'compenginesavail',
2119 _(b'checking available compression engines (%s)\n'),
1940 _(b'checking available compression engines (%s)\n'),
2120 fm.formatlist(
1941 fm.formatlist(
2121 sorted(e.name() for e in compengines if e.available()),
1942 sorted(e.name() for e in compengines if e.available()),
2122 name=b'compengine',
1943 name=b'compengine',
2123 fmt=b'%s',
1944 fmt=b'%s',
2124 sep=b', ',
1945 sep=b', ',
2125 ),
1946 ),
2126 )
1947 )
2127 wirecompengines = compression.compengines.supportedwireengines(
1948 wirecompengines = compression.compengines.supportedwireengines(
2128 compression.SERVERROLE
1949 compression.SERVERROLE
2129 )
1950 )
2130 fm.write(
1951 fm.write(
2131 b'compenginesserver',
1952 b'compenginesserver',
2132 _(
1953 _(
2133 b'checking available compression engines '
1954 b'checking available compression engines '
2134 b'for wire protocol (%s)\n'
1955 b'for wire protocol (%s)\n'
2135 ),
1956 ),
2136 fm.formatlist(
1957 fm.formatlist(
2137 [e.name() for e in wirecompengines if e.wireprotosupport()],
1958 [e.name() for e in wirecompengines if e.wireprotosupport()],
2138 name=b'compengine',
1959 name=b'compengine',
2139 fmt=b'%s',
1960 fmt=b'%s',
2140 sep=b', ',
1961 sep=b', ',
2141 ),
1962 ),
2142 )
1963 )
2143 re2 = b'missing'
1964 re2 = b'missing'
2144 if util.has_re2():
1965 if util.has_re2():
2145 re2 = b'available'
1966 re2 = b'available'
2146 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1967 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2147 fm.data(re2=bool(util._re2))
1968 fm.data(re2=bool(util._re2))
2148
1969
2149 # templates
1970 # templates
2150 p = templater.templatedir()
1971 p = templater.templatedir()
2151 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1972 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2152 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1973 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2153 if p:
1974 if p:
2154 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1975 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2155 if m:
1976 if m:
2156 # template found, check if it is working
1977 # template found, check if it is working
2157 err = None
1978 err = None
2158 try:
1979 try:
2159 templater.templater.frommapfile(m)
1980 templater.templater.frommapfile(m)
2160 except Exception as inst:
1981 except Exception as inst:
2161 err = stringutil.forcebytestr(inst)
1982 err = stringutil.forcebytestr(inst)
2162 p = None
1983 p = None
2163 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1984 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2164 else:
1985 else:
2165 p = None
1986 p = None
2166 fm.condwrite(
1987 fm.condwrite(
2167 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1988 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2168 )
1989 )
2169 fm.condwrite(
1990 fm.condwrite(
2170 not m,
1991 not m,
2171 b'defaulttemplatenotfound',
1992 b'defaulttemplatenotfound',
2172 _(b" template '%s' not found\n"),
1993 _(b" template '%s' not found\n"),
2173 b"default",
1994 b"default",
2174 )
1995 )
2175 if not p:
1996 if not p:
2176 problems += 1
1997 problems += 1
2177 fm.condwrite(
1998 fm.condwrite(
2178 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1999 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2179 )
2000 )
2180
2001
2181 # editor
2002 # editor
2182 editor = ui.geteditor()
2003 editor = ui.geteditor()
2183 editor = util.expandpath(editor)
2004 editor = util.expandpath(editor)
2184 editorbin = procutil.shellsplit(editor)[0]
2005 editorbin = procutil.shellsplit(editor)[0]
2185 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2006 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2186 cmdpath = procutil.findexe(editorbin)
2007 cmdpath = procutil.findexe(editorbin)
2187 fm.condwrite(
2008 fm.condwrite(
2188 not cmdpath and editor == b'vi',
2009 not cmdpath and editor == b'vi',
2189 b'vinotfound',
2010 b'vinotfound',
2190 _(
2011 _(
2191 b" No commit editor set and can't find %s in PATH\n"
2012 b" No commit editor set and can't find %s in PATH\n"
2192 b" (specify a commit editor in your configuration"
2013 b" (specify a commit editor in your configuration"
2193 b" file)\n"
2014 b" file)\n"
2194 ),
2015 ),
2195 not cmdpath and editor == b'vi' and editorbin,
2016 not cmdpath and editor == b'vi' and editorbin,
2196 )
2017 )
2197 fm.condwrite(
2018 fm.condwrite(
2198 not cmdpath and editor != b'vi',
2019 not cmdpath and editor != b'vi',
2199 b'editornotfound',
2020 b'editornotfound',
2200 _(
2021 _(
2201 b" Can't find editor '%s' in PATH\n"
2022 b" Can't find editor '%s' in PATH\n"
2202 b" (specify a commit editor in your configuration"
2023 b" (specify a commit editor in your configuration"
2203 b" file)\n"
2024 b" file)\n"
2204 ),
2025 ),
2205 not cmdpath and editorbin,
2026 not cmdpath and editorbin,
2206 )
2027 )
2207 if not cmdpath and editor != b'vi':
2028 if not cmdpath and editor != b'vi':
2208 problems += 1
2029 problems += 1
2209
2030
2210 # check username
2031 # check username
2211 username = None
2032 username = None
2212 err = None
2033 err = None
2213 try:
2034 try:
2214 username = ui.username()
2035 username = ui.username()
2215 except error.Abort as e:
2036 except error.Abort as e:
2216 err = e.message
2037 err = e.message
2217 problems += 1
2038 problems += 1
2218
2039
2219 fm.condwrite(
2040 fm.condwrite(
2220 username, b'username', _(b"checking username (%s)\n"), username
2041 username, b'username', _(b"checking username (%s)\n"), username
2221 )
2042 )
2222 fm.condwrite(
2043 fm.condwrite(
2223 err,
2044 err,
2224 b'usernameerror',
2045 b'usernameerror',
2225 _(
2046 _(
2226 b"checking username...\n %s\n"
2047 b"checking username...\n %s\n"
2227 b" (specify a username in your configuration file)\n"
2048 b" (specify a username in your configuration file)\n"
2228 ),
2049 ),
2229 err,
2050 err,
2230 )
2051 )
2231
2052
2232 for name, mod in extensions.extensions():
2053 for name, mod in extensions.extensions():
2233 handler = getattr(mod, 'debuginstall', None)
2054 handler = getattr(mod, 'debuginstall', None)
2234 if handler is not None:
2055 if handler is not None:
2235 problems += handler(ui, fm)
2056 problems += handler(ui, fm)
2236
2057
2237 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2058 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2238 if not problems:
2059 if not problems:
2239 fm.data(problems=problems)
2060 fm.data(problems=problems)
2240 fm.condwrite(
2061 fm.condwrite(
2241 problems,
2062 problems,
2242 b'problems',
2063 b'problems',
2243 _(b"%d problems detected, please check your install!\n"),
2064 _(b"%d problems detected, please check your install!\n"),
2244 problems,
2065 problems,
2245 )
2066 )
2246 fm.end()
2067 fm.end()
2247
2068
2248 return problems
2069 return problems
2249
2070
2250
2071
2251 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2072 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2252 def debugknown(ui, repopath, *ids, **opts):
2073 def debugknown(ui, repopath, *ids, **opts):
2253 """test whether node ids are known to a repo
2074 """test whether node ids are known to a repo
2254
2075
2255 Every ID must be a full-length hex node id string. Returns a list of 0s
2076 Every ID must be a full-length hex node id string. Returns a list of 0s
2256 and 1s indicating unknown/known.
2077 and 1s indicating unknown/known.
2257 """
2078 """
2258 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2079 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2259 if not repo.capable(b'known'):
2080 if not repo.capable(b'known'):
2260 raise error.Abort(b"known() not supported by target repository")
2081 raise error.Abort(b"known() not supported by target repository")
2261 flags = repo.known([bin(s) for s in ids])
2082 flags = repo.known([bin(s) for s in ids])
2262 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2083 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2263
2084
2264
2085
2265 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2086 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2266 def debuglabelcomplete(ui, repo, *args):
2087 def debuglabelcomplete(ui, repo, *args):
2267 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2088 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2268 debugnamecomplete(ui, repo, *args)
2089 debugnamecomplete(ui, repo, *args)
2269
2090
2270
2091
2271 @command(
2092 @command(
2272 b'debuglocks',
2093 b'debuglocks',
2273 [
2094 [
2274 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2095 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2275 (
2096 (
2276 b'W',
2097 b'W',
2277 b'force-free-wlock',
2098 b'force-free-wlock',
2278 None,
2099 None,
2279 _(b'free the working state lock (DANGEROUS)'),
2100 _(b'free the working state lock (DANGEROUS)'),
2280 ),
2101 ),
2281 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2102 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2282 (
2103 (
2283 b'S',
2104 b'S',
2284 b'set-wlock',
2105 b'set-wlock',
2285 None,
2106 None,
2286 _(b'set the working state lock until stopped'),
2107 _(b'set the working state lock until stopped'),
2287 ),
2108 ),
2288 ],
2109 ],
2289 _(b'[OPTION]...'),
2110 _(b'[OPTION]...'),
2290 )
2111 )
2291 def debuglocks(ui, repo, **opts):
2112 def debuglocks(ui, repo, **opts):
2292 """show or modify state of locks
2113 """show or modify state of locks
2293
2114
2294 By default, this command will show which locks are held. This
2115 By default, this command will show which locks are held. This
2295 includes the user and process holding the lock, the amount of time
2116 includes the user and process holding the lock, the amount of time
2296 the lock has been held, and the machine name where the process is
2117 the lock has been held, and the machine name where the process is
2297 running if it's not local.
2118 running if it's not local.
2298
2119
2299 Locks protect the integrity of Mercurial's data, so should be
2120 Locks protect the integrity of Mercurial's data, so should be
2300 treated with care. System crashes or other interruptions may cause
2121 treated with care. System crashes or other interruptions may cause
2301 locks to not be properly released, though Mercurial will usually
2122 locks to not be properly released, though Mercurial will usually
2302 detect and remove such stale locks automatically.
2123 detect and remove such stale locks automatically.
2303
2124
2304 However, detecting stale locks may not always be possible (for
2125 However, detecting stale locks may not always be possible (for
2305 instance, on a shared filesystem). Removing locks may also be
2126 instance, on a shared filesystem). Removing locks may also be
2306 blocked by filesystem permissions.
2127 blocked by filesystem permissions.
2307
2128
2308 Setting a lock will prevent other commands from changing the data.
2129 Setting a lock will prevent other commands from changing the data.
2309 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2130 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2310 The set locks are removed when the command exits.
2131 The set locks are removed when the command exits.
2311
2132
2312 Returns 0 if no locks are held.
2133 Returns 0 if no locks are held.
2313
2134
2314 """
2135 """
2315
2136
2316 if opts.get('force_free_lock'):
2137 if opts.get('force_free_lock'):
2317 repo.svfs.tryunlink(b'lock')
2138 repo.svfs.tryunlink(b'lock')
2318 if opts.get('force_free_wlock'):
2139 if opts.get('force_free_wlock'):
2319 repo.vfs.tryunlink(b'wlock')
2140 repo.vfs.tryunlink(b'wlock')
2320 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2141 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2321 return 0
2142 return 0
2322
2143
2323 locks = []
2144 locks = []
2324 try:
2145 try:
2325 if opts.get('set_wlock'):
2146 if opts.get('set_wlock'):
2326 try:
2147 try:
2327 locks.append(repo.wlock(False))
2148 locks.append(repo.wlock(False))
2328 except error.LockHeld:
2149 except error.LockHeld:
2329 raise error.Abort(_(b'wlock is already held'))
2150 raise error.Abort(_(b'wlock is already held'))
2330 if opts.get('set_lock'):
2151 if opts.get('set_lock'):
2331 try:
2152 try:
2332 locks.append(repo.lock(False))
2153 locks.append(repo.lock(False))
2333 except error.LockHeld:
2154 except error.LockHeld:
2334 raise error.Abort(_(b'lock is already held'))
2155 raise error.Abort(_(b'lock is already held'))
2335 if len(locks):
2156 if len(locks):
2336 try:
2157 try:
2337 if ui.interactive():
2158 if ui.interactive():
2338 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2159 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2339 ui.promptchoice(prompt)
2160 ui.promptchoice(prompt)
2340 else:
2161 else:
2341 msg = b"%d locks held, waiting for signal\n"
2162 msg = b"%d locks held, waiting for signal\n"
2342 msg %= len(locks)
2163 msg %= len(locks)
2343 ui.status(msg)
2164 ui.status(msg)
2344 while True: # XXX wait for a signal
2165 while True: # XXX wait for a signal
2345 time.sleep(0.1)
2166 time.sleep(0.1)
2346 except KeyboardInterrupt:
2167 except KeyboardInterrupt:
2347 msg = b"signal-received releasing locks\n"
2168 msg = b"signal-received releasing locks\n"
2348 ui.status(msg)
2169 ui.status(msg)
2349 return 0
2170 return 0
2350 finally:
2171 finally:
2351 release(*locks)
2172 release(*locks)
2352
2173
2353 now = time.time()
2174 now = time.time()
2354 held = 0
2175 held = 0
2355
2176
2356 def report(vfs, name, method):
2177 def report(vfs, name, method):
2357 # this causes stale locks to get reaped for more accurate reporting
2178 # this causes stale locks to get reaped for more accurate reporting
2358 try:
2179 try:
2359 l = method(False)
2180 l = method(False)
2360 except error.LockHeld:
2181 except error.LockHeld:
2361 l = None
2182 l = None
2362
2183
2363 if l:
2184 if l:
2364 l.release()
2185 l.release()
2365 else:
2186 else:
2366 try:
2187 try:
2367 st = vfs.lstat(name)
2188 st = vfs.lstat(name)
2368 age = now - st[stat.ST_MTIME]
2189 age = now - st[stat.ST_MTIME]
2369 user = util.username(st.st_uid)
2190 user = util.username(st.st_uid)
2370 locker = vfs.readlock(name)
2191 locker = vfs.readlock(name)
2371 if b":" in locker:
2192 if b":" in locker:
2372 host, pid = locker.split(b':')
2193 host, pid = locker.split(b':')
2373 if host == socket.gethostname():
2194 if host == socket.gethostname():
2374 locker = b'user %s, process %s' % (user or b'None', pid)
2195 locker = b'user %s, process %s' % (user or b'None', pid)
2375 else:
2196 else:
2376 locker = b'user %s, process %s, host %s' % (
2197 locker = b'user %s, process %s, host %s' % (
2377 user or b'None',
2198 user or b'None',
2378 pid,
2199 pid,
2379 host,
2200 host,
2380 )
2201 )
2381 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2202 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2382 return 1
2203 return 1
2383 except FileNotFoundError:
2204 except FileNotFoundError:
2384 pass
2205 pass
2385
2206
2386 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2207 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2387 return 0
2208 return 0
2388
2209
2389 held += report(repo.svfs, b"lock", repo.lock)
2210 held += report(repo.svfs, b"lock", repo.lock)
2390 held += report(repo.vfs, b"wlock", repo.wlock)
2211 held += report(repo.vfs, b"wlock", repo.wlock)
2391
2212
2392 return held
2213 return held
2393
2214
2394
2215
2395 @command(
2216 @command(
2396 b'debugmanifestfulltextcache',
2217 b'debugmanifestfulltextcache',
2397 [
2218 [
2398 (b'', b'clear', False, _(b'clear the cache')),
2219 (b'', b'clear', False, _(b'clear the cache')),
2399 (
2220 (
2400 b'a',
2221 b'a',
2401 b'add',
2222 b'add',
2402 [],
2223 [],
2403 _(b'add the given manifest nodes to the cache'),
2224 _(b'add the given manifest nodes to the cache'),
2404 _(b'NODE'),
2225 _(b'NODE'),
2405 ),
2226 ),
2406 ],
2227 ],
2407 b'',
2228 b'',
2408 )
2229 )
2409 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2230 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2410 """show, clear or amend the contents of the manifest fulltext cache"""
2231 """show, clear or amend the contents of the manifest fulltext cache"""
2411
2232
2412 def getcache():
2233 def getcache():
2413 r = repo.manifestlog.getstorage(b'')
2234 r = repo.manifestlog.getstorage(b'')
2414 try:
2235 try:
2415 return r._fulltextcache
2236 return r._fulltextcache
2416 except AttributeError:
2237 except AttributeError:
2417 msg = _(
2238 msg = _(
2418 b"Current revlog implementation doesn't appear to have a "
2239 b"Current revlog implementation doesn't appear to have a "
2419 b"manifest fulltext cache\n"
2240 b"manifest fulltext cache\n"
2420 )
2241 )
2421 raise error.Abort(msg)
2242 raise error.Abort(msg)
2422
2243
2423 if opts.get('clear'):
2244 if opts.get('clear'):
2424 with repo.wlock():
2245 with repo.wlock():
2425 cache = getcache()
2246 cache = getcache()
2426 cache.clear(clear_persisted_data=True)
2247 cache.clear(clear_persisted_data=True)
2427 return
2248 return
2428
2249
2429 if add:
2250 if add:
2430 with repo.wlock():
2251 with repo.wlock():
2431 m = repo.manifestlog
2252 m = repo.manifestlog
2432 store = m.getstorage(b'')
2253 store = m.getstorage(b'')
2433 for n in add:
2254 for n in add:
2434 try:
2255 try:
2435 manifest = m[store.lookup(n)]
2256 manifest = m[store.lookup(n)]
2436 except error.LookupError as e:
2257 except error.LookupError as e:
2437 raise error.Abort(
2258 raise error.Abort(
2438 bytes(e), hint=b"Check your manifest node id"
2259 bytes(e), hint=b"Check your manifest node id"
2439 )
2260 )
2440 manifest.read() # stores revisision in cache too
2261 manifest.read() # stores revisision in cache too
2441 return
2262 return
2442
2263
2443 cache = getcache()
2264 cache = getcache()
2444 if not len(cache):
2265 if not len(cache):
2445 ui.write(_(b'cache empty\n'))
2266 ui.write(_(b'cache empty\n'))
2446 else:
2267 else:
2447 ui.write(
2268 ui.write(
2448 _(
2269 _(
2449 b'cache contains %d manifest entries, in order of most to '
2270 b'cache contains %d manifest entries, in order of most to '
2450 b'least recent:\n'
2271 b'least recent:\n'
2451 )
2272 )
2452 % (len(cache),)
2273 % (len(cache),)
2453 )
2274 )
2454 totalsize = 0
2275 totalsize = 0
2455 for nodeid in cache:
2276 for nodeid in cache:
2456 # Use cache.get to not update the LRU order
2277 # Use cache.get to not update the LRU order
2457 data = cache.peek(nodeid)
2278 data = cache.peek(nodeid)
2458 size = len(data)
2279 size = len(data)
2459 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2280 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2460 ui.write(
2281 ui.write(
2461 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2282 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2462 )
2283 )
2463 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2284 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2464 ui.write(
2285 ui.write(
2465 _(b'total cache data size %s, on-disk %s\n')
2286 _(b'total cache data size %s, on-disk %s\n')
2466 % (util.bytecount(totalsize), util.bytecount(ondisk))
2287 % (util.bytecount(totalsize), util.bytecount(ondisk))
2467 )
2288 )
2468
2289
2469
2290
2470 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2291 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2471 def debugmergestate(ui, repo, *args, **opts):
2292 def debugmergestate(ui, repo, *args, **opts):
2472 """print merge state
2293 """print merge state
2473
2294
2474 Use --verbose to print out information about whether v1 or v2 merge state
2295 Use --verbose to print out information about whether v1 or v2 merge state
2475 was chosen."""
2296 was chosen."""
2476
2297
2477 if ui.verbose:
2298 if ui.verbose:
2478 ms = mergestatemod.mergestate(repo)
2299 ms = mergestatemod.mergestate(repo)
2479
2300
2480 # sort so that reasonable information is on top
2301 # sort so that reasonable information is on top
2481 v1records = ms._readrecordsv1()
2302 v1records = ms._readrecordsv1()
2482 v2records = ms._readrecordsv2()
2303 v2records = ms._readrecordsv2()
2483
2304
2484 if not v1records and not v2records:
2305 if not v1records and not v2records:
2485 pass
2306 pass
2486 elif not v2records:
2307 elif not v2records:
2487 ui.writenoi18n(b'no version 2 merge state\n')
2308 ui.writenoi18n(b'no version 2 merge state\n')
2488 elif ms._v1v2match(v1records, v2records):
2309 elif ms._v1v2match(v1records, v2records):
2489 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2310 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2490 else:
2311 else:
2491 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2312 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2492
2313
2493 if not opts['template']:
2314 if not opts['template']:
2494 opts['template'] = (
2315 opts['template'] = (
2495 b'{if(commits, "", "no merge state found\n")}'
2316 b'{if(commits, "", "no merge state found\n")}'
2496 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2317 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2497 b'{files % "file: {path} (state \\"{state}\\")\n'
2318 b'{files % "file: {path} (state \\"{state}\\")\n'
2498 b'{if(local_path, "'
2319 b'{if(local_path, "'
2499 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2320 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2500 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2321 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2501 b' other path: {other_path} (node {other_node})\n'
2322 b' other path: {other_path} (node {other_node})\n'
2502 b'")}'
2323 b'")}'
2503 b'{if(rename_side, "'
2324 b'{if(rename_side, "'
2504 b' rename side: {rename_side}\n'
2325 b' rename side: {rename_side}\n'
2505 b' renamed path: {renamed_path}\n'
2326 b' renamed path: {renamed_path}\n'
2506 b'")}'
2327 b'")}'
2507 b'{extras % " extra: {key} = {value}\n"}'
2328 b'{extras % " extra: {key} = {value}\n"}'
2508 b'"}'
2329 b'"}'
2509 b'{extras % "extra: {file} ({key} = {value})\n"}'
2330 b'{extras % "extra: {file} ({key} = {value})\n"}'
2510 )
2331 )
2511
2332
2512 ms = mergestatemod.mergestate.read(repo)
2333 ms = mergestatemod.mergestate.read(repo)
2513
2334
2514 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2335 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2515 fm.startitem()
2336 fm.startitem()
2516
2337
2517 fm_commits = fm.nested(b'commits')
2338 fm_commits = fm.nested(b'commits')
2518 if ms.active():
2339 if ms.active():
2519 for name, node, label_index in (
2340 for name, node, label_index in (
2520 (b'local', ms.local, 0),
2341 (b'local', ms.local, 0),
2521 (b'other', ms.other, 1),
2342 (b'other', ms.other, 1),
2522 ):
2343 ):
2523 fm_commits.startitem()
2344 fm_commits.startitem()
2524 fm_commits.data(name=name)
2345 fm_commits.data(name=name)
2525 fm_commits.data(node=hex(node))
2346 fm_commits.data(node=hex(node))
2526 if ms._labels and len(ms._labels) > label_index:
2347 if ms._labels and len(ms._labels) > label_index:
2527 fm_commits.data(label=ms._labels[label_index])
2348 fm_commits.data(label=ms._labels[label_index])
2528 fm_commits.end()
2349 fm_commits.end()
2529
2350
2530 fm_files = fm.nested(b'files')
2351 fm_files = fm.nested(b'files')
2531 if ms.active():
2352 if ms.active():
2532 for f in ms:
2353 for f in ms:
2533 fm_files.startitem()
2354 fm_files.startitem()
2534 fm_files.data(path=f)
2355 fm_files.data(path=f)
2535 state = ms._state[f]
2356 state = ms._state[f]
2536 fm_files.data(state=state[0])
2357 fm_files.data(state=state[0])
2537 if state[0] in (
2358 if state[0] in (
2538 mergestatemod.MERGE_RECORD_UNRESOLVED,
2359 mergestatemod.MERGE_RECORD_UNRESOLVED,
2539 mergestatemod.MERGE_RECORD_RESOLVED,
2360 mergestatemod.MERGE_RECORD_RESOLVED,
2540 ):
2361 ):
2541 fm_files.data(local_key=state[1])
2362 fm_files.data(local_key=state[1])
2542 fm_files.data(local_path=state[2])
2363 fm_files.data(local_path=state[2])
2543 fm_files.data(ancestor_path=state[3])
2364 fm_files.data(ancestor_path=state[3])
2544 fm_files.data(ancestor_node=state[4])
2365 fm_files.data(ancestor_node=state[4])
2545 fm_files.data(other_path=state[5])
2366 fm_files.data(other_path=state[5])
2546 fm_files.data(other_node=state[6])
2367 fm_files.data(other_node=state[6])
2547 fm_files.data(local_flags=state[7])
2368 fm_files.data(local_flags=state[7])
2548 elif state[0] in (
2369 elif state[0] in (
2549 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2370 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2550 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2371 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2551 ):
2372 ):
2552 fm_files.data(renamed_path=state[1])
2373 fm_files.data(renamed_path=state[1])
2553 fm_files.data(rename_side=state[2])
2374 fm_files.data(rename_side=state[2])
2554 fm_extras = fm_files.nested(b'extras')
2375 fm_extras = fm_files.nested(b'extras')
2555 for k, v in sorted(ms.extras(f).items()):
2376 for k, v in sorted(ms.extras(f).items()):
2556 fm_extras.startitem()
2377 fm_extras.startitem()
2557 fm_extras.data(key=k)
2378 fm_extras.data(key=k)
2558 fm_extras.data(value=v)
2379 fm_extras.data(value=v)
2559 fm_extras.end()
2380 fm_extras.end()
2560
2381
2561 fm_files.end()
2382 fm_files.end()
2562
2383
2563 fm_extras = fm.nested(b'extras')
2384 fm_extras = fm.nested(b'extras')
2564 for f, d in sorted(ms.allextras().items()):
2385 for f, d in sorted(ms.allextras().items()):
2565 if f in ms:
2386 if f in ms:
2566 # If file is in mergestate, we have already processed it's extras
2387 # If file is in mergestate, we have already processed it's extras
2567 continue
2388 continue
2568 for k, v in d.items():
2389 for k, v in d.items():
2569 fm_extras.startitem()
2390 fm_extras.startitem()
2570 fm_extras.data(file=f)
2391 fm_extras.data(file=f)
2571 fm_extras.data(key=k)
2392 fm_extras.data(key=k)
2572 fm_extras.data(value=v)
2393 fm_extras.data(value=v)
2573 fm_extras.end()
2394 fm_extras.end()
2574
2395
2575 fm.end()
2396 fm.end()
2576
2397
2577
2398
2578 @command(b'debugnamecomplete', [], _(b'NAME...'))
2399 @command(b'debugnamecomplete', [], _(b'NAME...'))
2579 def debugnamecomplete(ui, repo, *args):
2400 def debugnamecomplete(ui, repo, *args):
2580 '''complete "names" - tags, open branch names, bookmark names'''
2401 '''complete "names" - tags, open branch names, bookmark names'''
2581
2402
2582 names = set()
2403 names = set()
2583 # since we previously only listed open branches, we will handle that
2404 # since we previously only listed open branches, we will handle that
2584 # specially (after this for loop)
2405 # specially (after this for loop)
2585 for name, ns in repo.names.items():
2406 for name, ns in repo.names.items():
2586 if name != b'branches':
2407 if name != b'branches':
2587 names.update(ns.listnames(repo))
2408 names.update(ns.listnames(repo))
2588 names.update(
2409 names.update(
2589 tag
2410 tag
2590 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2411 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2591 if not closed
2412 if not closed
2592 )
2413 )
2593 completions = set()
2414 completions = set()
2594 if not args:
2415 if not args:
2595 args = [b'']
2416 args = [b'']
2596 for a in args:
2417 for a in args:
2597 completions.update(n for n in names if n.startswith(a))
2418 completions.update(n for n in names if n.startswith(a))
2598 ui.write(b'\n'.join(sorted(completions)))
2419 ui.write(b'\n'.join(sorted(completions)))
2599 ui.write(b'\n')
2420 ui.write(b'\n')
2600
2421
2601
2422
2602 @command(
2423 @command(
2603 b'debugnodemap',
2424 b'debugnodemap',
2604 (
2425 (
2605 cmdutil.debugrevlogopts
2426 cmdutil.debugrevlogopts
2606 + [
2427 + [
2607 (
2428 (
2608 b'',
2429 b'',
2609 b'dump-new',
2430 b'dump-new',
2610 False,
2431 False,
2611 _(b'write a (new) persistent binary nodemap on stdout'),
2432 _(b'write a (new) persistent binary nodemap on stdout'),
2612 ),
2433 ),
2613 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2434 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2614 (
2435 (
2615 b'',
2436 b'',
2616 b'check',
2437 b'check',
2617 False,
2438 False,
2618 _(b'check that the data on disk data are correct.'),
2439 _(b'check that the data on disk data are correct.'),
2619 ),
2440 ),
2620 (
2441 (
2621 b'',
2442 b'',
2622 b'metadata',
2443 b'metadata',
2623 False,
2444 False,
2624 _(b'display the on disk meta data for the nodemap'),
2445 _(b'display the on disk meta data for the nodemap'),
2625 ),
2446 ),
2626 ]
2447 ]
2627 ),
2448 ),
2628 _(b'-c|-m|FILE'),
2449 _(b'-c|-m|FILE'),
2629 )
2450 )
2630 def debugnodemap(ui, repo, file_=None, **opts):
2451 def debugnodemap(ui, repo, file_=None, **opts):
2631 """write and inspect on disk nodemap"""
2452 """write and inspect on disk nodemap"""
2632 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2453 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2633 if file_ is not None:
2454 if file_ is not None:
2634 raise error.InputError(
2455 raise error.InputError(
2635 _(b'cannot specify a file with other arguments')
2456 _(b'cannot specify a file with other arguments')
2636 )
2457 )
2637 elif file_ is None:
2458 elif file_ is None:
2638 opts['changelog'] = True
2459 opts['changelog'] = True
2639 r = cmdutil.openstorage(
2460 r = cmdutil.openstorage(
2640 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2461 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2641 )
2462 )
2642 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2463 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2643 r = r._revlog
2464 r = r._revlog
2644 if opts['dump_new']:
2465 if opts['dump_new']:
2645 if hasattr(r.index, "nodemap_data_all"):
2466 if hasattr(r.index, "nodemap_data_all"):
2646 data = r.index.nodemap_data_all()
2467 data = r.index.nodemap_data_all()
2647 else:
2468 else:
2648 data = nodemap.persistent_data(r.index)
2469 data = nodemap.persistent_data(r.index)
2649 ui.write(data)
2470 ui.write(data)
2650 elif opts['dump_disk']:
2471 elif opts['dump_disk']:
2651 nm_data = nodemap.persisted_data(r)
2472 nm_data = nodemap.persisted_data(r)
2652 if nm_data is not None:
2473 if nm_data is not None:
2653 docket, data = nm_data
2474 docket, data = nm_data
2654 ui.write(data[:])
2475 ui.write(data[:])
2655 elif opts['check']:
2476 elif opts['check']:
2656 nm_data = nodemap.persisted_data(r)
2477 nm_data = nodemap.persisted_data(r)
2657 if nm_data is not None:
2478 if nm_data is not None:
2658 docket, data = nm_data
2479 docket, data = nm_data
2659 return nodemap.check_data(ui, r.index, data)
2480 return nodemap.check_data(ui, r.index, data)
2660 elif opts['metadata']:
2481 elif opts['metadata']:
2661 nm_data = nodemap.persisted_data(r)
2482 nm_data = nodemap.persisted_data(r)
2662 if nm_data is not None:
2483 if nm_data is not None:
2663 docket, data = nm_data
2484 docket, data = nm_data
2664 ui.write((b"uid: %s\n") % docket.uid)
2485 ui.write((b"uid: %s\n") % docket.uid)
2665 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2486 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2666 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2487 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2667 ui.write((b"data-length: %d\n") % docket.data_length)
2488 ui.write((b"data-length: %d\n") % docket.data_length)
2668 ui.write((b"data-unused: %d\n") % docket.data_unused)
2489 ui.write((b"data-unused: %d\n") % docket.data_unused)
2669 unused_perc = docket.data_unused * 100.0 / docket.data_length
2490 unused_perc = docket.data_unused * 100.0 / docket.data_length
2670 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2491 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2671
2492
2672
2493
2673 @command(
2494 @command(
2674 b'debugobsolete',
2495 b'debugobsolete',
2675 [
2496 [
2676 (b'', b'flags', 0, _(b'markers flag')),
2497 (b'', b'flags', 0, _(b'markers flag')),
2677 (
2498 (
2678 b'',
2499 b'',
2679 b'record-parents',
2500 b'record-parents',
2680 False,
2501 False,
2681 _(b'record parent information for the precursor'),
2502 _(b'record parent information for the precursor'),
2682 ),
2503 ),
2683 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2504 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2684 (
2505 (
2685 b'',
2506 b'',
2686 b'exclusive',
2507 b'exclusive',
2687 False,
2508 False,
2688 _(b'restrict display to markers only relevant to REV'),
2509 _(b'restrict display to markers only relevant to REV'),
2689 ),
2510 ),
2690 (b'', b'index', False, _(b'display index of the marker')),
2511 (b'', b'index', False, _(b'display index of the marker')),
2691 (b'', b'delete', [], _(b'delete markers specified by indices')),
2512 (b'', b'delete', [], _(b'delete markers specified by indices')),
2692 ]
2513 ]
2693 + cmdutil.commitopts2
2514 + cmdutil.commitopts2
2694 + cmdutil.formatteropts,
2515 + cmdutil.formatteropts,
2695 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2516 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2696 )
2517 )
2697 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2518 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2698 """create arbitrary obsolete marker
2519 """create arbitrary obsolete marker
2699
2520
2700 With no arguments, displays the list of obsolescence markers."""
2521 With no arguments, displays the list of obsolescence markers."""
2701
2522
2702 def parsenodeid(s):
2523 def parsenodeid(s):
2703 try:
2524 try:
2704 # We do not use revsingle/revrange functions here to accept
2525 # We do not use revsingle/revrange functions here to accept
2705 # arbitrary node identifiers, possibly not present in the
2526 # arbitrary node identifiers, possibly not present in the
2706 # local repository.
2527 # local repository.
2707 n = bin(s)
2528 n = bin(s)
2708 if len(n) != repo.nodeconstants.nodelen:
2529 if len(n) != repo.nodeconstants.nodelen:
2709 raise ValueError
2530 raise ValueError
2710 return n
2531 return n
2711 except ValueError:
2532 except ValueError:
2712 raise error.InputError(
2533 raise error.InputError(
2713 b'changeset references must be full hexadecimal '
2534 b'changeset references must be full hexadecimal '
2714 b'node identifiers'
2535 b'node identifiers'
2715 )
2536 )
2716
2537
2717 if opts.get('delete'):
2538 if opts.get('delete'):
2718 indices = []
2539 indices = []
2719 for v in opts.get('delete'):
2540 for v in opts.get('delete'):
2720 try:
2541 try:
2721 indices.append(int(v))
2542 indices.append(int(v))
2722 except ValueError:
2543 except ValueError:
2723 raise error.InputError(
2544 raise error.InputError(
2724 _(b'invalid index value: %r') % v,
2545 _(b'invalid index value: %r') % v,
2725 hint=_(b'use integers for indices'),
2546 hint=_(b'use integers for indices'),
2726 )
2547 )
2727
2548
2728 if repo.currenttransaction():
2549 if repo.currenttransaction():
2729 raise error.Abort(
2550 raise error.Abort(
2730 _(b'cannot delete obsmarkers in the middle of transaction.')
2551 _(b'cannot delete obsmarkers in the middle of transaction.')
2731 )
2552 )
2732
2553
2733 with repo.lock():
2554 with repo.lock():
2734 n = repair.deleteobsmarkers(repo.obsstore, indices)
2555 n = repair.deleteobsmarkers(repo.obsstore, indices)
2735 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2556 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2736
2557
2737 return
2558 return
2738
2559
2739 if precursor is not None:
2560 if precursor is not None:
2740 if opts['rev']:
2561 if opts['rev']:
2741 raise error.InputError(
2562 raise error.InputError(
2742 b'cannot select revision when creating marker'
2563 b'cannot select revision when creating marker'
2743 )
2564 )
2744 metadata = {}
2565 metadata = {}
2745 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2566 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2746 succs = tuple(parsenodeid(succ) for succ in successors)
2567 succs = tuple(parsenodeid(succ) for succ in successors)
2747 l = repo.lock()
2568 l = repo.lock()
2748 try:
2569 try:
2749 tr = repo.transaction(b'debugobsolete')
2570 tr = repo.transaction(b'debugobsolete')
2750 try:
2571 try:
2751 date = opts.get('date')
2572 date = opts.get('date')
2752 if date:
2573 if date:
2753 date = dateutil.parsedate(date)
2574 date = dateutil.parsedate(date)
2754 else:
2575 else:
2755 date = None
2576 date = None
2756 prec = parsenodeid(precursor)
2577 prec = parsenodeid(precursor)
2757 parents = None
2578 parents = None
2758 if opts['record_parents']:
2579 if opts['record_parents']:
2759 if prec not in repo.unfiltered():
2580 if prec not in repo.unfiltered():
2760 raise error.Abort(
2581 raise error.Abort(
2761 b'cannot used --record-parents on '
2582 b'cannot used --record-parents on '
2762 b'unknown changesets'
2583 b'unknown changesets'
2763 )
2584 )
2764 parents = repo.unfiltered()[prec].parents()
2585 parents = repo.unfiltered()[prec].parents()
2765 parents = tuple(p.node() for p in parents)
2586 parents = tuple(p.node() for p in parents)
2766 repo.obsstore.create(
2587 repo.obsstore.create(
2767 tr,
2588 tr,
2768 prec,
2589 prec,
2769 succs,
2590 succs,
2770 opts['flags'],
2591 opts['flags'],
2771 parents=parents,
2592 parents=parents,
2772 date=date,
2593 date=date,
2773 metadata=metadata,
2594 metadata=metadata,
2774 ui=ui,
2595 ui=ui,
2775 )
2596 )
2776 tr.close()
2597 tr.close()
2777 except ValueError as exc:
2598 except ValueError as exc:
2778 raise error.Abort(
2599 raise error.Abort(
2779 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2600 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2780 )
2601 )
2781 finally:
2602 finally:
2782 tr.release()
2603 tr.release()
2783 finally:
2604 finally:
2784 l.release()
2605 l.release()
2785 else:
2606 else:
2786 if opts['rev']:
2607 if opts['rev']:
2787 revs = logcmdutil.revrange(repo, opts['rev'])
2608 revs = logcmdutil.revrange(repo, opts['rev'])
2788 nodes = [repo[r].node() for r in revs]
2609 nodes = [repo[r].node() for r in revs]
2789 markers = list(
2610 markers = list(
2790 obsutil.getmarkers(
2611 obsutil.getmarkers(
2791 repo, nodes=nodes, exclusive=opts['exclusive']
2612 repo, nodes=nodes, exclusive=opts['exclusive']
2792 )
2613 )
2793 )
2614 )
2794 markers.sort(key=lambda x: x._data)
2615 markers.sort(key=lambda x: x._data)
2795 else:
2616 else:
2796 markers = obsutil.getmarkers(repo)
2617 markers = obsutil.getmarkers(repo)
2797
2618
2798 markerstoiter = markers
2619 markerstoiter = markers
2799 isrelevant = lambda m: True
2620 isrelevant = lambda m: True
2800 if opts.get('rev') and opts.get('index'):
2621 if opts.get('rev') and opts.get('index'):
2801 markerstoiter = obsutil.getmarkers(repo)
2622 markerstoiter = obsutil.getmarkers(repo)
2802 markerset = set(markers)
2623 markerset = set(markers)
2803 isrelevant = lambda m: m in markerset
2624 isrelevant = lambda m: m in markerset
2804
2625
2805 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2626 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2806 for i, m in enumerate(markerstoiter):
2627 for i, m in enumerate(markerstoiter):
2807 if not isrelevant(m):
2628 if not isrelevant(m):
2808 # marker can be irrelevant when we're iterating over a set
2629 # marker can be irrelevant when we're iterating over a set
2809 # of markers (markerstoiter) which is bigger than the set
2630 # of markers (markerstoiter) which is bigger than the set
2810 # of markers we want to display (markers)
2631 # of markers we want to display (markers)
2811 # this can happen if both --index and --rev options are
2632 # this can happen if both --index and --rev options are
2812 # provided and thus we need to iterate over all of the markers
2633 # provided and thus we need to iterate over all of the markers
2813 # to get the correct indices, but only display the ones that
2634 # to get the correct indices, but only display the ones that
2814 # are relevant to --rev value
2635 # are relevant to --rev value
2815 continue
2636 continue
2816 fm.startitem()
2637 fm.startitem()
2817 ind = i if opts.get('index') else None
2638 ind = i if opts.get('index') else None
2818 cmdutil.showmarker(fm, m, index=ind)
2639 cmdutil.showmarker(fm, m, index=ind)
2819 fm.end()
2640 fm.end()
2820
2641
2821
2642
2822 @command(
2643 @command(
2823 b'debugp1copies',
2644 b'debugp1copies',
2824 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2645 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2825 _(b'[-r REV]'),
2646 _(b'[-r REV]'),
2826 )
2647 )
2827 def debugp1copies(ui, repo, **opts):
2648 def debugp1copies(ui, repo, **opts):
2828 """dump copy information compared to p1"""
2649 """dump copy information compared to p1"""
2829
2650
2830 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2651 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2831 for dst, src in ctx.p1copies().items():
2652 for dst, src in ctx.p1copies().items():
2832 ui.write(b'%s -> %s\n' % (src, dst))
2653 ui.write(b'%s -> %s\n' % (src, dst))
2833
2654
2834
2655
2835 @command(
2656 @command(
2836 b'debugp2copies',
2657 b'debugp2copies',
2837 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2658 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2838 _(b'[-r REV]'),
2659 _(b'[-r REV]'),
2839 )
2660 )
2840 def debugp2copies(ui, repo, **opts):
2661 def debugp2copies(ui, repo, **opts):
2841 """dump copy information compared to p2"""
2662 """dump copy information compared to p2"""
2842
2663
2843 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2664 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2844 for dst, src in ctx.p2copies().items():
2665 for dst, src in ctx.p2copies().items():
2845 ui.write(b'%s -> %s\n' % (src, dst))
2666 ui.write(b'%s -> %s\n' % (src, dst))
2846
2667
2847
2668
2848 @command(
2669 @command(
2849 b'debugpathcomplete',
2670 b'debugpathcomplete',
2850 [
2671 [
2851 (b'f', b'full', None, _(b'complete an entire path')),
2672 (b'f', b'full', None, _(b'complete an entire path')),
2852 (b'n', b'normal', None, _(b'show only normal files')),
2673 (b'n', b'normal', None, _(b'show only normal files')),
2853 (b'a', b'added', None, _(b'show only added files')),
2674 (b'a', b'added', None, _(b'show only added files')),
2854 (b'r', b'removed', None, _(b'show only removed files')),
2675 (b'r', b'removed', None, _(b'show only removed files')),
2855 ],
2676 ],
2856 _(b'FILESPEC...'),
2677 _(b'FILESPEC...'),
2857 )
2678 )
2858 def debugpathcomplete(ui, repo, *specs, **opts):
2679 def debugpathcomplete(ui, repo, *specs, **opts):
2859 """complete part or all of a tracked path
2680 """complete part or all of a tracked path
2860
2681
2861 This command supports shells that offer path name completion. It
2682 This command supports shells that offer path name completion. It
2862 currently completes only files already known to the dirstate.
2683 currently completes only files already known to the dirstate.
2863
2684
2864 Completion extends only to the next path segment unless
2685 Completion extends only to the next path segment unless
2865 --full is specified, in which case entire paths are used."""
2686 --full is specified, in which case entire paths are used."""
2866
2687
2867 def complete(path, acceptable):
2688 def complete(path, acceptable):
2868 dirstate = repo.dirstate
2689 dirstate = repo.dirstate
2869 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2690 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2870 rootdir = repo.root + pycompat.ossep
2691 rootdir = repo.root + pycompat.ossep
2871 if spec != repo.root and not spec.startswith(rootdir):
2692 if spec != repo.root and not spec.startswith(rootdir):
2872 return [], []
2693 return [], []
2873 if os.path.isdir(spec):
2694 if os.path.isdir(spec):
2874 spec += b'/'
2695 spec += b'/'
2875 spec = spec[len(rootdir) :]
2696 spec = spec[len(rootdir) :]
2876 fixpaths = pycompat.ossep != b'/'
2697 fixpaths = pycompat.ossep != b'/'
2877 if fixpaths:
2698 if fixpaths:
2878 spec = spec.replace(pycompat.ossep, b'/')
2699 spec = spec.replace(pycompat.ossep, b'/')
2879 speclen = len(spec)
2700 speclen = len(spec)
2880 fullpaths = opts['full']
2701 fullpaths = opts['full']
2881 files, dirs = set(), set()
2702 files, dirs = set(), set()
2882 adddir, addfile = dirs.add, files.add
2703 adddir, addfile = dirs.add, files.add
2883 for f, st in dirstate.items():
2704 for f, st in dirstate.items():
2884 if f.startswith(spec) and st.state in acceptable:
2705 if f.startswith(spec) and st.state in acceptable:
2885 if fixpaths:
2706 if fixpaths:
2886 f = f.replace(b'/', pycompat.ossep)
2707 f = f.replace(b'/', pycompat.ossep)
2887 if fullpaths:
2708 if fullpaths:
2888 addfile(f)
2709 addfile(f)
2889 continue
2710 continue
2890 s = f.find(pycompat.ossep, speclen)
2711 s = f.find(pycompat.ossep, speclen)
2891 if s >= 0:
2712 if s >= 0:
2892 adddir(f[:s])
2713 adddir(f[:s])
2893 else:
2714 else:
2894 addfile(f)
2715 addfile(f)
2895 return files, dirs
2716 return files, dirs
2896
2717
2897 acceptable = b''
2718 acceptable = b''
2898 if opts['normal']:
2719 if opts['normal']:
2899 acceptable += b'nm'
2720 acceptable += b'nm'
2900 if opts['added']:
2721 if opts['added']:
2901 acceptable += b'a'
2722 acceptable += b'a'
2902 if opts['removed']:
2723 if opts['removed']:
2903 acceptable += b'r'
2724 acceptable += b'r'
2904 cwd = repo.getcwd()
2725 cwd = repo.getcwd()
2905 if not specs:
2726 if not specs:
2906 specs = [b'.']
2727 specs = [b'.']
2907
2728
2908 files, dirs = set(), set()
2729 files, dirs = set(), set()
2909 for spec in specs:
2730 for spec in specs:
2910 f, d = complete(spec, acceptable or b'nmar')
2731 f, d = complete(spec, acceptable or b'nmar')
2911 files.update(f)
2732 files.update(f)
2912 dirs.update(d)
2733 dirs.update(d)
2913 files.update(dirs)
2734 files.update(dirs)
2914 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2735 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2915 ui.write(b'\n')
2736 ui.write(b'\n')
2916
2737
2917
2738
2918 @command(
2739 @command(
2919 b'debugpathcopies',
2740 b'debugpathcopies',
2920 cmdutil.walkopts,
2741 cmdutil.walkopts,
2921 b'hg debugpathcopies REV1 REV2 [FILE]',
2742 b'hg debugpathcopies REV1 REV2 [FILE]',
2922 inferrepo=True,
2743 inferrepo=True,
2923 )
2744 )
2924 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2745 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2925 """show copies between two revisions"""
2746 """show copies between two revisions"""
2926 ctx1 = scmutil.revsingle(repo, rev1)
2747 ctx1 = scmutil.revsingle(repo, rev1)
2927 ctx2 = scmutil.revsingle(repo, rev2)
2748 ctx2 = scmutil.revsingle(repo, rev2)
2928 m = scmutil.match(ctx1, pats, opts)
2749 m = scmutil.match(ctx1, pats, opts)
2929 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2750 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2930 ui.write(b'%s -> %s\n' % (src, dst))
2751 ui.write(b'%s -> %s\n' % (src, dst))
2931
2752
2932
2753
2933 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2754 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2934 def debugpeer(ui, path):
2755 def debugpeer(ui, path):
2935 """establish a connection to a peer repository"""
2756 """establish a connection to a peer repository"""
2936 # Always enable peer request logging. Requires --debug to display
2757 # Always enable peer request logging. Requires --debug to display
2937 # though.
2758 # though.
2938 overrides = {
2759 overrides = {
2939 (b'devel', b'debug.peer-request'): True,
2760 (b'devel', b'debug.peer-request'): True,
2940 }
2761 }
2941
2762
2942 with ui.configoverride(overrides):
2763 with ui.configoverride(overrides):
2943 peer = hg.peer(ui, {}, path)
2764 peer = hg.peer(ui, {}, path)
2944
2765
2945 try:
2766 try:
2946 local = peer.local() is not None
2767 local = peer.local() is not None
2947 canpush = peer.canpush()
2768 canpush = peer.canpush()
2948
2769
2949 ui.write(_(b'url: %s\n') % peer.url())
2770 ui.write(_(b'url: %s\n') % peer.url())
2950 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2771 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2951 ui.write(
2772 ui.write(
2952 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2773 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2953 )
2774 )
2954 finally:
2775 finally:
2955 peer.close()
2776 peer.close()
2956
2777
2957
2778
2958 @command(
2779 @command(
2959 b'debugpickmergetool',
2780 b'debugpickmergetool',
2960 [
2781 [
2961 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2782 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2962 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2783 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2963 ]
2784 ]
2964 + cmdutil.walkopts
2785 + cmdutil.walkopts
2965 + cmdutil.mergetoolopts,
2786 + cmdutil.mergetoolopts,
2966 _(b'[PATTERN]...'),
2787 _(b'[PATTERN]...'),
2967 inferrepo=True,
2788 inferrepo=True,
2968 )
2789 )
2969 def debugpickmergetool(ui, repo, *pats, **opts):
2790 def debugpickmergetool(ui, repo, *pats, **opts):
2970 """examine which merge tool is chosen for specified file
2791 """examine which merge tool is chosen for specified file
2971
2792
2972 As described in :hg:`help merge-tools`, Mercurial examines
2793 As described in :hg:`help merge-tools`, Mercurial examines
2973 configurations below in this order to decide which merge tool is
2794 configurations below in this order to decide which merge tool is
2974 chosen for specified file.
2795 chosen for specified file.
2975
2796
2976 1. ``--tool`` option
2797 1. ``--tool`` option
2977 2. ``HGMERGE`` environment variable
2798 2. ``HGMERGE`` environment variable
2978 3. configurations in ``merge-patterns`` section
2799 3. configurations in ``merge-patterns`` section
2979 4. configuration of ``ui.merge``
2800 4. configuration of ``ui.merge``
2980 5. configurations in ``merge-tools`` section
2801 5. configurations in ``merge-tools`` section
2981 6. ``hgmerge`` tool (for historical reason only)
2802 6. ``hgmerge`` tool (for historical reason only)
2982 7. default tool for fallback (``:merge`` or ``:prompt``)
2803 7. default tool for fallback (``:merge`` or ``:prompt``)
2983
2804
2984 This command writes out examination result in the style below::
2805 This command writes out examination result in the style below::
2985
2806
2986 FILE = MERGETOOL
2807 FILE = MERGETOOL
2987
2808
2988 By default, all files known in the first parent context of the
2809 By default, all files known in the first parent context of the
2989 working directory are examined. Use file patterns and/or -I/-X
2810 working directory are examined. Use file patterns and/or -I/-X
2990 options to limit target files. -r/--rev is also useful to examine
2811 options to limit target files. -r/--rev is also useful to examine
2991 files in another context without actual updating to it.
2812 files in another context without actual updating to it.
2992
2813
2993 With --debug, this command shows warning messages while matching
2814 With --debug, this command shows warning messages while matching
2994 against ``merge-patterns`` and so on, too. It is recommended to
2815 against ``merge-patterns`` and so on, too. It is recommended to
2995 use this option with explicit file patterns and/or -I/-X options,
2816 use this option with explicit file patterns and/or -I/-X options,
2996 because this option increases amount of output per file according
2817 because this option increases amount of output per file according
2997 to configurations in hgrc.
2818 to configurations in hgrc.
2998
2819
2999 With -v/--verbose, this command shows configurations below at
2820 With -v/--verbose, this command shows configurations below at
3000 first (only if specified).
2821 first (only if specified).
3001
2822
3002 - ``--tool`` option
2823 - ``--tool`` option
3003 - ``HGMERGE`` environment variable
2824 - ``HGMERGE`` environment variable
3004 - configuration of ``ui.merge``
2825 - configuration of ``ui.merge``
3005
2826
3006 If merge tool is chosen before matching against
2827 If merge tool is chosen before matching against
3007 ``merge-patterns``, this command can't show any helpful
2828 ``merge-patterns``, this command can't show any helpful
3008 information, even with --debug. In such case, information above is
2829 information, even with --debug. In such case, information above is
3009 useful to know why a merge tool is chosen.
2830 useful to know why a merge tool is chosen.
3010 """
2831 """
3011 overrides = {}
2832 overrides = {}
3012 if opts['tool']:
2833 if opts['tool']:
3013 overrides[(b'ui', b'forcemerge')] = opts['tool']
2834 overrides[(b'ui', b'forcemerge')] = opts['tool']
3014 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
3015
2836
3016 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 with ui.configoverride(overrides, b'debugmergepatterns'):
3017 hgmerge = encoding.environ.get(b"HGMERGE")
2838 hgmerge = encoding.environ.get(b"HGMERGE")
3018 if hgmerge is not None:
2839 if hgmerge is not None:
3019 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3020 uimerge = ui.config(b"ui", b"merge")
2841 uimerge = ui.config(b"ui", b"merge")
3021 if uimerge:
2842 if uimerge:
3022 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3023
2844
3024 ctx = scmutil.revsingle(repo, opts.get('rev'))
2845 ctx = scmutil.revsingle(repo, opts.get('rev'))
3025 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
2846 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3026 changedelete = opts['changedelete']
2847 changedelete = opts['changedelete']
3027 for path in ctx.walk(m):
2848 for path in ctx.walk(m):
3028 fctx = ctx[path]
2849 fctx = ctx[path]
3029 with ui.silent(
2850 with ui.silent(
3030 error=True
2851 error=True
3031 ) if not ui.debugflag else util.nullcontextmanager():
2852 ) if not ui.debugflag else util.nullcontextmanager():
3032 tool, toolpath = filemerge._picktool(
2853 tool, toolpath = filemerge._picktool(
3033 repo,
2854 repo,
3034 ui,
2855 ui,
3035 path,
2856 path,
3036 fctx.isbinary(),
2857 fctx.isbinary(),
3037 b'l' in fctx.flags(),
2858 b'l' in fctx.flags(),
3038 changedelete,
2859 changedelete,
3039 )
2860 )
3040 ui.write(b'%s = %s\n' % (path, tool))
2861 ui.write(b'%s = %s\n' % (path, tool))
3041
2862
3042
2863
3043 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3045 """access the pushkey key/value protocol
2866 """access the pushkey key/value protocol
3046
2867
3047 With two args, list the keys in the given namespace.
2868 With two args, list the keys in the given namespace.
3048
2869
3049 With five args, set a key to new if it currently is set to old.
2870 With five args, set a key to new if it currently is set to old.
3050 Reports success or failure.
2871 Reports success or failure.
3051 """
2872 """
3052
2873
3053 target = hg.peer(ui, {}, repopath)
2874 target = hg.peer(ui, {}, repopath)
3054 try:
2875 try:
3055 if keyinfo:
2876 if keyinfo:
3056 key, old, new = keyinfo
2877 key, old, new = keyinfo
3057 with target.commandexecutor() as e:
2878 with target.commandexecutor() as e:
3058 r = e.callcommand(
2879 r = e.callcommand(
3059 b'pushkey',
2880 b'pushkey',
3060 {
2881 {
3061 b'namespace': namespace,
2882 b'namespace': namespace,
3062 b'key': key,
2883 b'key': key,
3063 b'old': old,
2884 b'old': old,
3064 b'new': new,
2885 b'new': new,
3065 },
2886 },
3066 ).result()
2887 ).result()
3067
2888
3068 ui.status(pycompat.bytestr(r) + b'\n')
2889 ui.status(pycompat.bytestr(r) + b'\n')
3069 return not r
2890 return not r
3070 else:
2891 else:
3071 for k, v in sorted(target.listkeys(namespace).items()):
2892 for k, v in sorted(target.listkeys(namespace).items()):
3072 ui.write(
2893 ui.write(
3073 b"%s\t%s\n"
2894 b"%s\t%s\n"
3074 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
3075 )
2896 )
3076 finally:
2897 finally:
3077 target.close()
2898 target.close()
3078
2899
3079
2900
3080 @command(b'debugpvec', [], _(b'A B'))
2901 @command(b'debugpvec', [], _(b'A B'))
3081 def debugpvec(ui, repo, a, b=None):
2902 def debugpvec(ui, repo, a, b=None):
3082 ca = scmutil.revsingle(repo, a)
2903 ca = scmutil.revsingle(repo, a)
3083 cb = scmutil.revsingle(repo, b)
2904 cb = scmutil.revsingle(repo, b)
3084 pa = pvec.ctxpvec(ca)
2905 pa = pvec.ctxpvec(ca)
3085 pb = pvec.ctxpvec(cb)
2906 pb = pvec.ctxpvec(cb)
3086 if pa == pb:
2907 if pa == pb:
3087 rel = b"="
2908 rel = b"="
3088 elif pa > pb:
2909 elif pa > pb:
3089 rel = b">"
2910 rel = b">"
3090 elif pa < pb:
2911 elif pa < pb:
3091 rel = b"<"
2912 rel = b"<"
3092 elif pa | pb:
2913 elif pa | pb:
3093 rel = b"|"
2914 rel = b"|"
3094 ui.write(_(b"a: %s\n") % pa)
2915 ui.write(_(b"a: %s\n") % pa)
3095 ui.write(_(b"b: %s\n") % pb)
2916 ui.write(_(b"b: %s\n") % pb)
3096 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3097 ui.write(
2918 ui.write(
3098 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3099 % (
2920 % (
3100 abs(pa._depth - pb._depth),
2921 abs(pa._depth - pb._depth),
3101 pvec._hamming(pa._vec, pb._vec),
2922 pvec._hamming(pa._vec, pb._vec),
3102 pa.distance(pb),
2923 pa.distance(pb),
3103 rel,
2924 rel,
3104 )
2925 )
3105 )
2926 )
3106
2927
3107
2928
3108 @command(
2929 @command(
3109 b'debugrebuilddirstate|debugrebuildstate',
2930 b'debugrebuilddirstate|debugrebuildstate',
3110 [
2931 [
3111 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3112 (
2933 (
3113 b'',
2934 b'',
3114 b'minimal',
2935 b'minimal',
3115 None,
2936 None,
3116 _(
2937 _(
3117 b'only rebuild files that are inconsistent with '
2938 b'only rebuild files that are inconsistent with '
3118 b'the working copy parent'
2939 b'the working copy parent'
3119 ),
2940 ),
3120 ),
2941 ),
3121 ],
2942 ],
3122 _(b'[-r REV]'),
2943 _(b'[-r REV]'),
3123 )
2944 )
3124 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 def debugrebuilddirstate(ui, repo, rev, **opts):
3125 """rebuild the dirstate as it would look like for the given revision
2946 """rebuild the dirstate as it would look like for the given revision
3126
2947
3127 If no revision is specified the first current parent will be used.
2948 If no revision is specified the first current parent will be used.
3128
2949
3129 The dirstate will be set to the files of the given revision.
2950 The dirstate will be set to the files of the given revision.
3130 The actual working directory content or existing dirstate
2951 The actual working directory content or existing dirstate
3131 information such as adds or removes is not considered.
2952 information such as adds or removes is not considered.
3132
2953
3133 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
3134 tracked but are not in the parent manifest, or that exist in the parent
2955 tracked but are not in the parent manifest, or that exist in the parent
3135 manifest but are not in the dirstate. It will not change adds, removes, or
2956 manifest but are not in the dirstate. It will not change adds, removes, or
3136 modified files that are in the working copy parent.
2957 modified files that are in the working copy parent.
3137
2958
3138 One use of this command is to make the next :hg:`status` invocation
2959 One use of this command is to make the next :hg:`status` invocation
3139 check the actual file content.
2960 check the actual file content.
3140 """
2961 """
3141 ctx = scmutil.revsingle(repo, rev)
2962 ctx = scmutil.revsingle(repo, rev)
3142 with repo.wlock():
2963 with repo.wlock():
3143 if repo.currenttransaction() is not None:
2964 if repo.currenttransaction() is not None:
3144 msg = b'rebuild the dirstate outside of a transaction'
2965 msg = b'rebuild the dirstate outside of a transaction'
3145 raise error.ProgrammingError(msg)
2966 raise error.ProgrammingError(msg)
3146 dirstate = repo.dirstate
2967 dirstate = repo.dirstate
3147 changedfiles = None
2968 changedfiles = None
3148 # See command doc for what minimal does.
2969 # See command doc for what minimal does.
3149 if opts.get('minimal'):
2970 if opts.get('minimal'):
3150 manifestfiles = set(ctx.manifest().keys())
2971 manifestfiles = set(ctx.manifest().keys())
3151 dirstatefiles = set(dirstate)
2972 dirstatefiles = set(dirstate)
3152 manifestonly = manifestfiles - dirstatefiles
2973 manifestonly = manifestfiles - dirstatefiles
3153 dsonly = dirstatefiles - manifestfiles
2974 dsonly = dirstatefiles - manifestfiles
3154 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2975 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3155 changedfiles = manifestonly | dsnotadded
2976 changedfiles = manifestonly | dsnotadded
3156
2977
3157 with dirstate.changing_parents(repo):
2978 with dirstate.changing_parents(repo):
3158 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2979 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3159
2980
3160
2981
3161 @command(
2982 @command(
3162 b'debugrebuildfncache',
2983 b'debugrebuildfncache',
3163 [
2984 [
3164 (
2985 (
3165 b'',
2986 b'',
3166 b'only-data',
2987 b'only-data',
3167 False,
2988 False,
3168 _(b'only look for wrong .d files (much faster)'),
2989 _(b'only look for wrong .d files (much faster)'),
3169 )
2990 )
3170 ],
2991 ],
3171 b'',
2992 b'',
3172 )
2993 )
3173 def debugrebuildfncache(ui, repo, **opts):
2994 def debugrebuildfncache(ui, repo, **opts):
3174 """rebuild the fncache file"""
2995 """rebuild the fncache file"""
3175 repair.rebuildfncache(ui, repo, opts.get("only_data"))
2996 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3176
2997
3177
2998
3178 @command(
2999 @command(
3179 b'debugrename',
3000 b'debugrename',
3180 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3001 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3181 _(b'[-r REV] [FILE]...'),
3002 _(b'[-r REV] [FILE]...'),
3182 )
3003 )
3183 def debugrename(ui, repo, *pats, **opts):
3004 def debugrename(ui, repo, *pats, **opts):
3184 """dump rename information"""
3005 """dump rename information"""
3185
3006
3186 ctx = scmutil.revsingle(repo, opts.get('rev'))
3007 ctx = scmutil.revsingle(repo, opts.get('rev'))
3187 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3008 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3188 for abs in ctx.walk(m):
3009 for abs in ctx.walk(m):
3189 fctx = ctx[abs]
3010 fctx = ctx[abs]
3190 o = fctx.filelog().renamed(fctx.filenode())
3011 o = fctx.filelog().renamed(fctx.filenode())
3191 rel = repo.pathto(abs)
3012 rel = repo.pathto(abs)
3192 if o:
3013 if o:
3193 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3014 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3194 else:
3015 else:
3195 ui.write(_(b"%s not renamed\n") % rel)
3016 ui.write(_(b"%s not renamed\n") % rel)
3196
3017
3197
3018
3198 @command(b'debugrequires|debugrequirements', [], b'')
3019 @command(b'debugrequires|debugrequirements', [], b'')
3199 def debugrequirements(ui, repo):
3020 def debugrequirements(ui, repo):
3200 """print the current repo requirements"""
3021 """print the current repo requirements"""
3201 for r in sorted(repo.requirements):
3022 for r in sorted(repo.requirements):
3202 ui.write(b"%s\n" % r)
3023 ui.write(b"%s\n" % r)
3203
3024
3204
3025
3205 @command(
3026 @command(
3206 b'debugrevlog',
3027 b'debugrevlog',
3207 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3028 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3208 _(b'-c|-m|FILE'),
3029 _(b'-c|-m|FILE'),
3209 optionalrepo=True,
3030 optionalrepo=True,
3210 )
3031 )
3211 def debugrevlog(ui, repo, file_=None, **opts):
3032 def debugrevlog(ui, repo, file_=None, **opts):
3212 """show data and statistics about a revlog"""
3033 """show data and statistics about a revlog"""
3213 r = cmdutil.openrevlog(
3034 r = cmdutil.openrevlog(
3214 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3035 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3215 )
3036 )
3216
3037
3217 if opts.get("dump"):
3038 if opts.get("dump"):
3218 revlog_debug.dump(ui, r)
3039 revlog_debug.dump(ui, r)
3219 else:
3040 else:
3220 revlog_debug.debug_revlog(ui, r)
3041 revlog_debug.debug_revlog(ui, r)
3221 return 0
3042 return 0
3222
3043
3223
3044
3224 @command(
3045 @command(
3225 b'debugrevlogindex',
3046 b'debugrevlogindex',
3226 cmdutil.debugrevlogopts
3047 cmdutil.debugrevlogopts
3227 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3048 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3228 _(b'[-f FORMAT] -c|-m|FILE'),
3049 _(b'[-f FORMAT] -c|-m|FILE'),
3229 optionalrepo=True,
3050 optionalrepo=True,
3230 )
3051 )
3231 def debugrevlogindex(ui, repo, file_=None, **opts):
3052 def debugrevlogindex(ui, repo, file_=None, **opts):
3232 """dump the contents of a revlog index"""
3053 """dump the contents of a revlog index"""
3233 r = cmdutil.openrevlog(
3054 r = cmdutil.openrevlog(
3234 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3055 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3235 )
3056 )
3236 format = opts.get('format', 0)
3057 format = opts.get('format', 0)
3237 if format not in (0, 1):
3058 if format not in (0, 1):
3238 raise error.Abort(_(b"unknown format %d") % format)
3059 raise error.Abort(_(b"unknown format %d") % format)
3239
3060
3240 if ui.debugflag:
3061 if ui.debugflag:
3241 shortfn = hex
3062 shortfn = hex
3242 else:
3063 else:
3243 shortfn = short
3064 shortfn = short
3244
3065
3245 # There might not be anything in r, so have a sane default
3066 # There might not be anything in r, so have a sane default
3246 idlen = 12
3067 idlen = 12
3247 for i in r:
3068 for i in r:
3248 idlen = len(shortfn(r.node(i)))
3069 idlen = len(shortfn(r.node(i)))
3249 break
3070 break
3250
3071
3251 if format == 0:
3072 if format == 0:
3252 if ui.verbose:
3073 if ui.verbose:
3253 ui.writenoi18n(
3074 ui.writenoi18n(
3254 b" rev offset length linkrev %s %s p2\n"
3075 b" rev offset length linkrev %s %s p2\n"
3255 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3076 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3256 )
3077 )
3257 else:
3078 else:
3258 ui.writenoi18n(
3079 ui.writenoi18n(
3259 b" rev linkrev %s %s p2\n"
3080 b" rev linkrev %s %s p2\n"
3260 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3081 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3261 )
3082 )
3262 elif format == 1:
3083 elif format == 1:
3263 if ui.verbose:
3084 if ui.verbose:
3264 ui.writenoi18n(
3085 ui.writenoi18n(
3265 (
3086 (
3266 b" rev flag offset length size link p1"
3087 b" rev flag offset length size link p1"
3267 b" p2 %s\n"
3088 b" p2 %s\n"
3268 )
3089 )
3269 % b"nodeid".rjust(idlen)
3090 % b"nodeid".rjust(idlen)
3270 )
3091 )
3271 else:
3092 else:
3272 ui.writenoi18n(
3093 ui.writenoi18n(
3273 b" rev flag size link p1 p2 %s\n"
3094 b" rev flag size link p1 p2 %s\n"
3274 % b"nodeid".rjust(idlen)
3095 % b"nodeid".rjust(idlen)
3275 )
3096 )
3276
3097
3277 for i in r:
3098 for i in r:
3278 node = r.node(i)
3099 node = r.node(i)
3279 if format == 0:
3100 if format == 0:
3280 try:
3101 try:
3281 pp = r.parents(node)
3102 pp = r.parents(node)
3282 except Exception:
3103 except Exception:
3283 pp = [repo.nullid, repo.nullid]
3104 pp = [repo.nullid, repo.nullid]
3284 if ui.verbose:
3105 if ui.verbose:
3285 ui.write(
3106 ui.write(
3286 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3107 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3287 % (
3108 % (
3288 i,
3109 i,
3289 r.start(i),
3110 r.start(i),
3290 r.length(i),
3111 r.length(i),
3291 r.linkrev(i),
3112 r.linkrev(i),
3292 shortfn(node),
3113 shortfn(node),
3293 shortfn(pp[0]),
3114 shortfn(pp[0]),
3294 shortfn(pp[1]),
3115 shortfn(pp[1]),
3295 )
3116 )
3296 )
3117 )
3297 else:
3118 else:
3298 ui.write(
3119 ui.write(
3299 b"% 6d % 7d %s %s %s\n"
3120 b"% 6d % 7d %s %s %s\n"
3300 % (
3121 % (
3301 i,
3122 i,
3302 r.linkrev(i),
3123 r.linkrev(i),
3303 shortfn(node),
3124 shortfn(node),
3304 shortfn(pp[0]),
3125 shortfn(pp[0]),
3305 shortfn(pp[1]),
3126 shortfn(pp[1]),
3306 )
3127 )
3307 )
3128 )
3308 elif format == 1:
3129 elif format == 1:
3309 pr = r.parentrevs(i)
3130 pr = r.parentrevs(i)
3310 if ui.verbose:
3131 if ui.verbose:
3311 ui.write(
3132 ui.write(
3312 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3133 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3313 % (
3134 % (
3314 i,
3135 i,
3315 r.flags(i),
3136 r.flags(i),
3316 r.start(i),
3137 r.start(i),
3317 r.length(i),
3138 r.length(i),
3318 r.rawsize(i),
3139 r.rawsize(i),
3319 r.linkrev(i),
3140 r.linkrev(i),
3320 pr[0],
3141 pr[0],
3321 pr[1],
3142 pr[1],
3322 shortfn(node),
3143 shortfn(node),
3323 )
3144 )
3324 )
3145 )
3325 else:
3146 else:
3326 ui.write(
3147 ui.write(
3327 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3148 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3328 % (
3149 % (
3329 i,
3150 i,
3330 r.flags(i),
3151 r.flags(i),
3331 r.rawsize(i),
3152 r.rawsize(i),
3332 r.linkrev(i),
3153 r.linkrev(i),
3333 pr[0],
3154 pr[0],
3334 pr[1],
3155 pr[1],
3335 shortfn(node),
3156 shortfn(node),
3336 )
3157 )
3337 )
3158 )
3338
3159
3339
3160
3340 @command(
3161 @command(
3341 b'debugrevspec',
3162 b'debugrevspec',
3342 [
3163 [
3343 (
3164 (
3344 b'',
3165 b'',
3345 b'optimize',
3166 b'optimize',
3346 None,
3167 None,
3347 _(b'print parsed tree after optimizing (DEPRECATED)'),
3168 _(b'print parsed tree after optimizing (DEPRECATED)'),
3348 ),
3169 ),
3349 (
3170 (
3350 b'',
3171 b'',
3351 b'show-revs',
3172 b'show-revs',
3352 True,
3173 True,
3353 _(b'print list of result revisions (default)'),
3174 _(b'print list of result revisions (default)'),
3354 ),
3175 ),
3355 (
3176 (
3356 b's',
3177 b's',
3357 b'show-set',
3178 b'show-set',
3358 None,
3179 None,
3359 _(b'print internal representation of result set'),
3180 _(b'print internal representation of result set'),
3360 ),
3181 ),
3361 (
3182 (
3362 b'p',
3183 b'p',
3363 b'show-stage',
3184 b'show-stage',
3364 [],
3185 [],
3365 _(b'print parsed tree at the given stage'),
3186 _(b'print parsed tree at the given stage'),
3366 _(b'NAME'),
3187 _(b'NAME'),
3367 ),
3188 ),
3368 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3189 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3369 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3190 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3370 ],
3191 ],
3371 b'REVSPEC',
3192 b'REVSPEC',
3372 )
3193 )
3373 def debugrevspec(ui, repo, expr, **opts):
3194 def debugrevspec(ui, repo, expr, **opts):
3374 """parse and apply a revision specification
3195 """parse and apply a revision specification
3375
3196
3376 Use -p/--show-stage option to print the parsed tree at the given stages.
3197 Use -p/--show-stage option to print the parsed tree at the given stages.
3377 Use -p all to print tree at every stage.
3198 Use -p all to print tree at every stage.
3378
3199
3379 Use --no-show-revs option with -s or -p to print only the set
3200 Use --no-show-revs option with -s or -p to print only the set
3380 representation or the parsed tree respectively.
3201 representation or the parsed tree respectively.
3381
3202
3382 Use --verify-optimized to compare the optimized result with the unoptimized
3203 Use --verify-optimized to compare the optimized result with the unoptimized
3383 one. Returns 1 if the optimized result differs.
3204 one. Returns 1 if the optimized result differs.
3384 """
3205 """
3385 aliases = ui.configitems(b'revsetalias')
3206 aliases = ui.configitems(b'revsetalias')
3386 stages = [
3207 stages = [
3387 (b'parsed', lambda tree: tree),
3208 (b'parsed', lambda tree: tree),
3388 (
3209 (
3389 b'expanded',
3210 b'expanded',
3390 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3211 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3391 ),
3212 ),
3392 (b'concatenated', revsetlang.foldconcat),
3213 (b'concatenated', revsetlang.foldconcat),
3393 (b'analyzed', revsetlang.analyze),
3214 (b'analyzed', revsetlang.analyze),
3394 (b'optimized', revsetlang.optimize),
3215 (b'optimized', revsetlang.optimize),
3395 ]
3216 ]
3396 if opts['no_optimized']:
3217 if opts['no_optimized']:
3397 stages = stages[:-1]
3218 stages = stages[:-1]
3398 if opts['verify_optimized'] and opts['no_optimized']:
3219 if opts['verify_optimized'] and opts['no_optimized']:
3399 raise error.Abort(
3220 raise error.Abort(
3400 _(b'cannot use --verify-optimized with --no-optimized')
3221 _(b'cannot use --verify-optimized with --no-optimized')
3401 )
3222 )
3402 stagenames = {n for n, f in stages}
3223 stagenames = {n for n, f in stages}
3403
3224
3404 showalways = set()
3225 showalways = set()
3405 showchanged = set()
3226 showchanged = set()
3406 if ui.verbose and not opts['show_stage']:
3227 if ui.verbose and not opts['show_stage']:
3407 # show parsed tree by --verbose (deprecated)
3228 # show parsed tree by --verbose (deprecated)
3408 showalways.add(b'parsed')
3229 showalways.add(b'parsed')
3409 showchanged.update([b'expanded', b'concatenated'])
3230 showchanged.update([b'expanded', b'concatenated'])
3410 if opts['optimize']:
3231 if opts['optimize']:
3411 showalways.add(b'optimized')
3232 showalways.add(b'optimized')
3412 if opts['show_stage'] and opts['optimize']:
3233 if opts['show_stage'] and opts['optimize']:
3413 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3234 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3414 if opts['show_stage'] == [b'all']:
3235 if opts['show_stage'] == [b'all']:
3415 showalways.update(stagenames)
3236 showalways.update(stagenames)
3416 else:
3237 else:
3417 for n in opts['show_stage']:
3238 for n in opts['show_stage']:
3418 if n not in stagenames:
3239 if n not in stagenames:
3419 raise error.Abort(_(b'invalid stage name: %s') % n)
3240 raise error.Abort(_(b'invalid stage name: %s') % n)
3420 showalways.update(opts['show_stage'])
3241 showalways.update(opts['show_stage'])
3421
3242
3422 treebystage = {}
3243 treebystage = {}
3423 printedtree = None
3244 printedtree = None
3424 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3245 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3425 for n, f in stages:
3246 for n, f in stages:
3426 treebystage[n] = tree = f(tree)
3247 treebystage[n] = tree = f(tree)
3427 if n in showalways or (n in showchanged and tree != printedtree):
3248 if n in showalways or (n in showchanged and tree != printedtree):
3428 if opts['show_stage'] or n != b'parsed':
3249 if opts['show_stage'] or n != b'parsed':
3429 ui.write(b"* %s:\n" % n)
3250 ui.write(b"* %s:\n" % n)
3430 ui.write(revsetlang.prettyformat(tree), b"\n")
3251 ui.write(revsetlang.prettyformat(tree), b"\n")
3431 printedtree = tree
3252 printedtree = tree
3432
3253
3433 if opts['verify_optimized']:
3254 if opts['verify_optimized']:
3434 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3255 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3435 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3256 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3436 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3257 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3437 ui.writenoi18n(
3258 ui.writenoi18n(
3438 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3259 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3439 )
3260 )
3440 ui.writenoi18n(
3261 ui.writenoi18n(
3441 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3262 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3442 )
3263 )
3443 arevs = list(arevs)
3264 arevs = list(arevs)
3444 brevs = list(brevs)
3265 brevs = list(brevs)
3445 if arevs == brevs:
3266 if arevs == brevs:
3446 return 0
3267 return 0
3447 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3268 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3448 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3269 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3449 sm = difflib.SequenceMatcher(None, arevs, brevs)
3270 sm = difflib.SequenceMatcher(None, arevs, brevs)
3450 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3271 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3451 if tag in ('delete', 'replace'):
3272 if tag in ('delete', 'replace'):
3452 for c in arevs[alo:ahi]:
3273 for c in arevs[alo:ahi]:
3453 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3274 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3454 if tag in ('insert', 'replace'):
3275 if tag in ('insert', 'replace'):
3455 for c in brevs[blo:bhi]:
3276 for c in brevs[blo:bhi]:
3456 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3277 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3457 if tag == 'equal':
3278 if tag == 'equal':
3458 for c in arevs[alo:ahi]:
3279 for c in arevs[alo:ahi]:
3459 ui.write(b' %d\n' % c)
3280 ui.write(b' %d\n' % c)
3460 return 1
3281 return 1
3461
3282
3462 func = revset.makematcher(tree)
3283 func = revset.makematcher(tree)
3463 revs = func(repo)
3284 revs = func(repo)
3464 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3285 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3465 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3286 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3466 if not opts['show_revs']:
3287 if not opts['show_revs']:
3467 return
3288 return
3468 for c in revs:
3289 for c in revs:
3469 ui.write(b"%d\n" % c)
3290 ui.write(b"%d\n" % c)
3470
3291
3471
3292
3472 @command(
3293 @command(
3473 b'debugserve',
3294 b'debugserve',
3474 [
3295 [
3475 (
3296 (
3476 b'',
3297 b'',
3477 b'sshstdio',
3298 b'sshstdio',
3478 False,
3299 False,
3479 _(b'run an SSH server bound to process handles'),
3300 _(b'run an SSH server bound to process handles'),
3480 ),
3301 ),
3481 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3302 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3482 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3303 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3483 ],
3304 ],
3484 b'',
3305 b'',
3485 )
3306 )
3486 def debugserve(ui, repo, **opts):
3307 def debugserve(ui, repo, **opts):
3487 """run a server with advanced settings
3308 """run a server with advanced settings
3488
3309
3489 This command is similar to :hg:`serve`. It exists partially as a
3310 This command is similar to :hg:`serve`. It exists partially as a
3490 workaround to the fact that ``hg serve --stdio`` must have specific
3311 workaround to the fact that ``hg serve --stdio`` must have specific
3491 arguments for security reasons.
3312 arguments for security reasons.
3492 """
3313 """
3493 if not opts['sshstdio']:
3314 if not opts['sshstdio']:
3494 raise error.Abort(_(b'only --sshstdio is currently supported'))
3315 raise error.Abort(_(b'only --sshstdio is currently supported'))
3495
3316
3496 logfh = None
3317 logfh = None
3497
3318
3498 if opts['logiofd'] and opts['logiofile']:
3319 if opts['logiofd'] and opts['logiofile']:
3499 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3320 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3500
3321
3501 if opts['logiofd']:
3322 if opts['logiofd']:
3502 # Ideally we would be line buffered. But line buffering in binary
3323 # Ideally we would be line buffered. But line buffering in binary
3503 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3324 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3504 # buffering could have performance impacts. But since this isn't
3325 # buffering could have performance impacts. But since this isn't
3505 # performance critical code, it should be fine.
3326 # performance critical code, it should be fine.
3506 try:
3327 try:
3507 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3328 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3508 except OSError as e:
3329 except OSError as e:
3509 if e.errno != errno.ESPIPE:
3330 if e.errno != errno.ESPIPE:
3510 raise
3331 raise
3511 # can't seek a pipe, so `ab` mode fails on py3
3332 # can't seek a pipe, so `ab` mode fails on py3
3512 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3333 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3513 elif opts['logiofile']:
3334 elif opts['logiofile']:
3514 logfh = open(opts['logiofile'], b'ab', 0)
3335 logfh = open(opts['logiofile'], b'ab', 0)
3515
3336
3516 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3337 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3517 s.serve_forever()
3338 s.serve_forever()
3518
3339
3519
3340
3520 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3341 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3521 def debugsetparents(ui, repo, rev1, rev2=None):
3342 def debugsetparents(ui, repo, rev1, rev2=None):
3522 """manually set the parents of the current working directory (DANGEROUS)
3343 """manually set the parents of the current working directory (DANGEROUS)
3523
3344
3524 This command is not what you are looking for and should not be used. Using
3345 This command is not what you are looking for and should not be used. Using
3525 this command will most certainly results in slight corruption of the file
3346 this command will most certainly results in slight corruption of the file
3526 level histories withing your repository. DO NOT USE THIS COMMAND.
3347 level histories withing your repository. DO NOT USE THIS COMMAND.
3527
3348
3528 The command update the p1 and p2 field in the dirstate, and not touching
3349 The command update the p1 and p2 field in the dirstate, and not touching
3529 anything else. This useful for writing repository conversion tools, but
3350 anything else. This useful for writing repository conversion tools, but
3530 should be used with extreme care. For example, neither the working
3351 should be used with extreme care. For example, neither the working
3531 directory nor the dirstate is updated, so file status may be incorrect
3352 directory nor the dirstate is updated, so file status may be incorrect
3532 after running this command. Only used if you are one of the few people that
3353 after running this command. Only used if you are one of the few people that
3533 deeply unstand both conversion tools and file level histories. If you are
3354 deeply unstand both conversion tools and file level histories. If you are
3534 reading this help, you are not one of this people (most of them sailed west
3355 reading this help, you are not one of this people (most of them sailed west
3535 from Mithlond anyway.
3356 from Mithlond anyway.
3536
3357
3537 So one last time DO NOT USE THIS COMMAND.
3358 So one last time DO NOT USE THIS COMMAND.
3538
3359
3539 Returns 0 on success.
3360 Returns 0 on success.
3540 """
3361 """
3541
3362
3542 node1 = scmutil.revsingle(repo, rev1).node()
3363 node1 = scmutil.revsingle(repo, rev1).node()
3543 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3364 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3544
3365
3545 with repo.wlock():
3366 with repo.wlock():
3546 repo.setparents(node1, node2)
3367 repo.setparents(node1, node2)
3547
3368
3548
3369
3549 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3370 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3550 def debugsidedata(ui, repo, file_, rev=None, **opts):
3371 def debugsidedata(ui, repo, file_, rev=None, **opts):
3551 """dump the side data for a cl/manifest/file revision
3372 """dump the side data for a cl/manifest/file revision
3552
3373
3553 Use --verbose to dump the sidedata content."""
3374 Use --verbose to dump the sidedata content."""
3554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3375 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3555 if rev is not None:
3376 if rev is not None:
3556 raise error.InputError(
3377 raise error.InputError(
3557 _(b'cannot specify a revision with other arguments')
3378 _(b'cannot specify a revision with other arguments')
3558 )
3379 )
3559 file_, rev = None, file_
3380 file_, rev = None, file_
3560 elif rev is None:
3381 elif rev is None:
3561 raise error.InputError(_(b'please specify a revision'))
3382 raise error.InputError(_(b'please specify a revision'))
3562 r = cmdutil.openstorage(
3383 r = cmdutil.openstorage(
3563 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3384 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3564 )
3385 )
3565 r = getattr(r, '_revlog', r)
3386 r = getattr(r, '_revlog', r)
3566 try:
3387 try:
3567 sidedata = r.sidedata(r.lookup(rev))
3388 sidedata = r.sidedata(r.lookup(rev))
3568 except KeyError:
3389 except KeyError:
3569 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3390 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3570 if sidedata:
3391 if sidedata:
3571 sidedata = list(sidedata.items())
3392 sidedata = list(sidedata.items())
3572 sidedata.sort()
3393 sidedata.sort()
3573 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3394 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3574 for key, value in sidedata:
3395 for key, value in sidedata:
3575 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3396 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3576 if ui.verbose:
3397 if ui.verbose:
3577 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3398 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3578
3399
3579
3400
3580 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3401 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3581 def debugssl(ui, repo, source=None, **opts):
3402 def debugssl(ui, repo, source=None, **opts):
3582 """test a secure connection to a server
3403 """test a secure connection to a server
3583
3404
3584 This builds the certificate chain for the server on Windows, installing the
3405 This builds the certificate chain for the server on Windows, installing the
3585 missing intermediates and trusted root via Windows Update if necessary. It
3406 missing intermediates and trusted root via Windows Update if necessary. It
3586 does nothing on other platforms.
3407 does nothing on other platforms.
3587
3408
3588 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3409 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3589 that server is used. See :hg:`help urls` for more information.
3410 that server is used. See :hg:`help urls` for more information.
3590
3411
3591 If the update succeeds, retry the original operation. Otherwise, the cause
3412 If the update succeeds, retry the original operation. Otherwise, the cause
3592 of the SSL error is likely another issue.
3413 of the SSL error is likely another issue.
3593 """
3414 """
3594 if not pycompat.iswindows:
3415 if not pycompat.iswindows:
3595 raise error.Abort(
3416 raise error.Abort(
3596 _(b'certificate chain building is only possible on Windows')
3417 _(b'certificate chain building is only possible on Windows')
3597 )
3418 )
3598
3419
3599 if not source:
3420 if not source:
3600 if not repo:
3421 if not repo:
3601 raise error.Abort(
3422 raise error.Abort(
3602 _(
3423 _(
3603 b"there is no Mercurial repository here, and no "
3424 b"there is no Mercurial repository here, and no "
3604 b"server specified"
3425 b"server specified"
3605 )
3426 )
3606 )
3427 )
3607 source = b"default"
3428 source = b"default"
3608
3429
3609 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3430 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3610 url = path.url
3431 url = path.url
3611
3432
3612 defaultport = {b'https': 443, b'ssh': 22}
3433 defaultport = {b'https': 443, b'ssh': 22}
3613 if url.scheme in defaultport:
3434 if url.scheme in defaultport:
3614 try:
3435 try:
3615 addr = (url.host, int(url.port or defaultport[url.scheme]))
3436 addr = (url.host, int(url.port or defaultport[url.scheme]))
3616 except ValueError:
3437 except ValueError:
3617 raise error.Abort(_(b"malformed port number in URL"))
3438 raise error.Abort(_(b"malformed port number in URL"))
3618 else:
3439 else:
3619 raise error.Abort(_(b"only https and ssh connections are supported"))
3440 raise error.Abort(_(b"only https and ssh connections are supported"))
3620
3441
3621 from . import win32
3442 from . import win32
3622
3443
3623 s = ssl.wrap_socket(
3444 s = ssl.wrap_socket(
3624 socket.socket(),
3445 socket.socket(),
3625 ssl_version=ssl.PROTOCOL_TLS,
3446 ssl_version=ssl.PROTOCOL_TLS,
3626 cert_reqs=ssl.CERT_NONE,
3447 cert_reqs=ssl.CERT_NONE,
3627 ca_certs=None,
3448 ca_certs=None,
3628 )
3449 )
3629
3450
3630 try:
3451 try:
3631 s.connect(addr)
3452 s.connect(addr)
3632 cert = s.getpeercert(True)
3453 cert = s.getpeercert(True)
3633
3454
3634 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3455 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3635
3456
3636 complete = win32.checkcertificatechain(cert, build=False)
3457 complete = win32.checkcertificatechain(cert, build=False)
3637
3458
3638 if not complete:
3459 if not complete:
3639 ui.status(_(b'certificate chain is incomplete, updating... '))
3460 ui.status(_(b'certificate chain is incomplete, updating... '))
3640
3461
3641 if not win32.checkcertificatechain(cert):
3462 if not win32.checkcertificatechain(cert):
3642 ui.status(_(b'failed.\n'))
3463 ui.status(_(b'failed.\n'))
3643 else:
3464 else:
3644 ui.status(_(b'done.\n'))
3465 ui.status(_(b'done.\n'))
3645 else:
3466 else:
3646 ui.status(_(b'full certificate chain is available\n'))
3467 ui.status(_(b'full certificate chain is available\n'))
3647 finally:
3468 finally:
3648 s.close()
3469 s.close()
3649
3470
3650
3471
3651 @command(
3472 @command(
3652 b'debug::stable-tail-sort',
3473 b'debug::stable-tail-sort',
3653 [
3474 [
3654 (
3475 (
3655 b'T',
3476 b'T',
3656 b'template',
3477 b'template',
3657 b'{rev}\n',
3478 b'{rev}\n',
3658 _(b'display with template'),
3479 _(b'display with template'),
3659 _(b'TEMPLATE'),
3480 _(b'TEMPLATE'),
3660 ),
3481 ),
3661 ],
3482 ],
3662 b'REV',
3483 b'REV',
3663 )
3484 )
3664 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3485 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3665 """display the stable-tail sort of the ancestors of a given node"""
3486 """display the stable-tail sort of the ancestors of a given node"""
3666 rev = logcmdutil.revsingle(repo, revspec).rev()
3487 rev = logcmdutil.revsingle(repo, revspec).rev()
3667 cl = repo.changelog
3488 cl = repo.changelog
3668
3489
3669 displayer = logcmdutil.maketemplater(ui, repo, template)
3490 displayer = logcmdutil.maketemplater(ui, repo, template)
3670 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3491 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3671 for ancestor_rev in sorted_revs:
3492 for ancestor_rev in sorted_revs:
3672 displayer.show(repo[ancestor_rev])
3493 displayer.show(repo[ancestor_rev])
3673
3494
3674
3495
3675 @command(
3496 @command(
3676 b'debug::stable-tail-sort-leaps',
3497 b'debug::stable-tail-sort-leaps',
3677 [
3498 [
3678 (
3499 (
3679 b'T',
3500 b'T',
3680 b'template',
3501 b'template',
3681 b'{rev}',
3502 b'{rev}',
3682 _(b'display with template'),
3503 _(b'display with template'),
3683 _(b'TEMPLATE'),
3504 _(b'TEMPLATE'),
3684 ),
3505 ),
3685 (b's', b'specific', False, _(b'restrict to specific leaps')),
3506 (b's', b'specific', False, _(b'restrict to specific leaps')),
3686 ],
3507 ],
3687 b'REV',
3508 b'REV',
3688 )
3509 )
3689 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3510 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3690 """display the leaps in the stable-tail sort of a node, one per line"""
3511 """display the leaps in the stable-tail sort of a node, one per line"""
3691 rev = logcmdutil.revsingle(repo, rspec).rev()
3512 rev = logcmdutil.revsingle(repo, rspec).rev()
3692
3513
3693 if specific:
3514 if specific:
3694 get_leaps = stabletailsort._find_specific_leaps_naive
3515 get_leaps = stabletailsort._find_specific_leaps_naive
3695 else:
3516 else:
3696 get_leaps = stabletailsort._find_all_leaps_naive
3517 get_leaps = stabletailsort._find_all_leaps_naive
3697
3518
3698 displayer = logcmdutil.maketemplater(ui, repo, template)
3519 displayer = logcmdutil.maketemplater(ui, repo, template)
3699 for source, target in get_leaps(repo.changelog, rev):
3520 for source, target in get_leaps(repo.changelog, rev):
3700 displayer.show(repo[source])
3521 displayer.show(repo[source])
3701 displayer.show(repo[target])
3522 displayer.show(repo[target])
3702 ui.write(b'\n')
3523 ui.write(b'\n')
3703
3524
3704
3525
3705 @command(
3526 @command(
3706 b"debugbackupbundle",
3527 b"debugbackupbundle",
3707 [
3528 [
3708 (
3529 (
3709 b"",
3530 b"",
3710 b"recover",
3531 b"recover",
3711 b"",
3532 b"",
3712 b"brings the specified changeset back into the repository",
3533 b"brings the specified changeset back into the repository",
3713 )
3534 )
3714 ]
3535 ]
3715 + cmdutil.logopts,
3536 + cmdutil.logopts,
3716 _(b"hg debugbackupbundle [--recover HASH]"),
3537 _(b"hg debugbackupbundle [--recover HASH]"),
3717 )
3538 )
3718 def debugbackupbundle(ui, repo, *pats, **opts):
3539 def debugbackupbundle(ui, repo, *pats, **opts):
3719 """lists the changesets available in backup bundles
3540 """lists the changesets available in backup bundles
3720
3541
3721 Without any arguments, this command prints a list of the changesets in each
3542 Without any arguments, this command prints a list of the changesets in each
3722 backup bundle.
3543 backup bundle.
3723
3544
3724 --recover takes a changeset hash and unbundles the first bundle that
3545 --recover takes a changeset hash and unbundles the first bundle that
3725 contains that hash, which puts that changeset back in your repository.
3546 contains that hash, which puts that changeset back in your repository.
3726
3547
3727 --verbose will print the entire commit message and the bundle path for that
3548 --verbose will print the entire commit message and the bundle path for that
3728 backup.
3549 backup.
3729 """
3550 """
3730 backups = list(
3551 backups = list(
3731 filter(
3552 filter(
3732 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3553 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3733 )
3554 )
3734 )
3555 )
3735 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3556 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3736
3557
3737 opts["bundle"] = b""
3558 opts["bundle"] = b""
3738 opts["force"] = None
3559 opts["force"] = None
3739 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3560 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3740
3561
3741 def display(other, chlist, displayer):
3562 def display(other, chlist, displayer):
3742 if opts.get("newest_first"):
3563 if opts.get("newest_first"):
3743 chlist.reverse()
3564 chlist.reverse()
3744 count = 0
3565 count = 0
3745 for n in chlist:
3566 for n in chlist:
3746 if limit is not None and count >= limit:
3567 if limit is not None and count >= limit:
3747 break
3568 break
3748 parents = [
3569 parents = [
3749 True for p in other.changelog.parents(n) if p != repo.nullid
3570 True for p in other.changelog.parents(n) if p != repo.nullid
3750 ]
3571 ]
3751 if opts.get("no_merges") and len(parents) == 2:
3572 if opts.get("no_merges") and len(parents) == 2:
3752 continue
3573 continue
3753 count += 1
3574 count += 1
3754 displayer.show(other[n])
3575 displayer.show(other[n])
3755
3576
3756 recovernode = opts.get("recover")
3577 recovernode = opts.get("recover")
3757 if recovernode:
3578 if recovernode:
3758 if scmutil.isrevsymbol(repo, recovernode):
3579 if scmutil.isrevsymbol(repo, recovernode):
3759 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3580 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3760 return
3581 return
3761 elif backups:
3582 elif backups:
3762 msg = _(
3583 msg = _(
3763 b"Recover changesets using: hg debugbackupbundle --recover "
3584 b"Recover changesets using: hg debugbackupbundle --recover "
3764 b"<changeset hash>\n\nAvailable backup changesets:"
3585 b"<changeset hash>\n\nAvailable backup changesets:"
3765 )
3586 )
3766 ui.status(msg, label=b"status.removed")
3587 ui.status(msg, label=b"status.removed")
3767 else:
3588 else:
3768 ui.status(_(b"no backup changesets found\n"))
3589 ui.status(_(b"no backup changesets found\n"))
3769 return
3590 return
3770
3591
3771 for backup in backups:
3592 for backup in backups:
3772 # Much of this is copied from the hg incoming logic
3593 # Much of this is copied from the hg incoming logic
3773 source = os.path.relpath(backup, encoding.getcwd())
3594 source = os.path.relpath(backup, encoding.getcwd())
3774 path = urlutil.get_unique_pull_path_obj(
3595 path = urlutil.get_unique_pull_path_obj(
3775 b'debugbackupbundle',
3596 b'debugbackupbundle',
3776 ui,
3597 ui,
3777 source,
3598 source,
3778 )
3599 )
3779 try:
3600 try:
3780 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3601 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3781 except error.LookupError as ex:
3602 except error.LookupError as ex:
3782 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3603 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3783 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3604 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3784 ui.warn(msg, hint=hint)
3605 ui.warn(msg, hint=hint)
3785 continue
3606 continue
3786 branches = (path.branch, opts.get('branch', []))
3607 branches = (path.branch, opts.get('branch', []))
3787 revs, checkout = hg.addbranchrevs(
3608 revs, checkout = hg.addbranchrevs(
3788 repo, other, branches, opts.get("rev")
3609 repo, other, branches, opts.get("rev")
3789 )
3610 )
3790
3611
3791 if revs:
3612 if revs:
3792 revs = [other.lookup(rev) for rev in revs]
3613 revs = [other.lookup(rev) for rev in revs]
3793
3614
3794 with ui.silent():
3615 with ui.silent():
3795 try:
3616 try:
3796 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3617 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3797 ui, repo, other, revs, opts["bundle"], opts["force"]
3618 ui, repo, other, revs, opts["bundle"], opts["force"]
3798 )
3619 )
3799 except error.LookupError:
3620 except error.LookupError:
3800 continue
3621 continue
3801
3622
3802 try:
3623 try:
3803 if not chlist:
3624 if not chlist:
3804 continue
3625 continue
3805 if recovernode:
3626 if recovernode:
3806 with repo.lock(), repo.transaction(b"unbundle") as tr:
3627 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 if scmutil.isrevsymbol(other, recovernode):
3628 if scmutil.isrevsymbol(other, recovernode):
3808 ui.status(_(b"Unbundling %s\n") % (recovernode))
3629 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 f = hg.openpath(ui, path.loc)
3630 f = hg.openpath(ui, path.loc)
3810 gen = exchange.readbundle(ui, f, path.loc)
3631 gen = exchange.readbundle(ui, f, path.loc)
3811 if isinstance(gen, bundle2.unbundle20):
3632 if isinstance(gen, bundle2.unbundle20):
3812 bundle2.applybundle(
3633 bundle2.applybundle(
3813 repo,
3634 repo,
3814 gen,
3635 gen,
3815 tr,
3636 tr,
3816 source=b"unbundle",
3637 source=b"unbundle",
3817 url=b"bundle:" + path.loc,
3638 url=b"bundle:" + path.loc,
3818 )
3639 )
3819 else:
3640 else:
3820 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3641 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3821 break
3642 break
3822 else:
3643 else:
3823 backupdate = encoding.strtolocal(
3644 backupdate = encoding.strtolocal(
3824 time.strftime(
3645 time.strftime(
3825 "%a %H:%M, %Y-%m-%d",
3646 "%a %H:%M, %Y-%m-%d",
3826 time.localtime(os.path.getmtime(path.loc)),
3647 time.localtime(os.path.getmtime(path.loc)),
3827 )
3648 )
3828 )
3649 )
3829 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3650 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 if ui.verbose:
3651 if ui.verbose:
3831 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3652 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3832 else:
3653 else:
3833 opts[
3654 opts[
3834 "template"
3655 "template"
3835 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3656 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 displayer = logcmdutil.changesetdisplayer(
3657 displayer = logcmdutil.changesetdisplayer(
3837 ui, other, pycompat.byteskwargs(opts), False
3658 ui, other, pycompat.byteskwargs(opts), False
3838 )
3659 )
3839 display(other, chlist, displayer)
3660 display(other, chlist, displayer)
3840 displayer.close()
3661 displayer.close()
3841 finally:
3662 finally:
3842 cleanupfn()
3663 cleanupfn()
3843
3664
3844
3665
3845 @command(
3666 @command(
3846 b'debugsub',
3667 b'debugsub',
3847 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3668 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 _(b'[-r REV] [REV]'),
3669 _(b'[-r REV] [REV]'),
3849 )
3670 )
3850 def debugsub(ui, repo, rev=None):
3671 def debugsub(ui, repo, rev=None):
3851 ctx = scmutil.revsingle(repo, rev, None)
3672 ctx = scmutil.revsingle(repo, rev, None)
3852 for k, v in sorted(ctx.substate.items()):
3673 for k, v in sorted(ctx.substate.items()):
3853 ui.writenoi18n(b'path %s\n' % k)
3674 ui.writenoi18n(b'path %s\n' % k)
3854 ui.writenoi18n(b' source %s\n' % v[0])
3675 ui.writenoi18n(b' source %s\n' % v[0])
3855 ui.writenoi18n(b' revision %s\n' % v[1])
3676 ui.writenoi18n(b' revision %s\n' % v[1])
3856
3677
3857
3678
3858 @command(
3679 @command(
3859 b'debugshell',
3680 b'debugshell',
3860 [
3681 [
3861 (
3682 (
3862 b'c',
3683 b'c',
3863 b'command',
3684 b'command',
3864 b'',
3685 b'',
3865 _(b'program passed in as a string'),
3686 _(b'program passed in as a string'),
3866 _(b'COMMAND'),
3687 _(b'COMMAND'),
3867 )
3688 )
3868 ],
3689 ],
3869 _(b'[-c COMMAND]'),
3690 _(b'[-c COMMAND]'),
3870 optionalrepo=True,
3691 optionalrepo=True,
3871 )
3692 )
3872 def debugshell(ui, repo, **opts):
3693 def debugshell(ui, repo, **opts):
3873 """run an interactive Python interpreter
3694 """run an interactive Python interpreter
3874
3695
3875 The local namespace is provided with a reference to the ui and
3696 The local namespace is provided with a reference to the ui and
3876 the repo instance (if available).
3697 the repo instance (if available).
3877 """
3698 """
3878 import code
3699 import code
3879
3700
3880 imported_objects = {
3701 imported_objects = {
3881 'ui': ui,
3702 'ui': ui,
3882 'repo': repo,
3703 'repo': repo,
3883 }
3704 }
3884
3705
3885 # py2exe disables initialization of the site module, which is responsible
3706 # py2exe disables initialization of the site module, which is responsible
3886 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3707 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3887 # the stuff that site normally does here, so that the interpreter can be
3708 # the stuff that site normally does here, so that the interpreter can be
3888 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3709 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3889 # py.exe, or py2exe.
3710 # py.exe, or py2exe.
3890 if getattr(sys, "frozen", None) == 'console_exe':
3711 if getattr(sys, "frozen", None) == 'console_exe':
3891 try:
3712 try:
3892 import site
3713 import site
3893
3714
3894 site.setcopyright()
3715 site.setcopyright()
3895 site.sethelper()
3716 site.sethelper()
3896 site.setquit()
3717 site.setquit()
3897 except ImportError:
3718 except ImportError:
3898 site = None # Keep PyCharm happy
3719 site = None # Keep PyCharm happy
3899
3720
3900 command = opts.get('command')
3721 command = opts.get('command')
3901 if command:
3722 if command:
3902 compiled = code.compile_command(encoding.strfromlocal(command))
3723 compiled = code.compile_command(encoding.strfromlocal(command))
3903 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3724 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3904 return
3725 return
3905
3726
3906 code.interact(local=imported_objects)
3727 code.interact(local=imported_objects)
3907
3728
3908
3729
3909 @command(
3730 @command(
3910 b'debug-revlog-stats',
3731 b'debug-revlog-stats',
3911 [
3732 [
3912 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3733 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3913 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3734 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3914 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3735 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3915 ]
3736 ]
3916 + cmdutil.formatteropts,
3737 + cmdutil.formatteropts,
3917 )
3738 )
3918 def debug_revlog_stats(ui, repo, **opts):
3739 def debug_revlog_stats(ui, repo, **opts):
3919 """display statistics about revlogs in the store"""
3740 """display statistics about revlogs in the store"""
3920 changelog = opts["changelog"]
3741 changelog = opts["changelog"]
3921 manifest = opts["manifest"]
3742 manifest = opts["manifest"]
3922 filelogs = opts["filelogs"]
3743 filelogs = opts["filelogs"]
3923
3744
3924 if changelog is None and manifest is None and filelogs is None:
3745 if changelog is None and manifest is None and filelogs is None:
3925 changelog = True
3746 changelog = True
3926 manifest = True
3747 manifest = True
3927 filelogs = True
3748 filelogs = True
3928
3749
3929 repo = repo.unfiltered()
3750 repo = repo.unfiltered()
3930 fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts))
3751 fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts))
3931 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3752 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3932 fm.end()
3753 fm.end()
3933
3754
3934
3755
3935 @command(
3756 @command(
3936 b'debugsuccessorssets',
3757 b'debugsuccessorssets',
3937 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3758 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3938 _(b'[REV]'),
3759 _(b'[REV]'),
3939 )
3760 )
3940 def debugsuccessorssets(ui, repo, *revs, **opts):
3761 def debugsuccessorssets(ui, repo, *revs, **opts):
3941 """show set of successors for revision
3762 """show set of successors for revision
3942
3763
3943 A successors set of changeset A is a consistent group of revisions that
3764 A successors set of changeset A is a consistent group of revisions that
3944 succeed A. It contains non-obsolete changesets only unless closests
3765 succeed A. It contains non-obsolete changesets only unless closests
3945 successors set is set.
3766 successors set is set.
3946
3767
3947 In most cases a changeset A has a single successors set containing a single
3768 In most cases a changeset A has a single successors set containing a single
3948 successor (changeset A replaced by A').
3769 successor (changeset A replaced by A').
3949
3770
3950 A changeset that is made obsolete with no successors are called "pruned".
3771 A changeset that is made obsolete with no successors are called "pruned".
3951 Such changesets have no successors sets at all.
3772 Such changesets have no successors sets at all.
3952
3773
3953 A changeset that has been "split" will have a successors set containing
3774 A changeset that has been "split" will have a successors set containing
3954 more than one successor.
3775 more than one successor.
3955
3776
3956 A changeset that has been rewritten in multiple different ways is called
3777 A changeset that has been rewritten in multiple different ways is called
3957 "divergent". Such changesets have multiple successor sets (each of which
3778 "divergent". Such changesets have multiple successor sets (each of which
3958 may also be split, i.e. have multiple successors).
3779 may also be split, i.e. have multiple successors).
3959
3780
3960 Results are displayed as follows::
3781 Results are displayed as follows::
3961
3782
3962 <rev1>
3783 <rev1>
3963 <successors-1A>
3784 <successors-1A>
3964 <rev2>
3785 <rev2>
3965 <successors-2A>
3786 <successors-2A>
3966 <successors-2B1> <successors-2B2> <successors-2B3>
3787 <successors-2B1> <successors-2B2> <successors-2B3>
3967
3788
3968 Here rev2 has two possible (i.e. divergent) successors sets. The first
3789 Here rev2 has two possible (i.e. divergent) successors sets. The first
3969 holds one element, whereas the second holds three (i.e. the changeset has
3790 holds one element, whereas the second holds three (i.e. the changeset has
3970 been split).
3791 been split).
3971 """
3792 """
3972 # passed to successorssets caching computation from one call to another
3793 # passed to successorssets caching computation from one call to another
3973 cache = {}
3794 cache = {}
3974 ctx2str = bytes
3795 ctx2str = bytes
3975 node2str = short
3796 node2str = short
3976 for rev in logcmdutil.revrange(repo, revs):
3797 for rev in logcmdutil.revrange(repo, revs):
3977 ctx = repo[rev]
3798 ctx = repo[rev]
3978 ui.write(b'%s\n' % ctx2str(ctx))
3799 ui.write(b'%s\n' % ctx2str(ctx))
3979 for succsset in obsutil.successorssets(
3800 for succsset in obsutil.successorssets(
3980 repo, ctx.node(), closest=opts['closest'], cache=cache
3801 repo, ctx.node(), closest=opts['closest'], cache=cache
3981 ):
3802 ):
3982 if succsset:
3803 if succsset:
3983 ui.write(b' ')
3804 ui.write(b' ')
3984 ui.write(node2str(succsset[0]))
3805 ui.write(node2str(succsset[0]))
3985 for node in succsset[1:]:
3806 for node in succsset[1:]:
3986 ui.write(b' ')
3807 ui.write(b' ')
3987 ui.write(node2str(node))
3808 ui.write(node2str(node))
3988 ui.write(b'\n')
3809 ui.write(b'\n')
3989
3810
3990
3811
3991 @command(b'debugtagscache', [])
3812 @command(b'debugtagscache', [])
3992 def debugtagscache(ui, repo):
3813 def debugtagscache(ui, repo):
3993 """display the contents of .hg/cache/hgtagsfnodes1"""
3814 """display the contents of .hg/cache/hgtagsfnodes1"""
3994 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3815 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3995 flog = repo.file(b'.hgtags')
3816 flog = repo.file(b'.hgtags')
3996 for r in repo:
3817 for r in repo:
3997 node = repo[r].node()
3818 node = repo[r].node()
3998 tagsnode = cache.getfnode(node, computemissing=False)
3819 tagsnode = cache.getfnode(node, computemissing=False)
3999 if tagsnode:
3820 if tagsnode:
4000 tagsnodedisplay = hex(tagsnode)
3821 tagsnodedisplay = hex(tagsnode)
4001 if not flog.hasnode(tagsnode):
3822 if not flog.hasnode(tagsnode):
4002 tagsnodedisplay += b' (unknown node)'
3823 tagsnodedisplay += b' (unknown node)'
4003 elif tagsnode is None:
3824 elif tagsnode is None:
4004 tagsnodedisplay = b'missing'
3825 tagsnodedisplay = b'missing'
4005 else:
3826 else:
4006 tagsnodedisplay = b'invalid'
3827 tagsnodedisplay = b'invalid'
4007
3828
4008 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3829 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4009
3830
4010
3831
4011 @command(
3832 @command(
4012 b'debugtemplate',
3833 b'debugtemplate',
4013 [
3834 [
4014 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3835 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4015 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3836 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4016 ],
3837 ],
4017 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3838 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4018 optionalrepo=True,
3839 optionalrepo=True,
4019 )
3840 )
4020 def debugtemplate(ui, repo, tmpl, **opts):
3841 def debugtemplate(ui, repo, tmpl, **opts):
4021 """parse and apply a template
3842 """parse and apply a template
4022
3843
4023 If -r/--rev is given, the template is processed as a log template and
3844 If -r/--rev is given, the template is processed as a log template and
4024 applied to the given changesets. Otherwise, it is processed as a generic
3845 applied to the given changesets. Otherwise, it is processed as a generic
4025 template.
3846 template.
4026
3847
4027 Use --verbose to print the parsed tree.
3848 Use --verbose to print the parsed tree.
4028 """
3849 """
4029 revs = None
3850 revs = None
4030 if opts['rev']:
3851 if opts['rev']:
4031 if repo is None:
3852 if repo is None:
4032 raise error.RepoError(
3853 raise error.RepoError(
4033 _(b'there is no Mercurial repository here (.hg not found)')
3854 _(b'there is no Mercurial repository here (.hg not found)')
4034 )
3855 )
4035 revs = logcmdutil.revrange(repo, opts['rev'])
3856 revs = logcmdutil.revrange(repo, opts['rev'])
4036
3857
4037 props = {}
3858 props = {}
4038 for d in opts['define']:
3859 for d in opts['define']:
4039 try:
3860 try:
4040 k, v = (e.strip() for e in d.split(b'=', 1))
3861 k, v = (e.strip() for e in d.split(b'=', 1))
4041 if not k or k == b'ui':
3862 if not k or k == b'ui':
4042 raise ValueError
3863 raise ValueError
4043 props[k] = v
3864 props[k] = v
4044 except ValueError:
3865 except ValueError:
4045 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3866 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4046
3867
4047 if ui.verbose:
3868 if ui.verbose:
4048 aliases = ui.configitems(b'templatealias')
3869 aliases = ui.configitems(b'templatealias')
4049 tree = templater.parse(tmpl)
3870 tree = templater.parse(tmpl)
4050 ui.note(templater.prettyformat(tree), b'\n')
3871 ui.note(templater.prettyformat(tree), b'\n')
4051 newtree = templater.expandaliases(tree, aliases)
3872 newtree = templater.expandaliases(tree, aliases)
4052 if newtree != tree:
3873 if newtree != tree:
4053 ui.notenoi18n(
3874 ui.notenoi18n(
4054 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3875 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4055 )
3876 )
4056
3877
4057 if revs is None:
3878 if revs is None:
4058 tres = formatter.templateresources(ui, repo)
3879 tres = formatter.templateresources(ui, repo)
4059 t = formatter.maketemplater(ui, tmpl, resources=tres)
3880 t = formatter.maketemplater(ui, tmpl, resources=tres)
4060 if ui.verbose:
3881 if ui.verbose:
4061 kwds, funcs = t.symbolsuseddefault()
3882 kwds, funcs = t.symbolsuseddefault()
4062 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3883 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4063 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3884 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4064 ui.write(t.renderdefault(props))
3885 ui.write(t.renderdefault(props))
4065 else:
3886 else:
4066 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3887 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4067 if ui.verbose:
3888 if ui.verbose:
4068 kwds, funcs = displayer.t.symbolsuseddefault()
3889 kwds, funcs = displayer.t.symbolsuseddefault()
4069 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3890 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4070 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3891 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4071 for r in revs:
3892 for r in revs:
4072 displayer.show(repo[r], **pycompat.strkwargs(props))
3893 displayer.show(repo[r], **pycompat.strkwargs(props))
4073 displayer.close()
3894 displayer.close()
4074
3895
4075
3896
4076 @command(
3897 @command(
4077 b'debuguigetpass',
3898 b'debuguigetpass',
4078 [
3899 [
4079 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3900 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4080 ],
3901 ],
4081 _(b'[-p TEXT]'),
3902 _(b'[-p TEXT]'),
4082 norepo=True,
3903 norepo=True,
4083 )
3904 )
4084 def debuguigetpass(ui, prompt=b''):
3905 def debuguigetpass(ui, prompt=b''):
4085 """show prompt to type password"""
3906 """show prompt to type password"""
4086 r = ui.getpass(prompt)
3907 r = ui.getpass(prompt)
4087 if r is None:
3908 if r is None:
4088 r = b"<default response>"
3909 r = b"<default response>"
4089 ui.writenoi18n(b'response: %s\n' % r)
3910 ui.writenoi18n(b'response: %s\n' % r)
4090
3911
4091
3912
4092 @command(
3913 @command(
4093 b'debuguiprompt',
3914 b'debuguiprompt',
4094 [
3915 [
4095 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3916 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4096 ],
3917 ],
4097 _(b'[-p TEXT]'),
3918 _(b'[-p TEXT]'),
4098 norepo=True,
3919 norepo=True,
4099 )
3920 )
4100 def debuguiprompt(ui, prompt=b''):
3921 def debuguiprompt(ui, prompt=b''):
4101 """show plain prompt"""
3922 """show plain prompt"""
4102 r = ui.prompt(prompt)
3923 r = ui.prompt(prompt)
4103 ui.writenoi18n(b'response: %s\n' % r)
3924 ui.writenoi18n(b'response: %s\n' % r)
4104
3925
4105
3926
4106 @command(b'debugupdatecaches', [])
3927 @command(b'debugupdatecaches', [])
4107 def debugupdatecaches(ui, repo, *pats, **opts):
3928 def debugupdatecaches(ui, repo, *pats, **opts):
4108 """warm all known caches in the repository"""
3929 """warm all known caches in the repository"""
4109 with repo.wlock(), repo.lock():
3930 with repo.wlock(), repo.lock():
4110 repo.updatecaches(caches=repository.CACHES_ALL)
3931 repo.updatecaches(caches=repository.CACHES_ALL)
4111
3932
4112
3933
4113 @command(
3934 @command(
4114 b'debugupgraderepo',
3935 b'debugupgraderepo',
4115 [
3936 [
4116 (
3937 (
4117 b'o',
3938 b'o',
4118 b'optimize',
3939 b'optimize',
4119 [],
3940 [],
4120 _(b'extra optimization to perform'),
3941 _(b'extra optimization to perform'),
4121 _(b'NAME'),
3942 _(b'NAME'),
4122 ),
3943 ),
4123 (b'', b'run', False, _(b'performs an upgrade')),
3944 (b'', b'run', False, _(b'performs an upgrade')),
4124 (b'', b'backup', True, _(b'keep the old repository content around')),
3945 (b'', b'backup', True, _(b'keep the old repository content around')),
4125 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3946 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4126 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3947 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4127 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3948 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4128 ],
3949 ],
4129 )
3950 )
4130 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3951 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4131 """upgrade a repository to use different features
3952 """upgrade a repository to use different features
4132
3953
4133 If no arguments are specified, the repository is evaluated for upgrade
3954 If no arguments are specified, the repository is evaluated for upgrade
4134 and a list of problems and potential optimizations is printed.
3955 and a list of problems and potential optimizations is printed.
4135
3956
4136 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3957 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4137 can be influenced via additional arguments. More details will be provided
3958 can be influenced via additional arguments. More details will be provided
4138 by the command output when run without ``--run``.
3959 by the command output when run without ``--run``.
4139
3960
4140 During the upgrade, the repository will be locked and no writes will be
3961 During the upgrade, the repository will be locked and no writes will be
4141 allowed.
3962 allowed.
4142
3963
4143 At the end of the upgrade, the repository may not be readable while new
3964 At the end of the upgrade, the repository may not be readable while new
4144 repository data is swapped in. This window will be as long as it takes to
3965 repository data is swapped in. This window will be as long as it takes to
4145 rename some directories inside the ``.hg`` directory. On most machines, this
3966 rename some directories inside the ``.hg`` directory. On most machines, this
4146 should complete almost instantaneously and the chances of a consumer being
3967 should complete almost instantaneously and the chances of a consumer being
4147 unable to access the repository should be low.
3968 unable to access the repository should be low.
4148
3969
4149 By default, all revlogs will be upgraded. You can restrict this using flags
3970 By default, all revlogs will be upgraded. You can restrict this using flags
4150 such as `--manifest`:
3971 such as `--manifest`:
4151
3972
4152 * `--manifest`: only optimize the manifest
3973 * `--manifest`: only optimize the manifest
4153 * `--no-manifest`: optimize all revlog but the manifest
3974 * `--no-manifest`: optimize all revlog but the manifest
4154 * `--changelog`: optimize the changelog only
3975 * `--changelog`: optimize the changelog only
4155 * `--no-changelog --no-manifest`: optimize filelogs only
3976 * `--no-changelog --no-manifest`: optimize filelogs only
4156 * `--filelogs`: optimize the filelogs only
3977 * `--filelogs`: optimize the filelogs only
4157 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3978 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4158 """
3979 """
4159 return upgrade.upgraderepo(
3980 return upgrade.upgraderepo(
4160 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3981 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4161 )
3982 )
4162
3983
4163
3984
4164 @command(
3985 @command(
4165 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3986 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4166 )
3987 )
4167 def debugwalk(ui, repo, *pats, **opts):
3988 def debugwalk(ui, repo, *pats, **opts):
4168 """show how files match on given patterns"""
3989 """show how files match on given patterns"""
4169 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
3990 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
4170 if ui.verbose:
3991 if ui.verbose:
4171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3992 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4172 items = list(repo[None].walk(m))
3993 items = list(repo[None].walk(m))
4173 if not items:
3994 if not items:
4174 return
3995 return
4175 f = lambda fn: fn
3996 f = lambda fn: fn
4176 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3997 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4177 f = lambda fn: util.normpath(fn)
3998 f = lambda fn: util.normpath(fn)
4178 fmt = b'f %%-%ds %%-%ds %%s' % (
3999 fmt = b'f %%-%ds %%-%ds %%s' % (
4179 max([len(abs) for abs in items]),
4000 max([len(abs) for abs in items]),
4180 max([len(repo.pathto(abs)) for abs in items]),
4001 max([len(repo.pathto(abs)) for abs in items]),
4181 )
4002 )
4182 for abs in items:
4003 for abs in items:
4183 line = fmt % (
4004 line = fmt % (
4184 abs,
4005 abs,
4185 f(repo.pathto(abs)),
4006 f(repo.pathto(abs)),
4186 m.exact(abs) and b'exact' or b'',
4007 m.exact(abs) and b'exact' or b'',
4187 )
4008 )
4188 ui.write(b"%s\n" % line.rstrip())
4009 ui.write(b"%s\n" % line.rstrip())
4189
4010
4190
4011
4191 @command(b'debugwhyunstable', [], _(b'REV'))
4012 @command(b'debugwhyunstable', [], _(b'REV'))
4192 def debugwhyunstable(ui, repo, rev):
4013 def debugwhyunstable(ui, repo, rev):
4193 """explain instabilities of a changeset"""
4014 """explain instabilities of a changeset"""
4194 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4015 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4195 dnodes = b''
4016 dnodes = b''
4196 if entry.get(b'divergentnodes'):
4017 if entry.get(b'divergentnodes'):
4197 dnodes = (
4018 dnodes = (
4198 b' '.join(
4019 b' '.join(
4199 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4020 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4200 for ctx in entry[b'divergentnodes']
4021 for ctx in entry[b'divergentnodes']
4201 )
4022 )
4202 + b' '
4023 + b' '
4203 )
4024 )
4204 ui.write(
4025 ui.write(
4205 b'%s: %s%s %s\n'
4026 b'%s: %s%s %s\n'
4206 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4027 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4207 )
4028 )
4208
4029
4209
4030
4210 @command(
4031 @command(
4211 b'debugwireargs',
4032 b'debugwireargs',
4212 [
4033 [
4213 (b'', b'three', b'', b'three'),
4034 (b'', b'three', b'', b'three'),
4214 (b'', b'four', b'', b'four'),
4035 (b'', b'four', b'', b'four'),
4215 (b'', b'five', b'', b'five'),
4036 (b'', b'five', b'', b'five'),
4216 ]
4037 ]
4217 + cmdutil.remoteopts,
4038 + cmdutil.remoteopts,
4218 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4039 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4219 norepo=True,
4040 norepo=True,
4220 )
4041 )
4221 def debugwireargs(ui, repopath, *vals, **opts):
4042 def debugwireargs(ui, repopath, *vals, **opts):
4222 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
4043 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
4223 try:
4044 try:
4224 for opt in cmdutil.remoteopts:
4045 for opt in cmdutil.remoteopts:
4225 del opts[pycompat.sysstr(opt[1])]
4046 del opts[pycompat.sysstr(opt[1])]
4226 args = {}
4047 args = {}
4227 for k, v in opts.items():
4048 for k, v in opts.items():
4228 if v:
4049 if v:
4229 args[k] = v
4050 args[k] = v
4230
4051
4231 # run twice to check that we don't mess up the stream for the next command
4052 # run twice to check that we don't mess up the stream for the next command
4232 res1 = repo.debugwireargs(*vals, **args)
4053 res1 = repo.debugwireargs(*vals, **args)
4233 res2 = repo.debugwireargs(*vals, **args)
4054 res2 = repo.debugwireargs(*vals, **args)
4234 ui.write(b"%s\n" % res1)
4055 ui.write(b"%s\n" % res1)
4235 if res1 != res2:
4056 if res1 != res2:
4236 ui.warn(b"%s\n" % res2)
4057 ui.warn(b"%s\n" % res2)
4237 finally:
4058 finally:
4238 repo.close()
4059 repo.close()
4239
4060
4240
4061
4241 def _parsewirelangblocks(fh):
4062 def _parsewirelangblocks(fh):
4242 activeaction = None
4063 activeaction = None
4243 blocklines = []
4064 blocklines = []
4244 lastindent = 0
4065 lastindent = 0
4245
4066
4246 for line in fh:
4067 for line in fh:
4247 line = line.rstrip()
4068 line = line.rstrip()
4248 if not line:
4069 if not line:
4249 continue
4070 continue
4250
4071
4251 if line.startswith(b'#'):
4072 if line.startswith(b'#'):
4252 continue
4073 continue
4253
4074
4254 if not line.startswith(b' '):
4075 if not line.startswith(b' '):
4255 # New block. Flush previous one.
4076 # New block. Flush previous one.
4256 if activeaction:
4077 if activeaction:
4257 yield activeaction, blocklines
4078 yield activeaction, blocklines
4258
4079
4259 activeaction = line
4080 activeaction = line
4260 blocklines = []
4081 blocklines = []
4261 lastindent = 0
4082 lastindent = 0
4262 continue
4083 continue
4263
4084
4264 # Else we start with an indent.
4085 # Else we start with an indent.
4265
4086
4266 if not activeaction:
4087 if not activeaction:
4267 raise error.Abort(_(b'indented line outside of block'))
4088 raise error.Abort(_(b'indented line outside of block'))
4268
4089
4269 indent = len(line) - len(line.lstrip())
4090 indent = len(line) - len(line.lstrip())
4270
4091
4271 # If this line is indented more than the last line, concatenate it.
4092 # If this line is indented more than the last line, concatenate it.
4272 if indent > lastindent and blocklines:
4093 if indent > lastindent and blocklines:
4273 blocklines[-1] += line.lstrip()
4094 blocklines[-1] += line.lstrip()
4274 else:
4095 else:
4275 blocklines.append(line)
4096 blocklines.append(line)
4276 lastindent = indent
4097 lastindent = indent
4277
4098
4278 # Flush last block.
4099 # Flush last block.
4279 if activeaction:
4100 if activeaction:
4280 yield activeaction, blocklines
4101 yield activeaction, blocklines
4281
4102
4282
4103
4283 @command(
4104 @command(
4284 b'debugwireproto',
4105 b'debugwireproto',
4285 [
4106 [
4286 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4107 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4287 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4108 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4288 (
4109 (
4289 b'',
4110 b'',
4290 b'noreadstderr',
4111 b'noreadstderr',
4291 False,
4112 False,
4292 _(b'do not read from stderr of the remote'),
4113 _(b'do not read from stderr of the remote'),
4293 ),
4114 ),
4294 (
4115 (
4295 b'',
4116 b'',
4296 b'nologhandshake',
4117 b'nologhandshake',
4297 False,
4118 False,
4298 _(b'do not log I/O related to the peer handshake'),
4119 _(b'do not log I/O related to the peer handshake'),
4299 ),
4120 ),
4300 ]
4121 ]
4301 + cmdutil.remoteopts,
4122 + cmdutil.remoteopts,
4302 _(b'[PATH]'),
4123 _(b'[PATH]'),
4303 optionalrepo=True,
4124 optionalrepo=True,
4304 )
4125 )
4305 def debugwireproto(ui, repo, path=None, **opts):
4126 def debugwireproto(ui, repo, path=None, **opts):
4306 """send wire protocol commands to a server
4127 """send wire protocol commands to a server
4307
4128
4308 This command can be used to issue wire protocol commands to remote
4129 This command can be used to issue wire protocol commands to remote
4309 peers and to debug the raw data being exchanged.
4130 peers and to debug the raw data being exchanged.
4310
4131
4311 ``--localssh`` will start an SSH server against the current repository
4132 ``--localssh`` will start an SSH server against the current repository
4312 and connect to that. By default, the connection will perform a handshake
4133 and connect to that. By default, the connection will perform a handshake
4313 and establish an appropriate peer instance.
4134 and establish an appropriate peer instance.
4314
4135
4315 ``--peer`` can be used to bypass the handshake protocol and construct a
4136 ``--peer`` can be used to bypass the handshake protocol and construct a
4316 peer instance using the specified class type. Valid values are ``raw``,
4137 peer instance using the specified class type. Valid values are ``raw``,
4317 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4138 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4318 don't support higher-level command actions.
4139 don't support higher-level command actions.
4319
4140
4320 ``--noreadstderr`` can be used to disable automatic reading from stderr
4141 ``--noreadstderr`` can be used to disable automatic reading from stderr
4321 of the peer (for SSH connections only). Disabling automatic reading of
4142 of the peer (for SSH connections only). Disabling automatic reading of
4322 stderr is useful for making output more deterministic.
4143 stderr is useful for making output more deterministic.
4323
4144
4324 Commands are issued via a mini language which is specified via stdin.
4145 Commands are issued via a mini language which is specified via stdin.
4325 The language consists of individual actions to perform. An action is
4146 The language consists of individual actions to perform. An action is
4326 defined by a block. A block is defined as a line with no leading
4147 defined by a block. A block is defined as a line with no leading
4327 space followed by 0 or more lines with leading space. Blocks are
4148 space followed by 0 or more lines with leading space. Blocks are
4328 effectively a high-level command with additional metadata.
4149 effectively a high-level command with additional metadata.
4329
4150
4330 Lines beginning with ``#`` are ignored.
4151 Lines beginning with ``#`` are ignored.
4331
4152
4332 The following sections denote available actions.
4153 The following sections denote available actions.
4333
4154
4334 raw
4155 raw
4335 ---
4156 ---
4336
4157
4337 Send raw data to the server.
4158 Send raw data to the server.
4338
4159
4339 The block payload contains the raw data to send as one atomic send
4160 The block payload contains the raw data to send as one atomic send
4340 operation. The data may not actually be delivered in a single system
4161 operation. The data may not actually be delivered in a single system
4341 call: it depends on the abilities of the transport being used.
4162 call: it depends on the abilities of the transport being used.
4342
4163
4343 Each line in the block is de-indented and concatenated. Then, that
4164 Each line in the block is de-indented and concatenated. Then, that
4344 value is evaluated as a Python b'' literal. This allows the use of
4165 value is evaluated as a Python b'' literal. This allows the use of
4345 backslash escaping, etc.
4166 backslash escaping, etc.
4346
4167
4347 raw+
4168 raw+
4348 ----
4169 ----
4349
4170
4350 Behaves like ``raw`` except flushes output afterwards.
4171 Behaves like ``raw`` except flushes output afterwards.
4351
4172
4352 command <X>
4173 command <X>
4353 -----------
4174 -----------
4354
4175
4355 Send a request to run a named command, whose name follows the ``command``
4176 Send a request to run a named command, whose name follows the ``command``
4356 string.
4177 string.
4357
4178
4358 Arguments to the command are defined as lines in this block. The format of
4179 Arguments to the command are defined as lines in this block. The format of
4359 each line is ``<key> <value>``. e.g.::
4180 each line is ``<key> <value>``. e.g.::
4360
4181
4361 command listkeys
4182 command listkeys
4362 namespace bookmarks
4183 namespace bookmarks
4363
4184
4364 If the value begins with ``eval:``, it will be interpreted as a Python
4185 If the value begins with ``eval:``, it will be interpreted as a Python
4365 literal expression. Otherwise values are interpreted as Python b'' literals.
4186 literal expression. Otherwise values are interpreted as Python b'' literals.
4366 This allows sending complex types and encoding special byte sequences via
4187 This allows sending complex types and encoding special byte sequences via
4367 backslash escaping.
4188 backslash escaping.
4368
4189
4369 The following arguments have special meaning:
4190 The following arguments have special meaning:
4370
4191
4371 ``PUSHFILE``
4192 ``PUSHFILE``
4372 When defined, the *push* mechanism of the peer will be used instead
4193 When defined, the *push* mechanism of the peer will be used instead
4373 of the static request-response mechanism and the content of the
4194 of the static request-response mechanism and the content of the
4374 file specified in the value of this argument will be sent as the
4195 file specified in the value of this argument will be sent as the
4375 command payload.
4196 command payload.
4376
4197
4377 This can be used to submit a local bundle file to the remote.
4198 This can be used to submit a local bundle file to the remote.
4378
4199
4379 batchbegin
4200 batchbegin
4380 ----------
4201 ----------
4381
4202
4382 Instruct the peer to begin a batched send.
4203 Instruct the peer to begin a batched send.
4383
4204
4384 All ``command`` blocks are queued for execution until the next
4205 All ``command`` blocks are queued for execution until the next
4385 ``batchsubmit`` block.
4206 ``batchsubmit`` block.
4386
4207
4387 batchsubmit
4208 batchsubmit
4388 -----------
4209 -----------
4389
4210
4390 Submit previously queued ``command`` blocks as a batch request.
4211 Submit previously queued ``command`` blocks as a batch request.
4391
4212
4392 This action MUST be paired with a ``batchbegin`` action.
4213 This action MUST be paired with a ``batchbegin`` action.
4393
4214
4394 httprequest <method> <path>
4215 httprequest <method> <path>
4395 ---------------------------
4216 ---------------------------
4396
4217
4397 (HTTP peer only)
4218 (HTTP peer only)
4398
4219
4399 Send an HTTP request to the peer.
4220 Send an HTTP request to the peer.
4400
4221
4401 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4222 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4402
4223
4403 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4224 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4404 headers to add to the request. e.g. ``Accept: foo``.
4225 headers to add to the request. e.g. ``Accept: foo``.
4405
4226
4406 The following arguments are special:
4227 The following arguments are special:
4407
4228
4408 ``BODYFILE``
4229 ``BODYFILE``
4409 The content of the file defined as the value to this argument will be
4230 The content of the file defined as the value to this argument will be
4410 transferred verbatim as the HTTP request body.
4231 transferred verbatim as the HTTP request body.
4411
4232
4412 ``frame <type> <flags> <payload>``
4233 ``frame <type> <flags> <payload>``
4413 Send a unified protocol frame as part of the request body.
4234 Send a unified protocol frame as part of the request body.
4414
4235
4415 All frames will be collected and sent as the body to the HTTP
4236 All frames will be collected and sent as the body to the HTTP
4416 request.
4237 request.
4417
4238
4418 close
4239 close
4419 -----
4240 -----
4420
4241
4421 Close the connection to the server.
4242 Close the connection to the server.
4422
4243
4423 flush
4244 flush
4424 -----
4245 -----
4425
4246
4426 Flush data written to the server.
4247 Flush data written to the server.
4427
4248
4428 readavailable
4249 readavailable
4429 -------------
4250 -------------
4430
4251
4431 Close the write end of the connection and read all available data from
4252 Close the write end of the connection and read all available data from
4432 the server.
4253 the server.
4433
4254
4434 If the connection to the server encompasses multiple pipes, we poll both
4255 If the connection to the server encompasses multiple pipes, we poll both
4435 pipes and read available data.
4256 pipes and read available data.
4436
4257
4437 readline
4258 readline
4438 --------
4259 --------
4439
4260
4440 Read a line of output from the server. If there are multiple output
4261 Read a line of output from the server. If there are multiple output
4441 pipes, reads only the main pipe.
4262 pipes, reads only the main pipe.
4442
4263
4443 ereadline
4264 ereadline
4444 ---------
4265 ---------
4445
4266
4446 Like ``readline``, but read from the stderr pipe, if available.
4267 Like ``readline``, but read from the stderr pipe, if available.
4447
4268
4448 read <X>
4269 read <X>
4449 --------
4270 --------
4450
4271
4451 ``read()`` N bytes from the server's main output pipe.
4272 ``read()`` N bytes from the server's main output pipe.
4452
4273
4453 eread <X>
4274 eread <X>
4454 ---------
4275 ---------
4455
4276
4456 ``read()`` N bytes from the server's stderr pipe, if available.
4277 ``read()`` N bytes from the server's stderr pipe, if available.
4457
4278
4458 Specifying Unified Frame-Based Protocol Frames
4279 Specifying Unified Frame-Based Protocol Frames
4459 ----------------------------------------------
4280 ----------------------------------------------
4460
4281
4461 It is possible to emit a *Unified Frame-Based Protocol* by using special
4282 It is possible to emit a *Unified Frame-Based Protocol* by using special
4462 syntax.
4283 syntax.
4463
4284
4464 A frame is composed as a type, flags, and payload. These can be parsed
4285 A frame is composed as a type, flags, and payload. These can be parsed
4465 from a string of the form:
4286 from a string of the form:
4466
4287
4467 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4288 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4468
4289
4469 ``request-id`` and ``stream-id`` are integers defining the request and
4290 ``request-id`` and ``stream-id`` are integers defining the request and
4470 stream identifiers.
4291 stream identifiers.
4471
4292
4472 ``type`` can be an integer value for the frame type or the string name
4293 ``type`` can be an integer value for the frame type or the string name
4473 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4294 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4474 ``command-name``.
4295 ``command-name``.
4475
4296
4476 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4297 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4477 components. Each component (and there can be just one) can be an integer
4298 components. Each component (and there can be just one) can be an integer
4478 or a flag name for stream flags or frame flags, respectively. Values are
4299 or a flag name for stream flags or frame flags, respectively. Values are
4479 resolved to integers and then bitwise OR'd together.
4300 resolved to integers and then bitwise OR'd together.
4480
4301
4481 ``payload`` represents the raw frame payload. If it begins with
4302 ``payload`` represents the raw frame payload. If it begins with
4482 ``cbor:``, the following string is evaluated as Python code and the
4303 ``cbor:``, the following string is evaluated as Python code and the
4483 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4304 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4484 as a Python byte string literal.
4305 as a Python byte string literal.
4485 """
4306 """
4486 if opts['localssh'] and not repo:
4307 if opts['localssh'] and not repo:
4487 raise error.Abort(_(b'--localssh requires a repository'))
4308 raise error.Abort(_(b'--localssh requires a repository'))
4488
4309
4489 if opts['peer'] and opts['peer'] not in (
4310 if opts['peer'] and opts['peer'] not in (
4490 b'raw',
4311 b'raw',
4491 b'ssh1',
4312 b'ssh1',
4492 ):
4313 ):
4493 raise error.Abort(
4314 raise error.Abort(
4494 _(b'invalid value for --peer'),
4315 _(b'invalid value for --peer'),
4495 hint=_(b'valid values are "raw" and "ssh1"'),
4316 hint=_(b'valid values are "raw" and "ssh1"'),
4496 )
4317 )
4497
4318
4498 if path and opts['localssh']:
4319 if path and opts['localssh']:
4499 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4320 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4500
4321
4501 if ui.interactive():
4322 if ui.interactive():
4502 ui.write(_(b'(waiting for commands on stdin)\n'))
4323 ui.write(_(b'(waiting for commands on stdin)\n'))
4503
4324
4504 blocks = list(_parsewirelangblocks(ui.fin))
4325 blocks = list(_parsewirelangblocks(ui.fin))
4505
4326
4506 proc = None
4327 proc = None
4507 stdin = None
4328 stdin = None
4508 stdout = None
4329 stdout = None
4509 stderr = None
4330 stderr = None
4510 opener = None
4331 opener = None
4511
4332
4512 if opts['localssh']:
4333 if opts['localssh']:
4513 # We start the SSH server in its own process so there is process
4334 # We start the SSH server in its own process so there is process
4514 # separation. This prevents a whole class of potential bugs around
4335 # separation. This prevents a whole class of potential bugs around
4515 # shared state from interfering with server operation.
4336 # shared state from interfering with server operation.
4516 args = procutil.hgcmd() + [
4337 args = procutil.hgcmd() + [
4517 b'-R',
4338 b'-R',
4518 repo.root,
4339 repo.root,
4519 b'debugserve',
4340 b'debugserve',
4520 b'--sshstdio',
4341 b'--sshstdio',
4521 ]
4342 ]
4522 proc = subprocess.Popen(
4343 proc = subprocess.Popen(
4523 pycompat.rapply(procutil.tonativestr, args),
4344 pycompat.rapply(procutil.tonativestr, args),
4524 stdin=subprocess.PIPE,
4345 stdin=subprocess.PIPE,
4525 stdout=subprocess.PIPE,
4346 stdout=subprocess.PIPE,
4526 stderr=subprocess.PIPE,
4347 stderr=subprocess.PIPE,
4527 bufsize=0,
4348 bufsize=0,
4528 )
4349 )
4529
4350
4530 stdin = proc.stdin
4351 stdin = proc.stdin
4531 stdout = proc.stdout
4352 stdout = proc.stdout
4532 stderr = proc.stderr
4353 stderr = proc.stderr
4533
4354
4534 # We turn the pipes into observers so we can log I/O.
4355 # We turn the pipes into observers so we can log I/O.
4535 if ui.verbose or opts['peer'] == b'raw':
4356 if ui.verbose or opts['peer'] == b'raw':
4536 stdin = util.makeloggingfileobject(
4357 stdin = util.makeloggingfileobject(
4537 ui, proc.stdin, b'i', logdata=True
4358 ui, proc.stdin, b'i', logdata=True
4538 )
4359 )
4539 stdout = util.makeloggingfileobject(
4360 stdout = util.makeloggingfileobject(
4540 ui, proc.stdout, b'o', logdata=True
4361 ui, proc.stdout, b'o', logdata=True
4541 )
4362 )
4542 stderr = util.makeloggingfileobject(
4363 stderr = util.makeloggingfileobject(
4543 ui, proc.stderr, b'e', logdata=True
4364 ui, proc.stderr, b'e', logdata=True
4544 )
4365 )
4545
4366
4546 # --localssh also implies the peer connection settings.
4367 # --localssh also implies the peer connection settings.
4547
4368
4548 url = b'ssh://localserver'
4369 url = b'ssh://localserver'
4549 autoreadstderr = not opts['noreadstderr']
4370 autoreadstderr = not opts['noreadstderr']
4550
4371
4551 if opts['peer'] == b'ssh1':
4372 if opts['peer'] == b'ssh1':
4552 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4373 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4553 peer = sshpeer.sshv1peer(
4374 peer = sshpeer.sshv1peer(
4554 ui,
4375 ui,
4555 url,
4376 url,
4556 proc,
4377 proc,
4557 stdin,
4378 stdin,
4558 stdout,
4379 stdout,
4559 stderr,
4380 stderr,
4560 None,
4381 None,
4561 autoreadstderr=autoreadstderr,
4382 autoreadstderr=autoreadstderr,
4562 )
4383 )
4563 elif opts['peer'] == b'raw':
4384 elif opts['peer'] == b'raw':
4564 ui.write(_(b'using raw connection to peer\n'))
4385 ui.write(_(b'using raw connection to peer\n'))
4565 peer = None
4386 peer = None
4566 else:
4387 else:
4567 ui.write(_(b'creating ssh peer from handshake results\n'))
4388 ui.write(_(b'creating ssh peer from handshake results\n'))
4568 peer = sshpeer._make_peer(
4389 peer = sshpeer._make_peer(
4569 ui,
4390 ui,
4570 url,
4391 url,
4571 proc,
4392 proc,
4572 stdin,
4393 stdin,
4573 stdout,
4394 stdout,
4574 stderr,
4395 stderr,
4575 autoreadstderr=autoreadstderr,
4396 autoreadstderr=autoreadstderr,
4576 )
4397 )
4577
4398
4578 elif path:
4399 elif path:
4579 # We bypass hg.peer() so we can proxy the sockets.
4400 # We bypass hg.peer() so we can proxy the sockets.
4580 # TODO consider not doing this because we skip
4401 # TODO consider not doing this because we skip
4581 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4402 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4582 u = urlutil.url(path)
4403 u = urlutil.url(path)
4583 if u.scheme != b'http':
4404 if u.scheme != b'http':
4584 raise error.Abort(_(b'only http:// paths are currently supported'))
4405 raise error.Abort(_(b'only http:// paths are currently supported'))
4585
4406
4586 url, authinfo = u.authinfo()
4407 url, authinfo = u.authinfo()
4587 openerargs = {
4408 openerargs = {
4588 'useragent': b'Mercurial debugwireproto',
4409 'useragent': b'Mercurial debugwireproto',
4589 }
4410 }
4590
4411
4591 # Turn pipes/sockets into observers so we can log I/O.
4412 # Turn pipes/sockets into observers so we can log I/O.
4592 if ui.verbose:
4413 if ui.verbose:
4593 openerargs.update(
4414 openerargs.update(
4594 {
4415 {
4595 'loggingfh': ui,
4416 'loggingfh': ui,
4596 'loggingname': b's',
4417 'loggingname': b's',
4597 'loggingopts': {
4418 'loggingopts': {
4598 'logdata': True,
4419 'logdata': True,
4599 'logdataapis': False,
4420 'logdataapis': False,
4600 },
4421 },
4601 }
4422 }
4602 )
4423 )
4603
4424
4604 if ui.debugflag:
4425 if ui.debugflag:
4605 openerargs['loggingopts']['logdataapis'] = True
4426 openerargs['loggingopts']['logdataapis'] = True
4606
4427
4607 # Don't send default headers when in raw mode. This allows us to
4428 # Don't send default headers when in raw mode. This allows us to
4608 # bypass most of the behavior of our URL handling code so we can
4429 # bypass most of the behavior of our URL handling code so we can
4609 # have near complete control over what's sent on the wire.
4430 # have near complete control over what's sent on the wire.
4610 if opts['peer'] == b'raw':
4431 if opts['peer'] == b'raw':
4611 openerargs['sendaccept'] = False
4432 openerargs['sendaccept'] = False
4612
4433
4613 opener = urlmod.opener(ui, authinfo, **openerargs)
4434 opener = urlmod.opener(ui, authinfo, **openerargs)
4614
4435
4615 if opts['peer'] == b'raw':
4436 if opts['peer'] == b'raw':
4616 ui.write(_(b'using raw connection to peer\n'))
4437 ui.write(_(b'using raw connection to peer\n'))
4617 peer = None
4438 peer = None
4618 elif opts['peer']:
4439 elif opts['peer']:
4619 raise error.Abort(
4440 raise error.Abort(
4620 _(b'--peer %s not supported with HTTP peers') % opts['peer']
4441 _(b'--peer %s not supported with HTTP peers') % opts['peer']
4621 )
4442 )
4622 else:
4443 else:
4623 peer_path = urlutil.try_path(ui, path)
4444 peer_path = urlutil.try_path(ui, path)
4624 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4445 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4625
4446
4626 # We /could/ populate stdin/stdout with sock.makefile()...
4447 # We /could/ populate stdin/stdout with sock.makefile()...
4627 else:
4448 else:
4628 raise error.Abort(_(b'unsupported connection configuration'))
4449 raise error.Abort(_(b'unsupported connection configuration'))
4629
4450
4630 batchedcommands = None
4451 batchedcommands = None
4631
4452
4632 # Now perform actions based on the parsed wire language instructions.
4453 # Now perform actions based on the parsed wire language instructions.
4633 for action, lines in blocks:
4454 for action, lines in blocks:
4634 if action in (b'raw', b'raw+'):
4455 if action in (b'raw', b'raw+'):
4635 if not stdin:
4456 if not stdin:
4636 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4457 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4637
4458
4638 # Concatenate the data together.
4459 # Concatenate the data together.
4639 data = b''.join(l.lstrip() for l in lines)
4460 data = b''.join(l.lstrip() for l in lines)
4640 data = stringutil.unescapestr(data)
4461 data = stringutil.unescapestr(data)
4641 stdin.write(data)
4462 stdin.write(data)
4642
4463
4643 if action == b'raw+':
4464 if action == b'raw+':
4644 stdin.flush()
4465 stdin.flush()
4645 elif action == b'flush':
4466 elif action == b'flush':
4646 if not stdin:
4467 if not stdin:
4647 raise error.Abort(_(b'cannot call flush on this peer'))
4468 raise error.Abort(_(b'cannot call flush on this peer'))
4648 stdin.flush()
4469 stdin.flush()
4649 elif action.startswith(b'command'):
4470 elif action.startswith(b'command'):
4650 if not peer:
4471 if not peer:
4651 raise error.Abort(
4472 raise error.Abort(
4652 _(
4473 _(
4653 b'cannot send commands unless peer instance '
4474 b'cannot send commands unless peer instance '
4654 b'is available'
4475 b'is available'
4655 )
4476 )
4656 )
4477 )
4657
4478
4658 command = action.split(b' ', 1)[1]
4479 command = action.split(b' ', 1)[1]
4659
4480
4660 args = {}
4481 args = {}
4661 for line in lines:
4482 for line in lines:
4662 # We need to allow empty values.
4483 # We need to allow empty values.
4663 fields = line.lstrip().split(b' ', 1)
4484 fields = line.lstrip().split(b' ', 1)
4664 if len(fields) == 1:
4485 if len(fields) == 1:
4665 key = fields[0]
4486 key = fields[0]
4666 value = b''
4487 value = b''
4667 else:
4488 else:
4668 key, value = fields
4489 key, value = fields
4669
4490
4670 if value.startswith(b'eval:'):
4491 if value.startswith(b'eval:'):
4671 value = stringutil.evalpythonliteral(value[5:])
4492 value = stringutil.evalpythonliteral(value[5:])
4672 else:
4493 else:
4673 value = stringutil.unescapestr(value)
4494 value = stringutil.unescapestr(value)
4674
4495
4675 args[key] = value
4496 args[key] = value
4676
4497
4677 if batchedcommands is not None:
4498 if batchedcommands is not None:
4678 batchedcommands.append((command, args))
4499 batchedcommands.append((command, args))
4679 continue
4500 continue
4680
4501
4681 ui.status(_(b'sending %s command\n') % command)
4502 ui.status(_(b'sending %s command\n') % command)
4682
4503
4683 if b'PUSHFILE' in args:
4504 if b'PUSHFILE' in args:
4684 with open(args[b'PUSHFILE'], 'rb') as fh:
4505 with open(args[b'PUSHFILE'], 'rb') as fh:
4685 del args[b'PUSHFILE']
4506 del args[b'PUSHFILE']
4686 res, output = peer._callpush(
4507 res, output = peer._callpush(
4687 command, fh, **pycompat.strkwargs(args)
4508 command, fh, **pycompat.strkwargs(args)
4688 )
4509 )
4689 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4510 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4690 ui.status(
4511 ui.status(
4691 _(b'remote output: %s\n') % stringutil.escapestr(output)
4512 _(b'remote output: %s\n') % stringutil.escapestr(output)
4692 )
4513 )
4693 else:
4514 else:
4694 with peer.commandexecutor() as e:
4515 with peer.commandexecutor() as e:
4695 res = e.callcommand(command, args).result()
4516 res = e.callcommand(command, args).result()
4696
4517
4697 ui.status(
4518 ui.status(
4698 _(b'response: %s\n')
4519 _(b'response: %s\n')
4699 % stringutil.pprint(res, bprefix=True, indent=2)
4520 % stringutil.pprint(res, bprefix=True, indent=2)
4700 )
4521 )
4701
4522
4702 elif action == b'batchbegin':
4523 elif action == b'batchbegin':
4703 if batchedcommands is not None:
4524 if batchedcommands is not None:
4704 raise error.Abort(_(b'nested batchbegin not allowed'))
4525 raise error.Abort(_(b'nested batchbegin not allowed'))
4705
4526
4706 batchedcommands = []
4527 batchedcommands = []
4707 elif action == b'batchsubmit':
4528 elif action == b'batchsubmit':
4708 # There is a batching API we could go through. But it would be
4529 # There is a batching API we could go through. But it would be
4709 # difficult to normalize requests into function calls. It is easier
4530 # difficult to normalize requests into function calls. It is easier
4710 # to bypass this layer and normalize to commands + args.
4531 # to bypass this layer and normalize to commands + args.
4711 ui.status(
4532 ui.status(
4712 _(b'sending batch with %d sub-commands\n')
4533 _(b'sending batch with %d sub-commands\n')
4713 % len(batchedcommands)
4534 % len(batchedcommands)
4714 )
4535 )
4715 assert peer is not None
4536 assert peer is not None
4716 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4537 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4717 ui.status(
4538 ui.status(
4718 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4539 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4719 )
4540 )
4720
4541
4721 batchedcommands = None
4542 batchedcommands = None
4722
4543
4723 elif action.startswith(b'httprequest '):
4544 elif action.startswith(b'httprequest '):
4724 if not opener:
4545 if not opener:
4725 raise error.Abort(
4546 raise error.Abort(
4726 _(b'cannot use httprequest without an HTTP peer')
4547 _(b'cannot use httprequest without an HTTP peer')
4727 )
4548 )
4728
4549
4729 request = action.split(b' ', 2)
4550 request = action.split(b' ', 2)
4730 if len(request) != 3:
4551 if len(request) != 3:
4731 raise error.Abort(
4552 raise error.Abort(
4732 _(
4553 _(
4733 b'invalid httprequest: expected format is '
4554 b'invalid httprequest: expected format is '
4734 b'"httprequest <method> <path>'
4555 b'"httprequest <method> <path>'
4735 )
4556 )
4736 )
4557 )
4737
4558
4738 method, httppath = request[1:]
4559 method, httppath = request[1:]
4739 headers = {}
4560 headers = {}
4740 body = None
4561 body = None
4741 frames = []
4562 frames = []
4742 for line in lines:
4563 for line in lines:
4743 line = line.lstrip()
4564 line = line.lstrip()
4744 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4565 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4745 if m:
4566 if m:
4746 # Headers need to use native strings.
4567 # Headers need to use native strings.
4747 key = pycompat.strurl(m.group(1))
4568 key = pycompat.strurl(m.group(1))
4748 value = pycompat.strurl(m.group(2))
4569 value = pycompat.strurl(m.group(2))
4749 headers[key] = value
4570 headers[key] = value
4750 continue
4571 continue
4751
4572
4752 if line.startswith(b'BODYFILE '):
4573 if line.startswith(b'BODYFILE '):
4753 with open(line.split(b' ', 1), b'rb') as fh:
4574 with open(line.split(b' ', 1), b'rb') as fh:
4754 body = fh.read()
4575 body = fh.read()
4755 elif line.startswith(b'frame '):
4576 elif line.startswith(b'frame '):
4756 frame = wireprotoframing.makeframefromhumanstring(
4577 frame = wireprotoframing.makeframefromhumanstring(
4757 line[len(b'frame ') :]
4578 line[len(b'frame ') :]
4758 )
4579 )
4759
4580
4760 frames.append(frame)
4581 frames.append(frame)
4761 else:
4582 else:
4762 raise error.Abort(
4583 raise error.Abort(
4763 _(b'unknown argument to httprequest: %s') % line
4584 _(b'unknown argument to httprequest: %s') % line
4764 )
4585 )
4765
4586
4766 url = path + httppath
4587 url = path + httppath
4767
4588
4768 if frames:
4589 if frames:
4769 body = b''.join(bytes(f) for f in frames)
4590 body = b''.join(bytes(f) for f in frames)
4770
4591
4771 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4592 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4772
4593
4773 # urllib.Request insists on using has_data() as a proxy for
4594 # urllib.Request insists on using has_data() as a proxy for
4774 # determining the request method. Override that to use our
4595 # determining the request method. Override that to use our
4775 # explicitly requested method.
4596 # explicitly requested method.
4776 req.get_method = lambda: pycompat.sysstr(method)
4597 req.get_method = lambda: pycompat.sysstr(method)
4777
4598
4778 try:
4599 try:
4779 res = opener.open(req)
4600 res = opener.open(req)
4780 body = res.read()
4601 body = res.read()
4781 except util.urlerr.urlerror as e:
4602 except util.urlerr.urlerror as e:
4782 # read() method must be called, but only exists in Python 2
4603 # read() method must be called, but only exists in Python 2
4783 getattr(e, 'read', lambda: None)()
4604 getattr(e, 'read', lambda: None)()
4784 continue
4605 continue
4785
4606
4786 ct = res.headers.get('Content-Type')
4607 ct = res.headers.get('Content-Type')
4787 if ct == 'application/mercurial-cbor':
4608 if ct == 'application/mercurial-cbor':
4788 ui.write(
4609 ui.write(
4789 _(b'cbor> %s\n')
4610 _(b'cbor> %s\n')
4790 % stringutil.pprint(
4611 % stringutil.pprint(
4791 cborutil.decodeall(body), bprefix=True, indent=2
4612 cborutil.decodeall(body), bprefix=True, indent=2
4792 )
4613 )
4793 )
4614 )
4794
4615
4795 elif action == b'close':
4616 elif action == b'close':
4796 assert peer is not None
4617 assert peer is not None
4797 peer.close()
4618 peer.close()
4798 elif action == b'readavailable':
4619 elif action == b'readavailable':
4799 if not stdout or not stderr:
4620 if not stdout or not stderr:
4800 raise error.Abort(
4621 raise error.Abort(
4801 _(b'readavailable not available on this peer')
4622 _(b'readavailable not available on this peer')
4802 )
4623 )
4803
4624
4804 stdin.close()
4625 stdin.close()
4805 stdout.read()
4626 stdout.read()
4806 stderr.read()
4627 stderr.read()
4807
4628
4808 elif action == b'readline':
4629 elif action == b'readline':
4809 if not stdout:
4630 if not stdout:
4810 raise error.Abort(_(b'readline not available on this peer'))
4631 raise error.Abort(_(b'readline not available on this peer'))
4811 stdout.readline()
4632 stdout.readline()
4812 elif action == b'ereadline':
4633 elif action == b'ereadline':
4813 if not stderr:
4634 if not stderr:
4814 raise error.Abort(_(b'ereadline not available on this peer'))
4635 raise error.Abort(_(b'ereadline not available on this peer'))
4815 stderr.readline()
4636 stderr.readline()
4816 elif action.startswith(b'read '):
4637 elif action.startswith(b'read '):
4817 count = int(action.split(b' ', 1)[1])
4638 count = int(action.split(b' ', 1)[1])
4818 if not stdout:
4639 if not stdout:
4819 raise error.Abort(_(b'read not available on this peer'))
4640 raise error.Abort(_(b'read not available on this peer'))
4820 stdout.read(count)
4641 stdout.read(count)
4821 elif action.startswith(b'eread '):
4642 elif action.startswith(b'eread '):
4822 count = int(action.split(b' ', 1)[1])
4643 count = int(action.split(b' ', 1)[1])
4823 if not stderr:
4644 if not stderr:
4824 raise error.Abort(_(b'eread not available on this peer'))
4645 raise error.Abort(_(b'eread not available on this peer'))
4825 stderr.read(count)
4646 stderr.read(count)
4826 else:
4647 else:
4827 raise error.Abort(_(b'unknown action: %s') % action)
4648 raise error.Abort(_(b'unknown action: %s') % action)
4828
4649
4829 if batchedcommands is not None:
4650 if batchedcommands is not None:
4830 raise error.Abort(_(b'unclosed "batchbegin" request'))
4651 raise error.Abort(_(b'unclosed "batchbegin" request'))
4831
4652
4832 if peer:
4653 if peer:
4833 peer.close()
4654 peer.close()
4834
4655
4835 if proc:
4656 if proc:
4836 proc.kill()
4657 proc.kill()
@@ -1,712 +1,884 b''
1 # revlogutils/debug.py - utility used for revlog debuging
1 # revlogutils/debug.py - utility used for revlog debuging
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2022 Octobus <contact@octobus.net>
4 # Copyright 2022 Octobus <contact@octobus.net>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import collections
9 import collections
10 import string
10 import string
11
11
12 from .. import (
12 from .. import (
13 mdiff,
13 mdiff,
14 node as nodemod,
14 node as nodemod,
15 revlogutils,
15 revlogutils,
16 )
16 )
17
17
18 from . import (
18 from . import (
19 constants,
19 constants,
20 deltas as deltautil,
20 deltas as deltautil,
21 )
21 )
22
22
23 INDEX_ENTRY_DEBUG_COLUMN = []
23 INDEX_ENTRY_DEBUG_COLUMN = []
24
24
25 NODE_SIZE = object()
25 NODE_SIZE = object()
26
26
27
27
28 class _column_base:
28 class _column_base:
29 """constains the definition of a revlog column
29 """constains the definition of a revlog column
30
30
31 name: the column header,
31 name: the column header,
32 value_func: the function called to get a value,
32 value_func: the function called to get a value,
33 size: the width of the column,
33 size: the width of the column,
34 verbose_only: only include the column in verbose mode.
34 verbose_only: only include the column in verbose mode.
35 """
35 """
36
36
37 def __init__(self, name, value_func, size=None, verbose=False):
37 def __init__(self, name, value_func, size=None, verbose=False):
38 self.name = name
38 self.name = name
39 self.value_func = value_func
39 self.value_func = value_func
40 if size is not NODE_SIZE:
40 if size is not NODE_SIZE:
41 if size is None:
41 if size is None:
42 size = 8 # arbitrary default
42 size = 8 # arbitrary default
43 size = max(len(name), size)
43 size = max(len(name), size)
44 self._size = size
44 self._size = size
45 self.verbose_only = verbose
45 self.verbose_only = verbose
46
46
47 def get_size(self, node_size):
47 def get_size(self, node_size):
48 if self._size is NODE_SIZE:
48 if self._size is NODE_SIZE:
49 return node_size
49 return node_size
50 else:
50 else:
51 return self._size
51 return self._size
52
52
53
53
54 def debug_column(name, size=None, verbose=False):
54 def debug_column(name, size=None, verbose=False):
55 """decorated function is registered as a column
55 """decorated function is registered as a column
56
56
57 name: the name of the column,
57 name: the name of the column,
58 size: the expected size of the column.
58 size: the expected size of the column.
59 """
59 """
60
60
61 def register(func):
61 def register(func):
62 entry = _column_base(
62 entry = _column_base(
63 name=name,
63 name=name,
64 value_func=func,
64 value_func=func,
65 size=size,
65 size=size,
66 verbose=verbose,
66 verbose=verbose,
67 )
67 )
68 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
68 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
69 return entry
69 return entry
70
70
71 return register
71 return register
72
72
73
73
74 @debug_column(b"rev", size=6)
74 @debug_column(b"rev", size=6)
75 def _rev(index, rev, entry, hexfn):
75 def _rev(index, rev, entry, hexfn):
76 return b"%d" % rev
76 return b"%d" % rev
77
77
78
78
79 @debug_column(b"rank", size=6, verbose=True)
79 @debug_column(b"rank", size=6, verbose=True)
80 def rank(index, rev, entry, hexfn):
80 def rank(index, rev, entry, hexfn):
81 return b"%d" % entry[constants.ENTRY_RANK]
81 return b"%d" % entry[constants.ENTRY_RANK]
82
82
83
83
84 @debug_column(b"linkrev", size=6)
84 @debug_column(b"linkrev", size=6)
85 def _linkrev(index, rev, entry, hexfn):
85 def _linkrev(index, rev, entry, hexfn):
86 return b"%d" % entry[constants.ENTRY_LINK_REV]
86 return b"%d" % entry[constants.ENTRY_LINK_REV]
87
87
88
88
89 @debug_column(b"nodeid", size=NODE_SIZE)
89 @debug_column(b"nodeid", size=NODE_SIZE)
90 def _nodeid(index, rev, entry, hexfn):
90 def _nodeid(index, rev, entry, hexfn):
91 return hexfn(entry[constants.ENTRY_NODE_ID])
91 return hexfn(entry[constants.ENTRY_NODE_ID])
92
92
93
93
94 @debug_column(b"p1-rev", size=6, verbose=True)
94 @debug_column(b"p1-rev", size=6, verbose=True)
95 def _p1_rev(index, rev, entry, hexfn):
95 def _p1_rev(index, rev, entry, hexfn):
96 return b"%d" % entry[constants.ENTRY_PARENT_1]
96 return b"%d" % entry[constants.ENTRY_PARENT_1]
97
97
98
98
99 @debug_column(b"p1-nodeid", size=NODE_SIZE)
99 @debug_column(b"p1-nodeid", size=NODE_SIZE)
100 def _p1_node(index, rev, entry, hexfn):
100 def _p1_node(index, rev, entry, hexfn):
101 parent = entry[constants.ENTRY_PARENT_1]
101 parent = entry[constants.ENTRY_PARENT_1]
102 p_entry = index[parent]
102 p_entry = index[parent]
103 return hexfn(p_entry[constants.ENTRY_NODE_ID])
103 return hexfn(p_entry[constants.ENTRY_NODE_ID])
104
104
105
105
106 @debug_column(b"p2-rev", size=6, verbose=True)
106 @debug_column(b"p2-rev", size=6, verbose=True)
107 def _p2_rev(index, rev, entry, hexfn):
107 def _p2_rev(index, rev, entry, hexfn):
108 return b"%d" % entry[constants.ENTRY_PARENT_2]
108 return b"%d" % entry[constants.ENTRY_PARENT_2]
109
109
110
110
111 @debug_column(b"p2-nodeid", size=NODE_SIZE)
111 @debug_column(b"p2-nodeid", size=NODE_SIZE)
112 def _p2_node(index, rev, entry, hexfn):
112 def _p2_node(index, rev, entry, hexfn):
113 parent = entry[constants.ENTRY_PARENT_2]
113 parent = entry[constants.ENTRY_PARENT_2]
114 p_entry = index[parent]
114 p_entry = index[parent]
115 return hexfn(p_entry[constants.ENTRY_NODE_ID])
115 return hexfn(p_entry[constants.ENTRY_NODE_ID])
116
116
117
117
118 @debug_column(b"full-size", size=20, verbose=True)
118 @debug_column(b"full-size", size=20, verbose=True)
119 def full_size(index, rev, entry, hexfn):
119 def full_size(index, rev, entry, hexfn):
120 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
120 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
121
121
122
122
123 @debug_column(b"delta-base", size=6, verbose=True)
123 @debug_column(b"delta-base", size=6, verbose=True)
124 def delta_base(index, rev, entry, hexfn):
124 def delta_base(index, rev, entry, hexfn):
125 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
125 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
126
126
127
127
128 @debug_column(b"flags", size=2, verbose=True)
128 @debug_column(b"flags", size=2, verbose=True)
129 def flags(index, rev, entry, hexfn):
129 def flags(index, rev, entry, hexfn):
130 field = entry[constants.ENTRY_DATA_OFFSET]
130 field = entry[constants.ENTRY_DATA_OFFSET]
131 field &= 0xFFFF
131 field &= 0xFFFF
132 return b"%d" % field
132 return b"%d" % field
133
133
134
134
135 @debug_column(b"comp-mode", size=4, verbose=True)
135 @debug_column(b"comp-mode", size=4, verbose=True)
136 def compression_mode(index, rev, entry, hexfn):
136 def compression_mode(index, rev, entry, hexfn):
137 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
137 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
138
138
139
139
140 @debug_column(b"data-offset", size=20, verbose=True)
140 @debug_column(b"data-offset", size=20, verbose=True)
141 def data_offset(index, rev, entry, hexfn):
141 def data_offset(index, rev, entry, hexfn):
142 field = entry[constants.ENTRY_DATA_OFFSET]
142 field = entry[constants.ENTRY_DATA_OFFSET]
143 field >>= 16
143 field >>= 16
144 return b"%d" % field
144 return b"%d" % field
145
145
146
146
147 @debug_column(b"chunk-size", size=10, verbose=True)
147 @debug_column(b"chunk-size", size=10, verbose=True)
148 def data_chunk_size(index, rev, entry, hexfn):
148 def data_chunk_size(index, rev, entry, hexfn):
149 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
149 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
150
150
151
151
152 @debug_column(b"sd-comp-mode", size=7, verbose=True)
152 @debug_column(b"sd-comp-mode", size=7, verbose=True)
153 def sidedata_compression_mode(index, rev, entry, hexfn):
153 def sidedata_compression_mode(index, rev, entry, hexfn):
154 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
154 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
155 if compression == constants.COMP_MODE_PLAIN:
155 if compression == constants.COMP_MODE_PLAIN:
156 return b"plain"
156 return b"plain"
157 elif compression == constants.COMP_MODE_DEFAULT:
157 elif compression == constants.COMP_MODE_DEFAULT:
158 return b"default"
158 return b"default"
159 elif compression == constants.COMP_MODE_INLINE:
159 elif compression == constants.COMP_MODE_INLINE:
160 return b"inline"
160 return b"inline"
161 else:
161 else:
162 return b"%d" % compression
162 return b"%d" % compression
163
163
164
164
165 @debug_column(b"sidedata-offset", size=20, verbose=True)
165 @debug_column(b"sidedata-offset", size=20, verbose=True)
166 def sidedata_offset(index, rev, entry, hexfn):
166 def sidedata_offset(index, rev, entry, hexfn):
167 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
167 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
168
168
169
169
170 @debug_column(b"sd-chunk-size", size=10, verbose=True)
170 @debug_column(b"sd-chunk-size", size=10, verbose=True)
171 def sidedata_chunk_size(index, rev, entry, hexfn):
171 def sidedata_chunk_size(index, rev, entry, hexfn):
172 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
172 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
173
173
174
174
175 def debug_index(
175 def debug_index(
176 ui,
176 ui,
177 repo,
177 repo,
178 formatter,
178 formatter,
179 revlog,
179 revlog,
180 full_node,
180 full_node,
181 ):
181 ):
182 """display index data for a revlog"""
182 """display index data for a revlog"""
183 if full_node:
183 if full_node:
184 hexfn = nodemod.hex
184 hexfn = nodemod.hex
185 else:
185 else:
186 hexfn = nodemod.short
186 hexfn = nodemod.short
187
187
188 idlen = 12
188 idlen = 12
189 for i in revlog:
189 for i in revlog:
190 idlen = len(hexfn(revlog.node(i)))
190 idlen = len(hexfn(revlog.node(i)))
191 break
191 break
192
192
193 fm = formatter
193 fm = formatter
194
194
195 header_pieces = []
195 header_pieces = []
196 for column in INDEX_ENTRY_DEBUG_COLUMN:
196 for column in INDEX_ENTRY_DEBUG_COLUMN:
197 if column.verbose_only and not ui.verbose:
197 if column.verbose_only and not ui.verbose:
198 continue
198 continue
199 size = column.get_size(idlen)
199 size = column.get_size(idlen)
200 name = column.name
200 name = column.name
201 header_pieces.append(name.rjust(size))
201 header_pieces.append(name.rjust(size))
202
202
203 fm.plain(b' '.join(header_pieces) + b'\n')
203 fm.plain(b' '.join(header_pieces) + b'\n')
204
204
205 index = revlog.index
205 index = revlog.index
206
206
207 for rev in revlog:
207 for rev in revlog:
208 fm.startitem()
208 fm.startitem()
209 entry = index[rev]
209 entry = index[rev]
210 first = True
210 first = True
211 for column in INDEX_ENTRY_DEBUG_COLUMN:
211 for column in INDEX_ENTRY_DEBUG_COLUMN:
212 if column.verbose_only and not ui.verbose:
212 if column.verbose_only and not ui.verbose:
213 continue
213 continue
214 if not first:
214 if not first:
215 fm.plain(b' ')
215 fm.plain(b' ')
216 first = False
216 first = False
217
217
218 size = column.get_size(idlen)
218 size = column.get_size(idlen)
219 value = column.value_func(index, rev, entry, hexfn)
219 value = column.value_func(index, rev, entry, hexfn)
220 display = b"%%%ds" % size
220 display = b"%%%ds" % size
221 fm.write(column.name, display, value)
221 fm.write(column.name, display, value)
222 fm.plain(b'\n')
222 fm.plain(b'\n')
223
223
224 fm.end()
224 fm.end()
225
225
226
226
227 def dump(ui, revlog):
227 def dump(ui, revlog):
228 """perform the work for `hg debugrevlog --dump"""
228 """perform the work for `hg debugrevlog --dump"""
229 # XXX seems redundant with debug index ?
229 # XXX seems redundant with debug index ?
230 r = revlog
230 r = revlog
231 numrevs = len(r)
231 numrevs = len(r)
232 ui.write(
232 ui.write(
233 (
233 (
234 b"# rev p1rev p2rev start end deltastart base p1 p2"
234 b"# rev p1rev p2rev start end deltastart base p1 p2"
235 b" rawsize totalsize compression heads chainlen\n"
235 b" rawsize totalsize compression heads chainlen\n"
236 )
236 )
237 )
237 )
238 ts = 0
238 ts = 0
239 heads = set()
239 heads = set()
240
240
241 for rev in range(numrevs):
241 for rev in range(numrevs):
242 dbase = r.deltaparent(rev)
242 dbase = r.deltaparent(rev)
243 if dbase == -1:
243 if dbase == -1:
244 dbase = rev
244 dbase = rev
245 cbase = r.chainbase(rev)
245 cbase = r.chainbase(rev)
246 clen = r.chainlen(rev)
246 clen = r.chainlen(rev)
247 p1, p2 = r.parentrevs(rev)
247 p1, p2 = r.parentrevs(rev)
248 rs = r.rawsize(rev)
248 rs = r.rawsize(rev)
249 ts = ts + rs
249 ts = ts + rs
250 heads -= set(r.parentrevs(rev))
250 heads -= set(r.parentrevs(rev))
251 heads.add(rev)
251 heads.add(rev)
252 try:
252 try:
253 compression = ts / r.end(rev)
253 compression = ts / r.end(rev)
254 except ZeroDivisionError:
254 except ZeroDivisionError:
255 compression = 0
255 compression = 0
256 ui.write(
256 ui.write(
257 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
257 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
258 b"%11d %5d %8d\n"
258 b"%11d %5d %8d\n"
259 % (
259 % (
260 rev,
260 rev,
261 p1,
261 p1,
262 p2,
262 p2,
263 r.start(rev),
263 r.start(rev),
264 r.end(rev),
264 r.end(rev),
265 r.start(dbase),
265 r.start(dbase),
266 r.start(cbase),
266 r.start(cbase),
267 r.start(p1),
267 r.start(p1),
268 r.start(p2),
268 r.start(p2),
269 rs,
269 rs,
270 ts,
270 ts,
271 compression,
271 compression,
272 len(heads),
272 len(heads),
273 clen,
273 clen,
274 )
274 )
275 )
275 )
276
276
277
277
278 def debug_revlog(ui, revlog):
278 def debug_revlog(ui, revlog):
279 """code for `hg debugrevlog`"""
279 """code for `hg debugrevlog`"""
280 r = revlog
280 r = revlog
281 format = r._format_version
281 format = r._format_version
282 v = r._format_flags
282 v = r._format_flags
283 flags = []
283 flags = []
284 gdelta = False
284 gdelta = False
285 if v & constants.FLAG_INLINE_DATA:
285 if v & constants.FLAG_INLINE_DATA:
286 flags.append(b'inline')
286 flags.append(b'inline')
287 if v & constants.FLAG_GENERALDELTA:
287 if v & constants.FLAG_GENERALDELTA:
288 gdelta = True
288 gdelta = True
289 flags.append(b'generaldelta')
289 flags.append(b'generaldelta')
290 if not flags:
290 if not flags:
291 flags = [b'(none)']
291 flags = [b'(none)']
292
292
293 ### the total size of stored content if incompressed.
293 ### the total size of stored content if incompressed.
294 full_text_total_size = 0
294 full_text_total_size = 0
295 ### tracks merge vs single parent
295 ### tracks merge vs single parent
296 nummerges = 0
296 nummerges = 0
297
297
298 ### tracks ways the "delta" are build
298 ### tracks ways the "delta" are build
299 # nodelta
299 # nodelta
300 numempty = 0
300 numempty = 0
301 numemptytext = 0
301 numemptytext = 0
302 numemptydelta = 0
302 numemptydelta = 0
303 # full file content
303 # full file content
304 numfull = 0
304 numfull = 0
305 # intermediate snapshot against a prior snapshot
305 # intermediate snapshot against a prior snapshot
306 numsemi = 0
306 numsemi = 0
307 # snapshot count per depth
307 # snapshot count per depth
308 numsnapdepth = collections.defaultdict(lambda: 0)
308 numsnapdepth = collections.defaultdict(lambda: 0)
309 # number of snapshots with a non-ancestor delta
309 # number of snapshots with a non-ancestor delta
310 numsnapdepth_nad = collections.defaultdict(lambda: 0)
310 numsnapdepth_nad = collections.defaultdict(lambda: 0)
311 # delta against previous revision
311 # delta against previous revision
312 numprev = 0
312 numprev = 0
313 # delta against prev, where prev is a non-ancestor
313 # delta against prev, where prev is a non-ancestor
314 numprev_nad = 0
314 numprev_nad = 0
315 # delta against first or second parent (not prev)
315 # delta against first or second parent (not prev)
316 nump1 = 0
316 nump1 = 0
317 nump2 = 0
317 nump2 = 0
318 # delta against neither prev nor parents
318 # delta against neither prev nor parents
319 numother = 0
319 numother = 0
320 # delta against other that is a non-ancestor
320 # delta against other that is a non-ancestor
321 numother_nad = 0
321 numother_nad = 0
322 # delta against prev that are also first or second parent
322 # delta against prev that are also first or second parent
323 # (details of `numprev`)
323 # (details of `numprev`)
324 nump1prev = 0
324 nump1prev = 0
325 nump2prev = 0
325 nump2prev = 0
326
326
327 # data about delta chain of each revs
327 # data about delta chain of each revs
328 chainlengths = []
328 chainlengths = []
329 chainbases = []
329 chainbases = []
330 chainspans = []
330 chainspans = []
331
331
332 # data about each revision
332 # data about each revision
333 datasize = [None, 0, 0]
333 datasize = [None, 0, 0]
334 fullsize = [None, 0, 0]
334 fullsize = [None, 0, 0]
335 semisize = [None, 0, 0]
335 semisize = [None, 0, 0]
336 # snapshot count per depth
336 # snapshot count per depth
337 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
337 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
338 deltasize = [None, 0, 0]
338 deltasize = [None, 0, 0]
339 chunktypecounts = {}
339 chunktypecounts = {}
340 chunktypesizes = {}
340 chunktypesizes = {}
341
341
342 def addsize(size, l):
342 def addsize(size, l):
343 if l[0] is None or size < l[0]:
343 if l[0] is None or size < l[0]:
344 l[0] = size
344 l[0] = size
345 if size > l[1]:
345 if size > l[1]:
346 l[1] = size
346 l[1] = size
347 l[2] += size
347 l[2] += size
348
348
349 with r.reading():
349 with r.reading():
350 numrevs = len(r)
350 numrevs = len(r)
351 for rev in range(numrevs):
351 for rev in range(numrevs):
352 p1, p2 = r.parentrevs(rev)
352 p1, p2 = r.parentrevs(rev)
353 delta = r.deltaparent(rev)
353 delta = r.deltaparent(rev)
354 if format > 0:
354 if format > 0:
355 s = r.rawsize(rev)
355 s = r.rawsize(rev)
356 full_text_total_size += s
356 full_text_total_size += s
357 addsize(s, datasize)
357 addsize(s, datasize)
358 if p2 != nodemod.nullrev:
358 if p2 != nodemod.nullrev:
359 nummerges += 1
359 nummerges += 1
360 size = r.length(rev)
360 size = r.length(rev)
361 if delta == nodemod.nullrev:
361 if delta == nodemod.nullrev:
362 chainlengths.append(0)
362 chainlengths.append(0)
363 chainbases.append(r.start(rev))
363 chainbases.append(r.start(rev))
364 chainspans.append(size)
364 chainspans.append(size)
365 if size == 0:
365 if size == 0:
366 numempty += 1
366 numempty += 1
367 numemptytext += 1
367 numemptytext += 1
368 else:
368 else:
369 numfull += 1
369 numfull += 1
370 numsnapdepth[0] += 1
370 numsnapdepth[0] += 1
371 addsize(size, fullsize)
371 addsize(size, fullsize)
372 addsize(size, snapsizedepth[0])
372 addsize(size, snapsizedepth[0])
373 else:
373 else:
374 nad = (
374 nad = (
375 delta != p1
375 delta != p1
376 and delta != p2
376 and delta != p2
377 and not r.isancestorrev(delta, rev)
377 and not r.isancestorrev(delta, rev)
378 )
378 )
379 chainlengths.append(chainlengths[delta] + 1)
379 chainlengths.append(chainlengths[delta] + 1)
380 baseaddr = chainbases[delta]
380 baseaddr = chainbases[delta]
381 revaddr = r.start(rev)
381 revaddr = r.start(rev)
382 chainbases.append(baseaddr)
382 chainbases.append(baseaddr)
383 chainspans.append((revaddr - baseaddr) + size)
383 chainspans.append((revaddr - baseaddr) + size)
384 if size == 0:
384 if size == 0:
385 numempty += 1
385 numempty += 1
386 numemptydelta += 1
386 numemptydelta += 1
387 elif r.issnapshot(rev):
387 elif r.issnapshot(rev):
388 addsize(size, semisize)
388 addsize(size, semisize)
389 numsemi += 1
389 numsemi += 1
390 depth = r.snapshotdepth(rev)
390 depth = r.snapshotdepth(rev)
391 numsnapdepth[depth] += 1
391 numsnapdepth[depth] += 1
392 if nad:
392 if nad:
393 numsnapdepth_nad[depth] += 1
393 numsnapdepth_nad[depth] += 1
394 addsize(size, snapsizedepth[depth])
394 addsize(size, snapsizedepth[depth])
395 else:
395 else:
396 addsize(size, deltasize)
396 addsize(size, deltasize)
397 if delta == rev - 1:
397 if delta == rev - 1:
398 numprev += 1
398 numprev += 1
399 if delta == p1:
399 if delta == p1:
400 nump1prev += 1
400 nump1prev += 1
401 elif delta == p2:
401 elif delta == p2:
402 nump2prev += 1
402 nump2prev += 1
403 elif nad:
403 elif nad:
404 numprev_nad += 1
404 numprev_nad += 1
405 elif delta == p1:
405 elif delta == p1:
406 nump1 += 1
406 nump1 += 1
407 elif delta == p2:
407 elif delta == p2:
408 nump2 += 1
408 nump2 += 1
409 elif delta != nodemod.nullrev:
409 elif delta != nodemod.nullrev:
410 numother += 1
410 numother += 1
411 numother_nad += 1
411 numother_nad += 1
412
412
413 # Obtain data on the raw chunks in the revlog.
413 # Obtain data on the raw chunks in the revlog.
414 if hasattr(r, '_getsegmentforrevs'):
414 if hasattr(r, '_getsegmentforrevs'):
415 segment = r._getsegmentforrevs(rev, rev)[1]
415 segment = r._getsegmentforrevs(rev, rev)[1]
416 else:
416 else:
417 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
417 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
418 if segment:
418 if segment:
419 chunktype = bytes(segment[0:1])
419 chunktype = bytes(segment[0:1])
420 else:
420 else:
421 chunktype = b'empty'
421 chunktype = b'empty'
422
422
423 if chunktype not in chunktypecounts:
423 if chunktype not in chunktypecounts:
424 chunktypecounts[chunktype] = 0
424 chunktypecounts[chunktype] = 0
425 chunktypesizes[chunktype] = 0
425 chunktypesizes[chunktype] = 0
426
426
427 chunktypecounts[chunktype] += 1
427 chunktypecounts[chunktype] += 1
428 chunktypesizes[chunktype] += size
428 chunktypesizes[chunktype] += size
429
429
430 # Adjust size min value for empty cases
430 # Adjust size min value for empty cases
431 for size in (datasize, fullsize, semisize, deltasize):
431 for size in (datasize, fullsize, semisize, deltasize):
432 if size[0] is None:
432 if size[0] is None:
433 size[0] = 0
433 size[0] = 0
434
434
435 numdeltas = numrevs - numfull - numempty - numsemi
435 numdeltas = numrevs - numfull - numempty - numsemi
436 numoprev = numprev - nump1prev - nump2prev - numprev_nad
436 numoprev = numprev - nump1prev - nump2prev - numprev_nad
437 num_other_ancestors = numother - numother_nad
437 num_other_ancestors = numother - numother_nad
438 totalrawsize = datasize[2]
438 totalrawsize = datasize[2]
439 datasize[2] /= numrevs
439 datasize[2] /= numrevs
440 fulltotal = fullsize[2]
440 fulltotal = fullsize[2]
441 if numfull == 0:
441 if numfull == 0:
442 fullsize[2] = 0
442 fullsize[2] = 0
443 else:
443 else:
444 fullsize[2] /= numfull
444 fullsize[2] /= numfull
445 semitotal = semisize[2]
445 semitotal = semisize[2]
446 snaptotal = {}
446 snaptotal = {}
447 if numsemi > 0:
447 if numsemi > 0:
448 semisize[2] /= numsemi
448 semisize[2] /= numsemi
449 for depth in snapsizedepth:
449 for depth in snapsizedepth:
450 snaptotal[depth] = snapsizedepth[depth][2]
450 snaptotal[depth] = snapsizedepth[depth][2]
451 snapsizedepth[depth][2] /= numsnapdepth[depth]
451 snapsizedepth[depth][2] /= numsnapdepth[depth]
452
452
453 deltatotal = deltasize[2]
453 deltatotal = deltasize[2]
454 if numdeltas > 0:
454 if numdeltas > 0:
455 deltasize[2] /= numdeltas
455 deltasize[2] /= numdeltas
456 totalsize = fulltotal + semitotal + deltatotal
456 totalsize = fulltotal + semitotal + deltatotal
457 avgchainlen = sum(chainlengths) / numrevs
457 avgchainlen = sum(chainlengths) / numrevs
458 maxchainlen = max(chainlengths)
458 maxchainlen = max(chainlengths)
459 maxchainspan = max(chainspans)
459 maxchainspan = max(chainspans)
460 compratio = 1
460 compratio = 1
461 if totalsize:
461 if totalsize:
462 compratio = totalrawsize / totalsize
462 compratio = totalrawsize / totalsize
463
463
464 basedfmtstr = b'%%%dd\n'
464 basedfmtstr = b'%%%dd\n'
465 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
465 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
466
466
467 def dfmtstr(max):
467 def dfmtstr(max):
468 return basedfmtstr % len(str(max))
468 return basedfmtstr % len(str(max))
469
469
470 def pcfmtstr(max, padding=0):
470 def pcfmtstr(max, padding=0):
471 return basepcfmtstr % (len(str(max)), b' ' * padding)
471 return basepcfmtstr % (len(str(max)), b' ' * padding)
472
472
473 def pcfmt(value, total):
473 def pcfmt(value, total):
474 if total:
474 if total:
475 return (value, 100 * float(value) / total)
475 return (value, 100 * float(value) / total)
476 else:
476 else:
477 return value, 100.0
477 return value, 100.0
478
478
479 ui.writenoi18n(b'format : %d\n' % format)
479 ui.writenoi18n(b'format : %d\n' % format)
480 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
480 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
481
481
482 ui.write(b'\n')
482 ui.write(b'\n')
483 fmt = pcfmtstr(totalsize)
483 fmt = pcfmtstr(totalsize)
484 fmt2 = dfmtstr(totalsize)
484 fmt2 = dfmtstr(totalsize)
485 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
485 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
486 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
486 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
487 ui.writenoi18n(
487 ui.writenoi18n(
488 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
488 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
489 )
489 )
490 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
490 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
491 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
491 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
492 ui.writenoi18n(
492 ui.writenoi18n(
493 b' text : '
493 b' text : '
494 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
494 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
495 )
495 )
496 ui.writenoi18n(
496 ui.writenoi18n(
497 b' delta : '
497 b' delta : '
498 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
498 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
499 )
499 )
500 ui.writenoi18n(
500 ui.writenoi18n(
501 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
501 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
502 )
502 )
503 for depth in sorted(numsnapdepth):
503 for depth in sorted(numsnapdepth):
504 base = b' lvl-%-3d : ' % depth
504 base = b' lvl-%-3d : ' % depth
505 count = fmt % pcfmt(numsnapdepth[depth], numrevs)
505 count = fmt % pcfmt(numsnapdepth[depth], numrevs)
506 pieces = [base, count]
506 pieces = [base, count]
507 if numsnapdepth_nad[depth]:
507 if numsnapdepth_nad[depth]:
508 pieces[-1] = count = count[:-1] # drop the final '\n'
508 pieces[-1] = count = count[:-1] # drop the final '\n'
509 more = b' non-ancestor-bases: '
509 more = b' non-ancestor-bases: '
510 anc_count = fmt
510 anc_count = fmt
511 anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
511 anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
512 pieces.append(more)
512 pieces.append(more)
513 pieces.append(anc_count)
513 pieces.append(anc_count)
514 ui.write(b''.join(pieces))
514 ui.write(b''.join(pieces))
515 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
515 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
516 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
516 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
517 ui.writenoi18n(
517 ui.writenoi18n(
518 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
518 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
519 )
519 )
520 for depth in sorted(numsnapdepth):
520 for depth in sorted(numsnapdepth):
521 ui.write(
521 ui.write(
522 (b' lvl-%-3d : ' % depth)
522 (b' lvl-%-3d : ' % depth)
523 + fmt % pcfmt(snaptotal[depth], totalsize)
523 + fmt % pcfmt(snaptotal[depth], totalsize)
524 )
524 )
525 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
525 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
526
526
527 letters = string.ascii_letters.encode('ascii')
527 letters = string.ascii_letters.encode('ascii')
528
528
529 def fmtchunktype(chunktype):
529 def fmtchunktype(chunktype):
530 if chunktype == b'empty':
530 if chunktype == b'empty':
531 return b' %s : ' % chunktype
531 return b' %s : ' % chunktype
532 elif chunktype in letters:
532 elif chunktype in letters:
533 return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
533 return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
534 else:
534 else:
535 return b' 0x%s : ' % nodemod.hex(chunktype)
535 return b' 0x%s : ' % nodemod.hex(chunktype)
536
536
537 ui.write(b'\n')
537 ui.write(b'\n')
538 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
538 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
539 for chunktype in sorted(chunktypecounts):
539 for chunktype in sorted(chunktypecounts):
540 ui.write(fmtchunktype(chunktype))
540 ui.write(fmtchunktype(chunktype))
541 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
541 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
542 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
542 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
543 for chunktype in sorted(chunktypecounts):
543 for chunktype in sorted(chunktypecounts):
544 ui.write(fmtchunktype(chunktype))
544 ui.write(fmtchunktype(chunktype))
545 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
545 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
546
546
547 ui.write(b'\n')
547 ui.write(b'\n')
548 b_total = b"%d" % full_text_total_size
548 b_total = b"%d" % full_text_total_size
549 p_total = []
549 p_total = []
550 while len(b_total) > 3:
550 while len(b_total) > 3:
551 p_total.append(b_total[-3:])
551 p_total.append(b_total[-3:])
552 b_total = b_total[:-3]
552 b_total = b_total[:-3]
553 p_total.append(b_total)
553 p_total.append(b_total)
554 p_total.reverse()
554 p_total.reverse()
555 b_total = b' '.join(p_total)
555 b_total = b' '.join(p_total)
556
556
557 ui.write(b'\n')
557 ui.write(b'\n')
558 ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
558 ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
559 ui.write(b'\n')
559 ui.write(b'\n')
560 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
560 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
561 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
561 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
562 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
562 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
563 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
563 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
564 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
564 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
565
565
566 if format > 0:
566 if format > 0:
567 ui.write(b'\n')
567 ui.write(b'\n')
568 ui.writenoi18n(
568 ui.writenoi18n(
569 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
569 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
570 % tuple(datasize)
570 % tuple(datasize)
571 )
571 )
572 ui.writenoi18n(
572 ui.writenoi18n(
573 b'full revision size (min/max/avg) : %d / %d / %d\n'
573 b'full revision size (min/max/avg) : %d / %d / %d\n'
574 % tuple(fullsize)
574 % tuple(fullsize)
575 )
575 )
576 ui.writenoi18n(
576 ui.writenoi18n(
577 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
577 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
578 % tuple(semisize)
578 % tuple(semisize)
579 )
579 )
580 for depth in sorted(snapsizedepth):
580 for depth in sorted(snapsizedepth):
581 if depth == 0:
581 if depth == 0:
582 continue
582 continue
583 ui.writenoi18n(
583 ui.writenoi18n(
584 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
584 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
585 % ((depth,) + tuple(snapsizedepth[depth]))
585 % ((depth,) + tuple(snapsizedepth[depth]))
586 )
586 )
587 ui.writenoi18n(
587 ui.writenoi18n(
588 b'delta size (min/max/avg) : %d / %d / %d\n'
588 b'delta size (min/max/avg) : %d / %d / %d\n'
589 % tuple(deltasize)
589 % tuple(deltasize)
590 )
590 )
591
591
592 if numdeltas > 0:
592 if numdeltas > 0:
593 ui.write(b'\n')
593 ui.write(b'\n')
594 fmt = pcfmtstr(numdeltas)
594 fmt = pcfmtstr(numdeltas)
595 fmt2 = pcfmtstr(numdeltas, 4)
595 fmt2 = pcfmtstr(numdeltas, 4)
596 ui.writenoi18n(
596 ui.writenoi18n(
597 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
597 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
598 )
598 )
599 if numprev > 0:
599 if numprev > 0:
600 ui.writenoi18n(
600 ui.writenoi18n(
601 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
601 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
602 )
602 )
603 ui.writenoi18n(
603 ui.writenoi18n(
604 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
604 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
605 )
605 )
606 ui.writenoi18n(
606 ui.writenoi18n(
607 b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
607 b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
608 )
608 )
609 ui.writenoi18n(
609 ui.writenoi18n(
610 b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
610 b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
611 )
611 )
612 if gdelta:
612 if gdelta:
613 ui.writenoi18n(
613 ui.writenoi18n(
614 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
614 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
615 )
615 )
616 ui.writenoi18n(
616 ui.writenoi18n(
617 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
617 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
618 )
618 )
619 ui.writenoi18n(
619 ui.writenoi18n(
620 b'deltas against ancs : '
620 b'deltas against ancs : '
621 + fmt % pcfmt(num_other_ancestors, numdeltas)
621 + fmt % pcfmt(num_other_ancestors, numdeltas)
622 )
622 )
623 ui.writenoi18n(
623 ui.writenoi18n(
624 b'deltas against other : '
624 b'deltas against other : '
625 + fmt % pcfmt(numother_nad, numdeltas)
625 + fmt % pcfmt(numother_nad, numdeltas)
626 )
626 )
627
627
628
628
629 def debug_delta_find(ui, revlog, rev, base_rev=nodemod.nullrev):
629 def debug_delta_find(ui, revlog, rev, base_rev=nodemod.nullrev):
630 """display the search process for a delta"""
630 """display the search process for a delta"""
631 deltacomputer = deltautil.deltacomputer(
631 deltacomputer = deltautil.deltacomputer(
632 revlog,
632 revlog,
633 write_debug=ui.write,
633 write_debug=ui.write,
634 debug_search=not ui.quiet,
634 debug_search=not ui.quiet,
635 )
635 )
636
636
637 node = revlog.node(rev)
637 node = revlog.node(rev)
638 p1r, p2r = revlog.parentrevs(rev)
638 p1r, p2r = revlog.parentrevs(rev)
639 p1 = revlog.node(p1r)
639 p1 = revlog.node(p1r)
640 p2 = revlog.node(p2r)
640 p2 = revlog.node(p2r)
641 full_text = revlog.revision(rev)
641 full_text = revlog.revision(rev)
642 btext = [full_text]
642 btext = [full_text]
643 textlen = len(btext[0])
643 textlen = len(btext[0])
644 cachedelta = None
644 cachedelta = None
645 flags = revlog.flags(rev)
645 flags = revlog.flags(rev)
646
646
647 if base_rev != nodemod.nullrev:
647 if base_rev != nodemod.nullrev:
648 base_text = revlog.revision(base_rev)
648 base_text = revlog.revision(base_rev)
649 delta = mdiff.textdiff(base_text, full_text)
649 delta = mdiff.textdiff(base_text, full_text)
650
650
651 cachedelta = (base_rev, delta, constants.DELTA_BASE_REUSE_TRY)
651 cachedelta = (base_rev, delta, constants.DELTA_BASE_REUSE_TRY)
652 btext = [None]
652 btext = [None]
653
653
654 revinfo = revlogutils.revisioninfo(
654 revinfo = revlogutils.revisioninfo(
655 node,
655 node,
656 p1,
656 p1,
657 p2,
657 p2,
658 btext,
658 btext,
659 textlen,
659 textlen,
660 cachedelta,
660 cachedelta,
661 flags,
661 flags,
662 )
662 )
663
663
664 fh = revlog._datafp()
664 fh = revlog._datafp()
665 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
665 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
666
666
667
667
668 def debug_revlog_stats(
668 def debug_revlog_stats(
669 repo, fm, changelog: bool, manifest: bool, filelogs: bool
669 repo, fm, changelog: bool, manifest: bool, filelogs: bool
670 ):
670 ):
671 """Format revlog statistics for debugging purposes
671 """Format revlog statistics for debugging purposes
672
672
673 fm: the output formatter.
673 fm: the output formatter.
674 """
674 """
675 fm.plain(b'rev-count data-size inl type target \n')
675 fm.plain(b'rev-count data-size inl type target \n')
676
676
677 revlog_entries = [e for e in repo.store.walk() if e.is_revlog]
677 revlog_entries = [e for e in repo.store.walk() if e.is_revlog]
678 revlog_entries.sort(key=lambda e: (e.revlog_type, e.target_id))
678 revlog_entries.sort(key=lambda e: (e.revlog_type, e.target_id))
679
679
680 for entry in revlog_entries:
680 for entry in revlog_entries:
681 if not changelog and entry.is_changelog:
681 if not changelog and entry.is_changelog:
682 continue
682 continue
683 elif not manifest and entry.is_manifestlog:
683 elif not manifest and entry.is_manifestlog:
684 continue
684 continue
685 elif not filelogs and entry.is_filelog:
685 elif not filelogs and entry.is_filelog:
686 continue
686 continue
687 rlog = entry.get_revlog_instance(repo).get_revlog()
687 rlog = entry.get_revlog_instance(repo).get_revlog()
688 fm.startitem()
688 fm.startitem()
689 nb_rev = len(rlog)
689 nb_rev = len(rlog)
690 inline = rlog._inline
690 inline = rlog._inline
691 data_size = rlog._get_data_offset(nb_rev - 1)
691 data_size = rlog._get_data_offset(nb_rev - 1)
692
692
693 target = rlog.target
693 target = rlog.target
694 revlog_type = b'unknown'
694 revlog_type = b'unknown'
695 revlog_target = b''
695 revlog_target = b''
696 if target[0] == constants.KIND_CHANGELOG:
696 if target[0] == constants.KIND_CHANGELOG:
697 revlog_type = b'changelog'
697 revlog_type = b'changelog'
698 elif target[0] == constants.KIND_MANIFESTLOG:
698 elif target[0] == constants.KIND_MANIFESTLOG:
699 revlog_type = b'manifest'
699 revlog_type = b'manifest'
700 revlog_target = target[1]
700 revlog_target = target[1]
701 elif target[0] == constants.KIND_FILELOG:
701 elif target[0] == constants.KIND_FILELOG:
702 revlog_type = b'file'
702 revlog_type = b'file'
703 revlog_target = target[1]
703 revlog_target = target[1]
704
704
705 fm.write(b'revlog.rev-count', b'%9d', nb_rev)
705 fm.write(b'revlog.rev-count', b'%9d', nb_rev)
706 fm.write(b'revlog.data-size', b'%12d', data_size)
706 fm.write(b'revlog.data-size', b'%12d', data_size)
707
707
708 fm.write(b'revlog.inline', b' %-3s', b'yes' if inline else b'no')
708 fm.write(b'revlog.inline', b' %-3s', b'yes' if inline else b'no')
709 fm.write(b'revlog.type', b' %-9s', revlog_type)
709 fm.write(b'revlog.type', b' %-9s', revlog_type)
710 fm.write(b'revlog.target', b' %s', revlog_target)
710 fm.write(b'revlog.target', b' %s', revlog_target)
711
711
712 fm.plain(b'\n')
712 fm.plain(b'\n')
713
714
715 def debug_delta_chain(revlog):
716 r = revlog
717 index = r.index
718 start = r.start
719 length = r.length
720 generaldelta = r.delta_config.general_delta
721 withsparseread = r.data_config.with_sparse_read
722
723 # security to avoid crash on corrupted revlogs
724 total_revs = len(index)
725
726 chain_size_cache = {}
727
728 def revinfo(rev):
729 e = index[rev]
730 compsize = e[constants.ENTRY_DATA_COMPRESSED_LENGTH]
731 uncompsize = e[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
732
733 base = e[constants.ENTRY_DELTA_BASE]
734 p1 = e[constants.ENTRY_PARENT_1]
735 p2 = e[constants.ENTRY_PARENT_2]
736
737 # If the parents of a revision has an empty delta, we never try to
738 # delta against that parent, but directly against the delta base of
739 # that parent (recursively). It avoids adding a useless entry in the
740 # chain.
741 #
742 # However we need to detect that as a special case for delta-type, that
743 # is not simply "other".
744 p1_base = p1
745 if p1 != nodemod.nullrev and p1 < total_revs:
746 e1 = index[p1]
747 while e1[constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
748 new_base = e1[constants.ENTRY_DELTA_BASE]
749 if (
750 new_base == p1_base
751 or new_base == nodemod.nullrev
752 or new_base >= total_revs
753 ):
754 break
755 p1_base = new_base
756 e1 = index[p1_base]
757 p2_base = p2
758 if p2 != nodemod.nullrev and p2 < total_revs:
759 e2 = index[p2]
760 while e2[constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
761 new_base = e2[constants.ENTRY_DELTA_BASE]
762 if (
763 new_base == p2_base
764 or new_base == nodemod.nullrev
765 or new_base >= total_revs
766 ):
767 break
768 p2_base = new_base
769 e2 = index[p2_base]
770
771 if generaldelta:
772 if base == p1:
773 deltatype = b'p1'
774 elif base == p2:
775 deltatype = b'p2'
776 elif base == rev:
777 deltatype = b'base'
778 elif base == p1_base:
779 deltatype = b'skip1'
780 elif base == p2_base:
781 deltatype = b'skip2'
782 elif r.issnapshot(rev):
783 deltatype = b'snap'
784 elif base == rev - 1:
785 deltatype = b'prev'
786 else:
787 deltatype = b'other'
788 else:
789 if base == rev:
790 deltatype = b'base'
791 else:
792 deltatype = b'prev'
793
794 chain = r._deltachain(rev)[0]
795 chain_size = 0
796 for iter_rev in reversed(chain):
797 cached = chain_size_cache.get(iter_rev)
798 if cached is not None:
799 chain_size += cached
800 break
801 e = index[iter_rev]
802 chain_size += e[constants.ENTRY_DATA_COMPRESSED_LENGTH]
803 chain_size_cache[rev] = chain_size
804
805 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
806
807 header = (
808 b' rev p1 p2 chain# chainlen prev delta '
809 b'size rawsize chainsize ratio lindist extradist '
810 b'extraratio'
811 )
812 if withsparseread:
813 header += b' readsize largestblk rddensity srchunks'
814 header += b'\n'
815 yield header
816
817 chainbases = {}
818 for rev in r:
819 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
820 chainbase = chain[0]
821 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
822 basestart = start(chainbase)
823 revstart = start(rev)
824 lineardist = revstart + comp - basestart
825 extradist = lineardist - chainsize
826 try:
827 prevrev = chain[-2]
828 except IndexError:
829 prevrev = -1
830
831 if uncomp != 0:
832 chainratio = float(chainsize) / float(uncomp)
833 else:
834 chainratio = chainsize
835
836 if chainsize != 0:
837 extraratio = float(extradist) / float(chainsize)
838 else:
839 extraratio = extradist
840
841 # label, display-format, data-key, value
842 entry = [
843 (b'rev', b'%7d', 'rev', rev),
844 (b'p1', b'%7d', 'p1', p1),
845 (b'p2', b'%7d', 'p2', p2),
846 (b'chainid', b'%7d', 'chainid', chainid),
847 (b'chainlen', b'%8d', 'chainlen', len(chain)),
848 (b'prevrev', b'%8d', 'prevrev', prevrev),
849 (b'deltatype', b'%7s', 'deltatype', deltatype),
850 (b'compsize', b'%10d', 'compsize', comp),
851 (b'uncompsize', b'%10d', 'uncompsize', uncomp),
852 (b'chainsize', b'%10d', 'chainsize', chainsize),
853 (b'chainratio', b'%9.5f', 'chainratio', chainratio),
854 (b'lindist', b'%9d', 'lindist', lineardist),
855 (b'extradist', b'%9d', 'extradist', extradist),
856 (b'extraratio', b'%10.5f', 'extraratio', extraratio),
857 ]
858 if withsparseread:
859 readsize = 0
860 largestblock = 0
861 srchunks = 0
862
863 for revschunk in deltautil.slicechunk(r, chain):
864 srchunks += 1
865 blkend = start(revschunk[-1]) + length(revschunk[-1])
866 blksize = blkend - start(revschunk[0])
867
868 readsize += blksize
869 if largestblock < blksize:
870 largestblock = blksize
871
872 if readsize:
873 readdensity = float(chainsize) / float(readsize)
874 else:
875 readdensity = 1
876 entry.extend(
877 [
878 (b'readsize', b'%10d', 'readsize', readsize),
879 (b'largestblock', b'%10d', 'largestblock', largestblock),
880 (b'readdensity', b'%9.5f', 'readdensity', readdensity),
881 (b'srchunks', b'%8d', 'srchunks', srchunks),
882 ]
883 )
884 yield entry
General Comments 0
You need to be logged in to leave comments. Login now